Skip to main content

sqlparser/parser/
mod.rs

1// Licensed under the Apache License, Version 2.0 (the "License");
2// you may not use this file except in compliance with the License.
3// You may obtain a copy of the License at
4//
5// http://www.apache.org/licenses/LICENSE-2.0
6//
7// Unless required by applicable law or agreed to in writing, software
8// distributed under the License is distributed on an "AS IS" BASIS,
9// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
10// See the License for the specific language governing permissions and
11// limitations under the License.
12
13//! SQL Parser
14
15#[cfg(not(feature = "std"))]
16use alloc::{
17    boxed::Box,
18    format,
19    string::{String, ToString},
20    vec,
21    vec::Vec,
22};
23use core::{
24    fmt::{self, Display},
25    str::FromStr,
26};
27use helpers::attached_token::AttachedToken;
28
29use log::debug;
30
31use recursion::RecursionCounter;
32use IsLateral::*;
33use IsOptional::*;
34
35use crate::ast::Statement::CreatePolicy;
36use crate::ast::*;
37use crate::ast::{
38    comments,
39    helpers::{
40        key_value_options::{
41            KeyValueOption, KeyValueOptionKind, KeyValueOptions, KeyValueOptionsDelimiter,
42        },
43        stmt_create_table::{CreateTableBuilder, CreateTableConfiguration},
44    },
45};
46use crate::dialect::*;
47use crate::keywords::{Keyword, ALL_KEYWORDS};
48use crate::tokenizer::*;
49use sqlparser::parser::ParserState::ColumnDefinition;
50
51#[derive(Debug, Clone, PartialEq, Eq)]
52pub enum ParserError {
53    TokenizerError(String),
54    ParserError(String),
55    RecursionLimitExceeded,
56}
57
58// Use `Parser::expected` instead, if possible
59macro_rules! parser_err {
60    ($MSG:expr, $loc:expr) => {
61        Err(ParserError::ParserError(format!("{}{}", $MSG, $loc)))
62    };
63}
64
65mod alter;
66mod merge;
67
68#[cfg(feature = "std")]
69/// Implementation [`RecursionCounter`] if std is available
70mod recursion {
71    use std::cell::Cell;
72    use std::rc::Rc;
73
74    use super::ParserError;
75
76    /// Tracks remaining recursion depth. This value is decremented on
77    /// each call to [`RecursionCounter::try_decrease()`], when it reaches 0 an error will
78    /// be returned.
79    ///
80    /// Note: Uses an [`std::rc::Rc`] and [`std::cell::Cell`] in order to satisfy the Rust
81    /// borrow checker so the automatic [`DepthGuard`] decrement a
82    /// reference to the counter.
83    ///
84    /// Note: when "recursive-protection" feature is enabled, this crate uses additional stack overflow protection
85    /// for some of its recursive methods. See [`recursive::recursive`] for more information.
86    pub(crate) struct RecursionCounter {
87        remaining_depth: Rc<Cell<usize>>,
88    }
89
90    impl RecursionCounter {
91        /// Creates a [`RecursionCounter`] with the specified maximum
92        /// depth
93        pub fn new(remaining_depth: usize) -> Self {
94            Self {
95                remaining_depth: Rc::new(remaining_depth.into()),
96            }
97        }
98
99        /// Decreases the remaining depth by 1.
100        ///
101        /// Returns [`Err`] if the remaining depth falls to 0.
102        ///
103        /// Returns a [`DepthGuard`] which will adds 1 to the
104        /// remaining depth upon drop;
105        pub fn try_decrease(&self) -> Result<DepthGuard, ParserError> {
106            let old_value = self.remaining_depth.get();
107            // ran out of space
108            if old_value == 0 {
109                Err(ParserError::RecursionLimitExceeded)
110            } else {
111                self.remaining_depth.set(old_value - 1);
112                Ok(DepthGuard::new(Rc::clone(&self.remaining_depth)))
113            }
114        }
115    }
116
117    /// Guard that increases the remaining depth by 1 on drop
118    pub struct DepthGuard {
119        remaining_depth: Rc<Cell<usize>>,
120    }
121
122    impl DepthGuard {
123        fn new(remaining_depth: Rc<Cell<usize>>) -> Self {
124            Self { remaining_depth }
125        }
126    }
127    impl Drop for DepthGuard {
128        fn drop(&mut self) {
129            let old_value = self.remaining_depth.get();
130            self.remaining_depth.set(old_value + 1);
131        }
132    }
133}
134
135#[cfg(not(feature = "std"))]
136mod recursion {
137    /// Implementation [`RecursionCounter`] if std is NOT available (and does not
138    /// guard against stack overflow).
139    ///
140    /// Has the same API as the std [`RecursionCounter`] implementation
141    /// but does not actually limit stack depth.
142    pub(crate) struct RecursionCounter {}
143
144    impl RecursionCounter {
145        pub fn new(_remaining_depth: usize) -> Self {
146            Self {}
147        }
148        pub fn try_decrease(&self) -> Result<DepthGuard, super::ParserError> {
149            Ok(DepthGuard {})
150        }
151    }
152
153    pub struct DepthGuard {}
154}
155
156#[derive(PartialEq, Eq)]
157pub enum IsOptional {
158    Optional,
159    Mandatory,
160}
161
162pub enum IsLateral {
163    Lateral,
164    NotLateral,
165}
166
167pub enum WildcardExpr {
168    Expr(Expr),
169    QualifiedWildcard(ObjectName),
170    Wildcard,
171}
172
173impl From<TokenizerError> for ParserError {
174    fn from(e: TokenizerError) -> Self {
175        ParserError::TokenizerError(e.to_string())
176    }
177}
178
179impl fmt::Display for ParserError {
180    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
181        write!(
182            f,
183            "sql parser error: {}",
184            match self {
185                ParserError::TokenizerError(s) => s,
186                ParserError::ParserError(s) => s,
187                ParserError::RecursionLimitExceeded => "recursion limit exceeded",
188            }
189        )
190    }
191}
192
193#[cfg(feature = "std")]
194impl std::error::Error for ParserError {}
195
196// By default, allow expressions up to this deep before erroring
197const DEFAULT_REMAINING_DEPTH: usize = 50;
198
199// A constant EOF token that can be referenced.
200const EOF_TOKEN: TokenWithSpan = TokenWithSpan {
201    token: Token::EOF,
202    span: Span {
203        start: Location { line: 0, column: 0 },
204        end: Location { line: 0, column: 0 },
205    },
206};
207
208/// Composite types declarations using angle brackets syntax can be arbitrary
209/// nested such that the following declaration is possible:
210///      `ARRAY<ARRAY<INT>>`
211/// But the tokenizer recognizes the `>>` as a ShiftRight token.
212/// We work around that limitation when parsing a data type by accepting
213/// either a `>` or `>>` token in such cases, remembering which variant we
214/// matched.
215/// In the latter case having matched a `>>`, the parent type will not look to
216/// match its closing `>` as a result since that will have taken place at the
217/// child type.
218///
219/// See [Parser::parse_data_type] for details
220struct MatchedTrailingBracket(bool);
221
222impl From<bool> for MatchedTrailingBracket {
223    fn from(value: bool) -> Self {
224        Self(value)
225    }
226}
227
228/// Options that control how the [`Parser`] parses SQL text
229#[derive(Debug, Clone, PartialEq, Eq)]
230pub struct ParserOptions {
231    pub trailing_commas: bool,
232    /// Controls how literal values are unescaped. See
233    /// [`Tokenizer::with_unescape`] for more details.
234    pub unescape: bool,
235    /// Controls if the parser expects a semi-colon token
236    /// between statements. Default is `true`.
237    pub require_semicolon_stmt_delimiter: bool,
238}
239
240impl Default for ParserOptions {
241    fn default() -> Self {
242        Self {
243            trailing_commas: false,
244            unescape: true,
245            require_semicolon_stmt_delimiter: true,
246        }
247    }
248}
249
250impl ParserOptions {
251    /// Create a new [`ParserOptions`]
252    pub fn new() -> Self {
253        Default::default()
254    }
255
256    /// Set if trailing commas are allowed.
257    ///
258    /// If this option is `false` (the default), the following SQL will
259    /// not parse. If the option is `true`, the SQL will parse.
260    ///
261    /// ```sql
262    ///  SELECT
263    ///   foo,
264    ///   bar,
265    ///  FROM baz
266    /// ```
267    pub fn with_trailing_commas(mut self, trailing_commas: bool) -> Self {
268        self.trailing_commas = trailing_commas;
269        self
270    }
271
272    /// Set if literal values are unescaped. Defaults to true. See
273    /// [`Tokenizer::with_unescape`] for more details.
274    pub fn with_unescape(mut self, unescape: bool) -> Self {
275        self.unescape = unescape;
276        self
277    }
278}
279
280#[derive(Copy, Clone)]
281enum ParserState {
282    /// The default state of the parser.
283    Normal,
284    /// The state when parsing a CONNECT BY expression. This allows parsing
285    /// PRIOR expressions while still allowing prior as an identifier name
286    /// in other contexts.
287    ConnectBy,
288    /// The state when parsing column definitions.  This state prohibits
289    /// NOT NULL as an alias for IS NOT NULL.  For example:
290    /// ```sql
291    /// CREATE TABLE foo (abc BIGINT NOT NULL);
292    /// ```
293    ColumnDefinition,
294}
295
296/// A SQL Parser
297///
298/// This struct is the main entry point for parsing SQL queries.
299///
300/// # Functionality:
301/// * Parsing SQL: see examples on [`Parser::new`] and [`Parser::parse_sql`]
302/// * Controlling recursion: See [`Parser::with_recursion_limit`]
303/// * Controlling parser options: See [`Parser::with_options`]
304/// * Providing your own tokens: See [`Parser::with_tokens`]
305///
306/// # Internals
307///
308/// The parser uses a [`Tokenizer`] to tokenize the input SQL string into a
309/// `Vec` of [`TokenWithSpan`]s and maintains an `index` to the current token
310/// being processed. The token vec may contain multiple SQL statements.
311///
312/// * The "current" token is the token at `index - 1`
313/// * The "next" token is the token at `index`
314/// * The "previous" token is the token at `index - 2`
315///
316/// If `index` is equal to the length of the token stream, the 'next' token is
317/// [`Token::EOF`].
318///
319/// For example, the SQL string "SELECT * FROM foo" will be tokenized into
320/// following tokens:
321/// ```text
322///  [
323///    "SELECT", // token index 0
324///    " ",      // whitespace
325///    "*",
326///    " ",
327///    "FROM",
328///    " ",
329///    "foo"
330///   ]
331/// ```
332///
333///
334pub struct Parser<'a> {
335    /// The tokens
336    tokens: Vec<TokenWithSpan>,
337    /// The index of the first unprocessed token in [`Parser::tokens`].
338    index: usize,
339    /// The current state of the parser.
340    state: ParserState,
341    /// The SQL dialect to use.
342    dialect: &'a dyn Dialect,
343    /// Additional options that allow you to mix & match behavior
344    /// otherwise constrained to certain dialects (e.g. trailing
345    /// commas) and/or format of parse (e.g. unescaping).
346    options: ParserOptions,
347    /// Ensures the stack does not overflow by limiting recursion depth.
348    recursion_counter: RecursionCounter,
349}
350
351impl<'a> Parser<'a> {
352    /// Create a parser for a [`Dialect`]
353    ///
354    /// See also [`Parser::parse_sql`]
355    ///
356    /// Example:
357    /// ```
358    /// # use sqlparser::{parser::{Parser, ParserError}, dialect::GenericDialect};
359    /// # fn main() -> Result<(), ParserError> {
360    /// let dialect = GenericDialect{};
361    /// let statements = Parser::new(&dialect)
362    ///   .try_with_sql("SELECT * FROM foo")?
363    ///   .parse_statements()?;
364    /// # Ok(())
365    /// # }
366    /// ```
367    pub fn new(dialect: &'a dyn Dialect) -> Self {
368        Self {
369            tokens: vec![],
370            index: 0,
371            state: ParserState::Normal,
372            dialect,
373            recursion_counter: RecursionCounter::new(DEFAULT_REMAINING_DEPTH),
374            options: ParserOptions::new().with_trailing_commas(dialect.supports_trailing_commas()),
375        }
376    }
377
378    /// Specify the maximum recursion limit while parsing.
379    ///
380    /// [`Parser`] prevents stack overflows by returning
381    /// [`ParserError::RecursionLimitExceeded`] if the parser exceeds
382    /// this depth while processing the query.
383    ///
384    /// Example:
385    /// ```
386    /// # use sqlparser::{parser::{Parser, ParserError}, dialect::GenericDialect};
387    /// # fn main() -> Result<(), ParserError> {
388    /// let dialect = GenericDialect{};
389    /// let result = Parser::new(&dialect)
390    ///   .with_recursion_limit(1)
391    ///   .try_with_sql("SELECT * FROM foo WHERE (a OR (b OR (c OR d)))")?
392    ///   .parse_statements();
393    ///   assert_eq!(result, Err(ParserError::RecursionLimitExceeded));
394    /// # Ok(())
395    /// # }
396    /// ```
397    ///
398    /// Note: when "recursive-protection" feature is enabled, this crate uses additional stack overflow protection
399    //  for some of its recursive methods. See [`recursive::recursive`] for more information.
400    pub fn with_recursion_limit(mut self, recursion_limit: usize) -> Self {
401        self.recursion_counter = RecursionCounter::new(recursion_limit);
402        self
403    }
404
405    /// Specify additional parser options
406    ///
407    /// [`Parser`] supports additional options ([`ParserOptions`])
408    /// that allow you to mix & match behavior otherwise constrained
409    /// to certain dialects (e.g. trailing commas).
410    ///
411    /// Example:
412    /// ```
413    /// # use sqlparser::{parser::{Parser, ParserError, ParserOptions}, dialect::GenericDialect};
414    /// # fn main() -> Result<(), ParserError> {
415    /// let dialect = GenericDialect{};
416    /// let options = ParserOptions::new()
417    ///    .with_trailing_commas(true)
418    ///    .with_unescape(false);
419    /// let result = Parser::new(&dialect)
420    ///   .with_options(options)
421    ///   .try_with_sql("SELECT a, b, COUNT(*), FROM foo GROUP BY a, b,")?
422    ///   .parse_statements();
423    ///   assert!(matches!(result, Ok(_)));
424    /// # Ok(())
425    /// # }
426    /// ```
427    pub fn with_options(mut self, options: ParserOptions) -> Self {
428        self.options = options;
429        self
430    }
431
432    /// Reset this parser to parse the specified token stream
433    pub fn with_tokens_with_locations(mut self, tokens: Vec<TokenWithSpan>) -> Self {
434        self.tokens = tokens;
435        self.index = 0;
436        self
437    }
438
439    /// Reset this parser state to parse the specified tokens
440    pub fn with_tokens(self, tokens: Vec<Token>) -> Self {
441        // Put in dummy locations
442        let tokens_with_locations: Vec<TokenWithSpan> = tokens
443            .into_iter()
444            .map(|token| TokenWithSpan {
445                token,
446                span: Span::empty(),
447            })
448            .collect();
449        self.with_tokens_with_locations(tokens_with_locations)
450    }
451
452    /// Tokenize the sql string and sets this [`Parser`]'s state to
453    /// parse the resulting tokens
454    ///
455    /// Returns an error if there was an error tokenizing the SQL string.
456    ///
457    /// See example on [`Parser::new()`] for an example
458    pub fn try_with_sql(self, sql: &str) -> Result<Self, ParserError> {
459        debug!("Parsing sql '{sql}'...");
460        let tokens = Tokenizer::new(self.dialect, sql)
461            .with_unescape(self.options.unescape)
462            .tokenize_with_location()?;
463        Ok(self.with_tokens_with_locations(tokens))
464    }
465
466    /// Parse potentially multiple statements
467    ///
468    /// Example
469    /// ```
470    /// # use sqlparser::{parser::{Parser, ParserError}, dialect::GenericDialect};
471    /// # fn main() -> Result<(), ParserError> {
472    /// let dialect = GenericDialect{};
473    /// let statements = Parser::new(&dialect)
474    ///   // Parse a SQL string with 2 separate statements
475    ///   .try_with_sql("SELECT * FROM foo; SELECT * FROM bar;")?
476    ///   .parse_statements()?;
477    /// assert_eq!(statements.len(), 2);
478    /// # Ok(())
479    /// # }
480    /// ```
481    pub fn parse_statements(&mut self) -> Result<Vec<Statement>, ParserError> {
482        let mut stmts = Vec::new();
483        let mut expecting_statement_delimiter = false;
484        loop {
485            // ignore empty statements (between successive statement delimiters)
486            while self.consume_token(&Token::SemiColon) {
487                expecting_statement_delimiter = false;
488            }
489
490            if !self.options.require_semicolon_stmt_delimiter {
491                expecting_statement_delimiter = false;
492            }
493
494            match self.peek_token().token {
495                Token::EOF => break,
496
497                // end of statement
498                Token::Word(word) => {
499                    if expecting_statement_delimiter && word.keyword == Keyword::END {
500                        break;
501                    }
502                }
503                _ => {}
504            }
505
506            if expecting_statement_delimiter {
507                return self.expected("end of statement", self.peek_token());
508            }
509
510            let statement = self.parse_statement()?;
511            stmts.push(statement);
512            expecting_statement_delimiter = true;
513        }
514        Ok(stmts)
515    }
516
517    /// Convenience method to parse a string with one or more SQL
518    /// statements into produce an Abstract Syntax Tree (AST).
519    ///
520    /// Example
521    /// ```
522    /// # use sqlparser::{parser::{Parser, ParserError}, dialect::GenericDialect};
523    /// # fn main() -> Result<(), ParserError> {
524    /// let dialect = GenericDialect{};
525    /// let statements = Parser::parse_sql(
526    ///   &dialect, "SELECT * FROM foo"
527    /// )?;
528    /// assert_eq!(statements.len(), 1);
529    /// # Ok(())
530    /// # }
531    /// ```
532    pub fn parse_sql(dialect: &dyn Dialect, sql: &str) -> Result<Vec<Statement>, ParserError> {
533        Parser::new(dialect).try_with_sql(sql)?.parse_statements()
534    }
535
536    /// Parses the given `sql` into an Abstract Syntax Tree (AST), returning
537    /// also encountered source code comments.
538    ///
539    /// See [Parser::parse_sql].
540    pub fn parse_sql_with_comments(
541        dialect: &'a dyn Dialect,
542        sql: &str,
543    ) -> Result<(Vec<Statement>, comments::Comments), ParserError> {
544        let mut p = Parser::new(dialect).try_with_sql(sql)?;
545        p.parse_statements().map(|stmts| (stmts, p.into_comments()))
546    }
547
548    /// Consumes this parser returning comments from the parsed token stream.
549    fn into_comments(self) -> comments::Comments {
550        let mut comments = comments::Comments::default();
551        for t in self.tokens.into_iter() {
552            match t.token {
553                Token::Whitespace(Whitespace::SingleLineComment { comment, prefix }) => {
554                    comments.offer(comments::CommentWithSpan {
555                        comment: comments::Comment::SingleLine {
556                            content: comment,
557                            prefix,
558                        },
559                        span: t.span,
560                    });
561                }
562                Token::Whitespace(Whitespace::MultiLineComment(comment)) => {
563                    comments.offer(comments::CommentWithSpan {
564                        comment: comments::Comment::MultiLine(comment),
565                        span: t.span,
566                    });
567                }
568                _ => {}
569            }
570        }
571        comments
572    }
573
574    /// Parse a single top-level statement (such as SELECT, INSERT, CREATE, etc.),
575    /// stopping before the statement separator, if any.
576    pub fn parse_statement(&mut self) -> Result<Statement, ParserError> {
577        let _guard = self.recursion_counter.try_decrease()?;
578
579        // allow the dialect to override statement parsing
580        if let Some(statement) = self.dialect.parse_statement(self) {
581            return statement;
582        }
583
584        let next_token = self.next_token();
585        match &next_token.token {
586            Token::Word(w) => match w.keyword {
587                Keyword::KILL => self.parse_kill(),
588                Keyword::FLUSH => self.parse_flush(),
589                Keyword::DESC => self.parse_explain(DescribeAlias::Desc),
590                Keyword::DESCRIBE => self.parse_explain(DescribeAlias::Describe),
591                Keyword::EXPLAIN => self.parse_explain(DescribeAlias::Explain),
592                Keyword::ANALYZE => self.parse_analyze(),
593                Keyword::CASE => {
594                    self.prev_token();
595                    self.parse_case_stmt()
596                }
597                Keyword::IF => {
598                    self.prev_token();
599                    self.parse_if_stmt()
600                }
601                Keyword::WHILE => {
602                    self.prev_token();
603                    self.parse_while()
604                }
605                Keyword::RAISE => {
606                    self.prev_token();
607                    self.parse_raise_stmt()
608                }
609                Keyword::SELECT | Keyword::WITH | Keyword::VALUES | Keyword::FROM => {
610                    self.prev_token();
611                    self.parse_query().map(Statement::Query)
612                }
613                Keyword::TRUNCATE => self.parse_truncate(),
614                Keyword::ATTACH => {
615                    if dialect_of!(self is DuckDbDialect) {
616                        self.parse_attach_duckdb_database()
617                    } else {
618                        self.parse_attach_database()
619                    }
620                }
621                Keyword::DETACH if dialect_of!(self is DuckDbDialect | GenericDialect) => {
622                    self.parse_detach_duckdb_database()
623                }
624                Keyword::MSCK => self.parse_msck(),
625                Keyword::CREATE => self.parse_create(),
626                Keyword::CACHE => self.parse_cache_table(),
627                Keyword::DROP => self.parse_drop(),
628                Keyword::DISCARD => self.parse_discard(),
629                Keyword::DECLARE => self.parse_declare(),
630                Keyword::FETCH => self.parse_fetch_statement(),
631                Keyword::DELETE => self.parse_delete(next_token),
632                Keyword::INSERT => self.parse_insert(next_token),
633                Keyword::REPLACE => self.parse_replace(next_token),
634                Keyword::UNCACHE => self.parse_uncache_table(),
635                Keyword::UPDATE => self.parse_update(next_token),
636                Keyword::ALTER => self.parse_alter(),
637                Keyword::CALL => self.parse_call(),
638                Keyword::COPY => self.parse_copy(),
639                Keyword::OPEN => {
640                    self.prev_token();
641                    self.parse_open()
642                }
643                Keyword::CLOSE => self.parse_close(),
644                Keyword::SET => self.parse_set(),
645                Keyword::SHOW => self.parse_show(),
646                Keyword::USE => self.parse_use(),
647                Keyword::GRANT => self.parse_grant(),
648                Keyword::DENY => {
649                    self.prev_token();
650                    self.parse_deny()
651                }
652                Keyword::REVOKE => self.parse_revoke(),
653                Keyword::START => self.parse_start_transaction(),
654                Keyword::BEGIN => self.parse_begin(),
655                Keyword::END => self.parse_end(),
656                Keyword::SAVEPOINT => self.parse_savepoint(),
657                Keyword::RELEASE => self.parse_release(),
658                Keyword::COMMIT => self.parse_commit(),
659                Keyword::RAISERROR => Ok(self.parse_raiserror()?),
660                Keyword::ROLLBACK => self.parse_rollback(),
661                Keyword::ASSERT => self.parse_assert(),
662                // `PREPARE`, `EXECUTE` and `DEALLOCATE` are Postgres-specific
663                // syntaxes. They are used for Postgres prepared statement.
664                Keyword::DEALLOCATE => self.parse_deallocate(),
665                Keyword::EXECUTE | Keyword::EXEC => self.parse_execute(),
666                Keyword::PREPARE => self.parse_prepare(),
667                Keyword::MERGE => self.parse_merge(next_token),
668                // `LISTEN`, `UNLISTEN` and `NOTIFY` are Postgres-specific
669                // syntaxes. They are used for Postgres statement.
670                Keyword::LISTEN if self.dialect.supports_listen_notify() => self.parse_listen(),
671                Keyword::UNLISTEN if self.dialect.supports_listen_notify() => self.parse_unlisten(),
672                Keyword::NOTIFY if self.dialect.supports_listen_notify() => self.parse_notify(),
673                // `PRAGMA` is sqlite specific https://www.sqlite.org/pragma.html
674                Keyword::PRAGMA => self.parse_pragma(),
675                Keyword::UNLOAD => {
676                    self.prev_token();
677                    self.parse_unload()
678                }
679                Keyword::RENAME => self.parse_rename(),
680                // `INSTALL` is duckdb specific https://duckdb.org/docs/extensions/overview
681                Keyword::INSTALL if dialect_of!(self is DuckDbDialect | GenericDialect) => {
682                    self.parse_install()
683                }
684                Keyword::LOAD => self.parse_load(),
685                // `OPTIMIZE` is clickhouse specific https://clickhouse.tech/docs/en/sql-reference/statements/optimize/
686                Keyword::OPTIMIZE if dialect_of!(self is ClickHouseDialect | GenericDialect) => {
687                    self.parse_optimize_table()
688                }
689                // `COMMENT` is snowflake specific https://docs.snowflake.com/en/sql-reference/sql/comment
690                Keyword::COMMENT if self.dialect.supports_comment_on() => self.parse_comment(),
691                Keyword::PRINT => self.parse_print(),
692                Keyword::RETURN => self.parse_return(),
693                Keyword::EXPORT => {
694                    self.prev_token();
695                    self.parse_export_data()
696                }
697                Keyword::VACUUM => {
698                    self.prev_token();
699                    self.parse_vacuum()
700                }
701                Keyword::RESET => self.parse_reset(),
702                _ => self.expected("an SQL statement", next_token),
703            },
704            Token::LParen => {
705                self.prev_token();
706                self.parse_query().map(Statement::Query)
707            }
708            _ => self.expected("an SQL statement", next_token),
709        }
710    }
711
712    /// Parse a `CASE` statement.
713    ///
714    /// See [Statement::Case]
715    pub fn parse_case_stmt(&mut self) -> Result<Statement, ParserError> {
716        let case_token = self.expect_keyword(Keyword::CASE)?;
717
718        let match_expr = if self.peek_keyword(Keyword::WHEN) {
719            None
720        } else {
721            Some(self.parse_expr()?)
722        };
723
724        self.expect_keyword_is(Keyword::WHEN)?;
725        let when_blocks = self.parse_keyword_separated(Keyword::WHEN, |parser| {
726            parser.parse_conditional_statement_block(&[Keyword::WHEN, Keyword::ELSE, Keyword::END])
727        })?;
728
729        let else_block = if self.parse_keyword(Keyword::ELSE) {
730            Some(self.parse_conditional_statement_block(&[Keyword::END])?)
731        } else {
732            None
733        };
734
735        let mut end_case_token = self.expect_keyword(Keyword::END)?;
736        if self.peek_keyword(Keyword::CASE) {
737            end_case_token = self.expect_keyword(Keyword::CASE)?;
738        }
739
740        Ok(Statement::Case(CaseStatement {
741            case_token: AttachedToken(case_token),
742            match_expr,
743            when_blocks,
744            else_block,
745            end_case_token: AttachedToken(end_case_token),
746        }))
747    }
748
749    /// Parse an `IF` statement.
750    ///
751    /// See [Statement::If]
752    pub fn parse_if_stmt(&mut self) -> Result<Statement, ParserError> {
753        self.expect_keyword_is(Keyword::IF)?;
754        let if_block = self.parse_conditional_statement_block(&[
755            Keyword::ELSE,
756            Keyword::ELSEIF,
757            Keyword::END,
758        ])?;
759
760        let elseif_blocks = if self.parse_keyword(Keyword::ELSEIF) {
761            self.parse_keyword_separated(Keyword::ELSEIF, |parser| {
762                parser.parse_conditional_statement_block(&[
763                    Keyword::ELSEIF,
764                    Keyword::ELSE,
765                    Keyword::END,
766                ])
767            })?
768        } else {
769            vec![]
770        };
771
772        let else_block = if self.parse_keyword(Keyword::ELSE) {
773            Some(self.parse_conditional_statement_block(&[Keyword::END])?)
774        } else {
775            None
776        };
777
778        self.expect_keyword_is(Keyword::END)?;
779        let end_token = self.expect_keyword(Keyword::IF)?;
780
781        Ok(Statement::If(IfStatement {
782            if_block,
783            elseif_blocks,
784            else_block,
785            end_token: Some(AttachedToken(end_token)),
786        }))
787    }
788
789    /// Parse a `WHILE` statement.
790    ///
791    /// See [Statement::While]
792    fn parse_while(&mut self) -> Result<Statement, ParserError> {
793        self.expect_keyword_is(Keyword::WHILE)?;
794        let while_block = self.parse_conditional_statement_block(&[Keyword::END])?;
795
796        Ok(Statement::While(WhileStatement { while_block }))
797    }
798
799    /// Parses an expression and associated list of statements
800    /// belonging to a conditional statement like `IF` or `WHEN` or `WHILE`.
801    ///
802    /// Example:
803    /// ```sql
804    /// IF condition THEN statement1; statement2;
805    /// ```
806    fn parse_conditional_statement_block(
807        &mut self,
808        terminal_keywords: &[Keyword],
809    ) -> Result<ConditionalStatementBlock, ParserError> {
810        let start_token = self.get_current_token().clone(); // self.expect_keyword(keyword)?;
811        let mut then_token = None;
812
813        let condition = match &start_token.token {
814            Token::Word(w) if w.keyword == Keyword::ELSE => None,
815            Token::Word(w) if w.keyword == Keyword::WHILE => {
816                let expr = self.parse_expr()?;
817                Some(expr)
818            }
819            _ => {
820                let expr = self.parse_expr()?;
821                then_token = Some(AttachedToken(self.expect_keyword(Keyword::THEN)?));
822                Some(expr)
823            }
824        };
825
826        let conditional_statements = self.parse_conditional_statements(terminal_keywords)?;
827
828        Ok(ConditionalStatementBlock {
829            start_token: AttachedToken(start_token),
830            condition,
831            then_token,
832            conditional_statements,
833        })
834    }
835
836    /// Parse a BEGIN/END block or a sequence of statements
837    /// This could be inside of a conditional (IF, CASE, WHILE etc.) or an object body defined optionally BEGIN/END and one or more statements.
838    pub(crate) fn parse_conditional_statements(
839        &mut self,
840        terminal_keywords: &[Keyword],
841    ) -> Result<ConditionalStatements, ParserError> {
842        let conditional_statements = if self.peek_keyword(Keyword::BEGIN) {
843            let begin_token = self.expect_keyword(Keyword::BEGIN)?;
844            let statements = self.parse_statement_list(terminal_keywords)?;
845            let end_token = self.expect_keyword(Keyword::END)?;
846
847            ConditionalStatements::BeginEnd(BeginEndStatements {
848                begin_token: AttachedToken(begin_token),
849                statements,
850                end_token: AttachedToken(end_token),
851            })
852        } else {
853            ConditionalStatements::Sequence {
854                statements: self.parse_statement_list(terminal_keywords)?,
855            }
856        };
857        Ok(conditional_statements)
858    }
859
860    /// Parse a `RAISE` statement.
861    ///
862    /// See [Statement::Raise]
863    pub fn parse_raise_stmt(&mut self) -> Result<Statement, ParserError> {
864        self.expect_keyword_is(Keyword::RAISE)?;
865
866        let value = if self.parse_keywords(&[Keyword::USING, Keyword::MESSAGE]) {
867            self.expect_token(&Token::Eq)?;
868            Some(RaiseStatementValue::UsingMessage(self.parse_expr()?))
869        } else {
870            self.maybe_parse(|parser| parser.parse_expr().map(RaiseStatementValue::Expr))?
871        };
872
873        Ok(Statement::Raise(RaiseStatement { value }))
874    }
875
876    pub fn parse_comment(&mut self) -> Result<Statement, ParserError> {
877        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
878
879        self.expect_keyword_is(Keyword::ON)?;
880        let token = self.next_token();
881
882        let (object_type, object_name) = match token.token {
883            Token::Word(w) if w.keyword == Keyword::COLUMN => {
884                (CommentObject::Column, self.parse_object_name(false)?)
885            }
886            Token::Word(w) if w.keyword == Keyword::TABLE => {
887                (CommentObject::Table, self.parse_object_name(false)?)
888            }
889            Token::Word(w) if w.keyword == Keyword::EXTENSION => {
890                (CommentObject::Extension, self.parse_object_name(false)?)
891            }
892            Token::Word(w) if w.keyword == Keyword::SCHEMA => {
893                (CommentObject::Schema, self.parse_object_name(false)?)
894            }
895            Token::Word(w) if w.keyword == Keyword::DATABASE => {
896                (CommentObject::Database, self.parse_object_name(false)?)
897            }
898            Token::Word(w) if w.keyword == Keyword::USER => {
899                (CommentObject::User, self.parse_object_name(false)?)
900            }
901            Token::Word(w) if w.keyword == Keyword::ROLE => {
902                (CommentObject::Role, self.parse_object_name(false)?)
903            }
904            _ => self.expected("comment object_type", token)?,
905        };
906
907        self.expect_keyword_is(Keyword::IS)?;
908        let comment = if self.parse_keyword(Keyword::NULL) {
909            None
910        } else {
911            Some(self.parse_literal_string()?)
912        };
913        Ok(Statement::Comment {
914            object_type,
915            object_name,
916            comment,
917            if_exists,
918        })
919    }
920
921    pub fn parse_flush(&mut self) -> Result<Statement, ParserError> {
922        let mut channel = None;
923        let mut tables: Vec<ObjectName> = vec![];
924        let mut read_lock = false;
925        let mut export = false;
926
927        if !dialect_of!(self is MySqlDialect | GenericDialect) {
928            return parser_err!("Unsupported statement FLUSH", self.peek_token().span.start);
929        }
930
931        let location = if self.parse_keyword(Keyword::NO_WRITE_TO_BINLOG) {
932            Some(FlushLocation::NoWriteToBinlog)
933        } else if self.parse_keyword(Keyword::LOCAL) {
934            Some(FlushLocation::Local)
935        } else {
936            None
937        };
938
939        let object_type = if self.parse_keywords(&[Keyword::BINARY, Keyword::LOGS]) {
940            FlushType::BinaryLogs
941        } else if self.parse_keywords(&[Keyword::ENGINE, Keyword::LOGS]) {
942            FlushType::EngineLogs
943        } else if self.parse_keywords(&[Keyword::ERROR, Keyword::LOGS]) {
944            FlushType::ErrorLogs
945        } else if self.parse_keywords(&[Keyword::GENERAL, Keyword::LOGS]) {
946            FlushType::GeneralLogs
947        } else if self.parse_keywords(&[Keyword::HOSTS]) {
948            FlushType::Hosts
949        } else if self.parse_keyword(Keyword::PRIVILEGES) {
950            FlushType::Privileges
951        } else if self.parse_keyword(Keyword::OPTIMIZER_COSTS) {
952            FlushType::OptimizerCosts
953        } else if self.parse_keywords(&[Keyword::RELAY, Keyword::LOGS]) {
954            if self.parse_keywords(&[Keyword::FOR, Keyword::CHANNEL]) {
955                channel = Some(self.parse_object_name(false).unwrap().to_string());
956            }
957            FlushType::RelayLogs
958        } else if self.parse_keywords(&[Keyword::SLOW, Keyword::LOGS]) {
959            FlushType::SlowLogs
960        } else if self.parse_keyword(Keyword::STATUS) {
961            FlushType::Status
962        } else if self.parse_keyword(Keyword::USER_RESOURCES) {
963            FlushType::UserResources
964        } else if self.parse_keywords(&[Keyword::LOGS]) {
965            FlushType::Logs
966        } else if self.parse_keywords(&[Keyword::TABLES]) {
967            loop {
968                let next_token = self.next_token();
969                match &next_token.token {
970                    Token::Word(w) => match w.keyword {
971                        Keyword::WITH => {
972                            read_lock = self.parse_keywords(&[Keyword::READ, Keyword::LOCK]);
973                        }
974                        Keyword::FOR => {
975                            export = self.parse_keyword(Keyword::EXPORT);
976                        }
977                        Keyword::NoKeyword => {
978                            self.prev_token();
979                            tables = self.parse_comma_separated(|p| p.parse_object_name(false))?;
980                        }
981                        _ => {}
982                    },
983                    _ => {
984                        break;
985                    }
986                }
987            }
988
989            FlushType::Tables
990        } else {
991            return self.expected(
992                "BINARY LOGS, ENGINE LOGS, ERROR LOGS, GENERAL LOGS, HOSTS, LOGS, PRIVILEGES, OPTIMIZER_COSTS,\
993                 RELAY LOGS [FOR CHANNEL channel], SLOW LOGS, STATUS, USER_RESOURCES",
994                self.peek_token(),
995            );
996        };
997
998        Ok(Statement::Flush {
999            object_type,
1000            location,
1001            channel,
1002            read_lock,
1003            export,
1004            tables,
1005        })
1006    }
1007
1008    pub fn parse_msck(&mut self) -> Result<Statement, ParserError> {
1009        let repair = self.parse_keyword(Keyword::REPAIR);
1010        self.expect_keyword_is(Keyword::TABLE)?;
1011        let table_name = self.parse_object_name(false)?;
1012        let partition_action = self
1013            .maybe_parse(|parser| {
1014                let pa = match parser.parse_one_of_keywords(&[
1015                    Keyword::ADD,
1016                    Keyword::DROP,
1017                    Keyword::SYNC,
1018                ]) {
1019                    Some(Keyword::ADD) => Some(AddDropSync::ADD),
1020                    Some(Keyword::DROP) => Some(AddDropSync::DROP),
1021                    Some(Keyword::SYNC) => Some(AddDropSync::SYNC),
1022                    _ => None,
1023                };
1024                parser.expect_keyword_is(Keyword::PARTITIONS)?;
1025                Ok(pa)
1026            })?
1027            .unwrap_or_default();
1028        Ok(Msck {
1029            repair,
1030            table_name,
1031            partition_action,
1032        }
1033        .into())
1034    }
1035
1036    pub fn parse_truncate(&mut self) -> Result<Statement, ParserError> {
1037        let table = self.parse_keyword(Keyword::TABLE);
1038
1039        let table_names = self
1040            .parse_comma_separated(|p| {
1041                Ok((p.parse_keyword(Keyword::ONLY), p.parse_object_name(false)?))
1042            })?
1043            .into_iter()
1044            .map(|(only, name)| TruncateTableTarget { name, only })
1045            .collect();
1046
1047        let mut partitions = None;
1048        if self.parse_keyword(Keyword::PARTITION) {
1049            self.expect_token(&Token::LParen)?;
1050            partitions = Some(self.parse_comma_separated(Parser::parse_expr)?);
1051            self.expect_token(&Token::RParen)?;
1052        }
1053
1054        let mut identity = None;
1055        let mut cascade = None;
1056
1057        if dialect_of!(self is PostgreSqlDialect | GenericDialect) {
1058            identity = if self.parse_keywords(&[Keyword::RESTART, Keyword::IDENTITY]) {
1059                Some(TruncateIdentityOption::Restart)
1060            } else if self.parse_keywords(&[Keyword::CONTINUE, Keyword::IDENTITY]) {
1061                Some(TruncateIdentityOption::Continue)
1062            } else {
1063                None
1064            };
1065
1066            cascade = self.parse_cascade_option();
1067        };
1068
1069        let on_cluster = self.parse_optional_on_cluster()?;
1070
1071        Ok(Truncate {
1072            table_names,
1073            partitions,
1074            table,
1075            identity,
1076            cascade,
1077            on_cluster,
1078        }
1079        .into())
1080    }
1081
1082    fn parse_cascade_option(&mut self) -> Option<CascadeOption> {
1083        if self.parse_keyword(Keyword::CASCADE) {
1084            Some(CascadeOption::Cascade)
1085        } else if self.parse_keyword(Keyword::RESTRICT) {
1086            Some(CascadeOption::Restrict)
1087        } else {
1088            None
1089        }
1090    }
1091
1092    pub fn parse_attach_duckdb_database_options(
1093        &mut self,
1094    ) -> Result<Vec<AttachDuckDBDatabaseOption>, ParserError> {
1095        if !self.consume_token(&Token::LParen) {
1096            return Ok(vec![]);
1097        }
1098
1099        let mut options = vec![];
1100        loop {
1101            if self.parse_keyword(Keyword::READ_ONLY) {
1102                let boolean = if self.parse_keyword(Keyword::TRUE) {
1103                    Some(true)
1104                } else if self.parse_keyword(Keyword::FALSE) {
1105                    Some(false)
1106                } else {
1107                    None
1108                };
1109                options.push(AttachDuckDBDatabaseOption::ReadOnly(boolean));
1110            } else if self.parse_keyword(Keyword::TYPE) {
1111                let ident = self.parse_identifier()?;
1112                options.push(AttachDuckDBDatabaseOption::Type(ident));
1113            } else {
1114                return self.expected("expected one of: ), READ_ONLY, TYPE", self.peek_token());
1115            };
1116
1117            if self.consume_token(&Token::RParen) {
1118                return Ok(options);
1119            } else if self.consume_token(&Token::Comma) {
1120                continue;
1121            } else {
1122                return self.expected("expected one of: ')', ','", self.peek_token());
1123            }
1124        }
1125    }
1126
1127    pub fn parse_attach_duckdb_database(&mut self) -> Result<Statement, ParserError> {
1128        let database = self.parse_keyword(Keyword::DATABASE);
1129        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
1130        let database_path = self.parse_identifier()?;
1131        let database_alias = if self.parse_keyword(Keyword::AS) {
1132            Some(self.parse_identifier()?)
1133        } else {
1134            None
1135        };
1136
1137        let attach_options = self.parse_attach_duckdb_database_options()?;
1138        Ok(Statement::AttachDuckDBDatabase {
1139            if_not_exists,
1140            database,
1141            database_path,
1142            database_alias,
1143            attach_options,
1144        })
1145    }
1146
1147    pub fn parse_detach_duckdb_database(&mut self) -> Result<Statement, ParserError> {
1148        let database = self.parse_keyword(Keyword::DATABASE);
1149        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
1150        let database_alias = self.parse_identifier()?;
1151        Ok(Statement::DetachDuckDBDatabase {
1152            if_exists,
1153            database,
1154            database_alias,
1155        })
1156    }
1157
1158    pub fn parse_attach_database(&mut self) -> Result<Statement, ParserError> {
1159        let database = self.parse_keyword(Keyword::DATABASE);
1160        let database_file_name = self.parse_expr()?;
1161        self.expect_keyword_is(Keyword::AS)?;
1162        let schema_name = self.parse_identifier()?;
1163        Ok(Statement::AttachDatabase {
1164            database,
1165            schema_name,
1166            database_file_name,
1167        })
1168    }
1169
1170    pub fn parse_analyze(&mut self) -> Result<Statement, ParserError> {
1171        let has_table_keyword = self.parse_keyword(Keyword::TABLE);
1172        let table_name = self.parse_object_name(false)?;
1173        let mut for_columns = false;
1174        let mut cache_metadata = false;
1175        let mut noscan = false;
1176        let mut partitions = None;
1177        let mut compute_statistics = false;
1178        let mut columns = vec![];
1179        loop {
1180            match self.parse_one_of_keywords(&[
1181                Keyword::PARTITION,
1182                Keyword::FOR,
1183                Keyword::CACHE,
1184                Keyword::NOSCAN,
1185                Keyword::COMPUTE,
1186            ]) {
1187                Some(Keyword::PARTITION) => {
1188                    self.expect_token(&Token::LParen)?;
1189                    partitions = Some(self.parse_comma_separated(Parser::parse_expr)?);
1190                    self.expect_token(&Token::RParen)?;
1191                }
1192                Some(Keyword::NOSCAN) => noscan = true,
1193                Some(Keyword::FOR) => {
1194                    self.expect_keyword_is(Keyword::COLUMNS)?;
1195
1196                    columns = self
1197                        .maybe_parse(|parser| {
1198                            parser.parse_comma_separated(|p| p.parse_identifier())
1199                        })?
1200                        .unwrap_or_default();
1201                    for_columns = true
1202                }
1203                Some(Keyword::CACHE) => {
1204                    self.expect_keyword_is(Keyword::METADATA)?;
1205                    cache_metadata = true
1206                }
1207                Some(Keyword::COMPUTE) => {
1208                    self.expect_keyword_is(Keyword::STATISTICS)?;
1209                    compute_statistics = true
1210                }
1211                _ => break,
1212            }
1213        }
1214
1215        Ok(Analyze {
1216            has_table_keyword,
1217            table_name,
1218            for_columns,
1219            columns,
1220            partitions,
1221            cache_metadata,
1222            noscan,
1223            compute_statistics,
1224        }
1225        .into())
1226    }
1227
1228    /// Parse a new expression including wildcard & qualified wildcard.
1229    pub fn parse_wildcard_expr(&mut self) -> Result<Expr, ParserError> {
1230        let index = self.index;
1231
1232        let next_token = self.next_token();
1233        match next_token.token {
1234            t @ (Token::Word(_) | Token::SingleQuotedString(_)) => {
1235                if self.peek_token().token == Token::Period {
1236                    let mut id_parts: Vec<Ident> = vec![match t {
1237                        Token::Word(w) => w.into_ident(next_token.span),
1238                        Token::SingleQuotedString(s) => Ident::with_quote('\'', s),
1239                        _ => {
1240                            return Err(ParserError::ParserError(
1241                                "Internal parser error: unexpected token type".to_string(),
1242                            ))
1243                        }
1244                    }];
1245
1246                    while self.consume_token(&Token::Period) {
1247                        let next_token = self.next_token();
1248                        match next_token.token {
1249                            Token::Word(w) => id_parts.push(w.into_ident(next_token.span)),
1250                            Token::SingleQuotedString(s) => {
1251                                // SQLite has single-quoted identifiers
1252                                id_parts.push(Ident::with_quote('\'', s))
1253                            }
1254                            Token::Mul => {
1255                                return Ok(Expr::QualifiedWildcard(
1256                                    ObjectName::from(id_parts),
1257                                    AttachedToken(next_token),
1258                                ));
1259                            }
1260                            _ => {
1261                                return self
1262                                    .expected("an identifier or a '*' after '.'", next_token);
1263                            }
1264                        }
1265                    }
1266                }
1267            }
1268            Token::Mul => {
1269                return Ok(Expr::Wildcard(AttachedToken(next_token)));
1270            }
1271            // Handle parenthesized wildcard: (*)
1272            Token::LParen => {
1273                let [maybe_mul, maybe_rparen] = self.peek_tokens_ref();
1274                if maybe_mul.token == Token::Mul && maybe_rparen.token == Token::RParen {
1275                    let mul_token = self.next_token(); // consume Mul
1276                    self.next_token(); // consume RParen
1277                    return Ok(Expr::Wildcard(AttachedToken(mul_token)));
1278                }
1279            }
1280            _ => (),
1281        };
1282
1283        self.index = index;
1284        self.parse_expr()
1285    }
1286
1287    /// Parse a new expression.
1288    pub fn parse_expr(&mut self) -> Result<Expr, ParserError> {
1289        self.parse_subexpr(self.dialect.prec_unknown())
1290    }
1291
1292    pub fn parse_expr_with_alias_and_order_by(
1293        &mut self,
1294    ) -> Result<ExprWithAliasAndOrderBy, ParserError> {
1295        let expr = self.parse_expr()?;
1296
1297        fn validator(explicit: bool, kw: &Keyword, _parser: &mut Parser) -> bool {
1298            explicit || !&[Keyword::ASC, Keyword::DESC, Keyword::GROUP].contains(kw)
1299        }
1300        let alias = self.parse_optional_alias_inner(None, validator)?;
1301        let order_by = OrderByOptions {
1302            asc: self.parse_asc_desc(),
1303            nulls_first: None,
1304        };
1305        Ok(ExprWithAliasAndOrderBy {
1306            expr: ExprWithAlias { expr, alias },
1307            order_by,
1308        })
1309    }
1310
1311    /// Parse tokens until the precedence changes.
1312    pub fn parse_subexpr(&mut self, precedence: u8) -> Result<Expr, ParserError> {
1313        let _guard = self.recursion_counter.try_decrease()?;
1314        debug!("parsing expr");
1315        let mut expr = self.parse_prefix()?;
1316
1317        expr = self.parse_compound_expr(expr, vec![])?;
1318
1319        debug!("prefix: {expr:?}");
1320        loop {
1321            let next_precedence = self.get_next_precedence()?;
1322            debug!("next precedence: {next_precedence:?}");
1323
1324            if precedence >= next_precedence {
1325                break;
1326            }
1327
1328            // The period operator is handled exclusively by the
1329            // compound field access parsing.
1330            if Token::Period == self.peek_token_ref().token {
1331                break;
1332            }
1333
1334            expr = self.parse_infix(expr, next_precedence)?;
1335        }
1336        Ok(expr)
1337    }
1338
1339    pub fn parse_assert(&mut self) -> Result<Statement, ParserError> {
1340        let condition = self.parse_expr()?;
1341        let message = if self.parse_keyword(Keyword::AS) {
1342            Some(self.parse_expr()?)
1343        } else {
1344            None
1345        };
1346
1347        Ok(Statement::Assert { condition, message })
1348    }
1349
1350    pub fn parse_savepoint(&mut self) -> Result<Statement, ParserError> {
1351        let name = self.parse_identifier()?;
1352        Ok(Statement::Savepoint { name })
1353    }
1354
1355    pub fn parse_release(&mut self) -> Result<Statement, ParserError> {
1356        let _ = self.parse_keyword(Keyword::SAVEPOINT);
1357        let name = self.parse_identifier()?;
1358
1359        Ok(Statement::ReleaseSavepoint { name })
1360    }
1361
1362    pub fn parse_listen(&mut self) -> Result<Statement, ParserError> {
1363        let channel = self.parse_identifier()?;
1364        Ok(Statement::LISTEN { channel })
1365    }
1366
1367    pub fn parse_unlisten(&mut self) -> Result<Statement, ParserError> {
1368        let channel = if self.consume_token(&Token::Mul) {
1369            Ident::new(Expr::Wildcard(AttachedToken::empty()).to_string())
1370        } else {
1371            match self.parse_identifier() {
1372                Ok(expr) => expr,
1373                _ => {
1374                    self.prev_token();
1375                    return self.expected("wildcard or identifier", self.peek_token());
1376                }
1377            }
1378        };
1379        Ok(Statement::UNLISTEN { channel })
1380    }
1381
1382    pub fn parse_notify(&mut self) -> Result<Statement, ParserError> {
1383        let channel = self.parse_identifier()?;
1384        let payload = if self.consume_token(&Token::Comma) {
1385            Some(self.parse_literal_string()?)
1386        } else {
1387            None
1388        };
1389        Ok(Statement::NOTIFY { channel, payload })
1390    }
1391
1392    /// Parses a `RENAME TABLE` statement. See [Statement::RenameTable]
1393    pub fn parse_rename(&mut self) -> Result<Statement, ParserError> {
1394        if self.peek_keyword(Keyword::TABLE) {
1395            self.expect_keyword(Keyword::TABLE)?;
1396            let rename_tables = self.parse_comma_separated(|parser| {
1397                let old_name = parser.parse_object_name(false)?;
1398                parser.expect_keyword(Keyword::TO)?;
1399                let new_name = parser.parse_object_name(false)?;
1400
1401                Ok(RenameTable { old_name, new_name })
1402            })?;
1403            Ok(Statement::RenameTable(rename_tables))
1404        } else {
1405            self.expected("KEYWORD `TABLE` after RENAME", self.peek_token())
1406        }
1407    }
1408
1409    /// Tries to parse an expression by matching the specified word to known keywords that have a special meaning in the dialect.
1410    /// Returns `None if no match is found.
1411    fn parse_expr_prefix_by_reserved_word(
1412        &mut self,
1413        w: &Word,
1414        w_span: Span,
1415    ) -> Result<Option<Expr>, ParserError> {
1416        match w.keyword {
1417            Keyword::TRUE | Keyword::FALSE if self.dialect.supports_boolean_literals() => {
1418                self.prev_token();
1419                Ok(Some(Expr::Value(self.parse_value()?)))
1420            }
1421            Keyword::NULL => {
1422                self.prev_token();
1423                Ok(Some(Expr::Value(self.parse_value()?)))
1424            }
1425            Keyword::CURRENT_CATALOG
1426            | Keyword::CURRENT_USER
1427            | Keyword::SESSION_USER
1428            | Keyword::USER
1429            if dialect_of!(self is PostgreSqlDialect | GenericDialect) =>
1430                {
1431                    Ok(Some(Expr::Function(Function {
1432                        name: ObjectName::from(vec![w.clone().into_ident(w_span)]),
1433                        uses_odbc_syntax: false,
1434                        parameters: FunctionArguments::None,
1435                        args: FunctionArguments::None,
1436                        null_treatment: None,
1437                        filter: None,
1438                        over: None,
1439                        within_group: vec![],
1440                    })))
1441                }
1442            Keyword::CURRENT_TIMESTAMP
1443            | Keyword::CURRENT_TIME
1444            | Keyword::CURRENT_DATE
1445            | Keyword::LOCALTIME
1446            | Keyword::LOCALTIMESTAMP => {
1447                Ok(Some(self.parse_time_functions(ObjectName::from(vec![w.clone().into_ident(w_span)]))?))
1448            }
1449            Keyword::CASE => Ok(Some(self.parse_case_expr()?)),
1450            Keyword::CONVERT => Ok(Some(self.parse_convert_expr(false)?)),
1451            Keyword::TRY_CONVERT if self.dialect.supports_try_convert() => Ok(Some(self.parse_convert_expr(true)?)),
1452            Keyword::CAST => Ok(Some(self.parse_cast_expr(CastKind::Cast)?)),
1453            Keyword::TRY_CAST => Ok(Some(self.parse_cast_expr(CastKind::TryCast)?)),
1454            Keyword::SAFE_CAST => Ok(Some(self.parse_cast_expr(CastKind::SafeCast)?)),
1455            Keyword::EXISTS
1456            // Support parsing Databricks has a function named `exists`.
1457            if !dialect_of!(self is DatabricksDialect)
1458                || matches!(
1459                        self.peek_nth_token_ref(1).token,
1460                        Token::Word(Word {
1461                            keyword: Keyword::SELECT | Keyword::WITH,
1462                            ..
1463                        })
1464                    ) =>
1465                {
1466                    Ok(Some(self.parse_exists_expr(false)?))
1467                }
1468            Keyword::EXTRACT => Ok(Some(self.parse_extract_expr()?)),
1469            Keyword::CEIL => Ok(Some(self.parse_ceil_floor_expr(true)?)),
1470            Keyword::FLOOR => Ok(Some(self.parse_ceil_floor_expr(false)?)),
1471            Keyword::POSITION if self.peek_token_ref().token == Token::LParen => {
1472                Ok(Some(self.parse_position_expr(w.clone().into_ident(w_span))?))
1473            }
1474            Keyword::SUBSTR | Keyword::SUBSTRING => {
1475                self.prev_token();
1476                Ok(Some(self.parse_substring()?))
1477            }
1478            Keyword::OVERLAY => Ok(Some(self.parse_overlay_expr()?)),
1479            Keyword::TRIM => Ok(Some(self.parse_trim_expr()?)),
1480            Keyword::INTERVAL => Ok(Some(self.parse_interval()?)),
1481            // Treat ARRAY[1,2,3] as an array [1,2,3], otherwise try as subquery or a function call
1482            Keyword::ARRAY if *self.peek_token_ref() == Token::LBracket => {
1483                self.expect_token(&Token::LBracket)?;
1484                Ok(Some(self.parse_array_expr(true)?))
1485            }
1486            Keyword::ARRAY
1487            if self.peek_token() == Token::LParen
1488                && !dialect_of!(self is ClickHouseDialect | DatabricksDialect) =>
1489                {
1490                    self.expect_token(&Token::LParen)?;
1491                    let query = self.parse_query()?;
1492                    self.expect_token(&Token::RParen)?;
1493                    Ok(Some(Expr::Function(Function {
1494                        name: ObjectName::from(vec![w.clone().into_ident(w_span)]),
1495                        uses_odbc_syntax: false,
1496                        parameters: FunctionArguments::None,
1497                        args: FunctionArguments::Subquery(query),
1498                        filter: None,
1499                        null_treatment: None,
1500                        over: None,
1501                        within_group: vec![],
1502                    })))
1503                }
1504            Keyword::NOT => Ok(Some(self.parse_not()?)),
1505            Keyword::MATCH if self.dialect.supports_match_against() => {
1506                Ok(Some(self.parse_match_against()?))
1507            }
1508            Keyword::STRUCT if self.dialect.supports_struct_literal() => {
1509                let struct_expr = self.parse_struct_literal()?;
1510                Ok(Some(struct_expr))
1511            }
1512            Keyword::PRIOR if matches!(self.state, ParserState::ConnectBy) => {
1513                let expr = self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?;
1514                Ok(Some(Expr::Prior(Box::new(expr))))
1515            }
1516            Keyword::MAP if *self.peek_token_ref() == Token::LBrace && self.dialect.support_map_literal_syntax() => {
1517                Ok(Some(self.parse_duckdb_map_literal()?))
1518            }
1519            _ if self.dialect.supports_geometric_types() => match w.keyword {
1520                Keyword::CIRCLE => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Circle)?)),
1521                Keyword::BOX => Ok(Some(self.parse_geometric_type(GeometricTypeKind::GeometricBox)?)),
1522                Keyword::PATH => Ok(Some(self.parse_geometric_type(GeometricTypeKind::GeometricPath)?)),
1523                Keyword::LINE => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Line)?)),
1524                Keyword::LSEG => Ok(Some(self.parse_geometric_type(GeometricTypeKind::LineSegment)?)),
1525                Keyword::POINT => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Point)?)),
1526                Keyword::POLYGON => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Polygon)?)),
1527                _ => Ok(None),
1528            },
1529            _ => Ok(None),
1530        }
1531    }
1532
1533    /// Tries to parse an expression by a word that is not known to have a special meaning in the dialect.
1534    fn parse_expr_prefix_by_unreserved_word(
1535        &mut self,
1536        w: &Word,
1537        w_span: Span,
1538    ) -> Result<Expr, ParserError> {
1539        match self.peek_token().token {
1540            Token::LParen if !self.peek_outer_join_operator() => {
1541                let id_parts = vec![w.clone().into_ident(w_span)];
1542                self.parse_function(ObjectName::from(id_parts))
1543            }
1544            // string introducer https://dev.mysql.com/doc/refman/8.0/en/charset-introducer.html
1545            Token::SingleQuotedString(_)
1546            | Token::DoubleQuotedString(_)
1547            | Token::HexStringLiteral(_)
1548                if w.value.starts_with('_') =>
1549            {
1550                Ok(Expr::Prefixed {
1551                    prefix: w.clone().into_ident(w_span),
1552                    value: self.parse_introduced_string_expr()?.into(),
1553                })
1554            }
1555            // string introducer https://dev.mysql.com/doc/refman/8.0/en/charset-introducer.html
1556            Token::SingleQuotedString(_)
1557            | Token::DoubleQuotedString(_)
1558            | Token::HexStringLiteral(_)
1559                if w.value.starts_with('_') =>
1560            {
1561                Ok(Expr::Prefixed {
1562                    prefix: w.clone().into_ident(w_span),
1563                    value: self.parse_introduced_string_expr()?.into(),
1564                })
1565            }
1566            Token::Arrow if self.dialect.supports_lambda_functions() => {
1567                self.expect_token(&Token::Arrow)?;
1568                Ok(Expr::Lambda(LambdaFunction {
1569                    params: OneOrManyWithParens::One(w.clone().into_ident(w_span)),
1570                    body: Box::new(self.parse_expr()?),
1571                }))
1572            }
1573            _ => Ok(Expr::Identifier(w.clone().into_ident(w_span))),
1574        }
1575    }
1576
1577    /// Parse an expression prefix.
1578    pub fn parse_prefix(&mut self) -> Result<Expr, ParserError> {
1579        // allow the dialect to override prefix parsing
1580        if let Some(prefix) = self.dialect.parse_prefix(self) {
1581            return prefix;
1582        }
1583
1584        // PostgreSQL allows any string literal to be preceded by a type name, indicating that the
1585        // string literal represents a literal of that type. Some examples:
1586        //
1587        //      DATE '2020-05-20'
1588        //      TIMESTAMP WITH TIME ZONE '2020-05-20 7:43:54'
1589        //      BOOL 'true'
1590        //
1591        // The first two are standard SQL, while the latter is a PostgreSQL extension. Complicating
1592        // matters is the fact that INTERVAL string literals may optionally be followed by special
1593        // keywords, e.g.:
1594        //
1595        //      INTERVAL '7' DAY
1596        //
1597        // Note also that naively `SELECT date` looks like a syntax error because the `date` type
1598        // name is not followed by a string literal, but in fact in PostgreSQL it is a valid
1599        // expression that should parse as the column name "date".
1600        let loc = self.peek_token_ref().span.start;
1601        let opt_expr = self.maybe_parse(|parser| {
1602            match parser.parse_data_type()? {
1603                DataType::Interval { .. } => parser.parse_interval(),
1604                // PostgreSQL allows almost any identifier to be used as custom data type name,
1605                // and we support that in `parse_data_type()`. But unlike Postgres we don't
1606                // have a list of globally reserved keywords (since they vary across dialects),
1607                // so given `NOT 'a' LIKE 'b'`, we'd accept `NOT` as a possible custom data type
1608                // name, resulting in `NOT 'a'` being recognized as a `TypedString` instead of
1609                // an unary negation `NOT ('a' LIKE 'b')`. To solve this, we don't accept the
1610                // `type 'string'` syntax for the custom data types at all.
1611                DataType::Custom(..) => parser_err!("dummy", loc),
1612                data_type => Ok(Expr::TypedString(TypedString {
1613                    data_type,
1614                    value: parser.parse_value()?,
1615                    uses_odbc_syntax: false,
1616                })),
1617            }
1618        })?;
1619
1620        if let Some(expr) = opt_expr {
1621            return Ok(expr);
1622        }
1623
1624        // Cache some dialect properties to avoid lifetime issues with the
1625        // next_token reference.
1626
1627        let dialect = self.dialect;
1628
1629        self.advance_token();
1630        let next_token_index = self.get_current_index();
1631        let next_token = self.get_current_token();
1632        let span = next_token.span;
1633        let expr = match &next_token.token {
1634            Token::Word(w) => {
1635                // The word we consumed may fall into one of two cases: it has a special meaning, or not.
1636                // For example, in Snowflake, the word `interval` may have two meanings depending on the context:
1637                // `SELECT CURRENT_DATE() + INTERVAL '1 DAY', MAX(interval) FROM tbl;`
1638                //                          ^^^^^^^^^^^^^^^^      ^^^^^^^^
1639                //                         interval expression   identifier
1640                //
1641                // We first try to parse the word and following tokens as a special expression, and if that fails,
1642                // we rollback and try to parse it as an identifier.
1643                let w = w.clone();
1644                match self.try_parse(|parser| parser.parse_expr_prefix_by_reserved_word(&w, span)) {
1645                    // This word indicated an expression prefix and parsing was successful
1646                    Ok(Some(expr)) => Ok(expr),
1647
1648                    // No expression prefix associated with this word
1649                    Ok(None) => Ok(self.parse_expr_prefix_by_unreserved_word(&w, span)?),
1650
1651                    // If parsing of the word as a special expression failed, we are facing two options:
1652                    // 1. The statement is malformed, e.g. `SELECT INTERVAL '1 DAI` (`DAI` instead of `DAY`)
1653                    // 2. The word is used as an identifier, e.g. `SELECT MAX(interval) FROM tbl`
1654                    // We first try to parse the word as an identifier and if that fails
1655                    // we rollback and return the parsing error we got from trying to parse a
1656                    // special expression (to maintain backwards compatibility of parsing errors).
1657                    Err(e) => {
1658                        if !self.dialect.is_reserved_for_identifier(w.keyword) {
1659                            if let Ok(Some(expr)) = self.maybe_parse(|parser| {
1660                                parser.parse_expr_prefix_by_unreserved_word(&w, span)
1661                            }) {
1662                                return Ok(expr);
1663                            }
1664                        }
1665                        return Err(e);
1666                    }
1667                }
1668            } // End of Token::Word
1669            // array `[1, 2, 3]`
1670            Token::LBracket => self.parse_array_expr(false),
1671            tok @ Token::Minus | tok @ Token::Plus => {
1672                let op = if *tok == Token::Plus {
1673                    UnaryOperator::Plus
1674                } else {
1675                    UnaryOperator::Minus
1676                };
1677                Ok(Expr::UnaryOp {
1678                    op,
1679                    expr: Box::new(
1680                        self.parse_subexpr(self.dialect.prec_value(Precedence::MulDivModOp))?,
1681                    ),
1682                })
1683            }
1684            Token::ExclamationMark if dialect.supports_bang_not_operator() => Ok(Expr::UnaryOp {
1685                op: UnaryOperator::BangNot,
1686                expr: Box::new(self.parse_subexpr(self.dialect.prec_value(Precedence::UnaryNot))?),
1687            }),
1688            tok @ Token::DoubleExclamationMark
1689            | tok @ Token::PGSquareRoot
1690            | tok @ Token::PGCubeRoot
1691            | tok @ Token::AtSign
1692                if dialect_is!(dialect is PostgreSqlDialect) =>
1693            {
1694                let op = match tok {
1695                    Token::DoubleExclamationMark => UnaryOperator::PGPrefixFactorial,
1696                    Token::PGSquareRoot => UnaryOperator::PGSquareRoot,
1697                    Token::PGCubeRoot => UnaryOperator::PGCubeRoot,
1698                    Token::AtSign => UnaryOperator::PGAbs,
1699                    _ => {
1700                        return Err(ParserError::ParserError(
1701                            "Internal parser error: unexpected unary operator token".to_string(),
1702                        ))
1703                    }
1704                };
1705                Ok(Expr::UnaryOp {
1706                    op,
1707                    expr: Box::new(
1708                        self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?,
1709                    ),
1710                })
1711            }
1712            Token::Tilde => Ok(Expr::UnaryOp {
1713                op: UnaryOperator::BitwiseNot,
1714                expr: Box::new(self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?),
1715            }),
1716            tok @ Token::Sharp
1717            | tok @ Token::AtDashAt
1718            | tok @ Token::AtAt
1719            | tok @ Token::QuestionMarkDash
1720            | tok @ Token::QuestionPipe
1721                if self.dialect.supports_geometric_types() =>
1722            {
1723                let op = match tok {
1724                    Token::Sharp => UnaryOperator::Hash,
1725                    Token::AtDashAt => UnaryOperator::AtDashAt,
1726                    Token::AtAt => UnaryOperator::DoubleAt,
1727                    Token::QuestionMarkDash => UnaryOperator::QuestionDash,
1728                    Token::QuestionPipe => UnaryOperator::QuestionPipe,
1729                    _ => {
1730                        return Err(ParserError::ParserError(format!(
1731                            "Unexpected token in unary operator parsing: {tok:?}"
1732                        )))
1733                    }
1734                };
1735                Ok(Expr::UnaryOp {
1736                    op,
1737                    expr: Box::new(
1738                        self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?,
1739                    ),
1740                })
1741            }
1742            Token::EscapedStringLiteral(_) if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) =>
1743            {
1744                self.prev_token();
1745                Ok(Expr::Value(self.parse_value()?))
1746            }
1747            Token::UnicodeStringLiteral(_) => {
1748                self.prev_token();
1749                Ok(Expr::Value(self.parse_value()?))
1750            }
1751            Token::Number(_, _)
1752            | Token::SingleQuotedString(_)
1753            | Token::DoubleQuotedString(_)
1754            | Token::TripleSingleQuotedString(_)
1755            | Token::TripleDoubleQuotedString(_)
1756            | Token::DollarQuotedString(_)
1757            | Token::SingleQuotedByteStringLiteral(_)
1758            | Token::DoubleQuotedByteStringLiteral(_)
1759            | Token::TripleSingleQuotedByteStringLiteral(_)
1760            | Token::TripleDoubleQuotedByteStringLiteral(_)
1761            | Token::SingleQuotedRawStringLiteral(_)
1762            | Token::DoubleQuotedRawStringLiteral(_)
1763            | Token::TripleSingleQuotedRawStringLiteral(_)
1764            | Token::TripleDoubleQuotedRawStringLiteral(_)
1765            | Token::NationalStringLiteral(_)
1766            | Token::QuoteDelimitedStringLiteral(_)
1767            | Token::NationalQuoteDelimitedStringLiteral(_)
1768            | Token::HexStringLiteral(_) => {
1769                self.prev_token();
1770                Ok(Expr::Value(self.parse_value()?))
1771            }
1772            Token::LParen => {
1773                let expr =
1774                    if let Some(expr) = self.try_parse_expr_sub_query()? {
1775                        expr
1776                    } else if let Some(lambda) = self.try_parse_lambda()? {
1777                        return Ok(lambda);
1778                    } else {
1779                        let exprs = self.parse_comma_separated(Parser::parse_expr)?;
1780                        match exprs.len() {
1781                            0 => return Err(ParserError::ParserError(
1782                                "Internal parser error: parse_comma_separated returned empty list"
1783                                    .to_string(),
1784                            )),
1785                            1 => Expr::Nested(Box::new(exprs.into_iter().next().unwrap())),
1786                            _ => Expr::Tuple(exprs),
1787                        }
1788                    };
1789                self.expect_token(&Token::RParen)?;
1790                Ok(expr)
1791            }
1792            Token::Placeholder(_) | Token::Colon | Token::AtSign => {
1793                self.prev_token();
1794                Ok(Expr::Value(self.parse_value()?))
1795            }
1796            Token::LBrace => {
1797                self.prev_token();
1798                self.parse_lbrace_expr()
1799            }
1800            _ => self.expected_at("an expression", next_token_index),
1801        }?;
1802
1803        if !self.in_column_definition_state() && self.parse_keyword(Keyword::COLLATE) {
1804            Ok(Expr::Collate {
1805                expr: Box::new(expr),
1806                collation: self.parse_object_name(false)?,
1807            })
1808        } else {
1809            Ok(expr)
1810        }
1811    }
1812
1813    fn parse_geometric_type(&mut self, kind: GeometricTypeKind) -> Result<Expr, ParserError> {
1814        Ok(Expr::TypedString(TypedString {
1815            data_type: DataType::GeometricType(kind),
1816            value: self.parse_value()?,
1817            uses_odbc_syntax: false,
1818        }))
1819    }
1820
1821    /// Try to parse an [Expr::CompoundFieldAccess] like `a.b.c` or `a.b[1].c`.
1822    /// If all the fields are `Expr::Identifier`s, return an [Expr::CompoundIdentifier] instead.
1823    /// If only the root exists, return the root.
1824    /// Parses compound expressions which may be delimited by period
1825    /// or bracket notation.
1826    /// For example: `a.b.c`, `a.b[1]`.
1827    pub fn parse_compound_expr(
1828        &mut self,
1829        root: Expr,
1830        mut chain: Vec<AccessExpr>,
1831    ) -> Result<Expr, ParserError> {
1832        let mut ending_wildcard: Option<TokenWithSpan> = None;
1833        loop {
1834            if self.consume_token(&Token::Period) {
1835                let next_token = self.peek_token_ref();
1836                match &next_token.token {
1837                    Token::Mul => {
1838                        // Postgres explicitly allows funcnm(tablenm.*) and the
1839                        // function array_agg traverses this control flow
1840                        if dialect_of!(self is PostgreSqlDialect) {
1841                            ending_wildcard = Some(self.next_token());
1842                        } else {
1843                            // Put back the consumed `.` tokens before exiting.
1844                            // If this expression is being parsed in the
1845                            // context of a projection, then the `.*` could imply
1846                            // a wildcard expansion. For example:
1847                            // `SELECT STRUCT('foo').* FROM T`
1848                            self.prev_token(); // .
1849                        }
1850
1851                        break;
1852                    }
1853                    Token::SingleQuotedString(s) => {
1854                        let expr =
1855                            Expr::Identifier(Ident::with_quote_and_span('\'', next_token.span, s));
1856                        chain.push(AccessExpr::Dot(expr));
1857                        self.advance_token(); // The consumed string
1858                    }
1859                    // Fallback to parsing an arbitrary expression.
1860                    _ => match self.parse_subexpr(self.dialect.prec_value(Precedence::Period))? {
1861                        // If we get back a compound field access or identifier,
1862                        // we flatten the nested expression.
1863                        // For example if the current root is `foo`
1864                        // and we get back a compound identifier expression `bar.baz`
1865                        // The full expression should be `foo.bar.baz` (i.e.
1866                        // a root with an access chain with 2 entries) and not
1867                        // `foo.(bar.baz)` (i.e. a root with an access chain with
1868                        // 1 entry`).
1869                        Expr::CompoundFieldAccess { root, access_chain } => {
1870                            chain.push(AccessExpr::Dot(*root));
1871                            chain.extend(access_chain);
1872                        }
1873                        Expr::CompoundIdentifier(parts) => chain
1874                            .extend(parts.into_iter().map(Expr::Identifier).map(AccessExpr::Dot)),
1875                        expr => {
1876                            chain.push(AccessExpr::Dot(expr));
1877                        }
1878                    },
1879                }
1880            } else if !self.dialect.supports_partiql()
1881                && self.peek_token_ref().token == Token::LBracket
1882            {
1883                self.parse_multi_dim_subscript(&mut chain)?;
1884            } else {
1885                break;
1886            }
1887        }
1888
1889        let tok_index = self.get_current_index();
1890        if let Some(wildcard_token) = ending_wildcard {
1891            if !Self::is_all_ident(&root, &chain) {
1892                return self.expected("an identifier or a '*' after '.'", self.peek_token());
1893            };
1894            Ok(Expr::QualifiedWildcard(
1895                ObjectName::from(Self::exprs_to_idents(root, chain)?),
1896                AttachedToken(wildcard_token),
1897            ))
1898        } else if self.maybe_parse_outer_join_operator() {
1899            if !Self::is_all_ident(&root, &chain) {
1900                return self.expected_at("column identifier before (+)", tok_index);
1901            };
1902            let expr = if chain.is_empty() {
1903                root
1904            } else {
1905                Expr::CompoundIdentifier(Self::exprs_to_idents(root, chain)?)
1906            };
1907            Ok(Expr::OuterJoin(expr.into()))
1908        } else {
1909            Self::build_compound_expr(root, chain)
1910        }
1911    }
1912
1913    /// Combines a root expression and access chain to form
1914    /// a compound expression. Which may be a [Expr::CompoundFieldAccess]
1915    /// or other special cased expressions like [Expr::CompoundIdentifier],
1916    /// [Expr::OuterJoin].
1917    fn build_compound_expr(
1918        root: Expr,
1919        mut access_chain: Vec<AccessExpr>,
1920    ) -> Result<Expr, ParserError> {
1921        if access_chain.is_empty() {
1922            return Ok(root);
1923        }
1924
1925        if Self::is_all_ident(&root, &access_chain) {
1926            return Ok(Expr::CompoundIdentifier(Self::exprs_to_idents(
1927                root,
1928                access_chain,
1929            )?));
1930        }
1931
1932        // Flatten qualified function calls.
1933        // For example, the expression `a.b.c.foo(1,2,3)` should
1934        // represent a function called `a.b.c.foo`, rather than
1935        // a composite expression.
1936        if matches!(root, Expr::Identifier(_))
1937            && matches!(
1938                access_chain.last(),
1939                Some(AccessExpr::Dot(Expr::Function(_)))
1940            )
1941            && access_chain
1942                .iter()
1943                .rev()
1944                .skip(1) // All except the Function
1945                .all(|access| matches!(access, AccessExpr::Dot(Expr::Identifier(_))))
1946        {
1947            let Some(AccessExpr::Dot(Expr::Function(mut func))) = access_chain.pop() else {
1948                return parser_err!("expected function expression", root.span().start);
1949            };
1950
1951            let compound_func_name = [root]
1952                .into_iter()
1953                .chain(access_chain.into_iter().flat_map(|access| match access {
1954                    AccessExpr::Dot(expr) => Some(expr),
1955                    _ => None,
1956                }))
1957                .flat_map(|expr| match expr {
1958                    Expr::Identifier(ident) => Some(ident),
1959                    _ => None,
1960                })
1961                .map(ObjectNamePart::Identifier)
1962                .chain(func.name.0)
1963                .collect::<Vec<_>>();
1964            func.name = ObjectName(compound_func_name);
1965
1966            return Ok(Expr::Function(func));
1967        }
1968
1969        // Flatten qualified outer join expressions.
1970        // For example, the expression `T.foo(+)` should
1971        // represent an outer join on the column name `T.foo`
1972        // rather than a composite expression.
1973        if access_chain.len() == 1
1974            && matches!(
1975                access_chain.last(),
1976                Some(AccessExpr::Dot(Expr::OuterJoin(_)))
1977            )
1978        {
1979            let Some(AccessExpr::Dot(Expr::OuterJoin(inner_expr))) = access_chain.pop() else {
1980                return parser_err!("expected (+) expression", root.span().start);
1981            };
1982
1983            if !Self::is_all_ident(&root, &[]) {
1984                return parser_err!("column identifier before (+)", root.span().start);
1985            };
1986
1987            let token_start = root.span().start;
1988            let mut idents = Self::exprs_to_idents(root, vec![])?;
1989            match *inner_expr {
1990                Expr::CompoundIdentifier(suffix) => idents.extend(suffix),
1991                Expr::Identifier(suffix) => idents.push(suffix),
1992                _ => {
1993                    return parser_err!("column identifier before (+)", token_start);
1994                }
1995            }
1996
1997            return Ok(Expr::OuterJoin(Expr::CompoundIdentifier(idents).into()));
1998        }
1999
2000        Ok(Expr::CompoundFieldAccess {
2001            root: Box::new(root),
2002            access_chain,
2003        })
2004    }
2005
2006    fn keyword_to_modifier(k: Keyword) -> Option<ContextModifier> {
2007        match k {
2008            Keyword::LOCAL => Some(ContextModifier::Local),
2009            Keyword::GLOBAL => Some(ContextModifier::Global),
2010            Keyword::SESSION => Some(ContextModifier::Session),
2011            _ => None,
2012        }
2013    }
2014
2015    /// Check if the root is an identifier and all fields are identifiers.
2016    fn is_all_ident(root: &Expr, fields: &[AccessExpr]) -> bool {
2017        if !matches!(root, Expr::Identifier(_)) {
2018            return false;
2019        }
2020        fields
2021            .iter()
2022            .all(|x| matches!(x, AccessExpr::Dot(Expr::Identifier(_))))
2023    }
2024
2025    /// Convert a root and a list of fields to a list of identifiers.
2026    fn exprs_to_idents(root: Expr, fields: Vec<AccessExpr>) -> Result<Vec<Ident>, ParserError> {
2027        let mut idents = vec![];
2028        if let Expr::Identifier(root) = root {
2029            idents.push(root);
2030            for x in fields {
2031                if let AccessExpr::Dot(Expr::Identifier(ident)) = x {
2032                    idents.push(ident);
2033                } else {
2034                    return parser_err!(
2035                        format!("Expected identifier, found: {}", x),
2036                        x.span().start
2037                    );
2038                }
2039            }
2040            Ok(idents)
2041        } else {
2042            parser_err!(
2043                format!("Expected identifier, found: {}", root),
2044                root.span().start
2045            )
2046        }
2047    }
2048
2049    /// Returns true if the next tokens indicate the outer join operator `(+)`.
2050    fn peek_outer_join_operator(&mut self) -> bool {
2051        if !self.dialect.supports_outer_join_operator() {
2052            return false;
2053        }
2054
2055        let [maybe_lparen, maybe_plus, maybe_rparen] = self.peek_tokens_ref();
2056        Token::LParen == maybe_lparen.token
2057            && Token::Plus == maybe_plus.token
2058            && Token::RParen == maybe_rparen.token
2059    }
2060
2061    /// If the next tokens indicates the outer join operator `(+)`, consume
2062    /// the tokens and return true.
2063    fn maybe_parse_outer_join_operator(&mut self) -> bool {
2064        self.dialect.supports_outer_join_operator()
2065            && self.consume_tokens(&[Token::LParen, Token::Plus, Token::RParen])
2066    }
2067
2068    pub fn parse_utility_options(&mut self) -> Result<Vec<UtilityOption>, ParserError> {
2069        self.expect_token(&Token::LParen)?;
2070        let options = self.parse_comma_separated(Self::parse_utility_option)?;
2071        self.expect_token(&Token::RParen)?;
2072
2073        Ok(options)
2074    }
2075
2076    fn parse_utility_option(&mut self) -> Result<UtilityOption, ParserError> {
2077        let name = self.parse_identifier()?;
2078
2079        let next_token = self.peek_token();
2080        if next_token == Token::Comma || next_token == Token::RParen {
2081            return Ok(UtilityOption { name, arg: None });
2082        }
2083        let arg = self.parse_expr()?;
2084
2085        Ok(UtilityOption {
2086            name,
2087            arg: Some(arg),
2088        })
2089    }
2090
2091    fn try_parse_expr_sub_query(&mut self) -> Result<Option<Expr>, ParserError> {
2092        if !self.peek_sub_query() {
2093            return Ok(None);
2094        }
2095
2096        Ok(Some(Expr::Subquery(self.parse_query()?)))
2097    }
2098
2099    fn try_parse_lambda(&mut self) -> Result<Option<Expr>, ParserError> {
2100        if !self.dialect.supports_lambda_functions() {
2101            return Ok(None);
2102        }
2103        self.maybe_parse(|p| {
2104            let params = p.parse_comma_separated(|p| p.parse_identifier())?;
2105            p.expect_token(&Token::RParen)?;
2106            p.expect_token(&Token::Arrow)?;
2107            let expr = p.parse_expr()?;
2108            Ok(Expr::Lambda(LambdaFunction {
2109                params: OneOrManyWithParens::Many(params),
2110                body: Box::new(expr),
2111            }))
2112        })
2113    }
2114
2115    /// Tries to parse the body of an [ODBC escaping sequence]
2116    /// i.e. without the enclosing braces
2117    /// Currently implemented:
2118    /// Scalar Function Calls
2119    /// Date, Time, and Timestamp Literals
2120    /// See <https://learn.microsoft.com/en-us/sql/odbc/reference/develop-app/escape-sequences-in-odbc?view=sql-server-2017>
2121    fn maybe_parse_odbc_body(&mut self) -> Result<Option<Expr>, ParserError> {
2122        // Attempt 1: Try to parse it as a function.
2123        if let Some(expr) = self.maybe_parse_odbc_fn_body()? {
2124            return Ok(Some(expr));
2125        }
2126        // Attempt 2: Try to parse it as a Date, Time or Timestamp Literal
2127        self.maybe_parse_odbc_body_datetime()
2128    }
2129
2130    /// Tries to parse the body of an [ODBC Date, Time, and Timestamp Literals] call.
2131    ///
2132    /// ```sql
2133    /// {d '2025-07-17'}
2134    /// {t '14:12:01'}
2135    /// {ts '2025-07-17 14:12:01'}
2136    /// ```
2137    ///
2138    /// [ODBC Date, Time, and Timestamp Literals]:
2139    /// https://learn.microsoft.com/en-us/sql/odbc/reference/develop-app/date-time-and-timestamp-literals?view=sql-server-2017
2140    fn maybe_parse_odbc_body_datetime(&mut self) -> Result<Option<Expr>, ParserError> {
2141        self.maybe_parse(|p| {
2142            let token = p.next_token().clone();
2143            let word_string = token.token.to_string();
2144            let data_type = match word_string.as_str() {
2145                "t" => DataType::Time(None, TimezoneInfo::None),
2146                "d" => DataType::Date,
2147                "ts" => DataType::Timestamp(None, TimezoneInfo::None),
2148                _ => return p.expected("ODBC datetime keyword (t, d, or ts)", token),
2149            };
2150            let value = p.parse_value()?;
2151            Ok(Expr::TypedString(TypedString {
2152                data_type,
2153                value,
2154                uses_odbc_syntax: true,
2155            }))
2156        })
2157    }
2158
2159    /// Tries to parse the body of an [ODBC function] call.
2160    /// i.e. without the enclosing braces
2161    ///
2162    /// ```sql
2163    /// fn myfunc(1,2,3)
2164    /// ```
2165    ///
2166    /// [ODBC function]: https://learn.microsoft.com/en-us/sql/odbc/reference/develop-app/scalar-function-calls?view=sql-server-2017
2167    fn maybe_parse_odbc_fn_body(&mut self) -> Result<Option<Expr>, ParserError> {
2168        self.maybe_parse(|p| {
2169            p.expect_keyword(Keyword::FN)?;
2170            let fn_name = p.parse_object_name(false)?;
2171            let mut fn_call = p.parse_function_call(fn_name)?;
2172            fn_call.uses_odbc_syntax = true;
2173            Ok(Expr::Function(fn_call))
2174        })
2175    }
2176
2177    pub fn parse_function(&mut self, name: ObjectName) -> Result<Expr, ParserError> {
2178        self.parse_function_call(name).map(Expr::Function)
2179    }
2180
2181    fn parse_function_call(&mut self, name: ObjectName) -> Result<Function, ParserError> {
2182        self.expect_token(&Token::LParen)?;
2183
2184        // Snowflake permits a subquery to be passed as an argument without
2185        // an enclosing set of parens if it's the only argument.
2186        if dialect_of!(self is SnowflakeDialect) && self.peek_sub_query() {
2187            let subquery = self.parse_query()?;
2188            self.expect_token(&Token::RParen)?;
2189            return Ok(Function {
2190                name,
2191                uses_odbc_syntax: false,
2192                parameters: FunctionArguments::None,
2193                args: FunctionArguments::Subquery(subquery),
2194                filter: None,
2195                null_treatment: None,
2196                over: None,
2197                within_group: vec![],
2198            });
2199        }
2200
2201        let mut args = self.parse_function_argument_list()?;
2202        let mut parameters = FunctionArguments::None;
2203        // ClickHouse aggregations support parametric functions like `HISTOGRAM(0.5, 0.6)(x, y)`
2204        // which (0.5, 0.6) is a parameter to the function.
2205        if dialect_of!(self is ClickHouseDialect | GenericDialect)
2206            && self.consume_token(&Token::LParen)
2207        {
2208            parameters = FunctionArguments::List(args);
2209            args = self.parse_function_argument_list()?;
2210        }
2211
2212        let within_group = if self.parse_keywords(&[Keyword::WITHIN, Keyword::GROUP]) {
2213            self.expect_token(&Token::LParen)?;
2214            self.expect_keywords(&[Keyword::ORDER, Keyword::BY])?;
2215            let order_by = self.parse_comma_separated(Parser::parse_order_by_expr)?;
2216            self.expect_token(&Token::RParen)?;
2217            order_by
2218        } else {
2219            vec![]
2220        };
2221
2222        let filter = if self.dialect.supports_filter_during_aggregation()
2223            && self.parse_keyword(Keyword::FILTER)
2224            && self.consume_token(&Token::LParen)
2225            && self.parse_keyword(Keyword::WHERE)
2226        {
2227            let filter = Some(Box::new(self.parse_expr()?));
2228            self.expect_token(&Token::RParen)?;
2229            filter
2230        } else {
2231            None
2232        };
2233
2234        // Syntax for null treatment shows up either in the args list
2235        // or after the function call, but not both.
2236        let null_treatment = if args
2237            .clauses
2238            .iter()
2239            .all(|clause| !matches!(clause, FunctionArgumentClause::IgnoreOrRespectNulls(_)))
2240        {
2241            self.parse_null_treatment()?
2242        } else {
2243            None
2244        };
2245
2246        let over = if self.parse_keyword(Keyword::OVER) {
2247            if self.consume_token(&Token::LParen) {
2248                let window_spec = self.parse_window_spec()?;
2249                Some(WindowType::WindowSpec(window_spec))
2250            } else {
2251                Some(WindowType::NamedWindow(self.parse_identifier()?))
2252            }
2253        } else {
2254            None
2255        };
2256
2257        Ok(Function {
2258            name,
2259            uses_odbc_syntax: false,
2260            parameters,
2261            args: FunctionArguments::List(args),
2262            null_treatment,
2263            filter,
2264            over,
2265            within_group,
2266        })
2267    }
2268
2269    /// Optionally parses a null treatment clause.
2270    fn parse_null_treatment(&mut self) -> Result<Option<NullTreatment>, ParserError> {
2271        match self.parse_one_of_keywords(&[Keyword::RESPECT, Keyword::IGNORE]) {
2272            Some(keyword) => {
2273                self.expect_keyword_is(Keyword::NULLS)?;
2274
2275                Ok(match keyword {
2276                    Keyword::RESPECT => Some(NullTreatment::RespectNulls),
2277                    Keyword::IGNORE => Some(NullTreatment::IgnoreNulls),
2278                    _ => None,
2279                })
2280            }
2281            None => Ok(None),
2282        }
2283    }
2284
2285    pub fn parse_time_functions(&mut self, name: ObjectName) -> Result<Expr, ParserError> {
2286        let args = if self.consume_token(&Token::LParen) {
2287            FunctionArguments::List(self.parse_function_argument_list()?)
2288        } else {
2289            FunctionArguments::None
2290        };
2291        Ok(Expr::Function(Function {
2292            name,
2293            uses_odbc_syntax: false,
2294            parameters: FunctionArguments::None,
2295            args,
2296            filter: None,
2297            over: None,
2298            null_treatment: None,
2299            within_group: vec![],
2300        }))
2301    }
2302
2303    pub fn parse_window_frame_units(&mut self) -> Result<WindowFrameUnits, ParserError> {
2304        let next_token = self.next_token();
2305        match &next_token.token {
2306            Token::Word(w) => match w.keyword {
2307                Keyword::ROWS => Ok(WindowFrameUnits::Rows),
2308                Keyword::RANGE => Ok(WindowFrameUnits::Range),
2309                Keyword::GROUPS => Ok(WindowFrameUnits::Groups),
2310                _ => self.expected("ROWS, RANGE, GROUPS", next_token)?,
2311            },
2312            _ => self.expected("ROWS, RANGE, GROUPS", next_token),
2313        }
2314    }
2315
2316    pub fn parse_window_frame(&mut self) -> Result<WindowFrame, ParserError> {
2317        let units = self.parse_window_frame_units()?;
2318        let (start_bound, end_bound) = if self.parse_keyword(Keyword::BETWEEN) {
2319            let start_bound = self.parse_window_frame_bound()?;
2320            self.expect_keyword_is(Keyword::AND)?;
2321            let end_bound = Some(self.parse_window_frame_bound()?);
2322            (start_bound, end_bound)
2323        } else {
2324            (self.parse_window_frame_bound()?, None)
2325        };
2326        Ok(WindowFrame {
2327            units,
2328            start_bound,
2329            end_bound,
2330        })
2331    }
2332
2333    /// Parse `CURRENT ROW` or `{ <positive number> | UNBOUNDED } { PRECEDING | FOLLOWING }`
2334    pub fn parse_window_frame_bound(&mut self) -> Result<WindowFrameBound, ParserError> {
2335        if self.parse_keywords(&[Keyword::CURRENT, Keyword::ROW]) {
2336            Ok(WindowFrameBound::CurrentRow)
2337        } else {
2338            let rows = if self.parse_keyword(Keyword::UNBOUNDED) {
2339                None
2340            } else {
2341                Some(Box::new(match self.peek_token().token {
2342                    Token::SingleQuotedString(_) => self.parse_interval()?,
2343                    _ => self.parse_expr()?,
2344                }))
2345            };
2346            if self.parse_keyword(Keyword::PRECEDING) {
2347                Ok(WindowFrameBound::Preceding(rows))
2348            } else if self.parse_keyword(Keyword::FOLLOWING) {
2349                Ok(WindowFrameBound::Following(rows))
2350            } else {
2351                self.expected("PRECEDING or FOLLOWING", self.peek_token())
2352            }
2353        }
2354    }
2355
2356    /// Parse a group by expr. Group by expr can be one of group sets, roll up, cube, or simple expr.
2357    fn parse_group_by_expr(&mut self) -> Result<Expr, ParserError> {
2358        if self.dialect.supports_group_by_expr() {
2359            if self.parse_keywords(&[Keyword::GROUPING, Keyword::SETS]) {
2360                self.expect_token(&Token::LParen)?;
2361                let result = self.parse_comma_separated(|p| p.parse_tuple(false, true))?;
2362                self.expect_token(&Token::RParen)?;
2363                Ok(Expr::GroupingSets(result))
2364            } else if self.parse_keyword(Keyword::CUBE) {
2365                self.expect_token(&Token::LParen)?;
2366                let result = self.parse_comma_separated(|p| p.parse_tuple(true, true))?;
2367                self.expect_token(&Token::RParen)?;
2368                Ok(Expr::Cube(result))
2369            } else if self.parse_keyword(Keyword::ROLLUP) {
2370                self.expect_token(&Token::LParen)?;
2371                let result = self.parse_comma_separated(|p| p.parse_tuple(true, true))?;
2372                self.expect_token(&Token::RParen)?;
2373                Ok(Expr::Rollup(result))
2374            } else if self.consume_tokens(&[Token::LParen, Token::RParen]) {
2375                // PostgreSQL allow to use empty tuple as a group by expression,
2376                // e.g. `GROUP BY (), name`. Please refer to GROUP BY Clause section in
2377                // [PostgreSQL](https://www.postgresql.org/docs/16/sql-select.html)
2378                Ok(Expr::Tuple(vec![]))
2379            } else {
2380                self.parse_expr()
2381            }
2382        } else {
2383            // TODO parse rollup for other dialects
2384            self.parse_expr()
2385        }
2386    }
2387
2388    /// Parse a tuple with `(` and `)`.
2389    /// If `lift_singleton` is true, then a singleton tuple is lifted to a tuple of length 1, otherwise it will fail.
2390    /// If `allow_empty` is true, then an empty tuple is allowed.
2391    fn parse_tuple(
2392        &mut self,
2393        lift_singleton: bool,
2394        allow_empty: bool,
2395    ) -> Result<Vec<Expr>, ParserError> {
2396        if lift_singleton {
2397            if self.consume_token(&Token::LParen) {
2398                let result = if allow_empty && self.consume_token(&Token::RParen) {
2399                    vec![]
2400                } else {
2401                    let result = self.parse_comma_separated(Parser::parse_expr)?;
2402                    self.expect_token(&Token::RParen)?;
2403                    result
2404                };
2405                Ok(result)
2406            } else {
2407                Ok(vec![self.parse_expr()?])
2408            }
2409        } else {
2410            self.expect_token(&Token::LParen)?;
2411            let result = if allow_empty && self.consume_token(&Token::RParen) {
2412                vec![]
2413            } else {
2414                let result = self.parse_comma_separated(Parser::parse_expr)?;
2415                self.expect_token(&Token::RParen)?;
2416                result
2417            };
2418            Ok(result)
2419        }
2420    }
2421
2422    pub fn parse_case_expr(&mut self) -> Result<Expr, ParserError> {
2423        let case_token = AttachedToken(self.get_current_token().clone());
2424        let mut operand = None;
2425        if !self.parse_keyword(Keyword::WHEN) {
2426            operand = Some(Box::new(self.parse_expr()?));
2427            self.expect_keyword_is(Keyword::WHEN)?;
2428        }
2429        let mut conditions = vec![];
2430        loop {
2431            let condition = self.parse_expr()?;
2432            self.expect_keyword_is(Keyword::THEN)?;
2433            let result = self.parse_expr()?;
2434            conditions.push(CaseWhen { condition, result });
2435            if !self.parse_keyword(Keyword::WHEN) {
2436                break;
2437            }
2438        }
2439        let else_result = if self.parse_keyword(Keyword::ELSE) {
2440            Some(Box::new(self.parse_expr()?))
2441        } else {
2442            None
2443        };
2444        let end_token = AttachedToken(self.expect_keyword(Keyword::END)?);
2445        Ok(Expr::Case {
2446            case_token,
2447            end_token,
2448            operand,
2449            conditions,
2450            else_result,
2451        })
2452    }
2453
2454    pub fn parse_optional_cast_format(&mut self) -> Result<Option<CastFormat>, ParserError> {
2455        if self.parse_keyword(Keyword::FORMAT) {
2456            let value = self.parse_value()?.value;
2457            match self.parse_optional_time_zone()? {
2458                Some(tz) => Ok(Some(CastFormat::ValueAtTimeZone(value, tz))),
2459                None => Ok(Some(CastFormat::Value(value))),
2460            }
2461        } else {
2462            Ok(None)
2463        }
2464    }
2465
2466    pub fn parse_optional_time_zone(&mut self) -> Result<Option<Value>, ParserError> {
2467        if self.parse_keywords(&[Keyword::AT, Keyword::TIME, Keyword::ZONE]) {
2468            self.parse_value().map(|v| Some(v.value))
2469        } else {
2470            Ok(None)
2471        }
2472    }
2473
2474    /// mssql-like convert function
2475    fn parse_mssql_convert(&mut self, is_try: bool) -> Result<Expr, ParserError> {
2476        self.expect_token(&Token::LParen)?;
2477        let data_type = self.parse_data_type()?;
2478        self.expect_token(&Token::Comma)?;
2479        let expr = self.parse_expr()?;
2480        let styles = if self.consume_token(&Token::Comma) {
2481            self.parse_comma_separated(Parser::parse_expr)?
2482        } else {
2483            Default::default()
2484        };
2485        self.expect_token(&Token::RParen)?;
2486        Ok(Expr::Convert {
2487            is_try,
2488            expr: Box::new(expr),
2489            data_type: Some(data_type),
2490            charset: None,
2491            target_before_value: true,
2492            styles,
2493        })
2494    }
2495
2496    /// Parse a SQL CONVERT function:
2497    ///  - `CONVERT('héhé' USING utf8mb4)` (MySQL)
2498    ///  - `CONVERT('héhé', CHAR CHARACTER SET utf8mb4)` (MySQL)
2499    ///  - `CONVERT(DECIMAL(10, 5), 42)` (MSSQL) - the type comes first
2500    pub fn parse_convert_expr(&mut self, is_try: bool) -> Result<Expr, ParserError> {
2501        if self.dialect.convert_type_before_value() {
2502            return self.parse_mssql_convert(is_try);
2503        }
2504        self.expect_token(&Token::LParen)?;
2505        let expr = self.parse_expr()?;
2506        if self.parse_keyword(Keyword::USING) {
2507            let charset = self.parse_object_name(false)?;
2508            self.expect_token(&Token::RParen)?;
2509            return Ok(Expr::Convert {
2510                is_try,
2511                expr: Box::new(expr),
2512                data_type: None,
2513                charset: Some(charset),
2514                target_before_value: false,
2515                styles: vec![],
2516            });
2517        }
2518        self.expect_token(&Token::Comma)?;
2519        let data_type = self.parse_data_type()?;
2520        let charset = if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
2521            Some(self.parse_object_name(false)?)
2522        } else {
2523            None
2524        };
2525        self.expect_token(&Token::RParen)?;
2526        Ok(Expr::Convert {
2527            is_try,
2528            expr: Box::new(expr),
2529            data_type: Some(data_type),
2530            charset,
2531            target_before_value: false,
2532            styles: vec![],
2533        })
2534    }
2535
2536    /// Parse a SQL CAST function e.g. `CAST(expr AS FLOAT)`
2537    pub fn parse_cast_expr(&mut self, kind: CastKind) -> Result<Expr, ParserError> {
2538        self.expect_token(&Token::LParen)?;
2539        let expr = self.parse_expr()?;
2540        self.expect_keyword_is(Keyword::AS)?;
2541        let data_type = self.parse_data_type()?;
2542        let format = self.parse_optional_cast_format()?;
2543        self.expect_token(&Token::RParen)?;
2544        Ok(Expr::Cast {
2545            kind,
2546            expr: Box::new(expr),
2547            data_type,
2548            format,
2549        })
2550    }
2551
2552    /// Parse a SQL EXISTS expression e.g. `WHERE EXISTS(SELECT ...)`.
2553    pub fn parse_exists_expr(&mut self, negated: bool) -> Result<Expr, ParserError> {
2554        self.expect_token(&Token::LParen)?;
2555        let exists_node = Expr::Exists {
2556            negated,
2557            subquery: self.parse_query()?,
2558        };
2559        self.expect_token(&Token::RParen)?;
2560        Ok(exists_node)
2561    }
2562
2563    pub fn parse_extract_expr(&mut self) -> Result<Expr, ParserError> {
2564        self.expect_token(&Token::LParen)?;
2565        let field = self.parse_date_time_field()?;
2566
2567        let syntax = if self.parse_keyword(Keyword::FROM) {
2568            ExtractSyntax::From
2569        } else if self.consume_token(&Token::Comma)
2570            && dialect_of!(self is SnowflakeDialect | GenericDialect)
2571        {
2572            ExtractSyntax::Comma
2573        } else {
2574            return Err(ParserError::ParserError(
2575                "Expected 'FROM' or ','".to_string(),
2576            ));
2577        };
2578
2579        let expr = self.parse_expr()?;
2580        self.expect_token(&Token::RParen)?;
2581        Ok(Expr::Extract {
2582            field,
2583            expr: Box::new(expr),
2584            syntax,
2585        })
2586    }
2587
2588    pub fn parse_ceil_floor_expr(&mut self, is_ceil: bool) -> Result<Expr, ParserError> {
2589        self.expect_token(&Token::LParen)?;
2590        let expr = self.parse_expr()?;
2591        // Parse `CEIL/FLOOR(expr)`
2592        let field = if self.parse_keyword(Keyword::TO) {
2593            // Parse `CEIL/FLOOR(expr TO DateTimeField)`
2594            CeilFloorKind::DateTimeField(self.parse_date_time_field()?)
2595        } else if self.consume_token(&Token::Comma) {
2596            // Parse `CEIL/FLOOR(expr, scale)`
2597            match self.parse_value()?.value {
2598                Value::Number(n, s) => CeilFloorKind::Scale(Value::Number(n, s)),
2599                _ => {
2600                    return Err(ParserError::ParserError(
2601                        "Scale field can only be of number type".to_string(),
2602                    ))
2603                }
2604            }
2605        } else {
2606            CeilFloorKind::DateTimeField(DateTimeField::NoDateTime)
2607        };
2608        self.expect_token(&Token::RParen)?;
2609        if is_ceil {
2610            Ok(Expr::Ceil {
2611                expr: Box::new(expr),
2612                field,
2613            })
2614        } else {
2615            Ok(Expr::Floor {
2616                expr: Box::new(expr),
2617                field,
2618            })
2619        }
2620    }
2621
2622    pub fn parse_position_expr(&mut self, ident: Ident) -> Result<Expr, ParserError> {
2623        let between_prec = self.dialect.prec_value(Precedence::Between);
2624        let position_expr = self.maybe_parse(|p| {
2625            // PARSE SELECT POSITION('@' in field)
2626            p.expect_token(&Token::LParen)?;
2627
2628            // Parse the subexpr till the IN keyword
2629            let expr = p.parse_subexpr(between_prec)?;
2630            p.expect_keyword_is(Keyword::IN)?;
2631            let from = p.parse_expr()?;
2632            p.expect_token(&Token::RParen)?;
2633            Ok(Expr::Position {
2634                expr: Box::new(expr),
2635                r#in: Box::new(from),
2636            })
2637        })?;
2638        match position_expr {
2639            Some(expr) => Ok(expr),
2640            // Snowflake supports `position` as an ordinary function call
2641            // without the special `IN` syntax.
2642            None => self.parse_function(ObjectName::from(vec![ident])),
2643        }
2644    }
2645
2646    // { SUBSTRING | SUBSTR } (<EXPR> [FROM 1] [FOR 3])
2647    pub fn parse_substring(&mut self) -> Result<Expr, ParserError> {
2648        let shorthand = match self.expect_one_of_keywords(&[Keyword::SUBSTR, Keyword::SUBSTRING])? {
2649            Keyword::SUBSTR => true,
2650            Keyword::SUBSTRING => false,
2651            _ => {
2652                self.prev_token();
2653                return self.expected("SUBSTR or SUBSTRING", self.peek_token());
2654            }
2655        };
2656        self.expect_token(&Token::LParen)?;
2657        let expr = self.parse_expr()?;
2658        let mut from_expr = None;
2659        let special = self.consume_token(&Token::Comma);
2660        if special || self.parse_keyword(Keyword::FROM) {
2661            from_expr = Some(self.parse_expr()?);
2662        }
2663
2664        let mut to_expr = None;
2665        if self.parse_keyword(Keyword::FOR) || self.consume_token(&Token::Comma) {
2666            to_expr = Some(self.parse_expr()?);
2667        }
2668        self.expect_token(&Token::RParen)?;
2669
2670        Ok(Expr::Substring {
2671            expr: Box::new(expr),
2672            substring_from: from_expr.map(Box::new),
2673            substring_for: to_expr.map(Box::new),
2674            special,
2675            shorthand,
2676        })
2677    }
2678
2679    pub fn parse_overlay_expr(&mut self) -> Result<Expr, ParserError> {
2680        // PARSE OVERLAY (EXPR PLACING EXPR FROM 1 [FOR 3])
2681        self.expect_token(&Token::LParen)?;
2682        let expr = self.parse_expr()?;
2683        self.expect_keyword_is(Keyword::PLACING)?;
2684        let what_expr = self.parse_expr()?;
2685        self.expect_keyword_is(Keyword::FROM)?;
2686        let from_expr = self.parse_expr()?;
2687        let mut for_expr = None;
2688        if self.parse_keyword(Keyword::FOR) {
2689            for_expr = Some(self.parse_expr()?);
2690        }
2691        self.expect_token(&Token::RParen)?;
2692
2693        Ok(Expr::Overlay {
2694            expr: Box::new(expr),
2695            overlay_what: Box::new(what_expr),
2696            overlay_from: Box::new(from_expr),
2697            overlay_for: for_expr.map(Box::new),
2698        })
2699    }
2700
2701    /// ```sql
2702    /// TRIM ([WHERE] ['text' FROM] 'text')
2703    /// TRIM ('text')
2704    /// TRIM(<expr>, [, characters]) -- only Snowflake or BigQuery
2705    /// ```
2706    pub fn parse_trim_expr(&mut self) -> Result<Expr, ParserError> {
2707        self.expect_token(&Token::LParen)?;
2708        let mut trim_where = None;
2709        if let Token::Word(word) = self.peek_token().token {
2710            if [Keyword::BOTH, Keyword::LEADING, Keyword::TRAILING].contains(&word.keyword) {
2711                trim_where = Some(self.parse_trim_where()?);
2712            }
2713        }
2714        let expr = self.parse_expr()?;
2715        if self.parse_keyword(Keyword::FROM) {
2716            let trim_what = Box::new(expr);
2717            let expr = self.parse_expr()?;
2718            self.expect_token(&Token::RParen)?;
2719            Ok(Expr::Trim {
2720                expr: Box::new(expr),
2721                trim_where,
2722                trim_what: Some(trim_what),
2723                trim_characters: None,
2724            })
2725        } else if self.consume_token(&Token::Comma)
2726            && dialect_of!(self is DuckDbDialect | SnowflakeDialect | BigQueryDialect | GenericDialect)
2727        {
2728            let characters = self.parse_comma_separated(Parser::parse_expr)?;
2729            self.expect_token(&Token::RParen)?;
2730            Ok(Expr::Trim {
2731                expr: Box::new(expr),
2732                trim_where: None,
2733                trim_what: None,
2734                trim_characters: Some(characters),
2735            })
2736        } else {
2737            self.expect_token(&Token::RParen)?;
2738            Ok(Expr::Trim {
2739                expr: Box::new(expr),
2740                trim_where,
2741                trim_what: None,
2742                trim_characters: None,
2743            })
2744        }
2745    }
2746
2747    pub fn parse_trim_where(&mut self) -> Result<TrimWhereField, ParserError> {
2748        let next_token = self.next_token();
2749        match &next_token.token {
2750            Token::Word(w) => match w.keyword {
2751                Keyword::BOTH => Ok(TrimWhereField::Both),
2752                Keyword::LEADING => Ok(TrimWhereField::Leading),
2753                Keyword::TRAILING => Ok(TrimWhereField::Trailing),
2754                _ => self.expected("trim_where field", next_token)?,
2755            },
2756            _ => self.expected("trim_where field", next_token),
2757        }
2758    }
2759
2760    /// Parses an array expression `[ex1, ex2, ..]`
2761    /// if `named` is `true`, came from an expression like  `ARRAY[ex1, ex2]`
2762    pub fn parse_array_expr(&mut self, named: bool) -> Result<Expr, ParserError> {
2763        let exprs = self.parse_comma_separated0(Parser::parse_expr, Token::RBracket)?;
2764        self.expect_token(&Token::RBracket)?;
2765        Ok(Expr::Array(Array { elem: exprs, named }))
2766    }
2767
2768    pub fn parse_listagg_on_overflow(&mut self) -> Result<Option<ListAggOnOverflow>, ParserError> {
2769        if self.parse_keywords(&[Keyword::ON, Keyword::OVERFLOW]) {
2770            if self.parse_keyword(Keyword::ERROR) {
2771                Ok(Some(ListAggOnOverflow::Error))
2772            } else {
2773                self.expect_keyword_is(Keyword::TRUNCATE)?;
2774                let filler = match self.peek_token().token {
2775                    Token::Word(w)
2776                        if w.keyword == Keyword::WITH || w.keyword == Keyword::WITHOUT =>
2777                    {
2778                        None
2779                    }
2780                    Token::SingleQuotedString(_)
2781                    | Token::EscapedStringLiteral(_)
2782                    | Token::UnicodeStringLiteral(_)
2783                    | Token::NationalStringLiteral(_)
2784                    | Token::QuoteDelimitedStringLiteral(_)
2785                    | Token::NationalQuoteDelimitedStringLiteral(_)
2786                    | Token::HexStringLiteral(_) => Some(Box::new(self.parse_expr()?)),
2787                    _ => self.expected(
2788                        "either filler, WITH, or WITHOUT in LISTAGG",
2789                        self.peek_token(),
2790                    )?,
2791                };
2792                let with_count = self.parse_keyword(Keyword::WITH);
2793                if !with_count && !self.parse_keyword(Keyword::WITHOUT) {
2794                    self.expected("either WITH or WITHOUT in LISTAGG", self.peek_token())?;
2795                }
2796                self.expect_keyword_is(Keyword::COUNT)?;
2797                Ok(Some(ListAggOnOverflow::Truncate { filler, with_count }))
2798            }
2799        } else {
2800            Ok(None)
2801        }
2802    }
2803
2804    // This function parses date/time fields for the EXTRACT function-like
2805    // operator, interval qualifiers, and the ceil/floor operations.
2806    // EXTRACT supports a wider set of date/time fields than interval qualifiers,
2807    // so this function may need to be split in two.
2808    pub fn parse_date_time_field(&mut self) -> Result<DateTimeField, ParserError> {
2809        let next_token = self.next_token();
2810        match &next_token.token {
2811            Token::Word(w) => match w.keyword {
2812                Keyword::YEAR => Ok(DateTimeField::Year),
2813                Keyword::YEARS => Ok(DateTimeField::Years),
2814                Keyword::MONTH => Ok(DateTimeField::Month),
2815                Keyword::MONTHS => Ok(DateTimeField::Months),
2816                Keyword::WEEK => {
2817                    let week_day = if dialect_of!(self is BigQueryDialect | GenericDialect)
2818                        && self.consume_token(&Token::LParen)
2819                    {
2820                        let week_day = self.parse_identifier()?;
2821                        self.expect_token(&Token::RParen)?;
2822                        Some(week_day)
2823                    } else {
2824                        None
2825                    };
2826                    Ok(DateTimeField::Week(week_day))
2827                }
2828                Keyword::WEEKS => Ok(DateTimeField::Weeks),
2829                Keyword::DAY => Ok(DateTimeField::Day),
2830                Keyword::DAYOFWEEK => Ok(DateTimeField::DayOfWeek),
2831                Keyword::DAYOFYEAR => Ok(DateTimeField::DayOfYear),
2832                Keyword::DAYS => Ok(DateTimeField::Days),
2833                Keyword::DATE => Ok(DateTimeField::Date),
2834                Keyword::DATETIME => Ok(DateTimeField::Datetime),
2835                Keyword::HOUR => Ok(DateTimeField::Hour),
2836                Keyword::HOURS => Ok(DateTimeField::Hours),
2837                Keyword::MINUTE => Ok(DateTimeField::Minute),
2838                Keyword::MINUTES => Ok(DateTimeField::Minutes),
2839                Keyword::SECOND => Ok(DateTimeField::Second),
2840                Keyword::SECONDS => Ok(DateTimeField::Seconds),
2841                Keyword::CENTURY => Ok(DateTimeField::Century),
2842                Keyword::DECADE => Ok(DateTimeField::Decade),
2843                Keyword::DOY => Ok(DateTimeField::Doy),
2844                Keyword::DOW => Ok(DateTimeField::Dow),
2845                Keyword::EPOCH => Ok(DateTimeField::Epoch),
2846                Keyword::ISODOW => Ok(DateTimeField::Isodow),
2847                Keyword::ISOYEAR => Ok(DateTimeField::Isoyear),
2848                Keyword::ISOWEEK => Ok(DateTimeField::IsoWeek),
2849                Keyword::JULIAN => Ok(DateTimeField::Julian),
2850                Keyword::MICROSECOND => Ok(DateTimeField::Microsecond),
2851                Keyword::MICROSECONDS => Ok(DateTimeField::Microseconds),
2852                Keyword::MILLENIUM => Ok(DateTimeField::Millenium),
2853                Keyword::MILLENNIUM => Ok(DateTimeField::Millennium),
2854                Keyword::MILLISECOND => Ok(DateTimeField::Millisecond),
2855                Keyword::MILLISECONDS => Ok(DateTimeField::Milliseconds),
2856                Keyword::NANOSECOND => Ok(DateTimeField::Nanosecond),
2857                Keyword::NANOSECONDS => Ok(DateTimeField::Nanoseconds),
2858                Keyword::QUARTER => Ok(DateTimeField::Quarter),
2859                Keyword::TIME => Ok(DateTimeField::Time),
2860                Keyword::TIMEZONE => Ok(DateTimeField::Timezone),
2861                Keyword::TIMEZONE_ABBR => Ok(DateTimeField::TimezoneAbbr),
2862                Keyword::TIMEZONE_HOUR => Ok(DateTimeField::TimezoneHour),
2863                Keyword::TIMEZONE_MINUTE => Ok(DateTimeField::TimezoneMinute),
2864                Keyword::TIMEZONE_REGION => Ok(DateTimeField::TimezoneRegion),
2865                _ if self.dialect.allow_extract_custom() => {
2866                    self.prev_token();
2867                    let custom = self.parse_identifier()?;
2868                    Ok(DateTimeField::Custom(custom))
2869                }
2870                _ => self.expected("date/time field", next_token),
2871            },
2872            Token::SingleQuotedString(_) if self.dialect.allow_extract_single_quotes() => {
2873                self.prev_token();
2874                let custom = self.parse_identifier()?;
2875                Ok(DateTimeField::Custom(custom))
2876            }
2877            _ => self.expected("date/time field", next_token),
2878        }
2879    }
2880
2881    pub fn parse_not(&mut self) -> Result<Expr, ParserError> {
2882        match self.peek_token().token {
2883            Token::Word(w) => match w.keyword {
2884                Keyword::EXISTS => {
2885                    let negated = true;
2886                    let _ = self.parse_keyword(Keyword::EXISTS);
2887                    self.parse_exists_expr(negated)
2888                }
2889                _ => Ok(Expr::UnaryOp {
2890                    op: UnaryOperator::Not,
2891                    expr: Box::new(
2892                        self.parse_subexpr(self.dialect.prec_value(Precedence::UnaryNot))?,
2893                    ),
2894                }),
2895            },
2896            _ => Ok(Expr::UnaryOp {
2897                op: UnaryOperator::Not,
2898                expr: Box::new(self.parse_subexpr(self.dialect.prec_value(Precedence::UnaryNot))?),
2899            }),
2900        }
2901    }
2902
2903    /// Parse expression types that start with a left brace '{'.
2904    /// Examples:
2905    /// ```sql
2906    /// -- Dictionary expr.
2907    /// {'key1': 'value1', 'key2': 'value2'}
2908    ///
2909    /// -- Function call using the ODBC syntax.
2910    /// { fn CONCAT('foo', 'bar') }
2911    /// ```
2912    fn parse_lbrace_expr(&mut self) -> Result<Expr, ParserError> {
2913        let token = self.expect_token(&Token::LBrace)?;
2914
2915        if let Some(fn_expr) = self.maybe_parse_odbc_body()? {
2916            self.expect_token(&Token::RBrace)?;
2917            return Ok(fn_expr);
2918        }
2919
2920        if self.dialect.supports_dictionary_syntax() {
2921            self.prev_token(); // Put back the '{'
2922            return self.parse_dictionary();
2923        }
2924
2925        self.expected("an expression", token)
2926    }
2927
2928    /// Parses fulltext expressions [`sqlparser::ast::Expr::MatchAgainst`]
2929    ///
2930    /// # Errors
2931    /// This method will raise an error if the column list is empty or with invalid identifiers,
2932    /// the match expression is not a literal string, or if the search modifier is not valid.
2933    pub fn parse_match_against(&mut self) -> Result<Expr, ParserError> {
2934        let columns = self.parse_parenthesized_qualified_column_list(Mandatory, false)?;
2935
2936        self.expect_keyword_is(Keyword::AGAINST)?;
2937
2938        self.expect_token(&Token::LParen)?;
2939
2940        // MySQL is too permissive about the value, IMO we can't validate it perfectly on syntax level.
2941        let match_value = self.parse_value()?.value;
2942
2943        let in_natural_language_mode_keywords = &[
2944            Keyword::IN,
2945            Keyword::NATURAL,
2946            Keyword::LANGUAGE,
2947            Keyword::MODE,
2948        ];
2949
2950        let with_query_expansion_keywords = &[Keyword::WITH, Keyword::QUERY, Keyword::EXPANSION];
2951
2952        let in_boolean_mode_keywords = &[Keyword::IN, Keyword::BOOLEAN, Keyword::MODE];
2953
2954        let opt_search_modifier = if self.parse_keywords(in_natural_language_mode_keywords) {
2955            if self.parse_keywords(with_query_expansion_keywords) {
2956                Some(SearchModifier::InNaturalLanguageModeWithQueryExpansion)
2957            } else {
2958                Some(SearchModifier::InNaturalLanguageMode)
2959            }
2960        } else if self.parse_keywords(in_boolean_mode_keywords) {
2961            Some(SearchModifier::InBooleanMode)
2962        } else if self.parse_keywords(with_query_expansion_keywords) {
2963            Some(SearchModifier::WithQueryExpansion)
2964        } else {
2965            None
2966        };
2967
2968        self.expect_token(&Token::RParen)?;
2969
2970        Ok(Expr::MatchAgainst {
2971            columns,
2972            match_value,
2973            opt_search_modifier,
2974        })
2975    }
2976
2977    /// Parse an `INTERVAL` expression.
2978    ///
2979    /// Some syntactically valid intervals:
2980    ///
2981    /// ```sql
2982    ///   1. INTERVAL '1' DAY
2983    ///   2. INTERVAL '1-1' YEAR TO MONTH
2984    ///   3. INTERVAL '1' SECOND
2985    ///   4. INTERVAL '1:1:1.1' HOUR (5) TO SECOND (5)
2986    ///   5. INTERVAL '1.1' SECOND (2, 2)
2987    ///   6. INTERVAL '1:1' HOUR (5) TO MINUTE (5)
2988    ///   7. (MySql & BigQuery only): INTERVAL 1 DAY
2989    /// ```
2990    ///
2991    /// Note that we do not currently attempt to parse the quoted value.
2992    pub fn parse_interval(&mut self) -> Result<Expr, ParserError> {
2993        // The SQL standard allows an optional sign before the value string, but
2994        // it is not clear if any implementations support that syntax, so we
2995        // don't currently try to parse it. (The sign can instead be included
2996        // inside the value string.)
2997
2998        // to match the different flavours of INTERVAL syntax, we only allow expressions
2999        // if the dialect requires an interval qualifier,
3000        // see https://github.com/sqlparser-rs/sqlparser-rs/pull/1398 for more details
3001        let value = if self.dialect.require_interval_qualifier() {
3002            // parse a whole expression so `INTERVAL 1 + 1 DAY` is valid
3003            self.parse_expr()?
3004        } else {
3005            // parse a prefix expression so `INTERVAL 1 DAY` is valid, but `INTERVAL 1 + 1 DAY` is not
3006            // this also means that `INTERVAL '5 days' > INTERVAL '1 day'` treated properly
3007            self.parse_prefix()?
3008        };
3009
3010        // Following the string literal is a qualifier which indicates the units
3011        // of the duration specified in the string literal.
3012        //
3013        // Note that PostgreSQL allows omitting the qualifier, so we provide
3014        // this more general implementation.
3015        let leading_field = if self.next_token_is_temporal_unit() {
3016            Some(self.parse_date_time_field()?)
3017        } else if self.dialect.require_interval_qualifier() {
3018            return parser_err!(
3019                "INTERVAL requires a unit after the literal value",
3020                self.peek_token().span.start
3021            );
3022        } else {
3023            None
3024        };
3025
3026        let (leading_precision, last_field, fsec_precision) =
3027            if leading_field == Some(DateTimeField::Second) {
3028                // SQL mandates special syntax for `SECOND TO SECOND` literals.
3029                // Instead of
3030                //     `SECOND [(<leading precision>)] TO SECOND[(<fractional seconds precision>)]`
3031                // one must use the special format:
3032                //     `SECOND [( <leading precision> [ , <fractional seconds precision>] )]`
3033                let last_field = None;
3034                let (leading_precision, fsec_precision) = self.parse_optional_precision_scale()?;
3035                (leading_precision, last_field, fsec_precision)
3036            } else {
3037                let leading_precision = self.parse_optional_precision()?;
3038                if self.parse_keyword(Keyword::TO) {
3039                    let last_field = Some(self.parse_date_time_field()?);
3040                    let fsec_precision = if last_field == Some(DateTimeField::Second) {
3041                        self.parse_optional_precision()?
3042                    } else {
3043                        None
3044                    };
3045                    (leading_precision, last_field, fsec_precision)
3046                } else {
3047                    (leading_precision, None, None)
3048                }
3049            };
3050
3051        Ok(Expr::Interval(Interval {
3052            value: Box::new(value),
3053            leading_field,
3054            leading_precision,
3055            last_field,
3056            fractional_seconds_precision: fsec_precision,
3057        }))
3058    }
3059
3060    /// Peek at the next token and determine if it is a temporal unit
3061    /// like `second`.
3062    pub fn next_token_is_temporal_unit(&mut self) -> bool {
3063        if let Token::Word(word) = self.peek_token().token {
3064            matches!(
3065                word.keyword,
3066                Keyword::YEAR
3067                    | Keyword::YEARS
3068                    | Keyword::MONTH
3069                    | Keyword::MONTHS
3070                    | Keyword::WEEK
3071                    | Keyword::WEEKS
3072                    | Keyword::DAY
3073                    | Keyword::DAYS
3074                    | Keyword::HOUR
3075                    | Keyword::HOURS
3076                    | Keyword::MINUTE
3077                    | Keyword::MINUTES
3078                    | Keyword::SECOND
3079                    | Keyword::SECONDS
3080                    | Keyword::CENTURY
3081                    | Keyword::DECADE
3082                    | Keyword::DOW
3083                    | Keyword::DOY
3084                    | Keyword::EPOCH
3085                    | Keyword::ISODOW
3086                    | Keyword::ISOYEAR
3087                    | Keyword::JULIAN
3088                    | Keyword::MICROSECOND
3089                    | Keyword::MICROSECONDS
3090                    | Keyword::MILLENIUM
3091                    | Keyword::MILLENNIUM
3092                    | Keyword::MILLISECOND
3093                    | Keyword::MILLISECONDS
3094                    | Keyword::NANOSECOND
3095                    | Keyword::NANOSECONDS
3096                    | Keyword::QUARTER
3097                    | Keyword::TIMEZONE
3098                    | Keyword::TIMEZONE_HOUR
3099                    | Keyword::TIMEZONE_MINUTE
3100            )
3101        } else {
3102            false
3103        }
3104    }
3105
3106    /// Syntax
3107    /// ```sql
3108    /// -- typed
3109    /// STRUCT<[field_name] field_type, ...>( expr1 [, ... ])
3110    /// -- typeless
3111    /// STRUCT( expr1 [AS field_name] [, ... ])
3112    /// ```
3113    fn parse_struct_literal(&mut self) -> Result<Expr, ParserError> {
3114        // Parse the fields definition if exist `<[field_name] field_type, ...>`
3115        self.prev_token();
3116        let (fields, trailing_bracket) =
3117            self.parse_struct_type_def(Self::parse_struct_field_def)?;
3118        if trailing_bracket.0 {
3119            return parser_err!(
3120                "unmatched > in STRUCT literal",
3121                self.peek_token().span.start
3122            );
3123        }
3124
3125        // Parse the struct values `(expr1 [, ... ])`
3126        self.expect_token(&Token::LParen)?;
3127        let values = self
3128            .parse_comma_separated(|parser| parser.parse_struct_field_expr(!fields.is_empty()))?;
3129        self.expect_token(&Token::RParen)?;
3130
3131        Ok(Expr::Struct { values, fields })
3132    }
3133
3134    /// Parse an expression value for a struct literal
3135    /// Syntax
3136    /// ```sql
3137    /// expr [AS name]
3138    /// ```
3139    ///
3140    /// For biquery [1], Parameter typed_syntax is set to true if the expression
3141    /// is to be parsed as a field expression declared using typed
3142    /// struct syntax [2], and false if using typeless struct syntax [3].
3143    ///
3144    /// [1]: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#constructing_a_struct
3145    /// [2]: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#typed_struct_syntax
3146    /// [3]: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#typeless_struct_syntax
3147    fn parse_struct_field_expr(&mut self, typed_syntax: bool) -> Result<Expr, ParserError> {
3148        let expr = self.parse_expr()?;
3149        if self.parse_keyword(Keyword::AS) {
3150            if typed_syntax {
3151                return parser_err!("Typed syntax does not allow AS", {
3152                    self.prev_token();
3153                    self.peek_token().span.start
3154                });
3155            }
3156            let field_name = self.parse_identifier()?;
3157            Ok(Expr::Named {
3158                expr: expr.into(),
3159                name: field_name,
3160            })
3161        } else {
3162            Ok(expr)
3163        }
3164    }
3165
3166    /// Parse a Struct type definition as a sequence of field-value pairs.
3167    /// The syntax of the Struct elem differs by dialect so it is customised
3168    /// by the `elem_parser` argument.
3169    ///
3170    /// Syntax
3171    /// ```sql
3172    /// Hive:
3173    /// STRUCT<field_name: field_type>
3174    ///
3175    /// BigQuery:
3176    /// STRUCT<[field_name] field_type>
3177    /// ```
3178    fn parse_struct_type_def<F>(
3179        &mut self,
3180        mut elem_parser: F,
3181    ) -> Result<(Vec<StructField>, MatchedTrailingBracket), ParserError>
3182    where
3183        F: FnMut(&mut Parser<'a>) -> Result<(StructField, MatchedTrailingBracket), ParserError>,
3184    {
3185        self.expect_keyword_is(Keyword::STRUCT)?;
3186
3187        // Nothing to do if we have no type information.
3188        if Token::Lt != self.peek_token() {
3189            return Ok((Default::default(), false.into()));
3190        }
3191        self.next_token();
3192
3193        let mut field_defs = vec![];
3194        let trailing_bracket = loop {
3195            let (def, trailing_bracket) = elem_parser(self)?;
3196            field_defs.push(def);
3197            // The struct field definition is finished if it occurs `>>` or comma.
3198            if trailing_bracket.0 || !self.consume_token(&Token::Comma) {
3199                break trailing_bracket;
3200            }
3201        };
3202
3203        Ok((
3204            field_defs,
3205            self.expect_closing_angle_bracket(trailing_bracket)?,
3206        ))
3207    }
3208
3209    /// Duckdb Struct Data Type <https://duckdb.org/docs/sql/data_types/struct.html#retrieving-from-structs>
3210    fn parse_duckdb_struct_type_def(&mut self) -> Result<Vec<StructField>, ParserError> {
3211        self.expect_keyword_is(Keyword::STRUCT)?;
3212        self.expect_token(&Token::LParen)?;
3213        let struct_body = self.parse_comma_separated(|parser| {
3214            let field_name = parser.parse_identifier()?;
3215            let field_type = parser.parse_data_type()?;
3216
3217            Ok(StructField {
3218                field_name: Some(field_name),
3219                field_type,
3220                options: None,
3221            })
3222        });
3223        self.expect_token(&Token::RParen)?;
3224        struct_body
3225    }
3226
3227    /// Parse a field definition in a [struct] or [tuple].
3228    /// Syntax:
3229    ///
3230    /// ```sql
3231    /// [field_name] field_type
3232    /// ```
3233    ///
3234    /// [struct]: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#declaring_a_struct_type
3235    /// [tuple]: https://clickhouse.com/docs/en/sql-reference/data-types/tuple
3236    fn parse_struct_field_def(
3237        &mut self,
3238    ) -> Result<(StructField, MatchedTrailingBracket), ParserError> {
3239        // Look beyond the next item to infer whether both field name
3240        // and type are specified.
3241        let is_anonymous_field = !matches!(
3242            (self.peek_nth_token(0).token, self.peek_nth_token(1).token),
3243            (Token::Word(_), Token::Word(_))
3244        );
3245
3246        let field_name = if is_anonymous_field {
3247            None
3248        } else {
3249            Some(self.parse_identifier()?)
3250        };
3251
3252        let (field_type, trailing_bracket) = self.parse_data_type_helper()?;
3253
3254        let options = self.maybe_parse_options(Keyword::OPTIONS)?;
3255        Ok((
3256            StructField {
3257                field_name,
3258                field_type,
3259                options,
3260            },
3261            trailing_bracket,
3262        ))
3263    }
3264
3265    /// DuckDB specific: Parse a Union type definition as a sequence of field-value pairs.
3266    ///
3267    /// Syntax:
3268    ///
3269    /// ```sql
3270    /// UNION(field_name field_type[,...])
3271    /// ```
3272    ///
3273    /// [1]: https://duckdb.org/docs/sql/data_types/union.html
3274    fn parse_union_type_def(&mut self) -> Result<Vec<UnionField>, ParserError> {
3275        self.expect_keyword_is(Keyword::UNION)?;
3276
3277        self.expect_token(&Token::LParen)?;
3278
3279        let fields = self.parse_comma_separated(|p| {
3280            Ok(UnionField {
3281                field_name: p.parse_identifier()?,
3282                field_type: p.parse_data_type()?,
3283            })
3284        })?;
3285
3286        self.expect_token(&Token::RParen)?;
3287
3288        Ok(fields)
3289    }
3290
3291    /// DuckDB and ClickHouse specific: Parse a duckdb [dictionary] or a clickhouse [map] setting
3292    ///
3293    /// Syntax:
3294    ///
3295    /// ```sql
3296    /// {'field_name': expr1[, ... ]}
3297    /// ```
3298    ///
3299    /// [dictionary]: https://duckdb.org/docs/sql/data_types/struct#creating-structs
3300    /// [map]: https://clickhouse.com/docs/operations/settings/settings#additional_table_filters
3301    fn parse_dictionary(&mut self) -> Result<Expr, ParserError> {
3302        self.expect_token(&Token::LBrace)?;
3303
3304        let fields = self.parse_comma_separated0(Self::parse_dictionary_field, Token::RBrace)?;
3305
3306        self.expect_token(&Token::RBrace)?;
3307
3308        Ok(Expr::Dictionary(fields))
3309    }
3310
3311    /// Parse a field for a duckdb [dictionary] or a clickhouse [map] setting
3312    ///
3313    /// Syntax
3314    ///
3315    /// ```sql
3316    /// 'name': expr
3317    /// ```
3318    ///
3319    /// [dictionary]: https://duckdb.org/docs/sql/data_types/struct#creating-structs
3320    /// [map]: https://clickhouse.com/docs/operations/settings/settings#additional_table_filters
3321    fn parse_dictionary_field(&mut self) -> Result<DictionaryField, ParserError> {
3322        let key = self.parse_identifier()?;
3323
3324        self.expect_token(&Token::Colon)?;
3325
3326        let expr = self.parse_expr()?;
3327
3328        Ok(DictionaryField {
3329            key,
3330            value: Box::new(expr),
3331        })
3332    }
3333
3334    /// DuckDB specific: Parse a duckdb [map]
3335    ///
3336    /// Syntax:
3337    ///
3338    /// ```sql
3339    /// Map {key1: value1[, ... ]}
3340    /// ```
3341    ///
3342    /// [map]: https://duckdb.org/docs/sql/data_types/map.html#creating-maps
3343    fn parse_duckdb_map_literal(&mut self) -> Result<Expr, ParserError> {
3344        self.expect_token(&Token::LBrace)?;
3345        let fields = self.parse_comma_separated0(Self::parse_duckdb_map_field, Token::RBrace)?;
3346        self.expect_token(&Token::RBrace)?;
3347        Ok(Expr::Map(Map { entries: fields }))
3348    }
3349
3350    /// Parse a field for a duckdb [map]
3351    ///
3352    /// Syntax
3353    ///
3354    /// ```sql
3355    /// key: value
3356    /// ```
3357    ///
3358    /// [map]: https://duckdb.org/docs/sql/data_types/map.html#creating-maps
3359    fn parse_duckdb_map_field(&mut self) -> Result<MapEntry, ParserError> {
3360        let key = self.parse_expr()?;
3361
3362        self.expect_token(&Token::Colon)?;
3363
3364        let value = self.parse_expr()?;
3365
3366        Ok(MapEntry {
3367            key: Box::new(key),
3368            value: Box::new(value),
3369        })
3370    }
3371
3372    /// Parse clickhouse [map]
3373    ///
3374    /// Syntax
3375    ///
3376    /// ```sql
3377    /// Map(key_data_type, value_data_type)
3378    /// ```
3379    ///
3380    /// [map]: https://clickhouse.com/docs/en/sql-reference/data-types/map
3381    fn parse_click_house_map_def(&mut self) -> Result<(DataType, DataType), ParserError> {
3382        self.expect_keyword_is(Keyword::MAP)?;
3383        self.expect_token(&Token::LParen)?;
3384        let key_data_type = self.parse_data_type()?;
3385        self.expect_token(&Token::Comma)?;
3386        let value_data_type = self.parse_data_type()?;
3387        self.expect_token(&Token::RParen)?;
3388
3389        Ok((key_data_type, value_data_type))
3390    }
3391
3392    /// Parse clickhouse [tuple]
3393    ///
3394    /// Syntax
3395    ///
3396    /// ```sql
3397    /// Tuple([field_name] field_type, ...)
3398    /// ```
3399    ///
3400    /// [tuple]: https://clickhouse.com/docs/en/sql-reference/data-types/tuple
3401    fn parse_click_house_tuple_def(&mut self) -> Result<Vec<StructField>, ParserError> {
3402        self.expect_keyword_is(Keyword::TUPLE)?;
3403        self.expect_token(&Token::LParen)?;
3404        let mut field_defs = vec![];
3405        loop {
3406            let (def, _) = self.parse_struct_field_def()?;
3407            field_defs.push(def);
3408            if !self.consume_token(&Token::Comma) {
3409                break;
3410            }
3411        }
3412        self.expect_token(&Token::RParen)?;
3413
3414        Ok(field_defs)
3415    }
3416
3417    /// For nested types that use the angle bracket syntax, this matches either
3418    /// `>`, `>>` or nothing depending on which variant is expected (specified by the previously
3419    /// matched `trailing_bracket` argument). It returns whether there is a trailing
3420    /// left to be matched - (i.e. if '>>' was matched).
3421    fn expect_closing_angle_bracket(
3422        &mut self,
3423        trailing_bracket: MatchedTrailingBracket,
3424    ) -> Result<MatchedTrailingBracket, ParserError> {
3425        let trailing_bracket = if !trailing_bracket.0 {
3426            match self.peek_token().token {
3427                Token::Gt => {
3428                    self.next_token();
3429                    false.into()
3430                }
3431                Token::ShiftRight => {
3432                    self.next_token();
3433                    true.into()
3434                }
3435                _ => return self.expected(">", self.peek_token()),
3436            }
3437        } else {
3438            false.into()
3439        };
3440
3441        Ok(trailing_bracket)
3442    }
3443
3444    /// Parse an operator following an expression
3445    pub fn parse_infix(&mut self, expr: Expr, precedence: u8) -> Result<Expr, ParserError> {
3446        // allow the dialect to override infix parsing
3447        if let Some(infix) = self.dialect.parse_infix(self, &expr, precedence) {
3448            return infix;
3449        }
3450
3451        let dialect = self.dialect;
3452
3453        self.advance_token();
3454        let tok = self.get_current_token();
3455        debug!("infix: {tok:?}");
3456        let tok_index = self.get_current_index();
3457        let span = tok.span;
3458        let regular_binary_operator = match &tok.token {
3459            Token::Spaceship => Some(BinaryOperator::Spaceship),
3460            Token::DoubleEq => Some(BinaryOperator::Eq),
3461            Token::Assignment => Some(BinaryOperator::Assignment),
3462            Token::Eq => Some(BinaryOperator::Eq),
3463            Token::Neq => Some(BinaryOperator::NotEq),
3464            Token::Gt => Some(BinaryOperator::Gt),
3465            Token::GtEq => Some(BinaryOperator::GtEq),
3466            Token::Lt => Some(BinaryOperator::Lt),
3467            Token::LtEq => Some(BinaryOperator::LtEq),
3468            Token::Plus => Some(BinaryOperator::Plus),
3469            Token::Minus => Some(BinaryOperator::Minus),
3470            Token::Mul => Some(BinaryOperator::Multiply),
3471            Token::Mod => Some(BinaryOperator::Modulo),
3472            Token::StringConcat => Some(BinaryOperator::StringConcat),
3473            Token::Pipe => Some(BinaryOperator::BitwiseOr),
3474            Token::Caret => {
3475                // In PostgreSQL, ^ stands for the exponentiation operation,
3476                // and # stands for XOR. See https://www.postgresql.org/docs/current/functions-math.html
3477                if dialect_is!(dialect is PostgreSqlDialect) {
3478                    Some(BinaryOperator::PGExp)
3479                } else {
3480                    Some(BinaryOperator::BitwiseXor)
3481                }
3482            }
3483            Token::Ampersand => Some(BinaryOperator::BitwiseAnd),
3484            Token::Div => Some(BinaryOperator::Divide),
3485            Token::DuckIntDiv if dialect_is!(dialect is DuckDbDialect | GenericDialect) => {
3486                Some(BinaryOperator::DuckIntegerDivide)
3487            }
3488            Token::ShiftLeft if dialect_is!(dialect is PostgreSqlDialect | DuckDbDialect | GenericDialect | RedshiftSqlDialect) => {
3489                Some(BinaryOperator::PGBitwiseShiftLeft)
3490            }
3491            Token::ShiftRight if dialect_is!(dialect is PostgreSqlDialect | DuckDbDialect | GenericDialect | RedshiftSqlDialect) => {
3492                Some(BinaryOperator::PGBitwiseShiftRight)
3493            }
3494            Token::Sharp if dialect_is!(dialect is PostgreSqlDialect | RedshiftSqlDialect) => {
3495                Some(BinaryOperator::PGBitwiseXor)
3496            }
3497            Token::Overlap if dialect_is!(dialect is PostgreSqlDialect | RedshiftSqlDialect) => {
3498                Some(BinaryOperator::PGOverlap)
3499            }
3500            Token::Overlap if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
3501                Some(BinaryOperator::PGOverlap)
3502            }
3503            Token::CaretAt if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
3504                Some(BinaryOperator::PGStartsWith)
3505            }
3506            Token::Tilde => Some(BinaryOperator::PGRegexMatch),
3507            Token::TildeAsterisk => Some(BinaryOperator::PGRegexIMatch),
3508            Token::ExclamationMarkTilde => Some(BinaryOperator::PGRegexNotMatch),
3509            Token::ExclamationMarkTildeAsterisk => Some(BinaryOperator::PGRegexNotIMatch),
3510            Token::DoubleTilde => Some(BinaryOperator::PGLikeMatch),
3511            Token::DoubleTildeAsterisk => Some(BinaryOperator::PGILikeMatch),
3512            Token::ExclamationMarkDoubleTilde => Some(BinaryOperator::PGNotLikeMatch),
3513            Token::ExclamationMarkDoubleTildeAsterisk => Some(BinaryOperator::PGNotILikeMatch),
3514            Token::Arrow => Some(BinaryOperator::Arrow),
3515            Token::LongArrow => Some(BinaryOperator::LongArrow),
3516            Token::HashArrow => Some(BinaryOperator::HashArrow),
3517            Token::HashLongArrow => Some(BinaryOperator::HashLongArrow),
3518            Token::AtArrow => Some(BinaryOperator::AtArrow),
3519            Token::ArrowAt => Some(BinaryOperator::ArrowAt),
3520            Token::HashMinus => Some(BinaryOperator::HashMinus),
3521            Token::AtQuestion => Some(BinaryOperator::AtQuestion),
3522            Token::AtAt => Some(BinaryOperator::AtAt),
3523            Token::Question => Some(BinaryOperator::Question),
3524            Token::QuestionAnd => Some(BinaryOperator::QuestionAnd),
3525            Token::QuestionPipe => Some(BinaryOperator::QuestionPipe),
3526            Token::CustomBinaryOperator(s) => Some(BinaryOperator::Custom(s.clone())),
3527            Token::DoubleSharp if self.dialect.supports_geometric_types() => {
3528                Some(BinaryOperator::DoubleHash)
3529            }
3530
3531            Token::AmpersandLeftAngleBracket if self.dialect.supports_geometric_types() => {
3532                Some(BinaryOperator::AndLt)
3533            }
3534            Token::AmpersandRightAngleBracket if self.dialect.supports_geometric_types() => {
3535                Some(BinaryOperator::AndGt)
3536            }
3537            Token::QuestionMarkDash if self.dialect.supports_geometric_types() => {
3538                Some(BinaryOperator::QuestionDash)
3539            }
3540            Token::AmpersandLeftAngleBracketVerticalBar
3541                if self.dialect.supports_geometric_types() =>
3542            {
3543                Some(BinaryOperator::AndLtPipe)
3544            }
3545            Token::VerticalBarAmpersandRightAngleBracket
3546                if self.dialect.supports_geometric_types() =>
3547            {
3548                Some(BinaryOperator::PipeAndGt)
3549            }
3550            Token::TwoWayArrow if self.dialect.supports_geometric_types() => {
3551                Some(BinaryOperator::LtDashGt)
3552            }
3553            Token::LeftAngleBracketCaret if self.dialect.supports_geometric_types() => {
3554                Some(BinaryOperator::LtCaret)
3555            }
3556            Token::RightAngleBracketCaret if self.dialect.supports_geometric_types() => {
3557                Some(BinaryOperator::GtCaret)
3558            }
3559            Token::QuestionMarkSharp if self.dialect.supports_geometric_types() => {
3560                Some(BinaryOperator::QuestionHash)
3561            }
3562            Token::QuestionMarkDoubleVerticalBar if self.dialect.supports_geometric_types() => {
3563                Some(BinaryOperator::QuestionDoublePipe)
3564            }
3565            Token::QuestionMarkDashVerticalBar if self.dialect.supports_geometric_types() => {
3566                Some(BinaryOperator::QuestionDashPipe)
3567            }
3568            Token::TildeEqual if self.dialect.supports_geometric_types() => {
3569                Some(BinaryOperator::TildeEq)
3570            }
3571            Token::ShiftLeftVerticalBar if self.dialect.supports_geometric_types() => {
3572                Some(BinaryOperator::LtLtPipe)
3573            }
3574            Token::VerticalBarShiftRight if self.dialect.supports_geometric_types() => {
3575                Some(BinaryOperator::PipeGtGt)
3576            }
3577            Token::AtSign if self.dialect.supports_geometric_types() => Some(BinaryOperator::At),
3578
3579            Token::Word(w) => match w.keyword {
3580                Keyword::AND => Some(BinaryOperator::And),
3581                Keyword::OR => Some(BinaryOperator::Or),
3582                Keyword::XOR => Some(BinaryOperator::Xor),
3583                Keyword::OVERLAPS => Some(BinaryOperator::Overlaps),
3584                Keyword::OPERATOR if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
3585                    self.expect_token(&Token::LParen)?;
3586                    // there are special rules for operator names in
3587                    // postgres so we can not use 'parse_object'
3588                    // or similar.
3589                    // See https://www.postgresql.org/docs/current/sql-createoperator.html
3590                    let mut idents = vec![];
3591                    loop {
3592                        self.advance_token();
3593                        idents.push(self.get_current_token().to_string());
3594                        if !self.consume_token(&Token::Period) {
3595                            break;
3596                        }
3597                    }
3598                    self.expect_token(&Token::RParen)?;
3599                    Some(BinaryOperator::PGCustomBinaryOperator(idents))
3600                }
3601                _ => None,
3602            },
3603            _ => None,
3604        };
3605
3606        let tok = self.token_at(tok_index);
3607        if let Some(op) = regular_binary_operator {
3608            if let Some(keyword) =
3609                self.parse_one_of_keywords(&[Keyword::ANY, Keyword::ALL, Keyword::SOME])
3610            {
3611                self.expect_token(&Token::LParen)?;
3612                let right = if self.peek_sub_query() {
3613                    // We have a subquery ahead (SELECT\WITH ...) need to rewind and
3614                    // use the parenthesis for parsing the subquery as an expression.
3615                    self.prev_token(); // LParen
3616                    self.parse_subexpr(precedence)?
3617                } else {
3618                    // Non-subquery expression
3619                    let right = self.parse_subexpr(precedence)?;
3620                    self.expect_token(&Token::RParen)?;
3621                    right
3622                };
3623
3624                if !matches!(
3625                    op,
3626                    BinaryOperator::Gt
3627                        | BinaryOperator::Lt
3628                        | BinaryOperator::GtEq
3629                        | BinaryOperator::LtEq
3630                        | BinaryOperator::Eq
3631                        | BinaryOperator::NotEq
3632                        | BinaryOperator::PGRegexMatch
3633                        | BinaryOperator::PGRegexIMatch
3634                        | BinaryOperator::PGRegexNotMatch
3635                        | BinaryOperator::PGRegexNotIMatch
3636                        | BinaryOperator::PGLikeMatch
3637                        | BinaryOperator::PGILikeMatch
3638                        | BinaryOperator::PGNotLikeMatch
3639                        | BinaryOperator::PGNotILikeMatch
3640                ) {
3641                    return parser_err!(
3642                        format!(
3643                        "Expected one of [=, >, <, =>, =<, !=, ~, ~*, !~, !~*, ~~, ~~*, !~~, !~~*] as comparison operator, found: {op}"
3644                    ),
3645                        span.start
3646                    );
3647                };
3648
3649                Ok(match keyword {
3650                    Keyword::ALL => Expr::AllOp {
3651                        left: Box::new(expr),
3652                        compare_op: op,
3653                        right: Box::new(right),
3654                    },
3655                    Keyword::ANY | Keyword::SOME => Expr::AnyOp {
3656                        left: Box::new(expr),
3657                        compare_op: op,
3658                        right: Box::new(right),
3659                        is_some: keyword == Keyword::SOME,
3660                    },
3661                    unexpected_keyword => return Err(ParserError::ParserError(
3662                        format!("Internal parser error: expected any of {{ALL, ANY, SOME}}, got {unexpected_keyword:?}"),
3663                    )),
3664                })
3665            } else {
3666                Ok(Expr::BinaryOp {
3667                    left: Box::new(expr),
3668                    op,
3669                    right: Box::new(self.parse_subexpr(precedence)?),
3670                })
3671            }
3672        } else if let Token::Word(w) = &tok.token {
3673            match w.keyword {
3674                Keyword::IS => {
3675                    if self.parse_keyword(Keyword::NULL) {
3676                        Ok(Expr::IsNull(Box::new(expr)))
3677                    } else if self.parse_keywords(&[Keyword::NOT, Keyword::NULL]) {
3678                        Ok(Expr::IsNotNull(Box::new(expr)))
3679                    } else if self.parse_keywords(&[Keyword::TRUE]) {
3680                        Ok(Expr::IsTrue(Box::new(expr)))
3681                    } else if self.parse_keywords(&[Keyword::NOT, Keyword::TRUE]) {
3682                        Ok(Expr::IsNotTrue(Box::new(expr)))
3683                    } else if self.parse_keywords(&[Keyword::FALSE]) {
3684                        Ok(Expr::IsFalse(Box::new(expr)))
3685                    } else if self.parse_keywords(&[Keyword::NOT, Keyword::FALSE]) {
3686                        Ok(Expr::IsNotFalse(Box::new(expr)))
3687                    } else if self.parse_keywords(&[Keyword::UNKNOWN]) {
3688                        Ok(Expr::IsUnknown(Box::new(expr)))
3689                    } else if self.parse_keywords(&[Keyword::NOT, Keyword::UNKNOWN]) {
3690                        Ok(Expr::IsNotUnknown(Box::new(expr)))
3691                    } else if self.parse_keywords(&[Keyword::DISTINCT, Keyword::FROM]) {
3692                        let expr2 = self.parse_expr()?;
3693                        Ok(Expr::IsDistinctFrom(Box::new(expr), Box::new(expr2)))
3694                    } else if self.parse_keywords(&[Keyword::NOT, Keyword::DISTINCT, Keyword::FROM])
3695                    {
3696                        let expr2 = self.parse_expr()?;
3697                        Ok(Expr::IsNotDistinctFrom(Box::new(expr), Box::new(expr2)))
3698                    } else if let Ok(is_normalized) = self.parse_unicode_is_normalized(expr) {
3699                        Ok(is_normalized)
3700                    } else {
3701                        self.expected(
3702                            "[NOT] NULL | TRUE | FALSE | DISTINCT | [form] NORMALIZED FROM after IS",
3703                            self.peek_token(),
3704                        )
3705                    }
3706                }
3707                Keyword::AT => {
3708                    self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
3709                    Ok(Expr::AtTimeZone {
3710                        timestamp: Box::new(expr),
3711                        time_zone: Box::new(self.parse_subexpr(precedence)?),
3712                    })
3713                }
3714                Keyword::NOT
3715                | Keyword::IN
3716                | Keyword::BETWEEN
3717                | Keyword::LIKE
3718                | Keyword::ILIKE
3719                | Keyword::SIMILAR
3720                | Keyword::REGEXP
3721                | Keyword::RLIKE => {
3722                    self.prev_token();
3723                    let negated = self.parse_keyword(Keyword::NOT);
3724                    let regexp = self.parse_keyword(Keyword::REGEXP);
3725                    let rlike = self.parse_keyword(Keyword::RLIKE);
3726                    let null = if !self.in_column_definition_state() {
3727                        self.parse_keyword(Keyword::NULL)
3728                    } else {
3729                        false
3730                    };
3731                    if regexp || rlike {
3732                        Ok(Expr::RLike {
3733                            negated,
3734                            expr: Box::new(expr),
3735                            pattern: Box::new(
3736                                self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
3737                            ),
3738                            regexp,
3739                        })
3740                    } else if negated && null {
3741                        Ok(Expr::IsNotNull(Box::new(expr)))
3742                    } else if self.parse_keyword(Keyword::IN) {
3743                        self.parse_in(expr, negated)
3744                    } else if self.parse_keyword(Keyword::BETWEEN) {
3745                        self.parse_between(expr, negated)
3746                    } else if self.parse_keyword(Keyword::LIKE) {
3747                        Ok(Expr::Like {
3748                            negated,
3749                            any: self.parse_keyword(Keyword::ANY),
3750                            expr: Box::new(expr),
3751                            pattern: Box::new(
3752                                self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
3753                            ),
3754                            escape_char: self.parse_escape_char()?,
3755                        })
3756                    } else if self.parse_keyword(Keyword::ILIKE) {
3757                        Ok(Expr::ILike {
3758                            negated,
3759                            any: self.parse_keyword(Keyword::ANY),
3760                            expr: Box::new(expr),
3761                            pattern: Box::new(
3762                                self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
3763                            ),
3764                            escape_char: self.parse_escape_char()?,
3765                        })
3766                    } else if self.parse_keywords(&[Keyword::SIMILAR, Keyword::TO]) {
3767                        Ok(Expr::SimilarTo {
3768                            negated,
3769                            expr: Box::new(expr),
3770                            pattern: Box::new(
3771                                self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
3772                            ),
3773                            escape_char: self.parse_escape_char()?,
3774                        })
3775                    } else {
3776                        self.expected("IN or BETWEEN after NOT", self.peek_token())
3777                    }
3778                }
3779                Keyword::NOTNULL if dialect.supports_notnull_operator() => {
3780                    Ok(Expr::IsNotNull(Box::new(expr)))
3781                }
3782                Keyword::MEMBER => {
3783                    if self.parse_keyword(Keyword::OF) {
3784                        self.expect_token(&Token::LParen)?;
3785                        let array = self.parse_expr()?;
3786                        self.expect_token(&Token::RParen)?;
3787                        Ok(Expr::MemberOf(MemberOf {
3788                            value: Box::new(expr),
3789                            array: Box::new(array),
3790                        }))
3791                    } else {
3792                        self.expected("OF after MEMBER", self.peek_token())
3793                    }
3794                }
3795                // Can only happen if `get_next_precedence` got out of sync with this function
3796                _ => parser_err!(
3797                    format!("No infix parser for token {:?}", tok.token),
3798                    tok.span.start
3799                ),
3800            }
3801        } else if Token::DoubleColon == *tok {
3802            Ok(Expr::Cast {
3803                kind: CastKind::DoubleColon,
3804                expr: Box::new(expr),
3805                data_type: self.parse_data_type()?,
3806                format: None,
3807            })
3808        } else if Token::ExclamationMark == *tok && self.dialect.supports_factorial_operator() {
3809            Ok(Expr::UnaryOp {
3810                op: UnaryOperator::PGPostfixFactorial,
3811                expr: Box::new(expr),
3812            })
3813        } else if Token::LBracket == *tok && self.dialect.supports_partiql()
3814            || (dialect_of!(self is SnowflakeDialect | GenericDialect) && Token::Colon == *tok)
3815        {
3816            self.prev_token();
3817            self.parse_json_access(expr)
3818        } else {
3819            // Can only happen if `get_next_precedence` got out of sync with this function
3820            parser_err!(
3821                format!("No infix parser for token {:?}", tok.token),
3822                tok.span.start
3823            )
3824        }
3825    }
3826
3827    /// Parse the `ESCAPE CHAR` portion of `LIKE`, `ILIKE`, and `SIMILAR TO`
3828    pub fn parse_escape_char(&mut self) -> Result<Option<Value>, ParserError> {
3829        if self.parse_keyword(Keyword::ESCAPE) {
3830            Ok(Some(self.parse_value()?.into()))
3831        } else {
3832            Ok(None)
3833        }
3834    }
3835
3836    /// Parses an array subscript like
3837    /// * `[:]`
3838    /// * `[l]`
3839    /// * `[l:]`
3840    /// * `[:u]`
3841    /// * `[l:u]`
3842    /// * `[l:u:s]`
3843    ///
3844    /// Parser is right after `[`
3845    fn parse_subscript_inner(&mut self) -> Result<Subscript, ParserError> {
3846        // at either `<lower>:(rest)` or `:(rest)]`
3847        let lower_bound = if self.consume_token(&Token::Colon) {
3848            None
3849        } else {
3850            Some(self.parse_expr()?)
3851        };
3852
3853        // check for end
3854        if self.consume_token(&Token::RBracket) {
3855            if let Some(lower_bound) = lower_bound {
3856                return Ok(Subscript::Index { index: lower_bound });
3857            };
3858            return Ok(Subscript::Slice {
3859                lower_bound,
3860                upper_bound: None,
3861                stride: None,
3862            });
3863        }
3864
3865        // consume the `:`
3866        if lower_bound.is_some() {
3867            self.expect_token(&Token::Colon)?;
3868        }
3869
3870        // we are now at either `]`, `<upper>(rest)]`
3871        let upper_bound = if self.consume_token(&Token::RBracket) {
3872            return Ok(Subscript::Slice {
3873                lower_bound,
3874                upper_bound: None,
3875                stride: None,
3876            });
3877        } else {
3878            Some(self.parse_expr()?)
3879        };
3880
3881        // check for end
3882        if self.consume_token(&Token::RBracket) {
3883            return Ok(Subscript::Slice {
3884                lower_bound,
3885                upper_bound,
3886                stride: None,
3887            });
3888        }
3889
3890        // we are now at `:]` or `:stride]`
3891        self.expect_token(&Token::Colon)?;
3892        let stride = if self.consume_token(&Token::RBracket) {
3893            None
3894        } else {
3895            Some(self.parse_expr()?)
3896        };
3897
3898        if stride.is_some() {
3899            self.expect_token(&Token::RBracket)?;
3900        }
3901
3902        Ok(Subscript::Slice {
3903            lower_bound,
3904            upper_bound,
3905            stride,
3906        })
3907    }
3908
3909    /// Parse a multi-dimension array accessing like `[1:3][1][1]`
3910    pub fn parse_multi_dim_subscript(
3911        &mut self,
3912        chain: &mut Vec<AccessExpr>,
3913    ) -> Result<(), ParserError> {
3914        while self.consume_token(&Token::LBracket) {
3915            self.parse_subscript(chain)?;
3916        }
3917        Ok(())
3918    }
3919
3920    /// Parses an array subscript like `[1:3]`
3921    ///
3922    /// Parser is right after `[`
3923    fn parse_subscript(&mut self, chain: &mut Vec<AccessExpr>) -> Result<(), ParserError> {
3924        let subscript = self.parse_subscript_inner()?;
3925        chain.push(AccessExpr::Subscript(subscript));
3926        Ok(())
3927    }
3928
3929    fn parse_json_path_object_key(&mut self) -> Result<JsonPathElem, ParserError> {
3930        let token = self.next_token();
3931        match token.token {
3932            Token::Word(Word {
3933                value,
3934                // path segments in SF dot notation can be unquoted or double-quoted
3935                quote_style: quote_style @ (Some('"') | None),
3936                // some experimentation suggests that snowflake permits
3937                // any keyword here unquoted.
3938                keyword: _,
3939            }) => Ok(JsonPathElem::Dot {
3940                key: value,
3941                quoted: quote_style.is_some(),
3942            }),
3943
3944            // This token should never be generated on snowflake or generic
3945            // dialects, but we handle it just in case this is used on future
3946            // dialects.
3947            Token::DoubleQuotedString(key) => Ok(JsonPathElem::Dot { key, quoted: true }),
3948
3949            _ => self.expected("variant object key name", token),
3950        }
3951    }
3952
3953    fn parse_json_access(&mut self, expr: Expr) -> Result<Expr, ParserError> {
3954        let path = self.parse_json_path()?;
3955        Ok(Expr::JsonAccess {
3956            value: Box::new(expr),
3957            path,
3958        })
3959    }
3960
3961    fn parse_json_path(&mut self) -> Result<JsonPath, ParserError> {
3962        let mut path = Vec::new();
3963        loop {
3964            match self.next_token().token {
3965                Token::Colon if path.is_empty() => {
3966                    path.push(self.parse_json_path_object_key()?);
3967                }
3968                Token::Period if !path.is_empty() => {
3969                    path.push(self.parse_json_path_object_key()?);
3970                }
3971                Token::LBracket => {
3972                    let key = self.parse_expr()?;
3973                    self.expect_token(&Token::RBracket)?;
3974
3975                    path.push(JsonPathElem::Bracket { key });
3976                }
3977                _ => {
3978                    self.prev_token();
3979                    break;
3980                }
3981            };
3982        }
3983
3984        debug_assert!(!path.is_empty());
3985        Ok(JsonPath { path })
3986    }
3987
3988    /// Parses the parens following the `[ NOT ] IN` operator.
3989    pub fn parse_in(&mut self, expr: Expr, negated: bool) -> Result<Expr, ParserError> {
3990        // BigQuery allows `IN UNNEST(array_expression)`
3991        // https://cloud.google.com/bigquery/docs/reference/standard-sql/operators#in_operators
3992        if self.parse_keyword(Keyword::UNNEST) {
3993            self.expect_token(&Token::LParen)?;
3994            let array_expr = self.parse_expr()?;
3995            self.expect_token(&Token::RParen)?;
3996            return Ok(Expr::InUnnest {
3997                expr: Box::new(expr),
3998                array_expr: Box::new(array_expr),
3999                negated,
4000            });
4001        }
4002        self.expect_token(&Token::LParen)?;
4003        let in_op = match self.maybe_parse(|p| p.parse_query())? {
4004            Some(subquery) => Expr::InSubquery {
4005                expr: Box::new(expr),
4006                subquery,
4007                negated,
4008            },
4009            None => Expr::InList {
4010                expr: Box::new(expr),
4011                list: if self.dialect.supports_in_empty_list() {
4012                    self.parse_comma_separated0(Parser::parse_expr, Token::RParen)?
4013                } else {
4014                    self.parse_comma_separated(Parser::parse_expr)?
4015                },
4016                negated,
4017            },
4018        };
4019        self.expect_token(&Token::RParen)?;
4020        Ok(in_op)
4021    }
4022
4023    /// Parses `BETWEEN <low> AND <high>`, assuming the `BETWEEN` keyword was already consumed.
4024    pub fn parse_between(&mut self, expr: Expr, negated: bool) -> Result<Expr, ParserError> {
4025        // Stop parsing subexpressions for <low> and <high> on tokens with
4026        // precedence lower than that of `BETWEEN`, such as `AND`, `IS`, etc.
4027        let low = self.parse_subexpr(self.dialect.prec_value(Precedence::Between))?;
4028        self.expect_keyword_is(Keyword::AND)?;
4029        let high = self.parse_subexpr(self.dialect.prec_value(Precedence::Between))?;
4030        Ok(Expr::Between {
4031            expr: Box::new(expr),
4032            negated,
4033            low: Box::new(low),
4034            high: Box::new(high),
4035        })
4036    }
4037
4038    /// Parse a PostgreSQL casting style which is in the form of `expr::datatype`.
4039    pub fn parse_pg_cast(&mut self, expr: Expr) -> Result<Expr, ParserError> {
4040        Ok(Expr::Cast {
4041            kind: CastKind::DoubleColon,
4042            expr: Box::new(expr),
4043            data_type: self.parse_data_type()?,
4044            format: None,
4045        })
4046    }
4047
4048    /// Get the precedence of the next token
4049    pub fn get_next_precedence(&self) -> Result<u8, ParserError> {
4050        self.dialect.get_next_precedence_default(self)
4051    }
4052
4053    /// Return the token at the given location, or EOF if the index is beyond
4054    /// the length of the current set of tokens.
4055    pub fn token_at(&self, index: usize) -> &TokenWithSpan {
4056        self.tokens.get(index).unwrap_or(&EOF_TOKEN)
4057    }
4058
4059    /// Return the first non-whitespace token that has not yet been processed
4060    /// or Token::EOF
4061    ///
4062    /// See [`Self::peek_token_ref`] to avoid the copy.
4063    pub fn peek_token(&self) -> TokenWithSpan {
4064        self.peek_nth_token(0)
4065    }
4066
4067    /// Return a reference to the first non-whitespace token that has not yet
4068    /// been processed or Token::EOF
4069    pub fn peek_token_ref(&self) -> &TokenWithSpan {
4070        self.peek_nth_token_ref(0)
4071    }
4072
4073    /// Returns the `N` next non-whitespace tokens that have not yet been
4074    /// processed.
4075    ///
4076    /// Example:
4077    /// ```rust
4078    /// # use sqlparser::dialect::GenericDialect;
4079    /// # use sqlparser::parser::Parser;
4080    /// # use sqlparser::keywords::Keyword;
4081    /// # use sqlparser::tokenizer::{Token, Word};
4082    /// let dialect = GenericDialect {};
4083    /// let mut parser = Parser::new(&dialect).try_with_sql("ORDER BY foo, bar").unwrap();
4084    ///
4085    /// // Note that Rust infers the number of tokens to peek based on the
4086    /// // length of the slice pattern!
4087    /// assert!(matches!(
4088    ///     parser.peek_tokens(),
4089    ///     [
4090    ///         Token::Word(Word { keyword: Keyword::ORDER, .. }),
4091    ///         Token::Word(Word { keyword: Keyword::BY, .. }),
4092    ///     ]
4093    /// ));
4094    /// ```
4095    pub fn peek_tokens<const N: usize>(&self) -> [Token; N] {
4096        self.peek_tokens_with_location()
4097            .map(|with_loc| with_loc.token)
4098    }
4099
4100    /// Returns the `N` next non-whitespace tokens with locations that have not
4101    /// yet been processed.
4102    ///
4103    /// See [`Self::peek_token`] for an example.
4104    pub fn peek_tokens_with_location<const N: usize>(&self) -> [TokenWithSpan; N] {
4105        let mut index = self.index;
4106        core::array::from_fn(|_| loop {
4107            let token = self.tokens.get(index);
4108            index += 1;
4109            if let Some(TokenWithSpan {
4110                token: Token::Whitespace(_),
4111                span: _,
4112            }) = token
4113            {
4114                continue;
4115            }
4116            break token.cloned().unwrap_or(TokenWithSpan {
4117                token: Token::EOF,
4118                span: Span::empty(),
4119            });
4120        })
4121    }
4122
4123    /// Returns references to the `N` next non-whitespace tokens
4124    /// that have not yet been processed.
4125    ///
4126    /// See [`Self::peek_tokens`] for an example.
4127    pub fn peek_tokens_ref<const N: usize>(&self) -> [&TokenWithSpan; N] {
4128        let mut index = self.index;
4129        core::array::from_fn(|_| loop {
4130            let token = self.tokens.get(index);
4131            index += 1;
4132            if let Some(TokenWithSpan {
4133                token: Token::Whitespace(_),
4134                span: _,
4135            }) = token
4136            {
4137                continue;
4138            }
4139            break token.unwrap_or(&EOF_TOKEN);
4140        })
4141    }
4142
4143    /// Return nth non-whitespace token that has not yet been processed
4144    pub fn peek_nth_token(&self, n: usize) -> TokenWithSpan {
4145        self.peek_nth_token_ref(n).clone()
4146    }
4147
4148    /// Return nth non-whitespace token that has not yet been processed
4149    pub fn peek_nth_token_ref(&self, mut n: usize) -> &TokenWithSpan {
4150        let mut index = self.index;
4151        loop {
4152            index += 1;
4153            match self.tokens.get(index - 1) {
4154                Some(TokenWithSpan {
4155                    token: Token::Whitespace(_),
4156                    span: _,
4157                }) => continue,
4158                non_whitespace => {
4159                    if n == 0 {
4160                        return non_whitespace.unwrap_or(&EOF_TOKEN);
4161                    }
4162                    n -= 1;
4163                }
4164            }
4165        }
4166    }
4167
4168    /// Return the first token, possibly whitespace, that has not yet been processed
4169    /// (or None if reached end-of-file).
4170    pub fn peek_token_no_skip(&self) -> TokenWithSpan {
4171        self.peek_nth_token_no_skip(0)
4172    }
4173
4174    /// Return nth token, possibly whitespace, that has not yet been processed.
4175    pub fn peek_nth_token_no_skip(&self, n: usize) -> TokenWithSpan {
4176        self.tokens
4177            .get(self.index + n)
4178            .cloned()
4179            .unwrap_or(TokenWithSpan {
4180                token: Token::EOF,
4181                span: Span::empty(),
4182            })
4183    }
4184
4185    /// Return true if the next tokens exactly `expected`
4186    ///
4187    /// Does not advance the current token.
4188    fn peek_keywords(&mut self, expected: &[Keyword]) -> bool {
4189        let index = self.index;
4190        let matched = self.parse_keywords(expected);
4191        self.index = index;
4192        matched
4193    }
4194
4195    /// Advances to the next non-whitespace token and returns a copy.
4196    ///
4197    /// Please use [`Self::advance_token`] and [`Self::get_current_token`] to
4198    /// avoid the copy.
4199    pub fn next_token(&mut self) -> TokenWithSpan {
4200        self.advance_token();
4201        self.get_current_token().clone()
4202    }
4203
4204    /// Returns the index of the current token
4205    ///
4206    /// This can be used with APIs that expect an index, such as
4207    /// [`Self::token_at`]
4208    pub fn get_current_index(&self) -> usize {
4209        self.index.saturating_sub(1)
4210    }
4211
4212    /// Return the next unprocessed token, possibly whitespace.
4213    pub fn next_token_no_skip(&mut self) -> Option<&TokenWithSpan> {
4214        self.index += 1;
4215        self.tokens.get(self.index - 1)
4216    }
4217
4218    /// Advances the current token to the next non-whitespace token
4219    ///
4220    /// See [`Self::get_current_token`] to get the current token after advancing
4221    pub fn advance_token(&mut self) {
4222        loop {
4223            self.index += 1;
4224            match self.tokens.get(self.index - 1) {
4225                Some(TokenWithSpan {
4226                    token: Token::Whitespace(_),
4227                    span: _,
4228                }) => continue,
4229                _ => break,
4230            }
4231        }
4232    }
4233
4234    /// Returns a reference to the current token
4235    ///
4236    /// Does not advance the current token.
4237    pub fn get_current_token(&self) -> &TokenWithSpan {
4238        self.token_at(self.index.saturating_sub(1))
4239    }
4240
4241    /// Returns a reference to the previous token
4242    ///
4243    /// Does not advance the current token.
4244    pub fn get_previous_token(&self) -> &TokenWithSpan {
4245        self.token_at(self.index.saturating_sub(2))
4246    }
4247
4248    /// Returns a reference to the next token
4249    ///
4250    /// Does not advance the current token.
4251    pub fn get_next_token(&self) -> &TokenWithSpan {
4252        self.token_at(self.index)
4253    }
4254
4255    /// Seek back the last one non-whitespace token.
4256    ///
4257    /// Must be called after `next_token()`, otherwise might panic. OK to call
4258    /// after `next_token()` indicates an EOF.
4259    ///
4260    // TODO rename to backup_token and deprecate prev_token?
4261    pub fn prev_token(&mut self) {
4262        loop {
4263            assert!(self.index > 0);
4264            self.index -= 1;
4265            if let Some(TokenWithSpan {
4266                token: Token::Whitespace(_),
4267                span: _,
4268            }) = self.tokens.get(self.index)
4269            {
4270                continue;
4271            }
4272            return;
4273        }
4274    }
4275
4276    /// Report `found` was encountered instead of `expected`
4277    pub fn expected<T>(&self, expected: &str, found: TokenWithSpan) -> Result<T, ParserError> {
4278        parser_err!(
4279            format!("Expected: {expected}, found: {found}"),
4280            found.span.start
4281        )
4282    }
4283
4284    /// report `found` was encountered instead of `expected`
4285    pub fn expected_ref<T>(&self, expected: &str, found: &TokenWithSpan) -> Result<T, ParserError> {
4286        parser_err!(
4287            format!("Expected: {expected}, found: {found}"),
4288            found.span.start
4289        )
4290    }
4291
4292    /// Report that the token at `index` was found instead of `expected`.
4293    pub fn expected_at<T>(&self, expected: &str, index: usize) -> Result<T, ParserError> {
4294        let found = self.tokens.get(index).unwrap_or(&EOF_TOKEN);
4295        parser_err!(
4296            format!("Expected: {expected}, found: {found}"),
4297            found.span.start
4298        )
4299    }
4300
4301    /// If the current token is the `expected` keyword, consume it and returns
4302    /// true. Otherwise, no tokens are consumed and returns false.
4303    #[must_use]
4304    pub fn parse_keyword(&mut self, expected: Keyword) -> bool {
4305        if self.peek_keyword(expected) {
4306            self.advance_token();
4307            true
4308        } else {
4309            false
4310        }
4311    }
4312
4313    #[must_use]
4314    pub fn peek_keyword(&self, expected: Keyword) -> bool {
4315        matches!(&self.peek_token_ref().token, Token::Word(w) if expected == w.keyword)
4316    }
4317
4318    /// If the current token is the `expected` keyword followed by
4319    /// specified tokens, consume them and returns true.
4320    /// Otherwise, no tokens are consumed and returns false.
4321    ///
4322    /// Note that if the length of `tokens` is too long, this function will
4323    /// not be efficient as it does a loop on the tokens with `peek_nth_token`
4324    /// each time.
4325    pub fn parse_keyword_with_tokens(&mut self, expected: Keyword, tokens: &[Token]) -> bool {
4326        self.keyword_with_tokens(expected, tokens, true)
4327    }
4328
4329    /// Peeks to see if the current token is the `expected` keyword followed by specified tokens
4330    /// without consuming them.
4331    ///
4332    /// See [Self::parse_keyword_with_tokens] for details.
4333    pub(crate) fn peek_keyword_with_tokens(&mut self, expected: Keyword, tokens: &[Token]) -> bool {
4334        self.keyword_with_tokens(expected, tokens, false)
4335    }
4336
4337    fn keyword_with_tokens(&mut self, expected: Keyword, tokens: &[Token], consume: bool) -> bool {
4338        match &self.peek_token_ref().token {
4339            Token::Word(w) if expected == w.keyword => {
4340                for (idx, token) in tokens.iter().enumerate() {
4341                    if self.peek_nth_token_ref(idx + 1).token != *token {
4342                        return false;
4343                    }
4344                }
4345
4346                if consume {
4347                    for _ in 0..(tokens.len() + 1) {
4348                        self.advance_token();
4349                    }
4350                }
4351
4352                true
4353            }
4354            _ => false,
4355        }
4356    }
4357
4358    /// If the current and subsequent tokens exactly match the `keywords`
4359    /// sequence, consume them and returns true. Otherwise, no tokens are
4360    /// consumed and returns false
4361    #[must_use]
4362    pub fn parse_keywords(&mut self, keywords: &[Keyword]) -> bool {
4363        let index = self.index;
4364        for &keyword in keywords {
4365            if !self.parse_keyword(keyword) {
4366                // println!("parse_keywords aborting .. did not find {:?}", keyword);
4367                // reset index and return immediately
4368                self.index = index;
4369                return false;
4370            }
4371        }
4372        true
4373    }
4374
4375    /// If the current token is one of the given `keywords`, returns the keyword
4376    /// that matches, without consuming the token. Otherwise, returns [`None`].
4377    #[must_use]
4378    pub fn peek_one_of_keywords(&self, keywords: &[Keyword]) -> Option<Keyword> {
4379        for keyword in keywords {
4380            if self.peek_keyword(*keyword) {
4381                return Some(*keyword);
4382            }
4383        }
4384        None
4385    }
4386
4387    /// If the current token is one of the given `keywords`, consume the token
4388    /// and return the keyword that matches. Otherwise, no tokens are consumed
4389    /// and returns [`None`].
4390    #[must_use]
4391    pub fn parse_one_of_keywords(&mut self, keywords: &[Keyword]) -> Option<Keyword> {
4392        match &self.peek_token_ref().token {
4393            Token::Word(w) => {
4394                keywords
4395                    .iter()
4396                    .find(|keyword| **keyword == w.keyword)
4397                    .map(|keyword| {
4398                        self.advance_token();
4399                        *keyword
4400                    })
4401            }
4402            _ => None,
4403        }
4404    }
4405
4406    /// If the current token is one of the expected keywords, consume the token
4407    /// and return the keyword that matches. Otherwise, return an error.
4408    pub fn expect_one_of_keywords(&mut self, keywords: &[Keyword]) -> Result<Keyword, ParserError> {
4409        if let Some(keyword) = self.parse_one_of_keywords(keywords) {
4410            Ok(keyword)
4411        } else {
4412            let keywords: Vec<String> = keywords.iter().map(|x| format!("{x:?}")).collect();
4413            self.expected_ref(
4414                &format!("one of {}", keywords.join(" or ")),
4415                self.peek_token_ref(),
4416            )
4417        }
4418    }
4419
4420    /// If the current token is the `expected` keyword, consume the token.
4421    /// Otherwise, return an error.
4422    ///
4423    // todo deprecate in favor of expected_keyword_is
4424    pub fn expect_keyword(&mut self, expected: Keyword) -> Result<TokenWithSpan, ParserError> {
4425        if self.parse_keyword(expected) {
4426            Ok(self.get_current_token().clone())
4427        } else {
4428            self.expected_ref(format!("{:?}", &expected).as_str(), self.peek_token_ref())
4429        }
4430    }
4431
4432    /// If the current token is the `expected` keyword, consume the token.
4433    /// Otherwise, return an error.
4434    ///
4435    /// This differs from expect_keyword only in that the matched keyword
4436    /// token is not returned.
4437    pub fn expect_keyword_is(&mut self, expected: Keyword) -> Result<(), ParserError> {
4438        if self.parse_keyword(expected) {
4439            Ok(())
4440        } else {
4441            self.expected_ref(format!("{:?}", &expected).as_str(), self.peek_token_ref())
4442        }
4443    }
4444
4445    /// If the current and subsequent tokens exactly match the `keywords`
4446    /// sequence, consume them and returns Ok. Otherwise, return an Error.
4447    pub fn expect_keywords(&mut self, expected: &[Keyword]) -> Result<(), ParserError> {
4448        for &kw in expected {
4449            self.expect_keyword_is(kw)?;
4450        }
4451        Ok(())
4452    }
4453
4454    /// Consume the next token if it matches the expected token, otherwise return false
4455    ///
4456    /// See [Self::advance_token] to consume the token unconditionally
4457    #[must_use]
4458    pub fn consume_token(&mut self, expected: &Token) -> bool {
4459        if self.peek_token_ref() == expected {
4460            self.advance_token();
4461            true
4462        } else {
4463            false
4464        }
4465    }
4466
4467    /// If the current and subsequent tokens exactly match the `tokens`
4468    /// sequence, consume them and returns true. Otherwise, no tokens are
4469    /// consumed and returns false
4470    #[must_use]
4471    pub fn consume_tokens(&mut self, tokens: &[Token]) -> bool {
4472        let index = self.index;
4473        for token in tokens {
4474            if !self.consume_token(token) {
4475                self.index = index;
4476                return false;
4477            }
4478        }
4479        true
4480    }
4481
4482    /// Bail out if the current token is not an expected keyword, or consume it if it is
4483    pub fn expect_token(&mut self, expected: &Token) -> Result<TokenWithSpan, ParserError> {
4484        if self.peek_token_ref() == expected {
4485            Ok(self.next_token())
4486        } else {
4487            self.expected_ref(&expected.to_string(), self.peek_token_ref())
4488        }
4489    }
4490
4491    fn parse<T: FromStr>(s: String, loc: Location) -> Result<T, ParserError>
4492    where
4493        <T as FromStr>::Err: Display,
4494    {
4495        s.parse::<T>().map_err(|e| {
4496            ParserError::ParserError(format!(
4497                "Could not parse '{s}' as {}: {e}{loc}",
4498                core::any::type_name::<T>()
4499            ))
4500        })
4501    }
4502
4503    /// Parse a comma-separated list of 1+ SelectItem
4504    pub fn parse_projection(&mut self) -> Result<Vec<SelectItem>, ParserError> {
4505        // BigQuery and Snowflake allow trailing commas, but only in project lists
4506        // e.g. `SELECT 1, 2, FROM t`
4507        // https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#trailing_commas
4508        // https://docs.snowflake.com/en/release-notes/2024/8_11#select-supports-trailing-commas
4509
4510        let trailing_commas =
4511            self.options.trailing_commas | self.dialect.supports_projection_trailing_commas();
4512
4513        self.parse_comma_separated_with_trailing_commas(
4514            |p| p.parse_select_item(),
4515            trailing_commas,
4516            Self::is_reserved_for_column_alias,
4517        )
4518    }
4519
4520    pub fn parse_actions_list(&mut self) -> Result<Vec<Action>, ParserError> {
4521        let mut values = vec![];
4522        loop {
4523            values.push(self.parse_grant_permission()?);
4524            if !self.consume_token(&Token::Comma) {
4525                break;
4526            } else if self.options.trailing_commas {
4527                match self.peek_token().token {
4528                    Token::Word(kw) if kw.keyword == Keyword::ON => {
4529                        break;
4530                    }
4531                    Token::RParen
4532                    | Token::SemiColon
4533                    | Token::EOF
4534                    | Token::RBracket
4535                    | Token::RBrace => break,
4536                    _ => continue,
4537                }
4538            }
4539        }
4540        Ok(values)
4541    }
4542
4543    /// Parse a list of [TableWithJoins]
4544    fn parse_table_with_joins(&mut self) -> Result<Vec<TableWithJoins>, ParserError> {
4545        let trailing_commas = self.dialect.supports_from_trailing_commas();
4546
4547        self.parse_comma_separated_with_trailing_commas(
4548            Parser::parse_table_and_joins,
4549            trailing_commas,
4550            |kw, parser| !self.dialect.is_table_factor(kw, parser),
4551        )
4552    }
4553
4554    /// Parse the comma of a comma-separated syntax element.
4555    /// `R` is a predicate that should return true if the next
4556    /// keyword is a reserved keyword.
4557    /// Allows for control over trailing commas
4558    ///
4559    /// Returns true if there is a next element
4560    fn is_parse_comma_separated_end_with_trailing_commas<R>(
4561        &mut self,
4562        trailing_commas: bool,
4563        is_reserved_keyword: &R,
4564    ) -> bool
4565    where
4566        R: Fn(&Keyword, &mut Parser) -> bool,
4567    {
4568        if !self.consume_token(&Token::Comma) {
4569            true
4570        } else if trailing_commas {
4571            let token = self.next_token().token;
4572            let is_end = match token {
4573                Token::Word(ref kw) if is_reserved_keyword(&kw.keyword, self) => true,
4574                Token::RParen | Token::SemiColon | Token::EOF | Token::RBracket | Token::RBrace => {
4575                    true
4576                }
4577                _ => false,
4578            };
4579            self.prev_token();
4580
4581            is_end
4582        } else {
4583            false
4584        }
4585    }
4586
4587    /// Parse the comma of a comma-separated syntax element.
4588    /// Returns true if there is a next element
4589    fn is_parse_comma_separated_end(&mut self) -> bool {
4590        self.is_parse_comma_separated_end_with_trailing_commas(
4591            self.options.trailing_commas,
4592            &Self::is_reserved_for_column_alias,
4593        )
4594    }
4595
4596    /// Parse a comma-separated list of 1+ items accepted by `F`
4597    pub fn parse_comma_separated<T, F>(&mut self, f: F) -> Result<Vec<T>, ParserError>
4598    where
4599        F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4600    {
4601        self.parse_comma_separated_with_trailing_commas(
4602            f,
4603            self.options.trailing_commas,
4604            Self::is_reserved_for_column_alias,
4605        )
4606    }
4607
4608    /// Parse a comma-separated list of 1+ items accepted by `F`.
4609    /// `R` is a predicate that should return true if the next
4610    /// keyword is a reserved keyword.
4611    /// Allows for control over trailing commas.
4612    fn parse_comma_separated_with_trailing_commas<T, F, R>(
4613        &mut self,
4614        mut f: F,
4615        trailing_commas: bool,
4616        is_reserved_keyword: R,
4617    ) -> Result<Vec<T>, ParserError>
4618    where
4619        F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4620        R: Fn(&Keyword, &mut Parser) -> bool,
4621    {
4622        let mut values = vec![];
4623        loop {
4624            values.push(f(self)?);
4625            if self.is_parse_comma_separated_end_with_trailing_commas(
4626                trailing_commas,
4627                &is_reserved_keyword,
4628            ) {
4629                break;
4630            }
4631        }
4632        Ok(values)
4633    }
4634
4635    /// Parse a period-separated list of 1+ items accepted by `F`
4636    fn parse_period_separated<T, F>(&mut self, mut f: F) -> Result<Vec<T>, ParserError>
4637    where
4638        F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4639    {
4640        let mut values = vec![];
4641        loop {
4642            values.push(f(self)?);
4643            if !self.consume_token(&Token::Period) {
4644                break;
4645            }
4646        }
4647        Ok(values)
4648    }
4649
4650    /// Parse a keyword-separated list of 1+ items accepted by `F`
4651    pub fn parse_keyword_separated<T, F>(
4652        &mut self,
4653        keyword: Keyword,
4654        mut f: F,
4655    ) -> Result<Vec<T>, ParserError>
4656    where
4657        F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4658    {
4659        let mut values = vec![];
4660        loop {
4661            values.push(f(self)?);
4662            if !self.parse_keyword(keyword) {
4663                break;
4664            }
4665        }
4666        Ok(values)
4667    }
4668
4669    pub fn parse_parenthesized<T, F>(&mut self, mut f: F) -> Result<T, ParserError>
4670    where
4671        F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4672    {
4673        self.expect_token(&Token::LParen)?;
4674        let res = f(self)?;
4675        self.expect_token(&Token::RParen)?;
4676        Ok(res)
4677    }
4678
4679    /// Parse a comma-separated list of 0+ items accepted by `F`
4680    /// * `end_token` - expected end token for the closure (e.g. [Token::RParen], [Token::RBrace] ...)
4681    pub fn parse_comma_separated0<T, F>(
4682        &mut self,
4683        f: F,
4684        end_token: Token,
4685    ) -> Result<Vec<T>, ParserError>
4686    where
4687        F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4688    {
4689        if self.peek_token().token == end_token {
4690            return Ok(vec![]);
4691        }
4692
4693        if self.options.trailing_commas && self.peek_tokens() == [Token::Comma, end_token] {
4694            let _ = self.consume_token(&Token::Comma);
4695            return Ok(vec![]);
4696        }
4697
4698        self.parse_comma_separated(f)
4699    }
4700
4701    /// Parses 0 or more statements, each followed by a semicolon.
4702    /// If the next token is any of `terminal_keywords` then no more
4703    /// statements will be parsed.
4704    pub(crate) fn parse_statement_list(
4705        &mut self,
4706        terminal_keywords: &[Keyword],
4707    ) -> Result<Vec<Statement>, ParserError> {
4708        let mut values = vec![];
4709        loop {
4710            match &self.peek_nth_token_ref(0).token {
4711                Token::EOF => break,
4712                Token::Word(w) => {
4713                    if w.quote_style.is_none() && terminal_keywords.contains(&w.keyword) {
4714                        break;
4715                    }
4716                }
4717                _ => {}
4718            }
4719
4720            values.push(self.parse_statement()?);
4721            self.expect_token(&Token::SemiColon)?;
4722        }
4723        Ok(values)
4724    }
4725
4726    /// Default implementation of a predicate that returns true if
4727    /// the specified keyword is reserved for column alias.
4728    /// See [Dialect::is_column_alias]
4729    fn is_reserved_for_column_alias(kw: &Keyword, parser: &mut Parser) -> bool {
4730        !parser.dialect.is_column_alias(kw, parser)
4731    }
4732
4733    /// Run a parser method `f`, reverting back to the current position if unsuccessful.
4734    /// Returns `ParserError::RecursionLimitExceeded` if `f` returns a `RecursionLimitExceeded`.
4735    /// Returns `Ok(None)` if `f` returns any other error.
4736    pub fn maybe_parse<T, F>(&mut self, f: F) -> Result<Option<T>, ParserError>
4737    where
4738        F: FnMut(&mut Parser) -> Result<T, ParserError>,
4739    {
4740        match self.try_parse(f) {
4741            Ok(t) => Ok(Some(t)),
4742            Err(ParserError::RecursionLimitExceeded) => Err(ParserError::RecursionLimitExceeded),
4743            _ => Ok(None),
4744        }
4745    }
4746
4747    /// Run a parser method `f`, reverting back to the current position if unsuccessful.
4748    pub fn try_parse<T, F>(&mut self, mut f: F) -> Result<T, ParserError>
4749    where
4750        F: FnMut(&mut Parser) -> Result<T, ParserError>,
4751    {
4752        let index = self.index;
4753        match f(self) {
4754            Ok(t) => Ok(t),
4755            Err(e) => {
4756                // Unwind stack if limit exceeded
4757                self.index = index;
4758                Err(e)
4759            }
4760        }
4761    }
4762
4763    /// Parse either `ALL`, `DISTINCT` or `DISTINCT ON (...)`. Returns [`None`] if `ALL` is parsed
4764    /// and results in a [`ParserError`] if both `ALL` and `DISTINCT` are found.
4765    pub fn parse_all_or_distinct(&mut self) -> Result<Option<Distinct>, ParserError> {
4766        let loc = self.peek_token().span.start;
4767        let all = self.parse_keyword(Keyword::ALL);
4768        let distinct = self.parse_keyword(Keyword::DISTINCT);
4769        if !distinct {
4770            return Ok(None);
4771        }
4772        if all {
4773            return parser_err!("Cannot specify both ALL and DISTINCT".to_string(), loc);
4774        }
4775        let on = self.parse_keyword(Keyword::ON);
4776        if !on {
4777            return Ok(Some(Distinct::Distinct));
4778        }
4779
4780        self.expect_token(&Token::LParen)?;
4781        let col_names = if self.consume_token(&Token::RParen) {
4782            self.prev_token();
4783            Vec::new()
4784        } else {
4785            self.parse_comma_separated(Parser::parse_expr)?
4786        };
4787        self.expect_token(&Token::RParen)?;
4788        Ok(Some(Distinct::On(col_names)))
4789    }
4790
4791    /// Parse a SQL CREATE statement
4792    pub fn parse_create(&mut self) -> Result<Statement, ParserError> {
4793        let or_replace = self.parse_keywords(&[Keyword::OR, Keyword::REPLACE]);
4794        let or_alter = self.parse_keywords(&[Keyword::OR, Keyword::ALTER]);
4795        let local = self.parse_one_of_keywords(&[Keyword::LOCAL]).is_some();
4796        let global = self.parse_one_of_keywords(&[Keyword::GLOBAL]).is_some();
4797        let transient = self.parse_one_of_keywords(&[Keyword::TRANSIENT]).is_some();
4798        let global: Option<bool> = if global {
4799            Some(true)
4800        } else if local {
4801            Some(false)
4802        } else {
4803            None
4804        };
4805        let temporary = self
4806            .parse_one_of_keywords(&[Keyword::TEMP, Keyword::TEMPORARY])
4807            .is_some();
4808        let persistent = dialect_of!(self is DuckDbDialect)
4809            && self.parse_one_of_keywords(&[Keyword::PERSISTENT]).is_some();
4810        let create_view_params = self.parse_create_view_params()?;
4811        if self.parse_keyword(Keyword::TABLE) {
4812            self.parse_create_table(or_replace, temporary, global, transient)
4813        } else if self.peek_keyword(Keyword::MATERIALIZED)
4814            || self.peek_keyword(Keyword::VIEW)
4815            || self.peek_keywords(&[Keyword::SECURE, Keyword::MATERIALIZED, Keyword::VIEW])
4816            || self.peek_keywords(&[Keyword::SECURE, Keyword::VIEW])
4817        {
4818            self.parse_create_view(or_alter, or_replace, temporary, create_view_params)
4819        } else if self.parse_keyword(Keyword::POLICY) {
4820            self.parse_create_policy()
4821        } else if self.parse_keyword(Keyword::EXTERNAL) {
4822            self.parse_create_external_table(or_replace)
4823        } else if self.parse_keyword(Keyword::FUNCTION) {
4824            self.parse_create_function(or_alter, or_replace, temporary)
4825        } else if self.parse_keyword(Keyword::DOMAIN) {
4826            self.parse_create_domain()
4827        } else if self.parse_keyword(Keyword::TRIGGER) {
4828            self.parse_create_trigger(temporary, or_alter, or_replace, false)
4829        } else if self.parse_keywords(&[Keyword::CONSTRAINT, Keyword::TRIGGER]) {
4830            self.parse_create_trigger(temporary, or_alter, or_replace, true)
4831        } else if self.parse_keyword(Keyword::MACRO) {
4832            self.parse_create_macro(or_replace, temporary)
4833        } else if self.parse_keyword(Keyword::SECRET) {
4834            self.parse_create_secret(or_replace, temporary, persistent)
4835        } else if self.parse_keyword(Keyword::USER) {
4836            self.parse_create_user(or_replace)
4837        } else if or_replace {
4838            self.expected(
4839                "[EXTERNAL] TABLE or [MATERIALIZED] VIEW or FUNCTION after CREATE OR REPLACE",
4840                self.peek_token(),
4841            )
4842        } else if self.parse_keyword(Keyword::EXTENSION) {
4843            self.parse_create_extension()
4844        } else if self.parse_keyword(Keyword::INDEX) {
4845            self.parse_create_index(false)
4846        } else if self.parse_keywords(&[Keyword::UNIQUE, Keyword::INDEX]) {
4847            self.parse_create_index(true)
4848        } else if self.parse_keyword(Keyword::VIRTUAL) {
4849            self.parse_create_virtual_table()
4850        } else if self.parse_keyword(Keyword::SCHEMA) {
4851            self.parse_create_schema()
4852        } else if self.parse_keyword(Keyword::DATABASE) {
4853            self.parse_create_database()
4854        } else if self.parse_keyword(Keyword::ROLE) {
4855            self.parse_create_role()
4856        } else if self.parse_keyword(Keyword::SEQUENCE) {
4857            self.parse_create_sequence(temporary)
4858        } else if self.parse_keyword(Keyword::TYPE) {
4859            self.parse_create_type()
4860        } else if self.parse_keyword(Keyword::PROCEDURE) {
4861            self.parse_create_procedure(or_alter)
4862        } else if self.parse_keyword(Keyword::CONNECTOR) {
4863            self.parse_create_connector()
4864        } else if self.parse_keyword(Keyword::OPERATOR) {
4865            // Check if this is CREATE OPERATOR FAMILY or CREATE OPERATOR CLASS
4866            if self.parse_keyword(Keyword::FAMILY) {
4867                self.parse_create_operator_family()
4868            } else if self.parse_keyword(Keyword::CLASS) {
4869                self.parse_create_operator_class()
4870            } else {
4871                self.parse_create_operator()
4872            }
4873        } else if self.parse_keyword(Keyword::SERVER) {
4874            self.parse_pg_create_server()
4875        } else {
4876            self.expected("an object type after CREATE", self.peek_token())
4877        }
4878    }
4879
4880    fn parse_create_user(&mut self, or_replace: bool) -> Result<Statement, ParserError> {
4881        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
4882        let name = self.parse_identifier()?;
4883        let options = self
4884            .parse_key_value_options(false, &[Keyword::WITH, Keyword::TAG])?
4885            .options;
4886        let with_tags = self.parse_keyword(Keyword::WITH);
4887        let tags = if self.parse_keyword(Keyword::TAG) {
4888            self.parse_key_value_options(true, &[])?.options
4889        } else {
4890            vec![]
4891        };
4892        Ok(Statement::CreateUser(CreateUser {
4893            or_replace,
4894            if_not_exists,
4895            name,
4896            options: KeyValueOptions {
4897                options,
4898                delimiter: KeyValueOptionsDelimiter::Space,
4899            },
4900            with_tags,
4901            tags: KeyValueOptions {
4902                options: tags,
4903                delimiter: KeyValueOptionsDelimiter::Comma,
4904            },
4905        }))
4906    }
4907
4908    /// See [DuckDB Docs](https://duckdb.org/docs/sql/statements/create_secret.html) for more details.
4909    pub fn parse_create_secret(
4910        &mut self,
4911        or_replace: bool,
4912        temporary: bool,
4913        persistent: bool,
4914    ) -> Result<Statement, ParserError> {
4915        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
4916
4917        let mut storage_specifier = None;
4918        let mut name = None;
4919        if self.peek_token() != Token::LParen {
4920            if self.parse_keyword(Keyword::IN) {
4921                storage_specifier = self.parse_identifier().ok()
4922            } else {
4923                name = self.parse_identifier().ok();
4924            }
4925
4926            // Storage specifier may follow the name
4927            if storage_specifier.is_none()
4928                && self.peek_token() != Token::LParen
4929                && self.parse_keyword(Keyword::IN)
4930            {
4931                storage_specifier = self.parse_identifier().ok();
4932            }
4933        }
4934
4935        self.expect_token(&Token::LParen)?;
4936        self.expect_keyword_is(Keyword::TYPE)?;
4937        let secret_type = self.parse_identifier()?;
4938
4939        let mut options = Vec::new();
4940        if self.consume_token(&Token::Comma) {
4941            options.append(&mut self.parse_comma_separated(|p| {
4942                let key = p.parse_identifier()?;
4943                let value = p.parse_identifier()?;
4944                Ok(SecretOption { key, value })
4945            })?);
4946        }
4947        self.expect_token(&Token::RParen)?;
4948
4949        let temp = match (temporary, persistent) {
4950            (true, false) => Some(true),
4951            (false, true) => Some(false),
4952            (false, false) => None,
4953            _ => self.expected("TEMPORARY or PERSISTENT", self.peek_token())?,
4954        };
4955
4956        Ok(Statement::CreateSecret {
4957            or_replace,
4958            temporary: temp,
4959            if_not_exists,
4960            name,
4961            storage_specifier,
4962            secret_type,
4963            options,
4964        })
4965    }
4966
4967    /// Parse a CACHE TABLE statement
4968    pub fn parse_cache_table(&mut self) -> Result<Statement, ParserError> {
4969        let (mut table_flag, mut options, mut has_as, mut query) = (None, vec![], false, None);
4970        if self.parse_keyword(Keyword::TABLE) {
4971            let table_name = self.parse_object_name(false)?;
4972            if self.peek_token().token != Token::EOF {
4973                if let Token::Word(word) = self.peek_token().token {
4974                    if word.keyword == Keyword::OPTIONS {
4975                        options = self.parse_options(Keyword::OPTIONS)?
4976                    }
4977                };
4978
4979                if self.peek_token().token != Token::EOF {
4980                    let (a, q) = self.parse_as_query()?;
4981                    has_as = a;
4982                    query = Some(q);
4983                }
4984
4985                Ok(Statement::Cache {
4986                    table_flag,
4987                    table_name,
4988                    has_as,
4989                    options,
4990                    query,
4991                })
4992            } else {
4993                Ok(Statement::Cache {
4994                    table_flag,
4995                    table_name,
4996                    has_as,
4997                    options,
4998                    query,
4999                })
5000            }
5001        } else {
5002            table_flag = Some(self.parse_object_name(false)?);
5003            if self.parse_keyword(Keyword::TABLE) {
5004                let table_name = self.parse_object_name(false)?;
5005                if self.peek_token() != Token::EOF {
5006                    if let Token::Word(word) = self.peek_token().token {
5007                        if word.keyword == Keyword::OPTIONS {
5008                            options = self.parse_options(Keyword::OPTIONS)?
5009                        }
5010                    };
5011
5012                    if self.peek_token() != Token::EOF {
5013                        let (a, q) = self.parse_as_query()?;
5014                        has_as = a;
5015                        query = Some(q);
5016                    }
5017
5018                    Ok(Statement::Cache {
5019                        table_flag,
5020                        table_name,
5021                        has_as,
5022                        options,
5023                        query,
5024                    })
5025                } else {
5026                    Ok(Statement::Cache {
5027                        table_flag,
5028                        table_name,
5029                        has_as,
5030                        options,
5031                        query,
5032                    })
5033                }
5034            } else {
5035                if self.peek_token() == Token::EOF {
5036                    self.prev_token();
5037                }
5038                self.expected("a `TABLE` keyword", self.peek_token())
5039            }
5040        }
5041    }
5042
5043    /// Parse 'AS' before as query,such as `WITH XXX AS SELECT XXX` oer `CACHE TABLE AS SELECT XXX`
5044    pub fn parse_as_query(&mut self) -> Result<(bool, Box<Query>), ParserError> {
5045        match self.peek_token().token {
5046            Token::Word(word) => match word.keyword {
5047                Keyword::AS => {
5048                    self.next_token();
5049                    Ok((true, self.parse_query()?))
5050                }
5051                _ => Ok((false, self.parse_query()?)),
5052            },
5053            _ => self.expected("a QUERY statement", self.peek_token()),
5054        }
5055    }
5056
5057    /// Parse a UNCACHE TABLE statement
5058    pub fn parse_uncache_table(&mut self) -> Result<Statement, ParserError> {
5059        self.expect_keyword_is(Keyword::TABLE)?;
5060        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
5061        let table_name = self.parse_object_name(false)?;
5062        Ok(Statement::UNCache {
5063            table_name,
5064            if_exists,
5065        })
5066    }
5067
5068    /// SQLite-specific `CREATE VIRTUAL TABLE`
5069    pub fn parse_create_virtual_table(&mut self) -> Result<Statement, ParserError> {
5070        self.expect_keyword_is(Keyword::TABLE)?;
5071        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5072        let table_name = self.parse_object_name(false)?;
5073        self.expect_keyword_is(Keyword::USING)?;
5074        let module_name = self.parse_identifier()?;
5075        // SQLite docs note that module "arguments syntax is sufficiently
5076        // general that the arguments can be made to appear as column
5077        // definitions in a traditional CREATE TABLE statement", but
5078        // we don't implement that.
5079        let module_args = self.parse_parenthesized_column_list(Optional, false)?;
5080        Ok(Statement::CreateVirtualTable {
5081            name: table_name,
5082            if_not_exists,
5083            module_name,
5084            module_args,
5085        })
5086    }
5087
5088    pub fn parse_create_schema(&mut self) -> Result<Statement, ParserError> {
5089        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5090
5091        let schema_name = self.parse_schema_name()?;
5092
5093        let default_collate_spec = if self.parse_keywords(&[Keyword::DEFAULT, Keyword::COLLATE]) {
5094            Some(self.parse_expr()?)
5095        } else {
5096            None
5097        };
5098
5099        let with = if self.peek_keyword(Keyword::WITH) {
5100            Some(self.parse_options(Keyword::WITH)?)
5101        } else {
5102            None
5103        };
5104
5105        let options = if self.peek_keyword(Keyword::OPTIONS) {
5106            Some(self.parse_options(Keyword::OPTIONS)?)
5107        } else {
5108            None
5109        };
5110
5111        let clone = if self.parse_keyword(Keyword::CLONE) {
5112            Some(self.parse_object_name(false)?)
5113        } else {
5114            None
5115        };
5116
5117        Ok(Statement::CreateSchema {
5118            schema_name,
5119            if_not_exists,
5120            with,
5121            options,
5122            default_collate_spec,
5123            clone,
5124        })
5125    }
5126
5127    fn parse_schema_name(&mut self) -> Result<SchemaName, ParserError> {
5128        if self.parse_keyword(Keyword::AUTHORIZATION) {
5129            Ok(SchemaName::UnnamedAuthorization(self.parse_identifier()?))
5130        } else {
5131            let name = self.parse_object_name(false)?;
5132
5133            if self.parse_keyword(Keyword::AUTHORIZATION) {
5134                Ok(SchemaName::NamedAuthorization(
5135                    name,
5136                    self.parse_identifier()?,
5137                ))
5138            } else {
5139                Ok(SchemaName::Simple(name))
5140            }
5141        }
5142    }
5143
5144    pub fn parse_create_database(&mut self) -> Result<Statement, ParserError> {
5145        let ine = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5146        let db_name = self.parse_object_name(false)?;
5147        let mut location = None;
5148        let mut managed_location = None;
5149        loop {
5150            match self.parse_one_of_keywords(&[Keyword::LOCATION, Keyword::MANAGEDLOCATION]) {
5151                Some(Keyword::LOCATION) => location = Some(self.parse_literal_string()?),
5152                Some(Keyword::MANAGEDLOCATION) => {
5153                    managed_location = Some(self.parse_literal_string()?)
5154                }
5155                _ => break,
5156            }
5157        }
5158        let clone = if self.parse_keyword(Keyword::CLONE) {
5159            Some(self.parse_object_name(false)?)
5160        } else {
5161            None
5162        };
5163
5164        Ok(Statement::CreateDatabase {
5165            db_name,
5166            if_not_exists: ine,
5167            location,
5168            managed_location,
5169            or_replace: false,
5170            transient: false,
5171            clone,
5172            data_retention_time_in_days: None,
5173            max_data_extension_time_in_days: None,
5174            external_volume: None,
5175            catalog: None,
5176            replace_invalid_characters: None,
5177            default_ddl_collation: None,
5178            storage_serialization_policy: None,
5179            comment: None,
5180            catalog_sync: None,
5181            catalog_sync_namespace_mode: None,
5182            catalog_sync_namespace_flatten_delimiter: None,
5183            with_tags: None,
5184            with_contacts: None,
5185        })
5186    }
5187
5188    pub fn parse_optional_create_function_using(
5189        &mut self,
5190    ) -> Result<Option<CreateFunctionUsing>, ParserError> {
5191        if !self.parse_keyword(Keyword::USING) {
5192            return Ok(None);
5193        };
5194        let keyword =
5195            self.expect_one_of_keywords(&[Keyword::JAR, Keyword::FILE, Keyword::ARCHIVE])?;
5196
5197        let uri = self.parse_literal_string()?;
5198
5199        match keyword {
5200            Keyword::JAR => Ok(Some(CreateFunctionUsing::Jar(uri))),
5201            Keyword::FILE => Ok(Some(CreateFunctionUsing::File(uri))),
5202            Keyword::ARCHIVE => Ok(Some(CreateFunctionUsing::Archive(uri))),
5203            _ => self.expected(
5204                "JAR, FILE or ARCHIVE, got {:?}",
5205                TokenWithSpan::wrap(Token::make_keyword(format!("{keyword:?}").as_str())),
5206            ),
5207        }
5208    }
5209
5210    pub fn parse_create_function(
5211        &mut self,
5212        or_alter: bool,
5213        or_replace: bool,
5214        temporary: bool,
5215    ) -> Result<Statement, ParserError> {
5216        if dialect_of!(self is HiveDialect) {
5217            self.parse_hive_create_function(or_replace, temporary)
5218        } else if dialect_of!(self is PostgreSqlDialect | GenericDialect) {
5219            self.parse_postgres_create_function(or_replace, temporary)
5220        } else if dialect_of!(self is DuckDbDialect) {
5221            self.parse_create_macro(or_replace, temporary)
5222        } else if dialect_of!(self is BigQueryDialect) {
5223            self.parse_bigquery_create_function(or_replace, temporary)
5224        } else if dialect_of!(self is MsSqlDialect) {
5225            self.parse_mssql_create_function(or_alter, or_replace, temporary)
5226        } else {
5227            self.prev_token();
5228            self.expected("an object type after CREATE", self.peek_token())
5229        }
5230    }
5231
5232    /// Parse `CREATE FUNCTION` for [PostgreSQL]
5233    ///
5234    /// [PostgreSQL]: https://www.postgresql.org/docs/15/sql-createfunction.html
5235    fn parse_postgres_create_function(
5236        &mut self,
5237        or_replace: bool,
5238        temporary: bool,
5239    ) -> Result<Statement, ParserError> {
5240        let name = self.parse_object_name(false)?;
5241
5242        self.expect_token(&Token::LParen)?;
5243        let args = if Token::RParen != self.peek_token_ref().token {
5244            self.parse_comma_separated(Parser::parse_function_arg)?
5245        } else {
5246            vec![]
5247        };
5248        self.expect_token(&Token::RParen)?;
5249
5250        let return_type = if self.parse_keyword(Keyword::RETURNS) {
5251            let is_setof = self.parse_keyword(Keyword::SETOF);
5252            let base_type = self.parse_data_type()?;
5253            Some(if is_setof {
5254                DataType::SetOf(Box::new(base_type))
5255            } else {
5256                base_type
5257            })
5258        } else {
5259            None
5260        };
5261
5262        #[derive(Default)]
5263        struct Body {
5264            language: Option<Ident>,
5265            behavior: Option<FunctionBehavior>,
5266            function_body: Option<CreateFunctionBody>,
5267            called_on_null: Option<FunctionCalledOnNull>,
5268            parallel: Option<FunctionParallel>,
5269            security: Option<FunctionSecurity>,
5270        }
5271        let mut body = Body::default();
5272        let mut set_params: Vec<FunctionDefinitionSetParam> = Vec::new();
5273        loop {
5274            fn ensure_not_set<T>(field: &Option<T>, name: &str) -> Result<(), ParserError> {
5275                if field.is_some() {
5276                    return Err(ParserError::ParserError(format!(
5277                        "{name} specified more than once",
5278                    )));
5279                }
5280                Ok(())
5281            }
5282            if self.parse_keyword(Keyword::AS) {
5283                ensure_not_set(&body.function_body, "AS")?;
5284                body.function_body = Some(self.parse_create_function_body_string()?);
5285            } else if self.parse_keyword(Keyword::LANGUAGE) {
5286                ensure_not_set(&body.language, "LANGUAGE")?;
5287                body.language = Some(self.parse_identifier()?);
5288            } else if self.parse_keyword(Keyword::IMMUTABLE) {
5289                ensure_not_set(&body.behavior, "IMMUTABLE | STABLE | VOLATILE")?;
5290                body.behavior = Some(FunctionBehavior::Immutable);
5291            } else if self.parse_keyword(Keyword::STABLE) {
5292                ensure_not_set(&body.behavior, "IMMUTABLE | STABLE | VOLATILE")?;
5293                body.behavior = Some(FunctionBehavior::Stable);
5294            } else if self.parse_keyword(Keyword::VOLATILE) {
5295                ensure_not_set(&body.behavior, "IMMUTABLE | STABLE | VOLATILE")?;
5296                body.behavior = Some(FunctionBehavior::Volatile);
5297            } else if self.parse_keywords(&[
5298                Keyword::CALLED,
5299                Keyword::ON,
5300                Keyword::NULL,
5301                Keyword::INPUT,
5302            ]) {
5303                ensure_not_set(
5304                    &body.called_on_null,
5305                    "CALLED ON NULL INPUT | RETURNS NULL ON NULL INPUT | STRICT",
5306                )?;
5307                body.called_on_null = Some(FunctionCalledOnNull::CalledOnNullInput);
5308            } else if self.parse_keywords(&[
5309                Keyword::RETURNS,
5310                Keyword::NULL,
5311                Keyword::ON,
5312                Keyword::NULL,
5313                Keyword::INPUT,
5314            ]) {
5315                ensure_not_set(
5316                    &body.called_on_null,
5317                    "CALLED ON NULL INPUT | RETURNS NULL ON NULL INPUT | STRICT",
5318                )?;
5319                body.called_on_null = Some(FunctionCalledOnNull::ReturnsNullOnNullInput);
5320            } else if self.parse_keyword(Keyword::STRICT) {
5321                ensure_not_set(
5322                    &body.called_on_null,
5323                    "CALLED ON NULL INPUT | RETURNS NULL ON NULL INPUT | STRICT",
5324                )?;
5325                body.called_on_null = Some(FunctionCalledOnNull::Strict);
5326            } else if self.parse_keyword(Keyword::PARALLEL) {
5327                ensure_not_set(&body.parallel, "PARALLEL { UNSAFE | RESTRICTED | SAFE }")?;
5328                if self.parse_keyword(Keyword::UNSAFE) {
5329                    body.parallel = Some(FunctionParallel::Unsafe);
5330                } else if self.parse_keyword(Keyword::RESTRICTED) {
5331                    body.parallel = Some(FunctionParallel::Restricted);
5332                } else if self.parse_keyword(Keyword::SAFE) {
5333                    body.parallel = Some(FunctionParallel::Safe);
5334                } else {
5335                    return self.expected("one of UNSAFE | RESTRICTED | SAFE", self.peek_token());
5336                }
5337            } else if self.parse_keyword(Keyword::SECURITY) {
5338                ensure_not_set(&body.security, "SECURITY { DEFINER | INVOKER }")?;
5339                if self.parse_keyword(Keyword::DEFINER) {
5340                    body.security = Some(FunctionSecurity::Definer);
5341                } else if self.parse_keyword(Keyword::INVOKER) {
5342                    body.security = Some(FunctionSecurity::Invoker);
5343                } else {
5344                    return self.expected("DEFINER or INVOKER", self.peek_token());
5345                }
5346            } else if self.parse_keyword(Keyword::SET) {
5347                let name = self.parse_identifier()?;
5348                let value = if self.parse_keywords(&[Keyword::FROM, Keyword::CURRENT]) {
5349                    FunctionSetValue::FromCurrent
5350                } else {
5351                    if !self.consume_token(&Token::Eq) && !self.parse_keyword(Keyword::TO) {
5352                        return self.expected("= or TO", self.peek_token());
5353                    }
5354                    let values = self.parse_comma_separated(Parser::parse_expr)?;
5355                    FunctionSetValue::Values(values)
5356                };
5357                set_params.push(FunctionDefinitionSetParam { name, value });
5358            } else if self.parse_keyword(Keyword::RETURN) {
5359                ensure_not_set(&body.function_body, "RETURN")?;
5360                body.function_body = Some(CreateFunctionBody::Return(self.parse_expr()?));
5361            } else {
5362                break;
5363            }
5364        }
5365
5366        Ok(Statement::CreateFunction(CreateFunction {
5367            or_alter: false,
5368            or_replace,
5369            temporary,
5370            name,
5371            args: Some(args),
5372            return_type,
5373            behavior: body.behavior,
5374            called_on_null: body.called_on_null,
5375            parallel: body.parallel,
5376            security: body.security,
5377            set_params,
5378            language: body.language,
5379            function_body: body.function_body,
5380            if_not_exists: false,
5381            using: None,
5382            determinism_specifier: None,
5383            options: None,
5384            remote_connection: None,
5385        }))
5386    }
5387
5388    /// Parse `CREATE FUNCTION` for [Hive]
5389    ///
5390    /// [Hive]: https://cwiki.apache.org/confluence/display/hive/languagemanual+ddl#LanguageManualDDL-Create/Drop/ReloadFunction
5391    fn parse_hive_create_function(
5392        &mut self,
5393        or_replace: bool,
5394        temporary: bool,
5395    ) -> Result<Statement, ParserError> {
5396        let name = self.parse_object_name(false)?;
5397        self.expect_keyword_is(Keyword::AS)?;
5398
5399        let body = self.parse_create_function_body_string()?;
5400        let using = self.parse_optional_create_function_using()?;
5401
5402        Ok(Statement::CreateFunction(CreateFunction {
5403            or_alter: false,
5404            or_replace,
5405            temporary,
5406            name,
5407            function_body: Some(body),
5408            using,
5409            if_not_exists: false,
5410            args: None,
5411            return_type: None,
5412            behavior: None,
5413            called_on_null: None,
5414            parallel: None,
5415            security: None,
5416            set_params: vec![],
5417            language: None,
5418            determinism_specifier: None,
5419            options: None,
5420            remote_connection: None,
5421        }))
5422    }
5423
5424    /// Parse `CREATE FUNCTION` for [BigQuery]
5425    ///
5426    /// [BigQuery]: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#create_function_statement
5427    fn parse_bigquery_create_function(
5428        &mut self,
5429        or_replace: bool,
5430        temporary: bool,
5431    ) -> Result<Statement, ParserError> {
5432        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5433        let (name, args) = self.parse_create_function_name_and_params()?;
5434
5435        let return_type = if self.parse_keyword(Keyword::RETURNS) {
5436            Some(self.parse_data_type()?)
5437        } else {
5438            None
5439        };
5440
5441        let determinism_specifier = if self.parse_keyword(Keyword::DETERMINISTIC) {
5442            Some(FunctionDeterminismSpecifier::Deterministic)
5443        } else if self.parse_keywords(&[Keyword::NOT, Keyword::DETERMINISTIC]) {
5444            Some(FunctionDeterminismSpecifier::NotDeterministic)
5445        } else {
5446            None
5447        };
5448
5449        let language = if self.parse_keyword(Keyword::LANGUAGE) {
5450            Some(self.parse_identifier()?)
5451        } else {
5452            None
5453        };
5454
5455        let remote_connection =
5456            if self.parse_keywords(&[Keyword::REMOTE, Keyword::WITH, Keyword::CONNECTION]) {
5457                Some(self.parse_object_name(false)?)
5458            } else {
5459                None
5460            };
5461
5462        // `OPTIONS` may come before of after the function body but
5463        // may be specified at most once.
5464        let mut options = self.maybe_parse_options(Keyword::OPTIONS)?;
5465
5466        let function_body = if remote_connection.is_none() {
5467            self.expect_keyword_is(Keyword::AS)?;
5468            let expr = self.parse_expr()?;
5469            if options.is_none() {
5470                options = self.maybe_parse_options(Keyword::OPTIONS)?;
5471                Some(CreateFunctionBody::AsBeforeOptions {
5472                    body: expr,
5473                    link_symbol: None,
5474                })
5475            } else {
5476                Some(CreateFunctionBody::AsAfterOptions(expr))
5477            }
5478        } else {
5479            None
5480        };
5481
5482        Ok(Statement::CreateFunction(CreateFunction {
5483            or_alter: false,
5484            or_replace,
5485            temporary,
5486            if_not_exists,
5487            name,
5488            args: Some(args),
5489            return_type,
5490            function_body,
5491            language,
5492            determinism_specifier,
5493            options,
5494            remote_connection,
5495            using: None,
5496            behavior: None,
5497            called_on_null: None,
5498            parallel: None,
5499            security: None,
5500            set_params: vec![],
5501        }))
5502    }
5503
5504    /// Parse `CREATE FUNCTION` for [MsSql]
5505    ///
5506    /// [MsSql]: https://learn.microsoft.com/en-us/sql/t-sql/statements/create-function-transact-sql
5507    fn parse_mssql_create_function(
5508        &mut self,
5509        or_alter: bool,
5510        or_replace: bool,
5511        temporary: bool,
5512    ) -> Result<Statement, ParserError> {
5513        let (name, args) = self.parse_create_function_name_and_params()?;
5514
5515        self.expect_keyword(Keyword::RETURNS)?;
5516
5517        let return_table = self.maybe_parse(|p| {
5518            let return_table_name = p.parse_identifier()?;
5519
5520            p.expect_keyword_is(Keyword::TABLE)?;
5521            p.prev_token();
5522
5523            let table_column_defs = match p.parse_data_type()? {
5524                DataType::Table(Some(table_column_defs)) if !table_column_defs.is_empty() => {
5525                    table_column_defs
5526                }
5527                _ => parser_err!(
5528                    "Expected table column definitions after TABLE keyword",
5529                    p.peek_token().span.start
5530                )?,
5531            };
5532
5533            Ok(DataType::NamedTable {
5534                name: ObjectName(vec![ObjectNamePart::Identifier(return_table_name)]),
5535                columns: table_column_defs,
5536            })
5537        })?;
5538
5539        let return_type = if return_table.is_some() {
5540            return_table
5541        } else {
5542            Some(self.parse_data_type()?)
5543        };
5544
5545        let _ = self.parse_keyword(Keyword::AS);
5546
5547        let function_body = if self.peek_keyword(Keyword::BEGIN) {
5548            let begin_token = self.expect_keyword(Keyword::BEGIN)?;
5549            let statements = self.parse_statement_list(&[Keyword::END])?;
5550            let end_token = self.expect_keyword(Keyword::END)?;
5551
5552            Some(CreateFunctionBody::AsBeginEnd(BeginEndStatements {
5553                begin_token: AttachedToken(begin_token),
5554                statements,
5555                end_token: AttachedToken(end_token),
5556            }))
5557        } else if self.parse_keyword(Keyword::RETURN) {
5558            if self.peek_token() == Token::LParen {
5559                Some(CreateFunctionBody::AsReturnExpr(self.parse_expr()?))
5560            } else if self.peek_keyword(Keyword::SELECT) {
5561                let select = self.parse_select()?;
5562                Some(CreateFunctionBody::AsReturnSelect(select))
5563            } else {
5564                parser_err!(
5565                    "Expected a subquery (or bare SELECT statement) after RETURN",
5566                    self.peek_token().span.start
5567                )?
5568            }
5569        } else {
5570            parser_err!("Unparsable function body", self.peek_token().span.start)?
5571        };
5572
5573        Ok(Statement::CreateFunction(CreateFunction {
5574            or_alter,
5575            or_replace,
5576            temporary,
5577            if_not_exists: false,
5578            name,
5579            args: Some(args),
5580            return_type,
5581            function_body,
5582            language: None,
5583            determinism_specifier: None,
5584            options: None,
5585            remote_connection: None,
5586            using: None,
5587            behavior: None,
5588            called_on_null: None,
5589            parallel: None,
5590            security: None,
5591            set_params: vec![],
5592        }))
5593    }
5594
5595    fn parse_create_function_name_and_params(
5596        &mut self,
5597    ) -> Result<(ObjectName, Vec<OperateFunctionArg>), ParserError> {
5598        let name = self.parse_object_name(false)?;
5599        let parse_function_param =
5600            |parser: &mut Parser| -> Result<OperateFunctionArg, ParserError> {
5601                let name = parser.parse_identifier()?;
5602                let data_type = parser.parse_data_type()?;
5603                let default_expr = if parser.consume_token(&Token::Eq) {
5604                    Some(parser.parse_expr()?)
5605                } else {
5606                    None
5607                };
5608
5609                Ok(OperateFunctionArg {
5610                    mode: None,
5611                    name: Some(name),
5612                    data_type,
5613                    default_expr,
5614                })
5615            };
5616        self.expect_token(&Token::LParen)?;
5617        let args = self.parse_comma_separated0(parse_function_param, Token::RParen)?;
5618        self.expect_token(&Token::RParen)?;
5619        Ok((name, args))
5620    }
5621
5622    fn parse_function_arg(&mut self) -> Result<OperateFunctionArg, ParserError> {
5623        let mode = if self.parse_keyword(Keyword::IN) {
5624            Some(ArgMode::In)
5625        } else if self.parse_keyword(Keyword::OUT) {
5626            Some(ArgMode::Out)
5627        } else if self.parse_keyword(Keyword::INOUT) {
5628            Some(ArgMode::InOut)
5629        } else {
5630            None
5631        };
5632
5633        // parse: [ argname ] argtype
5634        let mut name = None;
5635        let mut data_type = self.parse_data_type()?;
5636
5637        // To check whether the first token is a name or a type, we need to
5638        // peek the next token, which if it is another type keyword, then the
5639        // first token is a name and not a type in itself.
5640        let data_type_idx = self.get_current_index();
5641
5642        // DEFAULT will be parsed as `DataType::Custom`, which is undesirable in this context
5643        fn parse_data_type_no_default(parser: &mut Parser) -> Result<DataType, ParserError> {
5644            if parser.peek_keyword(Keyword::DEFAULT) {
5645                // This dummy error is ignored in `maybe_parse`
5646                parser_err!(
5647                    "The DEFAULT keyword is not a type",
5648                    parser.peek_token().span.start
5649                )
5650            } else {
5651                parser.parse_data_type()
5652            }
5653        }
5654
5655        if let Some(next_data_type) = self.maybe_parse(parse_data_type_no_default)? {
5656            let token = self.token_at(data_type_idx);
5657
5658            // We ensure that the token is a `Word` token, and not other special tokens.
5659            if !matches!(token.token, Token::Word(_)) {
5660                return self.expected("a name or type", token.clone());
5661            }
5662
5663            name = Some(Ident::new(token.to_string()));
5664            data_type = next_data_type;
5665        }
5666
5667        let default_expr = if self.parse_keyword(Keyword::DEFAULT) || self.consume_token(&Token::Eq)
5668        {
5669            Some(self.parse_expr()?)
5670        } else {
5671            None
5672        };
5673        Ok(OperateFunctionArg {
5674            mode,
5675            name,
5676            data_type,
5677            default_expr,
5678        })
5679    }
5680
5681    /// Parse statements of the DropTrigger type such as:
5682    ///
5683    /// ```sql
5684    /// DROP TRIGGER [ IF EXISTS ] name ON table_name [ CASCADE | RESTRICT ]
5685    /// ```
5686    pub fn parse_drop_trigger(&mut self) -> Result<Statement, ParserError> {
5687        if !dialect_of!(self is PostgreSqlDialect | SQLiteDialect | GenericDialect | MySqlDialect | MsSqlDialect)
5688        {
5689            self.prev_token();
5690            return self.expected("an object type after DROP", self.peek_token());
5691        }
5692        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
5693        let trigger_name = self.parse_object_name(false)?;
5694        let table_name = if self.parse_keyword(Keyword::ON) {
5695            Some(self.parse_object_name(false)?)
5696        } else {
5697            None
5698        };
5699        let option = match self.parse_one_of_keywords(&[Keyword::CASCADE, Keyword::RESTRICT]) {
5700            Some(Keyword::CASCADE) => Some(ReferentialAction::Cascade),
5701            Some(Keyword::RESTRICT) => Some(ReferentialAction::Restrict),
5702            Some(unexpected_keyword) => return Err(ParserError::ParserError(
5703                format!("Internal parser error: expected any of {{CASCADE, RESTRICT}}, got {unexpected_keyword:?}"),
5704            )),
5705            None => None,
5706        };
5707        Ok(Statement::DropTrigger(DropTrigger {
5708            if_exists,
5709            trigger_name,
5710            table_name,
5711            option,
5712        }))
5713    }
5714
5715    pub fn parse_create_trigger(
5716        &mut self,
5717        temporary: bool,
5718        or_alter: bool,
5719        or_replace: bool,
5720        is_constraint: bool,
5721    ) -> Result<Statement, ParserError> {
5722        if !dialect_of!(self is PostgreSqlDialect | SQLiteDialect | GenericDialect | MySqlDialect | MsSqlDialect)
5723        {
5724            self.prev_token();
5725            return self.expected("an object type after CREATE", self.peek_token());
5726        }
5727
5728        let name = self.parse_object_name(false)?;
5729        let period = self.maybe_parse(|parser| parser.parse_trigger_period())?;
5730
5731        let events = self.parse_keyword_separated(Keyword::OR, Parser::parse_trigger_event)?;
5732        self.expect_keyword_is(Keyword::ON)?;
5733        let table_name = self.parse_object_name(false)?;
5734
5735        let referenced_table_name = if self.parse_keyword(Keyword::FROM) {
5736            self.parse_object_name(true).ok()
5737        } else {
5738            None
5739        };
5740
5741        let characteristics = self.parse_constraint_characteristics()?;
5742
5743        let mut referencing = vec![];
5744        if self.parse_keyword(Keyword::REFERENCING) {
5745            while let Some(refer) = self.parse_trigger_referencing()? {
5746                referencing.push(refer);
5747            }
5748        }
5749
5750        let trigger_object = if self.parse_keyword(Keyword::FOR) {
5751            let include_each = self.parse_keyword(Keyword::EACH);
5752            let trigger_object =
5753                match self.expect_one_of_keywords(&[Keyword::ROW, Keyword::STATEMENT])? {
5754                    Keyword::ROW => TriggerObject::Row,
5755                    Keyword::STATEMENT => TriggerObject::Statement,
5756                    unexpected_keyword => return Err(ParserError::ParserError(
5757                        format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in ROW/STATEMENT"),
5758                    )),
5759                };
5760
5761            Some(if include_each {
5762                TriggerObjectKind::ForEach(trigger_object)
5763            } else {
5764                TriggerObjectKind::For(trigger_object)
5765            })
5766        } else {
5767            let _ = self.parse_keyword(Keyword::FOR);
5768
5769            None
5770        };
5771
5772        let condition = self
5773            .parse_keyword(Keyword::WHEN)
5774            .then(|| self.parse_expr())
5775            .transpose()?;
5776
5777        let mut exec_body = None;
5778        let mut statements = None;
5779        if self.parse_keyword(Keyword::EXECUTE) {
5780            exec_body = Some(self.parse_trigger_exec_body()?);
5781        } else {
5782            statements = Some(self.parse_conditional_statements(&[Keyword::END])?);
5783        }
5784
5785        Ok(CreateTrigger {
5786            or_alter,
5787            temporary,
5788            or_replace,
5789            is_constraint,
5790            name,
5791            period,
5792            period_before_table: true,
5793            events,
5794            table_name,
5795            referenced_table_name,
5796            referencing,
5797            trigger_object,
5798            condition,
5799            exec_body,
5800            statements_as: false,
5801            statements,
5802            characteristics,
5803        }
5804        .into())
5805    }
5806
5807    pub fn parse_trigger_period(&mut self) -> Result<TriggerPeriod, ParserError> {
5808        Ok(
5809            match self.expect_one_of_keywords(&[
5810                Keyword::FOR,
5811                Keyword::BEFORE,
5812                Keyword::AFTER,
5813                Keyword::INSTEAD,
5814            ])? {
5815                Keyword::FOR => TriggerPeriod::For,
5816                Keyword::BEFORE => TriggerPeriod::Before,
5817                Keyword::AFTER => TriggerPeriod::After,
5818                Keyword::INSTEAD => self
5819                    .expect_keyword_is(Keyword::OF)
5820                    .map(|_| TriggerPeriod::InsteadOf)?,
5821                unexpected_keyword => return Err(ParserError::ParserError(
5822                    format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in trigger period"),
5823                )),
5824            },
5825        )
5826    }
5827
5828    pub fn parse_trigger_event(&mut self) -> Result<TriggerEvent, ParserError> {
5829        Ok(
5830            match self.expect_one_of_keywords(&[
5831                Keyword::INSERT,
5832                Keyword::UPDATE,
5833                Keyword::DELETE,
5834                Keyword::TRUNCATE,
5835            ])? {
5836                Keyword::INSERT => TriggerEvent::Insert,
5837                Keyword::UPDATE => {
5838                    if self.parse_keyword(Keyword::OF) {
5839                        let cols = self.parse_comma_separated(Parser::parse_identifier)?;
5840                        TriggerEvent::Update(cols)
5841                    } else {
5842                        TriggerEvent::Update(vec![])
5843                    }
5844                }
5845                Keyword::DELETE => TriggerEvent::Delete,
5846                Keyword::TRUNCATE => TriggerEvent::Truncate,
5847                unexpected_keyword => return Err(ParserError::ParserError(
5848                    format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in trigger event"),
5849                )),
5850            },
5851        )
5852    }
5853
5854    pub fn parse_trigger_referencing(&mut self) -> Result<Option<TriggerReferencing>, ParserError> {
5855        let refer_type = match self.parse_one_of_keywords(&[Keyword::OLD, Keyword::NEW]) {
5856            Some(Keyword::OLD) if self.parse_keyword(Keyword::TABLE) => {
5857                TriggerReferencingType::OldTable
5858            }
5859            Some(Keyword::NEW) if self.parse_keyword(Keyword::TABLE) => {
5860                TriggerReferencingType::NewTable
5861            }
5862            _ => {
5863                return Ok(None);
5864            }
5865        };
5866
5867        let is_as = self.parse_keyword(Keyword::AS);
5868        let transition_relation_name = self.parse_object_name(false)?;
5869        Ok(Some(TriggerReferencing {
5870            refer_type,
5871            is_as,
5872            transition_relation_name,
5873        }))
5874    }
5875
5876    pub fn parse_trigger_exec_body(&mut self) -> Result<TriggerExecBody, ParserError> {
5877        Ok(TriggerExecBody {
5878            exec_type: match self
5879                .expect_one_of_keywords(&[Keyword::FUNCTION, Keyword::PROCEDURE])?
5880            {
5881                Keyword::FUNCTION => TriggerExecBodyType::Function,
5882                Keyword::PROCEDURE => TriggerExecBodyType::Procedure,
5883                unexpected_keyword => return Err(ParserError::ParserError(
5884                    format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in trigger exec body"),
5885                )),
5886            },
5887            func_desc: self.parse_function_desc()?,
5888        })
5889    }
5890
5891    pub fn parse_create_macro(
5892        &mut self,
5893        or_replace: bool,
5894        temporary: bool,
5895    ) -> Result<Statement, ParserError> {
5896        if dialect_of!(self is DuckDbDialect |  GenericDialect) {
5897            let name = self.parse_object_name(false)?;
5898            self.expect_token(&Token::LParen)?;
5899            let args = if self.consume_token(&Token::RParen) {
5900                self.prev_token();
5901                None
5902            } else {
5903                Some(self.parse_comma_separated(Parser::parse_macro_arg)?)
5904            };
5905
5906            self.expect_token(&Token::RParen)?;
5907            self.expect_keyword_is(Keyword::AS)?;
5908
5909            Ok(Statement::CreateMacro {
5910                or_replace,
5911                temporary,
5912                name,
5913                args,
5914                definition: if self.parse_keyword(Keyword::TABLE) {
5915                    MacroDefinition::Table(self.parse_query()?)
5916                } else {
5917                    MacroDefinition::Expr(self.parse_expr()?)
5918                },
5919            })
5920        } else {
5921            self.prev_token();
5922            self.expected("an object type after CREATE", self.peek_token())
5923        }
5924    }
5925
5926    fn parse_macro_arg(&mut self) -> Result<MacroArg, ParserError> {
5927        let name = self.parse_identifier()?;
5928
5929        let default_expr =
5930            if self.consume_token(&Token::Assignment) || self.consume_token(&Token::RArrow) {
5931                Some(self.parse_expr()?)
5932            } else {
5933                None
5934            };
5935        Ok(MacroArg { name, default_expr })
5936    }
5937
5938    pub fn parse_create_external_table(
5939        &mut self,
5940        or_replace: bool,
5941    ) -> Result<Statement, ParserError> {
5942        self.expect_keyword_is(Keyword::TABLE)?;
5943        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5944        let table_name = self.parse_object_name(false)?;
5945        let (columns, constraints) = self.parse_columns()?;
5946
5947        let hive_distribution = self.parse_hive_distribution()?;
5948        let hive_formats = self.parse_hive_formats()?;
5949
5950        let file_format = if let Some(ref hf) = hive_formats {
5951            if let Some(ref ff) = hf.storage {
5952                match ff {
5953                    HiveIOFormat::FileFormat { format } => Some(*format),
5954                    _ => None,
5955                }
5956            } else {
5957                None
5958            }
5959        } else {
5960            None
5961        };
5962        let location = hive_formats.as_ref().and_then(|hf| hf.location.clone());
5963        let table_properties = self.parse_options(Keyword::TBLPROPERTIES)?;
5964        let table_options = if !table_properties.is_empty() {
5965            CreateTableOptions::TableProperties(table_properties)
5966        } else {
5967            CreateTableOptions::None
5968        };
5969        Ok(CreateTableBuilder::new(table_name)
5970            .columns(columns)
5971            .constraints(constraints)
5972            .hive_distribution(hive_distribution)
5973            .hive_formats(hive_formats)
5974            .table_options(table_options)
5975            .or_replace(or_replace)
5976            .if_not_exists(if_not_exists)
5977            .external(true)
5978            .file_format(file_format)
5979            .location(location)
5980            .build())
5981    }
5982
5983    pub fn parse_file_format(&mut self) -> Result<FileFormat, ParserError> {
5984        let next_token = self.next_token();
5985        match &next_token.token {
5986            Token::Word(w) => match w.keyword {
5987                Keyword::AVRO => Ok(FileFormat::AVRO),
5988                Keyword::JSONFILE => Ok(FileFormat::JSONFILE),
5989                Keyword::ORC => Ok(FileFormat::ORC),
5990                Keyword::PARQUET => Ok(FileFormat::PARQUET),
5991                Keyword::RCFILE => Ok(FileFormat::RCFILE),
5992                Keyword::SEQUENCEFILE => Ok(FileFormat::SEQUENCEFILE),
5993                Keyword::TEXTFILE => Ok(FileFormat::TEXTFILE),
5994                _ => self.expected("fileformat", next_token),
5995            },
5996            _ => self.expected("fileformat", next_token),
5997        }
5998    }
5999
6000    fn parse_analyze_format_kind(&mut self) -> Result<AnalyzeFormatKind, ParserError> {
6001        if self.consume_token(&Token::Eq) {
6002            Ok(AnalyzeFormatKind::Assignment(self.parse_analyze_format()?))
6003        } else {
6004            Ok(AnalyzeFormatKind::Keyword(self.parse_analyze_format()?))
6005        }
6006    }
6007
6008    pub fn parse_analyze_format(&mut self) -> Result<AnalyzeFormat, ParserError> {
6009        let next_token = self.next_token();
6010        match &next_token.token {
6011            Token::Word(w) => match w.keyword {
6012                Keyword::TEXT => Ok(AnalyzeFormat::TEXT),
6013                Keyword::GRAPHVIZ => Ok(AnalyzeFormat::GRAPHVIZ),
6014                Keyword::JSON => Ok(AnalyzeFormat::JSON),
6015                _ => self.expected("fileformat", next_token),
6016            },
6017            _ => self.expected("fileformat", next_token),
6018        }
6019    }
6020
6021    pub fn parse_create_view(
6022        &mut self,
6023        or_alter: bool,
6024        or_replace: bool,
6025        temporary: bool,
6026        create_view_params: Option<CreateViewParams>,
6027    ) -> Result<Statement, ParserError> {
6028        let secure = self.parse_keyword(Keyword::SECURE);
6029        let materialized = self.parse_keyword(Keyword::MATERIALIZED);
6030        self.expect_keyword_is(Keyword::VIEW)?;
6031        let allow_unquoted_hyphen = dialect_of!(self is BigQueryDialect);
6032        // Tries to parse IF NOT EXISTS either before name or after name
6033        // Name before IF NOT EXISTS is supported by snowflake but undocumented
6034        let if_not_exists_first =
6035            self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
6036        let name = self.parse_object_name(allow_unquoted_hyphen)?;
6037        let name_before_not_exists = !if_not_exists_first
6038            && self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
6039        let if_not_exists = if_not_exists_first || name_before_not_exists;
6040        // Many dialects support `OR ALTER` right after `CREATE`, but we don't (yet).
6041        // ANSI SQL and Postgres support RECURSIVE here, but we don't support it either.
6042        let columns = self.parse_view_columns()?;
6043        let mut options = CreateTableOptions::None;
6044        let with_options = self.parse_options(Keyword::WITH)?;
6045        if !with_options.is_empty() {
6046            options = CreateTableOptions::With(with_options);
6047        }
6048
6049        let cluster_by = if self.parse_keyword(Keyword::CLUSTER) {
6050            self.expect_keyword_is(Keyword::BY)?;
6051            self.parse_parenthesized_column_list(Optional, false)?
6052        } else {
6053            vec![]
6054        };
6055
6056        if dialect_of!(self is BigQueryDialect | GenericDialect) {
6057            if let Some(opts) = self.maybe_parse_options(Keyword::OPTIONS)? {
6058                if !opts.is_empty() {
6059                    options = CreateTableOptions::Options(opts);
6060                }
6061            };
6062        }
6063
6064        let to = if dialect_of!(self is ClickHouseDialect | GenericDialect)
6065            && self.parse_keyword(Keyword::TO)
6066        {
6067            Some(self.parse_object_name(false)?)
6068        } else {
6069            None
6070        };
6071
6072        let comment = if dialect_of!(self is SnowflakeDialect | GenericDialect)
6073            && self.parse_keyword(Keyword::COMMENT)
6074        {
6075            self.expect_token(&Token::Eq)?;
6076            Some(self.parse_comment_value()?)
6077        } else {
6078            None
6079        };
6080
6081        self.expect_keyword_is(Keyword::AS)?;
6082        let query = self.parse_query()?;
6083        // Optional `WITH [ CASCADED | LOCAL ] CHECK OPTION` is widely supported here.
6084
6085        let with_no_schema_binding = dialect_of!(self is RedshiftSqlDialect | GenericDialect)
6086            && self.parse_keywords(&[
6087                Keyword::WITH,
6088                Keyword::NO,
6089                Keyword::SCHEMA,
6090                Keyword::BINDING,
6091            ]);
6092
6093        Ok(CreateView {
6094            or_alter,
6095            name,
6096            columns,
6097            query,
6098            materialized,
6099            secure,
6100            or_replace,
6101            options,
6102            cluster_by,
6103            comment,
6104            with_no_schema_binding,
6105            if_not_exists,
6106            temporary,
6107            to,
6108            params: create_view_params,
6109            name_before_not_exists,
6110        }
6111        .into())
6112    }
6113
6114    /// Parse optional parameters for the `CREATE VIEW` statement supported by [MySQL].
6115    ///
6116    /// [MySQL]: https://dev.mysql.com/doc/refman/9.1/en/create-view.html
6117    fn parse_create_view_params(&mut self) -> Result<Option<CreateViewParams>, ParserError> {
6118        let algorithm = if self.parse_keyword(Keyword::ALGORITHM) {
6119            self.expect_token(&Token::Eq)?;
6120            Some(
6121                match self.expect_one_of_keywords(&[
6122                    Keyword::UNDEFINED,
6123                    Keyword::MERGE,
6124                    Keyword::TEMPTABLE,
6125                ])? {
6126                    Keyword::UNDEFINED => CreateViewAlgorithm::Undefined,
6127                    Keyword::MERGE => CreateViewAlgorithm::Merge,
6128                    Keyword::TEMPTABLE => CreateViewAlgorithm::TempTable,
6129                    _ => {
6130                        self.prev_token();
6131                        let found = self.next_token();
6132                        return self
6133                            .expected("UNDEFINED or MERGE or TEMPTABLE after ALGORITHM =", found);
6134                    }
6135                },
6136            )
6137        } else {
6138            None
6139        };
6140        let definer = if self.parse_keyword(Keyword::DEFINER) {
6141            self.expect_token(&Token::Eq)?;
6142            Some(self.parse_grantee_name()?)
6143        } else {
6144            None
6145        };
6146        let security = if self.parse_keywords(&[Keyword::SQL, Keyword::SECURITY]) {
6147            Some(
6148                match self.expect_one_of_keywords(&[Keyword::DEFINER, Keyword::INVOKER])? {
6149                    Keyword::DEFINER => CreateViewSecurity::Definer,
6150                    Keyword::INVOKER => CreateViewSecurity::Invoker,
6151                    _ => {
6152                        self.prev_token();
6153                        let found = self.next_token();
6154                        return self.expected("DEFINER or INVOKER after SQL SECURITY", found);
6155                    }
6156                },
6157            )
6158        } else {
6159            None
6160        };
6161        if algorithm.is_some() || definer.is_some() || security.is_some() {
6162            Ok(Some(CreateViewParams {
6163                algorithm,
6164                definer,
6165                security,
6166            }))
6167        } else {
6168            Ok(None)
6169        }
6170    }
6171
6172    pub fn parse_create_role(&mut self) -> Result<Statement, ParserError> {
6173        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
6174        let names = self.parse_comma_separated(|p| p.parse_object_name(false))?;
6175
6176        let _ = self.parse_keyword(Keyword::WITH); // [ WITH ]
6177
6178        let optional_keywords = if dialect_of!(self is MsSqlDialect) {
6179            vec![Keyword::AUTHORIZATION]
6180        } else if dialect_of!(self is PostgreSqlDialect) {
6181            vec![
6182                Keyword::LOGIN,
6183                Keyword::NOLOGIN,
6184                Keyword::INHERIT,
6185                Keyword::NOINHERIT,
6186                Keyword::BYPASSRLS,
6187                Keyword::NOBYPASSRLS,
6188                Keyword::PASSWORD,
6189                Keyword::CREATEDB,
6190                Keyword::NOCREATEDB,
6191                Keyword::CREATEROLE,
6192                Keyword::NOCREATEROLE,
6193                Keyword::SUPERUSER,
6194                Keyword::NOSUPERUSER,
6195                Keyword::REPLICATION,
6196                Keyword::NOREPLICATION,
6197                Keyword::CONNECTION,
6198                Keyword::VALID,
6199                Keyword::IN,
6200                Keyword::ROLE,
6201                Keyword::ADMIN,
6202                Keyword::USER,
6203            ]
6204        } else {
6205            vec![]
6206        };
6207
6208        // MSSQL
6209        let mut authorization_owner = None;
6210        // Postgres
6211        let mut login = None;
6212        let mut inherit = None;
6213        let mut bypassrls = None;
6214        let mut password = None;
6215        let mut create_db = None;
6216        let mut create_role = None;
6217        let mut superuser = None;
6218        let mut replication = None;
6219        let mut connection_limit = None;
6220        let mut valid_until = None;
6221        let mut in_role = vec![];
6222        let mut in_group = vec![];
6223        let mut role = vec![];
6224        let mut user = vec![];
6225        let mut admin = vec![];
6226
6227        while let Some(keyword) = self.parse_one_of_keywords(&optional_keywords) {
6228            let loc = self
6229                .tokens
6230                .get(self.index - 1)
6231                .map_or(Location { line: 0, column: 0 }, |t| t.span.start);
6232            match keyword {
6233                Keyword::AUTHORIZATION => {
6234                    if authorization_owner.is_some() {
6235                        parser_err!("Found multiple AUTHORIZATION", loc)
6236                    } else {
6237                        authorization_owner = Some(self.parse_object_name(false)?);
6238                        Ok(())
6239                    }
6240                }
6241                Keyword::LOGIN | Keyword::NOLOGIN => {
6242                    if login.is_some() {
6243                        parser_err!("Found multiple LOGIN or NOLOGIN", loc)
6244                    } else {
6245                        login = Some(keyword == Keyword::LOGIN);
6246                        Ok(())
6247                    }
6248                }
6249                Keyword::INHERIT | Keyword::NOINHERIT => {
6250                    if inherit.is_some() {
6251                        parser_err!("Found multiple INHERIT or NOINHERIT", loc)
6252                    } else {
6253                        inherit = Some(keyword == Keyword::INHERIT);
6254                        Ok(())
6255                    }
6256                }
6257                Keyword::BYPASSRLS | Keyword::NOBYPASSRLS => {
6258                    if bypassrls.is_some() {
6259                        parser_err!("Found multiple BYPASSRLS or NOBYPASSRLS", loc)
6260                    } else {
6261                        bypassrls = Some(keyword == Keyword::BYPASSRLS);
6262                        Ok(())
6263                    }
6264                }
6265                Keyword::CREATEDB | Keyword::NOCREATEDB => {
6266                    if create_db.is_some() {
6267                        parser_err!("Found multiple CREATEDB or NOCREATEDB", loc)
6268                    } else {
6269                        create_db = Some(keyword == Keyword::CREATEDB);
6270                        Ok(())
6271                    }
6272                }
6273                Keyword::CREATEROLE | Keyword::NOCREATEROLE => {
6274                    if create_role.is_some() {
6275                        parser_err!("Found multiple CREATEROLE or NOCREATEROLE", loc)
6276                    } else {
6277                        create_role = Some(keyword == Keyword::CREATEROLE);
6278                        Ok(())
6279                    }
6280                }
6281                Keyword::SUPERUSER | Keyword::NOSUPERUSER => {
6282                    if superuser.is_some() {
6283                        parser_err!("Found multiple SUPERUSER or NOSUPERUSER", loc)
6284                    } else {
6285                        superuser = Some(keyword == Keyword::SUPERUSER);
6286                        Ok(())
6287                    }
6288                }
6289                Keyword::REPLICATION | Keyword::NOREPLICATION => {
6290                    if replication.is_some() {
6291                        parser_err!("Found multiple REPLICATION or NOREPLICATION", loc)
6292                    } else {
6293                        replication = Some(keyword == Keyword::REPLICATION);
6294                        Ok(())
6295                    }
6296                }
6297                Keyword::PASSWORD => {
6298                    if password.is_some() {
6299                        parser_err!("Found multiple PASSWORD", loc)
6300                    } else {
6301                        password = if self.parse_keyword(Keyword::NULL) {
6302                            Some(Password::NullPassword)
6303                        } else {
6304                            Some(Password::Password(Expr::Value(self.parse_value()?)))
6305                        };
6306                        Ok(())
6307                    }
6308                }
6309                Keyword::CONNECTION => {
6310                    self.expect_keyword_is(Keyword::LIMIT)?;
6311                    if connection_limit.is_some() {
6312                        parser_err!("Found multiple CONNECTION LIMIT", loc)
6313                    } else {
6314                        connection_limit = Some(Expr::Value(self.parse_number_value()?));
6315                        Ok(())
6316                    }
6317                }
6318                Keyword::VALID => {
6319                    self.expect_keyword_is(Keyword::UNTIL)?;
6320                    if valid_until.is_some() {
6321                        parser_err!("Found multiple VALID UNTIL", loc)
6322                    } else {
6323                        valid_until = Some(Expr::Value(self.parse_value()?));
6324                        Ok(())
6325                    }
6326                }
6327                Keyword::IN => {
6328                    if self.parse_keyword(Keyword::ROLE) {
6329                        if !in_role.is_empty() {
6330                            parser_err!("Found multiple IN ROLE", loc)
6331                        } else {
6332                            in_role = self.parse_comma_separated(|p| p.parse_identifier())?;
6333                            Ok(())
6334                        }
6335                    } else if self.parse_keyword(Keyword::GROUP) {
6336                        if !in_group.is_empty() {
6337                            parser_err!("Found multiple IN GROUP", loc)
6338                        } else {
6339                            in_group = self.parse_comma_separated(|p| p.parse_identifier())?;
6340                            Ok(())
6341                        }
6342                    } else {
6343                        self.expected("ROLE or GROUP after IN", self.peek_token())
6344                    }
6345                }
6346                Keyword::ROLE => {
6347                    if !role.is_empty() {
6348                        parser_err!("Found multiple ROLE", loc)
6349                    } else {
6350                        role = self.parse_comma_separated(|p| p.parse_identifier())?;
6351                        Ok(())
6352                    }
6353                }
6354                Keyword::USER => {
6355                    if !user.is_empty() {
6356                        parser_err!("Found multiple USER", loc)
6357                    } else {
6358                        user = self.parse_comma_separated(|p| p.parse_identifier())?;
6359                        Ok(())
6360                    }
6361                }
6362                Keyword::ADMIN => {
6363                    if !admin.is_empty() {
6364                        parser_err!("Found multiple ADMIN", loc)
6365                    } else {
6366                        admin = self.parse_comma_separated(|p| p.parse_identifier())?;
6367                        Ok(())
6368                    }
6369                }
6370                _ => break,
6371            }?
6372        }
6373
6374        Ok(CreateRole {
6375            names,
6376            if_not_exists,
6377            login,
6378            inherit,
6379            bypassrls,
6380            password,
6381            create_db,
6382            create_role,
6383            replication,
6384            superuser,
6385            connection_limit,
6386            valid_until,
6387            in_role,
6388            in_group,
6389            role,
6390            user,
6391            admin,
6392            authorization_owner,
6393        }
6394        .into())
6395    }
6396
6397    pub fn parse_owner(&mut self) -> Result<Owner, ParserError> {
6398        let owner = match self.parse_one_of_keywords(&[Keyword::CURRENT_USER, Keyword::CURRENT_ROLE, Keyword::SESSION_USER]) {
6399            Some(Keyword::CURRENT_USER) => Owner::CurrentUser,
6400            Some(Keyword::CURRENT_ROLE) => Owner::CurrentRole,
6401            Some(Keyword::SESSION_USER) => Owner::SessionUser,
6402            Some(unexpected_keyword) => return Err(ParserError::ParserError(
6403                format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in owner"),
6404            )),
6405            None => {
6406                match self.parse_identifier() {
6407                    Ok(ident) => Owner::Ident(ident),
6408                    Err(e) => {
6409                        return Err(ParserError::ParserError(format!("Expected: CURRENT_USER, CURRENT_ROLE, SESSION_USER or identifier after OWNER TO. {e}")))
6410                    }
6411                }
6412            }
6413        };
6414        Ok(owner)
6415    }
6416
6417    /// Parses a [Statement::CreateDomain] statement.
6418    fn parse_create_domain(&mut self) -> Result<Statement, ParserError> {
6419        let name = self.parse_object_name(false)?;
6420        self.expect_keyword_is(Keyword::AS)?;
6421        let data_type = self.parse_data_type()?;
6422        let collation = if self.parse_keyword(Keyword::COLLATE) {
6423            Some(self.parse_identifier()?)
6424        } else {
6425            None
6426        };
6427        let default = if self.parse_keyword(Keyword::DEFAULT) {
6428            Some(self.parse_expr()?)
6429        } else {
6430            None
6431        };
6432        let mut constraints = Vec::new();
6433        while let Some(constraint) = self.parse_optional_table_constraint()? {
6434            constraints.push(constraint);
6435        }
6436
6437        Ok(Statement::CreateDomain(CreateDomain {
6438            name,
6439            data_type,
6440            collation,
6441            default,
6442            constraints,
6443        }))
6444    }
6445
6446    /// ```sql
6447    ///     CREATE POLICY name ON table_name [ AS { PERMISSIVE | RESTRICTIVE } ]
6448    ///     [ FOR { ALL | SELECT | INSERT | UPDATE | DELETE } ]
6449    ///     [ TO { role_name | PUBLIC | CURRENT_USER | CURRENT_ROLE | SESSION_USER } [, ...] ]
6450    ///     [ USING ( using_expression ) ]
6451    ///     [ WITH CHECK ( with_check_expression ) ]
6452    /// ```
6453    ///
6454    /// [PostgreSQL Documentation](https://www.postgresql.org/docs/current/sql-createpolicy.html)
6455    pub fn parse_create_policy(&mut self) -> Result<Statement, ParserError> {
6456        let name = self.parse_identifier()?;
6457        self.expect_keyword_is(Keyword::ON)?;
6458        let table_name = self.parse_object_name(false)?;
6459
6460        let policy_type = if self.parse_keyword(Keyword::AS) {
6461            let keyword =
6462                self.expect_one_of_keywords(&[Keyword::PERMISSIVE, Keyword::RESTRICTIVE])?;
6463            Some(match keyword {
6464                Keyword::PERMISSIVE => CreatePolicyType::Permissive,
6465                Keyword::RESTRICTIVE => CreatePolicyType::Restrictive,
6466                unexpected_keyword => return Err(ParserError::ParserError(
6467                    format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in policy type"),
6468                )),
6469            })
6470        } else {
6471            None
6472        };
6473
6474        let command = if self.parse_keyword(Keyword::FOR) {
6475            let keyword = self.expect_one_of_keywords(&[
6476                Keyword::ALL,
6477                Keyword::SELECT,
6478                Keyword::INSERT,
6479                Keyword::UPDATE,
6480                Keyword::DELETE,
6481            ])?;
6482            Some(match keyword {
6483                Keyword::ALL => CreatePolicyCommand::All,
6484                Keyword::SELECT => CreatePolicyCommand::Select,
6485                Keyword::INSERT => CreatePolicyCommand::Insert,
6486                Keyword::UPDATE => CreatePolicyCommand::Update,
6487                Keyword::DELETE => CreatePolicyCommand::Delete,
6488                unexpected_keyword => return Err(ParserError::ParserError(
6489                    format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in policy command"),
6490                )),
6491            })
6492        } else {
6493            None
6494        };
6495
6496        let to = if self.parse_keyword(Keyword::TO) {
6497            Some(self.parse_comma_separated(|p| p.parse_owner())?)
6498        } else {
6499            None
6500        };
6501
6502        let using = if self.parse_keyword(Keyword::USING) {
6503            self.expect_token(&Token::LParen)?;
6504            let expr = self.parse_expr()?;
6505            self.expect_token(&Token::RParen)?;
6506            Some(expr)
6507        } else {
6508            None
6509        };
6510
6511        let with_check = if self.parse_keywords(&[Keyword::WITH, Keyword::CHECK]) {
6512            self.expect_token(&Token::LParen)?;
6513            let expr = self.parse_expr()?;
6514            self.expect_token(&Token::RParen)?;
6515            Some(expr)
6516        } else {
6517            None
6518        };
6519
6520        Ok(CreatePolicy {
6521            name,
6522            table_name,
6523            policy_type,
6524            command,
6525            to,
6526            using,
6527            with_check,
6528        })
6529    }
6530
6531    /// ```sql
6532    /// CREATE CONNECTOR [IF NOT EXISTS] connector_name
6533    /// [TYPE datasource_type]
6534    /// [URL datasource_url]
6535    /// [COMMENT connector_comment]
6536    /// [WITH DCPROPERTIES(property_name=property_value, ...)]
6537    /// ```
6538    ///
6539    /// [Hive Documentation](https://cwiki.apache.org/confluence/pages/viewpage.action?pageId=27362034#LanguageManualDDL-CreateDataConnectorCreateConnector)
6540    pub fn parse_create_connector(&mut self) -> Result<Statement, ParserError> {
6541        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
6542        let name = self.parse_identifier()?;
6543
6544        let connector_type = if self.parse_keyword(Keyword::TYPE) {
6545            Some(self.parse_literal_string()?)
6546        } else {
6547            None
6548        };
6549
6550        let url = if self.parse_keyword(Keyword::URL) {
6551            Some(self.parse_literal_string()?)
6552        } else {
6553            None
6554        };
6555
6556        let comment = self.parse_optional_inline_comment()?;
6557
6558        let with_dcproperties =
6559            match self.parse_options_with_keywords(&[Keyword::WITH, Keyword::DCPROPERTIES])? {
6560                properties if !properties.is_empty() => Some(properties),
6561                _ => None,
6562            };
6563
6564        Ok(Statement::CreateConnector(CreateConnector {
6565            name,
6566            if_not_exists,
6567            connector_type,
6568            url,
6569            comment,
6570            with_dcproperties,
6571        }))
6572    }
6573
6574    /// Parse an operator name, which can contain special characters like +, -, <, >, =
6575    /// that are tokenized as operator tokens rather than identifiers.
6576    /// This is used for PostgreSQL CREATE OPERATOR statements.
6577    ///
6578    /// Examples: `+`, `myschema.+`, `pg_catalog.<=`
6579    fn parse_operator_name(&mut self) -> Result<ObjectName, ParserError> {
6580        let mut parts = vec![];
6581        loop {
6582            parts.push(ObjectNamePart::Identifier(Ident::new(
6583                self.next_token().to_string(),
6584            )));
6585            if !self.consume_token(&Token::Period) {
6586                break;
6587            }
6588        }
6589        Ok(ObjectName(parts))
6590    }
6591
6592    /// Parse a [Statement::CreateOperator]
6593    ///
6594    /// [PostgreSQL Documentation](https://www.postgresql.org/docs/current/sql-createoperator.html)
6595    pub fn parse_create_operator(&mut self) -> Result<Statement, ParserError> {
6596        let name = self.parse_operator_name()?;
6597        self.expect_token(&Token::LParen)?;
6598
6599        let mut function: Option<ObjectName> = None;
6600        let mut is_procedure = false;
6601        let mut left_arg: Option<DataType> = None;
6602        let mut right_arg: Option<DataType> = None;
6603        let mut options: Vec<OperatorOption> = Vec::new();
6604
6605        loop {
6606            let keyword = self.expect_one_of_keywords(&[
6607                Keyword::FUNCTION,
6608                Keyword::PROCEDURE,
6609                Keyword::LEFTARG,
6610                Keyword::RIGHTARG,
6611                Keyword::COMMUTATOR,
6612                Keyword::NEGATOR,
6613                Keyword::RESTRICT,
6614                Keyword::JOIN,
6615                Keyword::HASHES,
6616                Keyword::MERGES,
6617            ])?;
6618
6619            match keyword {
6620                Keyword::HASHES if !options.iter().any(|o| matches!(o, OperatorOption::Hashes)) => {
6621                    options.push(OperatorOption::Hashes);
6622                }
6623                Keyword::MERGES if !options.iter().any(|o| matches!(o, OperatorOption::Merges)) => {
6624                    options.push(OperatorOption::Merges);
6625                }
6626                Keyword::FUNCTION | Keyword::PROCEDURE if function.is_none() => {
6627                    self.expect_token(&Token::Eq)?;
6628                    function = Some(self.parse_object_name(false)?);
6629                    is_procedure = keyword == Keyword::PROCEDURE;
6630                }
6631                Keyword::LEFTARG if left_arg.is_none() => {
6632                    self.expect_token(&Token::Eq)?;
6633                    left_arg = Some(self.parse_data_type()?);
6634                }
6635                Keyword::RIGHTARG if right_arg.is_none() => {
6636                    self.expect_token(&Token::Eq)?;
6637                    right_arg = Some(self.parse_data_type()?);
6638                }
6639                Keyword::COMMUTATOR
6640                    if !options
6641                        .iter()
6642                        .any(|o| matches!(o, OperatorOption::Commutator(_))) =>
6643                {
6644                    self.expect_token(&Token::Eq)?;
6645                    if self.parse_keyword(Keyword::OPERATOR) {
6646                        self.expect_token(&Token::LParen)?;
6647                        let op = self.parse_operator_name()?;
6648                        self.expect_token(&Token::RParen)?;
6649                        options.push(OperatorOption::Commutator(op));
6650                    } else {
6651                        options.push(OperatorOption::Commutator(self.parse_operator_name()?));
6652                    }
6653                }
6654                Keyword::NEGATOR
6655                    if !options
6656                        .iter()
6657                        .any(|o| matches!(o, OperatorOption::Negator(_))) =>
6658                {
6659                    self.expect_token(&Token::Eq)?;
6660                    if self.parse_keyword(Keyword::OPERATOR) {
6661                        self.expect_token(&Token::LParen)?;
6662                        let op = self.parse_operator_name()?;
6663                        self.expect_token(&Token::RParen)?;
6664                        options.push(OperatorOption::Negator(op));
6665                    } else {
6666                        options.push(OperatorOption::Negator(self.parse_operator_name()?));
6667                    }
6668                }
6669                Keyword::RESTRICT
6670                    if !options
6671                        .iter()
6672                        .any(|o| matches!(o, OperatorOption::Restrict(_))) =>
6673                {
6674                    self.expect_token(&Token::Eq)?;
6675                    options.push(OperatorOption::Restrict(Some(
6676                        self.parse_object_name(false)?,
6677                    )));
6678                }
6679                Keyword::JOIN if !options.iter().any(|o| matches!(o, OperatorOption::Join(_))) => {
6680                    self.expect_token(&Token::Eq)?;
6681                    options.push(OperatorOption::Join(Some(self.parse_object_name(false)?)));
6682                }
6683                _ => {
6684                    return Err(ParserError::ParserError(format!(
6685                        "Duplicate or unexpected keyword {:?} in CREATE OPERATOR",
6686                        keyword
6687                    )))
6688                }
6689            }
6690
6691            if !self.consume_token(&Token::Comma) {
6692                break;
6693            }
6694        }
6695
6696        // Expect closing parenthesis
6697        self.expect_token(&Token::RParen)?;
6698
6699        // FUNCTION is required
6700        let function = function.ok_or_else(|| {
6701            ParserError::ParserError("CREATE OPERATOR requires FUNCTION parameter".to_string())
6702        })?;
6703
6704        Ok(Statement::CreateOperator(CreateOperator {
6705            name,
6706            function,
6707            is_procedure,
6708            left_arg,
6709            right_arg,
6710            options,
6711        }))
6712    }
6713
6714    /// Parse a [Statement::CreateOperatorFamily]
6715    ///
6716    /// [PostgreSQL Documentation](https://www.postgresql.org/docs/current/sql-createopfamily.html)
6717    pub fn parse_create_operator_family(&mut self) -> Result<Statement, ParserError> {
6718        let name = self.parse_object_name(false)?;
6719        self.expect_keyword(Keyword::USING)?;
6720        let using = self.parse_identifier()?;
6721
6722        Ok(Statement::CreateOperatorFamily(CreateOperatorFamily {
6723            name,
6724            using,
6725        }))
6726    }
6727
6728    /// Parse a [Statement::CreateOperatorClass]
6729    ///
6730    /// [PostgreSQL Documentation](https://www.postgresql.org/docs/current/sql-createopclass.html)
6731    pub fn parse_create_operator_class(&mut self) -> Result<Statement, ParserError> {
6732        let name = self.parse_object_name(false)?;
6733        let default = self.parse_keyword(Keyword::DEFAULT);
6734        self.expect_keywords(&[Keyword::FOR, Keyword::TYPE])?;
6735        let for_type = self.parse_data_type()?;
6736        self.expect_keyword(Keyword::USING)?;
6737        let using = self.parse_identifier()?;
6738
6739        let family = if self.parse_keyword(Keyword::FAMILY) {
6740            Some(self.parse_object_name(false)?)
6741        } else {
6742            None
6743        };
6744
6745        self.expect_keyword(Keyword::AS)?;
6746
6747        let mut items = vec![];
6748        loop {
6749            if self.parse_keyword(Keyword::OPERATOR) {
6750                let strategy_number = self.parse_literal_uint()?;
6751                let operator_name = self.parse_operator_name()?;
6752
6753                // Optional operator argument types
6754                let op_types = if self.consume_token(&Token::LParen) {
6755                    let left = self.parse_data_type()?;
6756                    self.expect_token(&Token::Comma)?;
6757                    let right = self.parse_data_type()?;
6758                    self.expect_token(&Token::RParen)?;
6759                    Some(OperatorArgTypes { left, right })
6760                } else {
6761                    None
6762                };
6763
6764                // Optional purpose
6765                let purpose = if self.parse_keyword(Keyword::FOR) {
6766                    if self.parse_keyword(Keyword::SEARCH) {
6767                        Some(OperatorPurpose::ForSearch)
6768                    } else if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
6769                        let sort_family = self.parse_object_name(false)?;
6770                        Some(OperatorPurpose::ForOrderBy { sort_family })
6771                    } else {
6772                        return self.expected("SEARCH or ORDER BY after FOR", self.peek_token());
6773                    }
6774                } else {
6775                    None
6776                };
6777
6778                items.push(OperatorClassItem::Operator {
6779                    strategy_number,
6780                    operator_name,
6781                    op_types,
6782                    purpose,
6783                });
6784            } else if self.parse_keyword(Keyword::FUNCTION) {
6785                let support_number = self.parse_literal_uint()?;
6786
6787                // Optional operator types
6788                let op_types =
6789                    if self.consume_token(&Token::LParen) && self.peek_token() != Token::RParen {
6790                        let mut types = vec![];
6791                        loop {
6792                            types.push(self.parse_data_type()?);
6793                            if !self.consume_token(&Token::Comma) {
6794                                break;
6795                            }
6796                        }
6797                        self.expect_token(&Token::RParen)?;
6798                        Some(types)
6799                    } else if self.consume_token(&Token::LParen) {
6800                        self.expect_token(&Token::RParen)?;
6801                        Some(vec![])
6802                    } else {
6803                        None
6804                    };
6805
6806                let function_name = self.parse_object_name(false)?;
6807
6808                // Function argument types
6809                let argument_types = if self.consume_token(&Token::LParen) {
6810                    let mut types = vec![];
6811                    loop {
6812                        if self.peek_token() == Token::RParen {
6813                            break;
6814                        }
6815                        types.push(self.parse_data_type()?);
6816                        if !self.consume_token(&Token::Comma) {
6817                            break;
6818                        }
6819                    }
6820                    self.expect_token(&Token::RParen)?;
6821                    types
6822                } else {
6823                    vec![]
6824                };
6825
6826                items.push(OperatorClassItem::Function {
6827                    support_number,
6828                    op_types,
6829                    function_name,
6830                    argument_types,
6831                });
6832            } else if self.parse_keyword(Keyword::STORAGE) {
6833                let storage_type = self.parse_data_type()?;
6834                items.push(OperatorClassItem::Storage { storage_type });
6835            } else {
6836                break;
6837            }
6838
6839            // Check for comma separator
6840            if !self.consume_token(&Token::Comma) {
6841                break;
6842            }
6843        }
6844
6845        Ok(Statement::CreateOperatorClass(CreateOperatorClass {
6846            name,
6847            default,
6848            for_type,
6849            using,
6850            family,
6851            items,
6852        }))
6853    }
6854
6855    pub fn parse_drop(&mut self) -> Result<Statement, ParserError> {
6856        // MySQL dialect supports `TEMPORARY`
6857        let temporary = dialect_of!(self is MySqlDialect | GenericDialect | DuckDbDialect)
6858            && self.parse_keyword(Keyword::TEMPORARY);
6859        let persistent = dialect_of!(self is DuckDbDialect)
6860            && self.parse_one_of_keywords(&[Keyword::PERSISTENT]).is_some();
6861
6862        let object_type = if self.parse_keyword(Keyword::TABLE) {
6863            ObjectType::Table
6864        } else if self.parse_keyword(Keyword::VIEW) {
6865            ObjectType::View
6866        } else if self.parse_keywords(&[Keyword::MATERIALIZED, Keyword::VIEW]) {
6867            ObjectType::MaterializedView
6868        } else if self.parse_keyword(Keyword::INDEX) {
6869            ObjectType::Index
6870        } else if self.parse_keyword(Keyword::ROLE) {
6871            ObjectType::Role
6872        } else if self.parse_keyword(Keyword::SCHEMA) {
6873            ObjectType::Schema
6874        } else if self.parse_keyword(Keyword::DATABASE) {
6875            ObjectType::Database
6876        } else if self.parse_keyword(Keyword::SEQUENCE) {
6877            ObjectType::Sequence
6878        } else if self.parse_keyword(Keyword::STAGE) {
6879            ObjectType::Stage
6880        } else if self.parse_keyword(Keyword::TYPE) {
6881            ObjectType::Type
6882        } else if self.parse_keyword(Keyword::USER) {
6883            ObjectType::User
6884        } else if self.parse_keyword(Keyword::STREAM) {
6885            ObjectType::Stream
6886        } else if self.parse_keyword(Keyword::FUNCTION) {
6887            return self.parse_drop_function();
6888        } else if self.parse_keyword(Keyword::POLICY) {
6889            return self.parse_drop_policy();
6890        } else if self.parse_keyword(Keyword::CONNECTOR) {
6891            return self.parse_drop_connector();
6892        } else if self.parse_keyword(Keyword::DOMAIN) {
6893            return self.parse_drop_domain();
6894        } else if self.parse_keyword(Keyword::PROCEDURE) {
6895            return self.parse_drop_procedure();
6896        } else if self.parse_keyword(Keyword::SECRET) {
6897            return self.parse_drop_secret(temporary, persistent);
6898        } else if self.parse_keyword(Keyword::TRIGGER) {
6899            return self.parse_drop_trigger();
6900        } else if self.parse_keyword(Keyword::EXTENSION) {
6901            return self.parse_drop_extension();
6902        } else if self.parse_keyword(Keyword::OPERATOR) {
6903            // Check if this is DROP OPERATOR FAMILY or DROP OPERATOR CLASS
6904            return if self.parse_keyword(Keyword::FAMILY) {
6905                self.parse_drop_operator_family()
6906            } else if self.parse_keyword(Keyword::CLASS) {
6907                self.parse_drop_operator_class()
6908            } else {
6909                self.parse_drop_operator()
6910            };
6911        } else {
6912            return self.expected(
6913                "CONNECTOR, DATABASE, EXTENSION, FUNCTION, INDEX, OPERATOR, POLICY, PROCEDURE, ROLE, SCHEMA, SECRET, SEQUENCE, STAGE, TABLE, TRIGGER, TYPE, VIEW, MATERIALIZED VIEW or USER after DROP",
6914                self.peek_token(),
6915            );
6916        };
6917        // Many dialects support the non-standard `IF EXISTS` clause and allow
6918        // specifying multiple objects to delete in a single statement
6919        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6920        let names = self.parse_comma_separated(|p| p.parse_object_name(false))?;
6921
6922        let loc = self.peek_token().span.start;
6923        let cascade = self.parse_keyword(Keyword::CASCADE);
6924        let restrict = self.parse_keyword(Keyword::RESTRICT);
6925        let purge = self.parse_keyword(Keyword::PURGE);
6926        if cascade && restrict {
6927            return parser_err!("Cannot specify both CASCADE and RESTRICT in DROP", loc);
6928        }
6929        if object_type == ObjectType::Role && (cascade || restrict || purge) {
6930            return parser_err!(
6931                "Cannot specify CASCADE, RESTRICT, or PURGE in DROP ROLE",
6932                loc
6933            );
6934        }
6935        let table = if self.parse_keyword(Keyword::ON) {
6936            Some(self.parse_object_name(false)?)
6937        } else {
6938            None
6939        };
6940        Ok(Statement::Drop {
6941            object_type,
6942            if_exists,
6943            names,
6944            cascade,
6945            restrict,
6946            purge,
6947            temporary,
6948            table,
6949        })
6950    }
6951
6952    fn parse_optional_drop_behavior(&mut self) -> Option<DropBehavior> {
6953        match self.parse_one_of_keywords(&[Keyword::CASCADE, Keyword::RESTRICT]) {
6954            Some(Keyword::CASCADE) => Some(DropBehavior::Cascade),
6955            Some(Keyword::RESTRICT) => Some(DropBehavior::Restrict),
6956            _ => None,
6957        }
6958    }
6959
6960    /// ```sql
6961    /// DROP FUNCTION [ IF EXISTS ] name [ ( [ [ argmode ] [ argname ] argtype [, ...] ] ) ] [, ...]
6962    /// [ CASCADE | RESTRICT ]
6963    /// ```
6964    fn parse_drop_function(&mut self) -> Result<Statement, ParserError> {
6965        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6966        let func_desc = self.parse_comma_separated(Parser::parse_function_desc)?;
6967        let drop_behavior = self.parse_optional_drop_behavior();
6968        Ok(Statement::DropFunction(DropFunction {
6969            if_exists,
6970            func_desc,
6971            drop_behavior,
6972        }))
6973    }
6974
6975    /// ```sql
6976    /// DROP POLICY [ IF EXISTS ] name ON table_name [ CASCADE | RESTRICT ]
6977    /// ```
6978    ///
6979    /// [PostgreSQL Documentation](https://www.postgresql.org/docs/current/sql-droppolicy.html)
6980    fn parse_drop_policy(&mut self) -> Result<Statement, ParserError> {
6981        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6982        let name = self.parse_identifier()?;
6983        self.expect_keyword_is(Keyword::ON)?;
6984        let table_name = self.parse_object_name(false)?;
6985        let drop_behavior = self.parse_optional_drop_behavior();
6986        Ok(Statement::DropPolicy {
6987            if_exists,
6988            name,
6989            table_name,
6990            drop_behavior,
6991        })
6992    }
6993    /// ```sql
6994    /// DROP CONNECTOR [IF EXISTS] name
6995    /// ```
6996    ///
6997    /// See [Hive](https://cwiki.apache.org/confluence/pages/viewpage.action?pageId=27362034#LanguageManualDDL-DropConnector)
6998    fn parse_drop_connector(&mut self) -> Result<Statement, ParserError> {
6999        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7000        let name = self.parse_identifier()?;
7001        Ok(Statement::DropConnector { if_exists, name })
7002    }
7003
7004    /// ```sql
7005    /// DROP DOMAIN [ IF EXISTS ] name [ CASCADE | RESTRICT ]
7006    /// ```
7007    fn parse_drop_domain(&mut self) -> Result<Statement, ParserError> {
7008        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7009        let name = self.parse_object_name(false)?;
7010        let drop_behavior = self.parse_optional_drop_behavior();
7011        Ok(Statement::DropDomain(DropDomain {
7012            if_exists,
7013            name,
7014            drop_behavior,
7015        }))
7016    }
7017
7018    /// ```sql
7019    /// DROP PROCEDURE [ IF EXISTS ] name [ ( [ [ argmode ] [ argname ] argtype [, ...] ] ) ] [, ...]
7020    /// [ CASCADE | RESTRICT ]
7021    /// ```
7022    fn parse_drop_procedure(&mut self) -> Result<Statement, ParserError> {
7023        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7024        let proc_desc = self.parse_comma_separated(Parser::parse_function_desc)?;
7025        let drop_behavior = self.parse_optional_drop_behavior();
7026        Ok(Statement::DropProcedure {
7027            if_exists,
7028            proc_desc,
7029            drop_behavior,
7030        })
7031    }
7032
7033    fn parse_function_desc(&mut self) -> Result<FunctionDesc, ParserError> {
7034        let name = self.parse_object_name(false)?;
7035
7036        let args = if self.consume_token(&Token::LParen) {
7037            if self.consume_token(&Token::RParen) {
7038                Some(vec![])
7039            } else {
7040                let args = self.parse_comma_separated(Parser::parse_function_arg)?;
7041                self.expect_token(&Token::RParen)?;
7042                Some(args)
7043            }
7044        } else {
7045            None
7046        };
7047
7048        Ok(FunctionDesc { name, args })
7049    }
7050
7051    /// See [DuckDB Docs](https://duckdb.org/docs/sql/statements/create_secret.html) for more details.
7052    fn parse_drop_secret(
7053        &mut self,
7054        temporary: bool,
7055        persistent: bool,
7056    ) -> Result<Statement, ParserError> {
7057        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7058        let name = self.parse_identifier()?;
7059        let storage_specifier = if self.parse_keyword(Keyword::FROM) {
7060            self.parse_identifier().ok()
7061        } else {
7062            None
7063        };
7064        let temp = match (temporary, persistent) {
7065            (true, false) => Some(true),
7066            (false, true) => Some(false),
7067            (false, false) => None,
7068            _ => self.expected("TEMPORARY or PERSISTENT", self.peek_token())?,
7069        };
7070
7071        Ok(Statement::DropSecret {
7072            if_exists,
7073            temporary: temp,
7074            name,
7075            storage_specifier,
7076        })
7077    }
7078
7079    /// Parse a `DECLARE` statement.
7080    ///
7081    /// ```sql
7082    /// DECLARE name [ BINARY ] [ ASENSITIVE | INSENSITIVE ] [ [ NO ] SCROLL ]
7083    ///     CURSOR [ { WITH | WITHOUT } HOLD ] FOR query
7084    /// ```
7085    ///
7086    /// The syntax can vary significantly between warehouses. See the grammar
7087    /// on the warehouse specific function in such cases.
7088    pub fn parse_declare(&mut self) -> Result<Statement, ParserError> {
7089        if dialect_of!(self is BigQueryDialect) {
7090            return self.parse_big_query_declare();
7091        }
7092        if dialect_of!(self is SnowflakeDialect) {
7093            return self.parse_snowflake_declare();
7094        }
7095        if dialect_of!(self is MsSqlDialect) {
7096            return self.parse_mssql_declare();
7097        }
7098
7099        let name = self.parse_identifier()?;
7100
7101        let binary = Some(self.parse_keyword(Keyword::BINARY));
7102        let sensitive = if self.parse_keyword(Keyword::INSENSITIVE) {
7103            Some(true)
7104        } else if self.parse_keyword(Keyword::ASENSITIVE) {
7105            Some(false)
7106        } else {
7107            None
7108        };
7109        let scroll = if self.parse_keyword(Keyword::SCROLL) {
7110            Some(true)
7111        } else if self.parse_keywords(&[Keyword::NO, Keyword::SCROLL]) {
7112            Some(false)
7113        } else {
7114            None
7115        };
7116
7117        self.expect_keyword_is(Keyword::CURSOR)?;
7118        let declare_type = Some(DeclareType::Cursor);
7119
7120        let hold = match self.parse_one_of_keywords(&[Keyword::WITH, Keyword::WITHOUT]) {
7121            Some(keyword) => {
7122                self.expect_keyword_is(Keyword::HOLD)?;
7123
7124                match keyword {
7125                    Keyword::WITH => Some(true),
7126                    Keyword::WITHOUT => Some(false),
7127                    unexpected_keyword => return Err(ParserError::ParserError(
7128                        format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in cursor hold"),
7129                    )),
7130                }
7131            }
7132            None => None,
7133        };
7134
7135        self.expect_keyword_is(Keyword::FOR)?;
7136
7137        let query = Some(self.parse_query()?);
7138
7139        Ok(Statement::Declare {
7140            stmts: vec![Declare {
7141                names: vec![name],
7142                data_type: None,
7143                assignment: None,
7144                declare_type,
7145                binary,
7146                sensitive,
7147                scroll,
7148                hold,
7149                for_query: query,
7150            }],
7151        })
7152    }
7153
7154    /// Parse a [BigQuery] `DECLARE` statement.
7155    ///
7156    /// Syntax:
7157    /// ```text
7158    /// DECLARE variable_name[, ...] [{ <variable_type> | <DEFAULT expression> }];
7159    /// ```
7160    /// [BigQuery]: https://cloud.google.com/bigquery/docs/reference/standard-sql/procedural-language#declare
7161    pub fn parse_big_query_declare(&mut self) -> Result<Statement, ParserError> {
7162        let names = self.parse_comma_separated(Parser::parse_identifier)?;
7163
7164        let data_type = match self.peek_token().token {
7165            Token::Word(w) if w.keyword == Keyword::DEFAULT => None,
7166            _ => Some(self.parse_data_type()?),
7167        };
7168
7169        let expr = if data_type.is_some() {
7170            if self.parse_keyword(Keyword::DEFAULT) {
7171                Some(self.parse_expr()?)
7172            } else {
7173                None
7174            }
7175        } else {
7176            // If no variable type - default expression must be specified, per BQ docs.
7177            // i.e `DECLARE foo;` is invalid.
7178            self.expect_keyword_is(Keyword::DEFAULT)?;
7179            Some(self.parse_expr()?)
7180        };
7181
7182        Ok(Statement::Declare {
7183            stmts: vec![Declare {
7184                names,
7185                data_type,
7186                assignment: expr.map(|expr| DeclareAssignment::Default(Box::new(expr))),
7187                declare_type: None,
7188                binary: None,
7189                sensitive: None,
7190                scroll: None,
7191                hold: None,
7192                for_query: None,
7193            }],
7194        })
7195    }
7196
7197    /// Parse a [Snowflake] `DECLARE` statement.
7198    ///
7199    /// Syntax:
7200    /// ```text
7201    /// DECLARE
7202    ///   [{ <variable_declaration>
7203    ///      | <cursor_declaration>
7204    ///      | <resultset_declaration>
7205    ///      | <exception_declaration> }; ... ]
7206    ///
7207    /// <variable_declaration>
7208    /// <variable_name> [<type>] [ { DEFAULT | := } <expression>]
7209    ///
7210    /// <cursor_declaration>
7211    /// <cursor_name> CURSOR FOR <query>
7212    ///
7213    /// <resultset_declaration>
7214    /// <resultset_name> RESULTSET [ { DEFAULT | := } ( <query> ) ] ;
7215    ///
7216    /// <exception_declaration>
7217    /// <exception_name> EXCEPTION [ ( <exception_number> , '<exception_message>' ) ] ;
7218    /// ```
7219    ///
7220    /// [Snowflake]: https://docs.snowflake.com/en/sql-reference/snowflake-scripting/declare
7221    pub fn parse_snowflake_declare(&mut self) -> Result<Statement, ParserError> {
7222        let mut stmts = vec![];
7223        loop {
7224            let name = self.parse_identifier()?;
7225            let (declare_type, for_query, assigned_expr, data_type) =
7226                if self.parse_keyword(Keyword::CURSOR) {
7227                    self.expect_keyword_is(Keyword::FOR)?;
7228                    match self.peek_token().token {
7229                        Token::Word(w) if w.keyword == Keyword::SELECT => (
7230                            Some(DeclareType::Cursor),
7231                            Some(self.parse_query()?),
7232                            None,
7233                            None,
7234                        ),
7235                        _ => (
7236                            Some(DeclareType::Cursor),
7237                            None,
7238                            Some(DeclareAssignment::For(Box::new(self.parse_expr()?))),
7239                            None,
7240                        ),
7241                    }
7242                } else if self.parse_keyword(Keyword::RESULTSET) {
7243                    let assigned_expr = if self.peek_token().token != Token::SemiColon {
7244                        self.parse_snowflake_variable_declaration_expression()?
7245                    } else {
7246                        // Nothing more to do. The statement has no further parameters.
7247                        None
7248                    };
7249
7250                    (Some(DeclareType::ResultSet), None, assigned_expr, None)
7251                } else if self.parse_keyword(Keyword::EXCEPTION) {
7252                    let assigned_expr = if self.peek_token().token == Token::LParen {
7253                        Some(DeclareAssignment::Expr(Box::new(self.parse_expr()?)))
7254                    } else {
7255                        // Nothing more to do. The statement has no further parameters.
7256                        None
7257                    };
7258
7259                    (Some(DeclareType::Exception), None, assigned_expr, None)
7260                } else {
7261                    // Without an explicit keyword, the only valid option is variable declaration.
7262                    let (assigned_expr, data_type) = if let Some(assigned_expr) =
7263                        self.parse_snowflake_variable_declaration_expression()?
7264                    {
7265                        (Some(assigned_expr), None)
7266                    } else if let Token::Word(_) = self.peek_token().token {
7267                        let data_type = self.parse_data_type()?;
7268                        (
7269                            self.parse_snowflake_variable_declaration_expression()?,
7270                            Some(data_type),
7271                        )
7272                    } else {
7273                        (None, None)
7274                    };
7275                    (None, None, assigned_expr, data_type)
7276                };
7277            let stmt = Declare {
7278                names: vec![name],
7279                data_type,
7280                assignment: assigned_expr,
7281                declare_type,
7282                binary: None,
7283                sensitive: None,
7284                scroll: None,
7285                hold: None,
7286                for_query,
7287            };
7288
7289            stmts.push(stmt);
7290            if self.consume_token(&Token::SemiColon) {
7291                match self.peek_token().token {
7292                    Token::Word(w)
7293                        if ALL_KEYWORDS
7294                            .binary_search(&w.value.to_uppercase().as_str())
7295                            .is_err() =>
7296                    {
7297                        // Not a keyword - start of a new declaration.
7298                        continue;
7299                    }
7300                    _ => {
7301                        // Put back the semicolon, this is the end of the DECLARE statement.
7302                        self.prev_token();
7303                    }
7304                }
7305            }
7306
7307            break;
7308        }
7309
7310        Ok(Statement::Declare { stmts })
7311    }
7312
7313    /// Parse a [MsSql] `DECLARE` statement.
7314    ///
7315    /// Syntax:
7316    /// ```text
7317    /// DECLARE
7318    // {
7319    //   { @local_variable [AS] data_type [ = value ] }
7320    //   | { @cursor_variable_name CURSOR [ FOR ] }
7321    // } [ ,...n ]
7322    /// ```
7323    /// [MsSql]: https://learn.microsoft.com/en-us/sql/t-sql/language-elements/declare-local-variable-transact-sql?view=sql-server-ver16
7324    pub fn parse_mssql_declare(&mut self) -> Result<Statement, ParserError> {
7325        let stmts = self.parse_comma_separated(Parser::parse_mssql_declare_stmt)?;
7326
7327        Ok(Statement::Declare { stmts })
7328    }
7329
7330    /// Parse the body of a [MsSql] `DECLARE`statement.
7331    ///
7332    /// Syntax:
7333    /// ```text
7334    // {
7335    //   { @local_variable [AS] data_type [ = value ] }
7336    //   | { @cursor_variable_name CURSOR [ FOR ]}
7337    // } [ ,...n ]
7338    /// ```
7339    /// [MsSql]: https://learn.microsoft.com/en-us/sql/t-sql/language-elements/declare-local-variable-transact-sql?view=sql-server-ver16
7340    pub fn parse_mssql_declare_stmt(&mut self) -> Result<Declare, ParserError> {
7341        let name = {
7342            let ident = self.parse_identifier()?;
7343            if !ident.value.starts_with('@')
7344                && !matches!(
7345                    self.peek_token().token,
7346                    Token::Word(w) if w.keyword == Keyword::CURSOR
7347                )
7348            {
7349                Err(ParserError::TokenizerError(
7350                    "Invalid MsSql variable declaration.".to_string(),
7351                ))
7352            } else {
7353                Ok(ident)
7354            }
7355        }?;
7356
7357        let (declare_type, data_type) = match self.peek_token().token {
7358            Token::Word(w) => match w.keyword {
7359                Keyword::CURSOR => {
7360                    self.next_token();
7361                    (Some(DeclareType::Cursor), None)
7362                }
7363                Keyword::AS => {
7364                    self.next_token();
7365                    (None, Some(self.parse_data_type()?))
7366                }
7367                _ => (None, Some(self.parse_data_type()?)),
7368            },
7369            _ => (None, Some(self.parse_data_type()?)),
7370        };
7371
7372        let (for_query, assignment) = if self.peek_keyword(Keyword::FOR) {
7373            self.next_token();
7374            let query = Some(self.parse_query()?);
7375            (query, None)
7376        } else {
7377            let assignment = self.parse_mssql_variable_declaration_expression()?;
7378            (None, assignment)
7379        };
7380
7381        Ok(Declare {
7382            names: vec![name],
7383            data_type,
7384            assignment,
7385            declare_type,
7386            binary: None,
7387            sensitive: None,
7388            scroll: None,
7389            hold: None,
7390            for_query,
7391        })
7392    }
7393
7394    /// Parses the assigned expression in a variable declaration.
7395    ///
7396    /// Syntax:
7397    /// ```text
7398    /// [ { DEFAULT | := } <expression>]
7399    /// ```
7400    /// <https://docs.snowflake.com/en/sql-reference/snowflake-scripting/declare#variable-declaration-syntax>
7401    pub fn parse_snowflake_variable_declaration_expression(
7402        &mut self,
7403    ) -> Result<Option<DeclareAssignment>, ParserError> {
7404        Ok(match self.peek_token().token {
7405            Token::Word(w) if w.keyword == Keyword::DEFAULT => {
7406                self.next_token(); // Skip `DEFAULT`
7407                Some(DeclareAssignment::Default(Box::new(self.parse_expr()?)))
7408            }
7409            Token::Assignment => {
7410                self.next_token(); // Skip `:=`
7411                Some(DeclareAssignment::DuckAssignment(Box::new(
7412                    self.parse_expr()?,
7413                )))
7414            }
7415            _ => None,
7416        })
7417    }
7418
7419    /// Parses the assigned expression in a variable declaration.
7420    ///
7421    /// Syntax:
7422    /// ```text
7423    /// [ = <expression>]
7424    /// ```
7425    pub fn parse_mssql_variable_declaration_expression(
7426        &mut self,
7427    ) -> Result<Option<DeclareAssignment>, ParserError> {
7428        Ok(match self.peek_token().token {
7429            Token::Eq => {
7430                self.next_token(); // Skip `=`
7431                Some(DeclareAssignment::MsSqlAssignment(Box::new(
7432                    self.parse_expr()?,
7433                )))
7434            }
7435            _ => None,
7436        })
7437    }
7438
7439    // FETCH [ direction { FROM | IN } ] cursor INTO target;
7440    pub fn parse_fetch_statement(&mut self) -> Result<Statement, ParserError> {
7441        let direction = if self.parse_keyword(Keyword::NEXT) {
7442            FetchDirection::Next
7443        } else if self.parse_keyword(Keyword::PRIOR) {
7444            FetchDirection::Prior
7445        } else if self.parse_keyword(Keyword::FIRST) {
7446            FetchDirection::First
7447        } else if self.parse_keyword(Keyword::LAST) {
7448            FetchDirection::Last
7449        } else if self.parse_keyword(Keyword::ABSOLUTE) {
7450            FetchDirection::Absolute {
7451                limit: self.parse_number_value()?.value,
7452            }
7453        } else if self.parse_keyword(Keyword::RELATIVE) {
7454            FetchDirection::Relative {
7455                limit: self.parse_number_value()?.value,
7456            }
7457        } else if self.parse_keyword(Keyword::FORWARD) {
7458            if self.parse_keyword(Keyword::ALL) {
7459                FetchDirection::ForwardAll
7460            } else {
7461                FetchDirection::Forward {
7462                    // TODO: Support optional
7463                    limit: Some(self.parse_number_value()?.value),
7464                }
7465            }
7466        } else if self.parse_keyword(Keyword::BACKWARD) {
7467            if self.parse_keyword(Keyword::ALL) {
7468                FetchDirection::BackwardAll
7469            } else {
7470                FetchDirection::Backward {
7471                    // TODO: Support optional
7472                    limit: Some(self.parse_number_value()?.value),
7473                }
7474            }
7475        } else if self.parse_keyword(Keyword::ALL) {
7476            FetchDirection::All
7477        } else {
7478            FetchDirection::Count {
7479                limit: self.parse_number_value()?.value,
7480            }
7481        };
7482
7483        let position = if self.peek_keyword(Keyword::FROM) {
7484            self.expect_keyword(Keyword::FROM)?;
7485            FetchPosition::From
7486        } else if self.peek_keyword(Keyword::IN) {
7487            self.expect_keyword(Keyword::IN)?;
7488            FetchPosition::In
7489        } else {
7490            return parser_err!("Expected FROM or IN", self.peek_token().span.start);
7491        };
7492
7493        let name = self.parse_identifier()?;
7494
7495        let into = if self.parse_keyword(Keyword::INTO) {
7496            Some(self.parse_object_name(false)?)
7497        } else {
7498            None
7499        };
7500
7501        Ok(Statement::Fetch {
7502            name,
7503            direction,
7504            position,
7505            into,
7506        })
7507    }
7508
7509    pub fn parse_discard(&mut self) -> Result<Statement, ParserError> {
7510        let object_type = if self.parse_keyword(Keyword::ALL) {
7511            DiscardObject::ALL
7512        } else if self.parse_keyword(Keyword::PLANS) {
7513            DiscardObject::PLANS
7514        } else if self.parse_keyword(Keyword::SEQUENCES) {
7515            DiscardObject::SEQUENCES
7516        } else if self.parse_keyword(Keyword::TEMP) || self.parse_keyword(Keyword::TEMPORARY) {
7517            DiscardObject::TEMP
7518        } else {
7519            return self.expected(
7520                "ALL, PLANS, SEQUENCES, TEMP or TEMPORARY after DISCARD",
7521                self.peek_token(),
7522            );
7523        };
7524        Ok(Statement::Discard { object_type })
7525    }
7526
7527    pub fn parse_create_index(&mut self, unique: bool) -> Result<Statement, ParserError> {
7528        let concurrently = self.parse_keyword(Keyword::CONCURRENTLY);
7529        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
7530
7531        let mut using = None;
7532
7533        let index_name = if if_not_exists || !self.parse_keyword(Keyword::ON) {
7534            let index_name = self.parse_object_name(false)?;
7535            // MySQL allows `USING index_type` either before or after `ON table_name`
7536            using = self.parse_optional_using_then_index_type()?;
7537            self.expect_keyword_is(Keyword::ON)?;
7538            Some(index_name)
7539        } else {
7540            None
7541        };
7542
7543        let table_name = self.parse_object_name(false)?;
7544
7545        // MySQL allows having two `USING` clauses.
7546        // In that case, the second clause overwrites the first.
7547        using = self.parse_optional_using_then_index_type()?.or(using);
7548
7549        let columns = self.parse_parenthesized_index_column_list()?;
7550
7551        let include = if self.parse_keyword(Keyword::INCLUDE) {
7552            self.expect_token(&Token::LParen)?;
7553            let columns = self.parse_comma_separated(|p| p.parse_identifier())?;
7554            self.expect_token(&Token::RParen)?;
7555            columns
7556        } else {
7557            vec![]
7558        };
7559
7560        let nulls_distinct = if self.parse_keyword(Keyword::NULLS) {
7561            let not = self.parse_keyword(Keyword::NOT);
7562            self.expect_keyword_is(Keyword::DISTINCT)?;
7563            Some(!not)
7564        } else {
7565            None
7566        };
7567
7568        let with = if self.dialect.supports_create_index_with_clause()
7569            && self.parse_keyword(Keyword::WITH)
7570        {
7571            self.expect_token(&Token::LParen)?;
7572            let with_params = self.parse_comma_separated(Parser::parse_expr)?;
7573            self.expect_token(&Token::RParen)?;
7574            with_params
7575        } else {
7576            Vec::new()
7577        };
7578
7579        let predicate = if self.parse_keyword(Keyword::WHERE) {
7580            Some(self.parse_expr()?)
7581        } else {
7582            None
7583        };
7584
7585        // MySQL options (including the modern style of `USING` after the column list instead of
7586        // before, which is deprecated) shouldn't conflict with other preceding options (e.g. `WITH
7587        // PARSER` won't be caught by the above `WITH` clause parsing because MySQL doesn't set that
7588        // support flag). This is probably invalid syntax for other dialects, but it is simpler to
7589        // parse it anyway (as we do inside `ALTER TABLE` and `CREATE TABLE` parsing).
7590        let index_options = self.parse_index_options()?;
7591
7592        // MySQL allows `ALGORITHM` and `LOCK` options. Unlike in `ALTER TABLE`, they need not be comma separated.
7593        let mut alter_options = Vec::new();
7594        while self
7595            .peek_one_of_keywords(&[Keyword::ALGORITHM, Keyword::LOCK])
7596            .is_some()
7597        {
7598            alter_options.push(self.parse_alter_table_operation()?)
7599        }
7600
7601        Ok(Statement::CreateIndex(CreateIndex {
7602            name: index_name,
7603            table_name,
7604            using,
7605            columns,
7606            unique,
7607            concurrently,
7608            if_not_exists,
7609            include,
7610            nulls_distinct,
7611            with,
7612            predicate,
7613            index_options,
7614            alter_options,
7615        }))
7616    }
7617
7618    pub fn parse_create_extension(&mut self) -> Result<Statement, ParserError> {
7619        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
7620        let name = self.parse_identifier()?;
7621
7622        let (schema, version, cascade) = if self.parse_keyword(Keyword::WITH) {
7623            let schema = if self.parse_keyword(Keyword::SCHEMA) {
7624                Some(self.parse_identifier()?)
7625            } else {
7626                None
7627            };
7628
7629            let version = if self.parse_keyword(Keyword::VERSION) {
7630                Some(self.parse_identifier()?)
7631            } else {
7632                None
7633            };
7634
7635            let cascade = self.parse_keyword(Keyword::CASCADE);
7636
7637            (schema, version, cascade)
7638        } else {
7639            (None, None, false)
7640        };
7641
7642        Ok(CreateExtension {
7643            name,
7644            if_not_exists,
7645            schema,
7646            version,
7647            cascade,
7648        }
7649        .into())
7650    }
7651
7652    /// Parse a PostgreSQL-specific [Statement::DropExtension] statement.
7653    pub fn parse_drop_extension(&mut self) -> Result<Statement, ParserError> {
7654        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7655        let names = self.parse_comma_separated(|p| p.parse_identifier())?;
7656        let cascade_or_restrict =
7657            self.parse_one_of_keywords(&[Keyword::CASCADE, Keyword::RESTRICT]);
7658        Ok(Statement::DropExtension(DropExtension {
7659            names,
7660            if_exists,
7661            cascade_or_restrict: cascade_or_restrict
7662                .map(|k| match k {
7663                    Keyword::CASCADE => Ok(ReferentialAction::Cascade),
7664                    Keyword::RESTRICT => Ok(ReferentialAction::Restrict),
7665                    _ => self.expected("CASCADE or RESTRICT", self.peek_token()),
7666                })
7667                .transpose()?,
7668        }))
7669    }
7670
7671    /// Parse a[Statement::DropOperator] statement.
7672    ///
7673    pub fn parse_drop_operator(&mut self) -> Result<Statement, ParserError> {
7674        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7675        let operators = self.parse_comma_separated(|p| p.parse_drop_operator_signature())?;
7676        let drop_behavior = self.parse_optional_drop_behavior();
7677        Ok(Statement::DropOperator(DropOperator {
7678            if_exists,
7679            operators,
7680            drop_behavior,
7681        }))
7682    }
7683
7684    /// Parse an operator signature for a [Statement::DropOperator]
7685    /// Format: `name ( { left_type | NONE } , right_type )`
7686    fn parse_drop_operator_signature(&mut self) -> Result<DropOperatorSignature, ParserError> {
7687        let name = self.parse_operator_name()?;
7688        self.expect_token(&Token::LParen)?;
7689
7690        // Parse left operand type (or NONE for prefix operators)
7691        let left_type = if self.parse_keyword(Keyword::NONE) {
7692            None
7693        } else {
7694            Some(self.parse_data_type()?)
7695        };
7696
7697        self.expect_token(&Token::Comma)?;
7698
7699        // Parse right operand type (always required)
7700        let right_type = self.parse_data_type()?;
7701
7702        self.expect_token(&Token::RParen)?;
7703
7704        Ok(DropOperatorSignature {
7705            name,
7706            left_type,
7707            right_type,
7708        })
7709    }
7710
7711    /// Parse a [Statement::DropOperatorFamily]
7712    ///
7713    /// [PostgreSQL Documentation](https://www.postgresql.org/docs/current/sql-dropopfamily.html)
7714    pub fn parse_drop_operator_family(&mut self) -> Result<Statement, ParserError> {
7715        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7716        let names = self.parse_comma_separated(|p| p.parse_object_name(false))?;
7717        self.expect_keyword(Keyword::USING)?;
7718        let using = self.parse_identifier()?;
7719        let drop_behavior = self.parse_optional_drop_behavior();
7720        Ok(Statement::DropOperatorFamily(DropOperatorFamily {
7721            if_exists,
7722            names,
7723            using,
7724            drop_behavior,
7725        }))
7726    }
7727
7728    /// Parse a [Statement::DropOperatorClass]
7729    ///
7730    /// [PostgreSQL Documentation](https://www.postgresql.org/docs/current/sql-dropopclass.html)
7731    pub fn parse_drop_operator_class(&mut self) -> Result<Statement, ParserError> {
7732        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7733        let names = self.parse_comma_separated(|p| p.parse_object_name(false))?;
7734        self.expect_keyword(Keyword::USING)?;
7735        let using = self.parse_identifier()?;
7736        let drop_behavior = self.parse_optional_drop_behavior();
7737        Ok(Statement::DropOperatorClass(DropOperatorClass {
7738            if_exists,
7739            names,
7740            using,
7741            drop_behavior,
7742        }))
7743    }
7744
7745    //TODO: Implement parsing for Skewed
7746    pub fn parse_hive_distribution(&mut self) -> Result<HiveDistributionStyle, ParserError> {
7747        if self.parse_keywords(&[Keyword::PARTITIONED, Keyword::BY]) {
7748            self.expect_token(&Token::LParen)?;
7749            let columns = self.parse_comma_separated(Parser::parse_column_def)?;
7750            self.expect_token(&Token::RParen)?;
7751            Ok(HiveDistributionStyle::PARTITIONED { columns })
7752        } else {
7753            Ok(HiveDistributionStyle::NONE)
7754        }
7755    }
7756
7757    pub fn parse_hive_formats(&mut self) -> Result<Option<HiveFormat>, ParserError> {
7758        let mut hive_format: Option<HiveFormat> = None;
7759        loop {
7760            match self.parse_one_of_keywords(&[
7761                Keyword::ROW,
7762                Keyword::STORED,
7763                Keyword::LOCATION,
7764                Keyword::WITH,
7765            ]) {
7766                Some(Keyword::ROW) => {
7767                    hive_format
7768                        .get_or_insert_with(HiveFormat::default)
7769                        .row_format = Some(self.parse_row_format()?);
7770                }
7771                Some(Keyword::STORED) => {
7772                    self.expect_keyword_is(Keyword::AS)?;
7773                    if self.parse_keyword(Keyword::INPUTFORMAT) {
7774                        let input_format = self.parse_expr()?;
7775                        self.expect_keyword_is(Keyword::OUTPUTFORMAT)?;
7776                        let output_format = self.parse_expr()?;
7777                        hive_format.get_or_insert_with(HiveFormat::default).storage =
7778                            Some(HiveIOFormat::IOF {
7779                                input_format,
7780                                output_format,
7781                            });
7782                    } else {
7783                        let format = self.parse_file_format()?;
7784                        hive_format.get_or_insert_with(HiveFormat::default).storage =
7785                            Some(HiveIOFormat::FileFormat { format });
7786                    }
7787                }
7788                Some(Keyword::LOCATION) => {
7789                    hive_format.get_or_insert_with(HiveFormat::default).location =
7790                        Some(self.parse_literal_string()?);
7791                }
7792                Some(Keyword::WITH) => {
7793                    self.prev_token();
7794                    let properties = self
7795                        .parse_options_with_keywords(&[Keyword::WITH, Keyword::SERDEPROPERTIES])?;
7796                    if !properties.is_empty() {
7797                        hive_format
7798                            .get_or_insert_with(HiveFormat::default)
7799                            .serde_properties = Some(properties);
7800                    } else {
7801                        break;
7802                    }
7803                }
7804                None => break,
7805                _ => break,
7806            }
7807        }
7808
7809        Ok(hive_format)
7810    }
7811
7812    pub fn parse_row_format(&mut self) -> Result<HiveRowFormat, ParserError> {
7813        self.expect_keyword_is(Keyword::FORMAT)?;
7814        match self.parse_one_of_keywords(&[Keyword::SERDE, Keyword::DELIMITED]) {
7815            Some(Keyword::SERDE) => {
7816                let class = self.parse_literal_string()?;
7817                Ok(HiveRowFormat::SERDE { class })
7818            }
7819            _ => {
7820                let mut row_delimiters = vec![];
7821
7822                loop {
7823                    match self.parse_one_of_keywords(&[
7824                        Keyword::FIELDS,
7825                        Keyword::COLLECTION,
7826                        Keyword::MAP,
7827                        Keyword::LINES,
7828                        Keyword::NULL,
7829                    ]) {
7830                        Some(Keyword::FIELDS) => {
7831                            if self.parse_keywords(&[Keyword::TERMINATED, Keyword::BY]) {
7832                                row_delimiters.push(HiveRowDelimiter {
7833                                    delimiter: HiveDelimiter::FieldsTerminatedBy,
7834                                    char: self.parse_identifier()?,
7835                                });
7836
7837                                if self.parse_keywords(&[Keyword::ESCAPED, Keyword::BY]) {
7838                                    row_delimiters.push(HiveRowDelimiter {
7839                                        delimiter: HiveDelimiter::FieldsEscapedBy,
7840                                        char: self.parse_identifier()?,
7841                                    });
7842                                }
7843                            } else {
7844                                break;
7845                            }
7846                        }
7847                        Some(Keyword::COLLECTION) => {
7848                            if self.parse_keywords(&[
7849                                Keyword::ITEMS,
7850                                Keyword::TERMINATED,
7851                                Keyword::BY,
7852                            ]) {
7853                                row_delimiters.push(HiveRowDelimiter {
7854                                    delimiter: HiveDelimiter::CollectionItemsTerminatedBy,
7855                                    char: self.parse_identifier()?,
7856                                });
7857                            } else {
7858                                break;
7859                            }
7860                        }
7861                        Some(Keyword::MAP) => {
7862                            if self.parse_keywords(&[
7863                                Keyword::KEYS,
7864                                Keyword::TERMINATED,
7865                                Keyword::BY,
7866                            ]) {
7867                                row_delimiters.push(HiveRowDelimiter {
7868                                    delimiter: HiveDelimiter::MapKeysTerminatedBy,
7869                                    char: self.parse_identifier()?,
7870                                });
7871                            } else {
7872                                break;
7873                            }
7874                        }
7875                        Some(Keyword::LINES) => {
7876                            if self.parse_keywords(&[Keyword::TERMINATED, Keyword::BY]) {
7877                                row_delimiters.push(HiveRowDelimiter {
7878                                    delimiter: HiveDelimiter::LinesTerminatedBy,
7879                                    char: self.parse_identifier()?,
7880                                });
7881                            } else {
7882                                break;
7883                            }
7884                        }
7885                        Some(Keyword::NULL) => {
7886                            if self.parse_keywords(&[Keyword::DEFINED, Keyword::AS]) {
7887                                row_delimiters.push(HiveRowDelimiter {
7888                                    delimiter: HiveDelimiter::NullDefinedAs,
7889                                    char: self.parse_identifier()?,
7890                                });
7891                            } else {
7892                                break;
7893                            }
7894                        }
7895                        _ => {
7896                            break;
7897                        }
7898                    }
7899                }
7900
7901                Ok(HiveRowFormat::DELIMITED {
7902                    delimiters: row_delimiters,
7903                })
7904            }
7905        }
7906    }
7907
7908    fn parse_optional_on_cluster(&mut self) -> Result<Option<Ident>, ParserError> {
7909        if self.parse_keywords(&[Keyword::ON, Keyword::CLUSTER]) {
7910            Ok(Some(self.parse_identifier()?))
7911        } else {
7912            Ok(None)
7913        }
7914    }
7915
7916    pub fn parse_create_table(
7917        &mut self,
7918        or_replace: bool,
7919        temporary: bool,
7920        global: Option<bool>,
7921        transient: bool,
7922    ) -> Result<Statement, ParserError> {
7923        let allow_unquoted_hyphen = dialect_of!(self is BigQueryDialect);
7924        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
7925        let table_name = self.parse_object_name(allow_unquoted_hyphen)?;
7926
7927        // PostgreSQL PARTITION OF for child partition tables
7928        // Note: This is a PostgreSQL-specific feature, but the dialect check was intentionally
7929        // removed to allow GenericDialect and other dialects to parse this syntax. This enables
7930        // multi-dialect SQL tools to work with PostgreSQL-specific DDL statements.
7931        //
7932        // PARTITION OF can be combined with other table definition clauses in the AST,
7933        // though PostgreSQL itself prohibits PARTITION OF with AS SELECT or LIKE clauses.
7934        // The parser accepts these combinations for flexibility; semantic validation
7935        // is left to downstream tools.
7936        // Child partitions can have their own constraints and indexes.
7937        let partition_of = if self.parse_keywords(&[Keyword::PARTITION, Keyword::OF]) {
7938            Some(self.parse_object_name(allow_unquoted_hyphen)?)
7939        } else {
7940            None
7941        };
7942
7943        // Clickhouse has `ON CLUSTER 'cluster'` syntax for DDLs
7944        let on_cluster = self.parse_optional_on_cluster()?;
7945
7946        let like = self.maybe_parse_create_table_like(allow_unquoted_hyphen)?;
7947
7948        let clone = if self.parse_keyword(Keyword::CLONE) {
7949            self.parse_object_name(allow_unquoted_hyphen).ok()
7950        } else {
7951            None
7952        };
7953
7954        // parse optional column list (schema)
7955        let (columns, constraints) = self.parse_columns()?;
7956        let comment_after_column_def =
7957            if dialect_of!(self is HiveDialect) && self.parse_keyword(Keyword::COMMENT) {
7958                let next_token = self.next_token();
7959                match next_token.token {
7960                    Token::SingleQuotedString(str) => Some(CommentDef::WithoutEq(str)),
7961                    _ => self.expected("comment", next_token)?,
7962                }
7963            } else {
7964                None
7965            };
7966
7967        // PostgreSQL PARTITION OF: partition bound specification
7968        let for_values = if partition_of.is_some() {
7969            if self.peek_keyword(Keyword::FOR) || self.peek_keyword(Keyword::DEFAULT) {
7970                Some(self.parse_partition_for_values()?)
7971            } else {
7972                return self.expected(
7973                    "FOR VALUES or DEFAULT after PARTITION OF",
7974                    self.peek_token(),
7975                );
7976            }
7977        } else {
7978            None
7979        };
7980
7981        // SQLite supports `WITHOUT ROWID` at the end of `CREATE TABLE`
7982        let without_rowid = self.parse_keywords(&[Keyword::WITHOUT, Keyword::ROWID]);
7983
7984        let hive_distribution = self.parse_hive_distribution()?;
7985        let clustered_by = self.parse_optional_clustered_by()?;
7986        let hive_formats = self.parse_hive_formats()?;
7987
7988        let create_table_config = self.parse_optional_create_table_config()?;
7989
7990        // ClickHouse supports `PRIMARY KEY`, before `ORDER BY`
7991        // https://clickhouse.com/docs/en/sql-reference/statements/create/table#primary-key
7992        let primary_key = if dialect_of!(self is ClickHouseDialect | GenericDialect)
7993            && self.parse_keywords(&[Keyword::PRIMARY, Keyword::KEY])
7994        {
7995            Some(Box::new(self.parse_expr()?))
7996        } else {
7997            None
7998        };
7999
8000        let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
8001            if self.consume_token(&Token::LParen) {
8002                let columns = if self.peek_token() != Token::RParen {
8003                    self.parse_comma_separated(|p| p.parse_expr())?
8004                } else {
8005                    vec![]
8006                };
8007                self.expect_token(&Token::RParen)?;
8008                Some(OneOrManyWithParens::Many(columns))
8009            } else {
8010                Some(OneOrManyWithParens::One(self.parse_expr()?))
8011            }
8012        } else {
8013            None
8014        };
8015
8016        let on_commit = if self.parse_keywords(&[Keyword::ON, Keyword::COMMIT]) {
8017            Some(self.parse_create_table_on_commit()?)
8018        } else {
8019            None
8020        };
8021
8022        let strict = self.parse_keyword(Keyword::STRICT);
8023
8024        // Parse optional `AS ( query )`
8025        let query = if self.parse_keyword(Keyword::AS) {
8026            Some(self.parse_query()?)
8027        } else if self.dialect.supports_create_table_select() && self.parse_keyword(Keyword::SELECT)
8028        {
8029            // rewind the SELECT keyword
8030            self.prev_token();
8031            Some(self.parse_query()?)
8032        } else {
8033            None
8034        };
8035
8036        Ok(CreateTableBuilder::new(table_name)
8037            .temporary(temporary)
8038            .columns(columns)
8039            .constraints(constraints)
8040            .or_replace(or_replace)
8041            .if_not_exists(if_not_exists)
8042            .transient(transient)
8043            .hive_distribution(hive_distribution)
8044            .hive_formats(hive_formats)
8045            .global(global)
8046            .query(query)
8047            .without_rowid(without_rowid)
8048            .like(like)
8049            .clone_clause(clone)
8050            .comment_after_column_def(comment_after_column_def)
8051            .order_by(order_by)
8052            .on_commit(on_commit)
8053            .on_cluster(on_cluster)
8054            .clustered_by(clustered_by)
8055            .partition_by(create_table_config.partition_by)
8056            .cluster_by(create_table_config.cluster_by)
8057            .inherits(create_table_config.inherits)
8058            .partition_of(partition_of)
8059            .for_values(for_values)
8060            .table_options(create_table_config.table_options)
8061            .primary_key(primary_key)
8062            .strict(strict)
8063            .build())
8064    }
8065
8066    fn maybe_parse_create_table_like(
8067        &mut self,
8068        allow_unquoted_hyphen: bool,
8069    ) -> Result<Option<CreateTableLikeKind>, ParserError> {
8070        let like = if self.dialect.supports_create_table_like_parenthesized()
8071            && self.consume_token(&Token::LParen)
8072        {
8073            if self.parse_keyword(Keyword::LIKE) {
8074                let name = self.parse_object_name(allow_unquoted_hyphen)?;
8075                let defaults = if self.parse_keywords(&[Keyword::INCLUDING, Keyword::DEFAULTS]) {
8076                    Some(CreateTableLikeDefaults::Including)
8077                } else if self.parse_keywords(&[Keyword::EXCLUDING, Keyword::DEFAULTS]) {
8078                    Some(CreateTableLikeDefaults::Excluding)
8079                } else {
8080                    None
8081                };
8082                self.expect_token(&Token::RParen)?;
8083                Some(CreateTableLikeKind::Parenthesized(CreateTableLike {
8084                    name,
8085                    defaults,
8086                }))
8087            } else {
8088                // Rollback the '(' it's probably the columns list
8089                self.prev_token();
8090                None
8091            }
8092        } else if self.parse_keyword(Keyword::LIKE) || self.parse_keyword(Keyword::ILIKE) {
8093            let name = self.parse_object_name(allow_unquoted_hyphen)?;
8094            Some(CreateTableLikeKind::Plain(CreateTableLike {
8095                name,
8096                defaults: None,
8097            }))
8098        } else {
8099            None
8100        };
8101        Ok(like)
8102    }
8103
8104    pub(crate) fn parse_create_table_on_commit(&mut self) -> Result<OnCommit, ParserError> {
8105        if self.parse_keywords(&[Keyword::DELETE, Keyword::ROWS]) {
8106            Ok(OnCommit::DeleteRows)
8107        } else if self.parse_keywords(&[Keyword::PRESERVE, Keyword::ROWS]) {
8108            Ok(OnCommit::PreserveRows)
8109        } else if self.parse_keywords(&[Keyword::DROP]) {
8110            Ok(OnCommit::Drop)
8111        } else {
8112            parser_err!(
8113                "Expecting DELETE ROWS, PRESERVE ROWS or DROP",
8114                self.peek_token()
8115            )
8116        }
8117    }
8118
8119    /// Parse [ForValues] of a `PARTITION OF` clause.
8120    ///
8121    /// Parses: `FOR VALUES partition_bound_spec | DEFAULT`
8122    ///
8123    /// [PostgreSQL](https://www.postgresql.org/docs/current/sql-createtable.html)
8124    fn parse_partition_for_values(&mut self) -> Result<ForValues, ParserError> {
8125        if self.parse_keyword(Keyword::DEFAULT) {
8126            return Ok(ForValues::Default);
8127        }
8128
8129        self.expect_keywords(&[Keyword::FOR, Keyword::VALUES])?;
8130
8131        if self.parse_keyword(Keyword::IN) {
8132            // FOR VALUES IN (expr, ...)
8133            self.expect_token(&Token::LParen)?;
8134            if self.peek_token() == Token::RParen {
8135                return self.expected("at least one value", self.peek_token());
8136            }
8137            let values = self.parse_comma_separated(Parser::parse_expr)?;
8138            self.expect_token(&Token::RParen)?;
8139            Ok(ForValues::In(values))
8140        } else if self.parse_keyword(Keyword::FROM) {
8141            // FOR VALUES FROM (...) TO (...)
8142            self.expect_token(&Token::LParen)?;
8143            if self.peek_token() == Token::RParen {
8144                return self.expected("at least one value", self.peek_token());
8145            }
8146            let from = self.parse_comma_separated(Parser::parse_partition_bound_value)?;
8147            self.expect_token(&Token::RParen)?;
8148            self.expect_keyword(Keyword::TO)?;
8149            self.expect_token(&Token::LParen)?;
8150            if self.peek_token() == Token::RParen {
8151                return self.expected("at least one value", self.peek_token());
8152            }
8153            let to = self.parse_comma_separated(Parser::parse_partition_bound_value)?;
8154            self.expect_token(&Token::RParen)?;
8155            Ok(ForValues::From { from, to })
8156        } else if self.parse_keyword(Keyword::WITH) {
8157            // FOR VALUES WITH (MODULUS n, REMAINDER r)
8158            self.expect_token(&Token::LParen)?;
8159            self.expect_keyword(Keyword::MODULUS)?;
8160            let modulus = self.parse_literal_uint()?;
8161            self.expect_token(&Token::Comma)?;
8162            self.expect_keyword(Keyword::REMAINDER)?;
8163            let remainder = self.parse_literal_uint()?;
8164            self.expect_token(&Token::RParen)?;
8165            Ok(ForValues::With { modulus, remainder })
8166        } else {
8167            self.expected("IN, FROM, or WITH after FOR VALUES", self.peek_token())
8168        }
8169    }
8170
8171    /// Parse a single [PartitionBoundValue].
8172    fn parse_partition_bound_value(&mut self) -> Result<PartitionBoundValue, ParserError> {
8173        if self.parse_keyword(Keyword::MINVALUE) {
8174            Ok(PartitionBoundValue::MinValue)
8175        } else if self.parse_keyword(Keyword::MAXVALUE) {
8176            Ok(PartitionBoundValue::MaxValue)
8177        } else {
8178            Ok(PartitionBoundValue::Expr(self.parse_expr()?))
8179        }
8180    }
8181
8182    /// Parse configuration like inheritance, partitioning, clustering information during the table creation.
8183    ///
8184    /// [BigQuery](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#syntax_2)
8185    /// [PostgreSQL](https://www.postgresql.org/docs/current/ddl-partitioning.html)
8186    /// [MySql](https://dev.mysql.com/doc/refman/8.4/en/create-table.html)
8187    fn parse_optional_create_table_config(
8188        &mut self,
8189    ) -> Result<CreateTableConfiguration, ParserError> {
8190        let mut table_options = CreateTableOptions::None;
8191
8192        let inherits = if self.parse_keyword(Keyword::INHERITS) {
8193            Some(self.parse_parenthesized_qualified_column_list(IsOptional::Mandatory, false)?)
8194        } else {
8195            None
8196        };
8197
8198        // PostgreSQL supports `WITH ( options )`, before `AS`
8199        let with_options = self.parse_options(Keyword::WITH)?;
8200        if !with_options.is_empty() {
8201            table_options = CreateTableOptions::With(with_options)
8202        }
8203
8204        let table_properties = self.parse_options(Keyword::TBLPROPERTIES)?;
8205        if !table_properties.is_empty() {
8206            table_options = CreateTableOptions::TableProperties(table_properties);
8207        }
8208        let partition_by = if dialect_of!(self is BigQueryDialect | PostgreSqlDialect | GenericDialect)
8209            && self.parse_keywords(&[Keyword::PARTITION, Keyword::BY])
8210        {
8211            Some(Box::new(self.parse_expr()?))
8212        } else {
8213            None
8214        };
8215
8216        let mut cluster_by = None;
8217        if dialect_of!(self is BigQueryDialect | GenericDialect) {
8218            if self.parse_keywords(&[Keyword::CLUSTER, Keyword::BY]) {
8219                cluster_by = Some(WrappedCollection::NoWrapping(
8220                    self.parse_comma_separated(|p| p.parse_expr())?,
8221                ));
8222            };
8223
8224            if let Token::Word(word) = self.peek_token().token {
8225                if word.keyword == Keyword::OPTIONS {
8226                    table_options =
8227                        CreateTableOptions::Options(self.parse_options(Keyword::OPTIONS)?)
8228                }
8229            };
8230        }
8231
8232        if !dialect_of!(self is HiveDialect) && table_options == CreateTableOptions::None {
8233            let plain_options = self.parse_plain_options()?;
8234            if !plain_options.is_empty() {
8235                table_options = CreateTableOptions::Plain(plain_options)
8236            }
8237        };
8238
8239        Ok(CreateTableConfiguration {
8240            partition_by,
8241            cluster_by,
8242            inherits,
8243            table_options,
8244        })
8245    }
8246
8247    fn parse_plain_option(&mut self) -> Result<Option<SqlOption>, ParserError> {
8248        // Single parameter option
8249        // <https://dev.mysql.com/doc/refman/8.4/en/create-table.html>
8250        if self.parse_keywords(&[Keyword::START, Keyword::TRANSACTION]) {
8251            return Ok(Some(SqlOption::Ident(Ident::new("START TRANSACTION"))));
8252        }
8253
8254        // Custom option
8255        // <https://dev.mysql.com/doc/refman/8.4/en/create-table.html>
8256        if self.parse_keywords(&[Keyword::COMMENT]) {
8257            let has_eq = self.consume_token(&Token::Eq);
8258            let value = self.next_token();
8259
8260            let comment = match (has_eq, value.token) {
8261                (true, Token::SingleQuotedString(s)) => {
8262                    Ok(Some(SqlOption::Comment(CommentDef::WithEq(s))))
8263                }
8264                (false, Token::SingleQuotedString(s)) => {
8265                    Ok(Some(SqlOption::Comment(CommentDef::WithoutEq(s))))
8266                }
8267                (_, token) => {
8268                    self.expected("Token::SingleQuotedString", TokenWithSpan::wrap(token))
8269                }
8270            };
8271            return comment;
8272        }
8273
8274        // <https://dev.mysql.com/doc/refman/8.4/en/create-table.html>
8275        // <https://clickhouse.com/docs/sql-reference/statements/create/table>
8276        if self.parse_keywords(&[Keyword::ENGINE]) {
8277            let _ = self.consume_token(&Token::Eq);
8278            let value = self.next_token();
8279
8280            let engine = match value.token {
8281                Token::Word(w) => {
8282                    let parameters = if self.peek_token() == Token::LParen {
8283                        self.parse_parenthesized_identifiers()?
8284                    } else {
8285                        vec![]
8286                    };
8287
8288                    Ok(Some(SqlOption::NamedParenthesizedList(
8289                        NamedParenthesizedList {
8290                            key: Ident::new("ENGINE"),
8291                            name: Some(Ident::new(w.value)),
8292                            values: parameters,
8293                        },
8294                    )))
8295                }
8296                _ => {
8297                    return self.expected("Token::Word", value)?;
8298                }
8299            };
8300
8301            return engine;
8302        }
8303
8304        // <https://dev.mysql.com/doc/refman/8.4/en/create-table.html>
8305        if self.parse_keywords(&[Keyword::TABLESPACE]) {
8306            let _ = self.consume_token(&Token::Eq);
8307            let value = self.next_token();
8308
8309            let tablespace = match value.token {
8310                Token::Word(Word { value: name, .. }) | Token::SingleQuotedString(name) => {
8311                    let storage = match self.parse_keyword(Keyword::STORAGE) {
8312                        true => {
8313                            let _ = self.consume_token(&Token::Eq);
8314                            let storage_token = self.next_token();
8315                            match &storage_token.token {
8316                                Token::Word(w) => match w.value.to_uppercase().as_str() {
8317                                    "DISK" => Some(StorageType::Disk),
8318                                    "MEMORY" => Some(StorageType::Memory),
8319                                    _ => self
8320                                        .expected("Storage type (DISK or MEMORY)", storage_token)?,
8321                                },
8322                                _ => self.expected("Token::Word", storage_token)?,
8323                            }
8324                        }
8325                        false => None,
8326                    };
8327
8328                    Ok(Some(SqlOption::TableSpace(TablespaceOption {
8329                        name,
8330                        storage,
8331                    })))
8332                }
8333                _ => {
8334                    return self.expected("Token::Word", value)?;
8335                }
8336            };
8337
8338            return tablespace;
8339        }
8340
8341        // <https://dev.mysql.com/doc/refman/8.4/en/create-table.html>
8342        if self.parse_keyword(Keyword::UNION) {
8343            let _ = self.consume_token(&Token::Eq);
8344            let value = self.next_token();
8345
8346            match value.token {
8347                Token::LParen => {
8348                    let tables: Vec<Ident> =
8349                        self.parse_comma_separated0(Parser::parse_identifier, Token::RParen)?;
8350                    self.expect_token(&Token::RParen)?;
8351
8352                    return Ok(Some(SqlOption::NamedParenthesizedList(
8353                        NamedParenthesizedList {
8354                            key: Ident::new("UNION"),
8355                            name: None,
8356                            values: tables,
8357                        },
8358                    )));
8359                }
8360                _ => {
8361                    return self.expected("Token::LParen", value)?;
8362                }
8363            }
8364        }
8365
8366        // Key/Value parameter option
8367        let key = if self.parse_keywords(&[Keyword::DEFAULT, Keyword::CHARSET]) {
8368            Ident::new("DEFAULT CHARSET")
8369        } else if self.parse_keyword(Keyword::CHARSET) {
8370            Ident::new("CHARSET")
8371        } else if self.parse_keywords(&[Keyword::DEFAULT, Keyword::CHARACTER, Keyword::SET]) {
8372            Ident::new("DEFAULT CHARACTER SET")
8373        } else if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
8374            Ident::new("CHARACTER SET")
8375        } else if self.parse_keywords(&[Keyword::DEFAULT, Keyword::COLLATE]) {
8376            Ident::new("DEFAULT COLLATE")
8377        } else if self.parse_keyword(Keyword::COLLATE) {
8378            Ident::new("COLLATE")
8379        } else if self.parse_keywords(&[Keyword::DATA, Keyword::DIRECTORY]) {
8380            Ident::new("DATA DIRECTORY")
8381        } else if self.parse_keywords(&[Keyword::INDEX, Keyword::DIRECTORY]) {
8382            Ident::new("INDEX DIRECTORY")
8383        } else if self.parse_keyword(Keyword::KEY_BLOCK_SIZE) {
8384            Ident::new("KEY_BLOCK_SIZE")
8385        } else if self.parse_keyword(Keyword::ROW_FORMAT) {
8386            Ident::new("ROW_FORMAT")
8387        } else if self.parse_keyword(Keyword::PACK_KEYS) {
8388            Ident::new("PACK_KEYS")
8389        } else if self.parse_keyword(Keyword::STATS_AUTO_RECALC) {
8390            Ident::new("STATS_AUTO_RECALC")
8391        } else if self.parse_keyword(Keyword::STATS_PERSISTENT) {
8392            Ident::new("STATS_PERSISTENT")
8393        } else if self.parse_keyword(Keyword::STATS_SAMPLE_PAGES) {
8394            Ident::new("STATS_SAMPLE_PAGES")
8395        } else if self.parse_keyword(Keyword::DELAY_KEY_WRITE) {
8396            Ident::new("DELAY_KEY_WRITE")
8397        } else if self.parse_keyword(Keyword::COMPRESSION) {
8398            Ident::new("COMPRESSION")
8399        } else if self.parse_keyword(Keyword::ENCRYPTION) {
8400            Ident::new("ENCRYPTION")
8401        } else if self.parse_keyword(Keyword::MAX_ROWS) {
8402            Ident::new("MAX_ROWS")
8403        } else if self.parse_keyword(Keyword::MIN_ROWS) {
8404            Ident::new("MIN_ROWS")
8405        } else if self.parse_keyword(Keyword::AUTOEXTEND_SIZE) {
8406            Ident::new("AUTOEXTEND_SIZE")
8407        } else if self.parse_keyword(Keyword::AVG_ROW_LENGTH) {
8408            Ident::new("AVG_ROW_LENGTH")
8409        } else if self.parse_keyword(Keyword::CHECKSUM) {
8410            Ident::new("CHECKSUM")
8411        } else if self.parse_keyword(Keyword::CONNECTION) {
8412            Ident::new("CONNECTION")
8413        } else if self.parse_keyword(Keyword::ENGINE_ATTRIBUTE) {
8414            Ident::new("ENGINE_ATTRIBUTE")
8415        } else if self.parse_keyword(Keyword::PASSWORD) {
8416            Ident::new("PASSWORD")
8417        } else if self.parse_keyword(Keyword::SECONDARY_ENGINE_ATTRIBUTE) {
8418            Ident::new("SECONDARY_ENGINE_ATTRIBUTE")
8419        } else if self.parse_keyword(Keyword::INSERT_METHOD) {
8420            Ident::new("INSERT_METHOD")
8421        } else if self.parse_keyword(Keyword::AUTO_INCREMENT) {
8422            Ident::new("AUTO_INCREMENT")
8423        } else {
8424            return Ok(None);
8425        };
8426
8427        let _ = self.consume_token(&Token::Eq);
8428
8429        let value = match self
8430            .maybe_parse(|parser| parser.parse_value())?
8431            .map(Expr::Value)
8432        {
8433            Some(expr) => expr,
8434            None => Expr::Identifier(self.parse_identifier()?),
8435        };
8436
8437        Ok(Some(SqlOption::KeyValue { key, value }))
8438    }
8439
8440    pub fn parse_plain_options(&mut self) -> Result<Vec<SqlOption>, ParserError> {
8441        let mut options = Vec::new();
8442
8443        while let Some(option) = self.parse_plain_option()? {
8444            options.push(option);
8445            // Some dialects support comma-separated options; it shouldn't introduce ambiguity to
8446            // consume it for all dialects.
8447            let _ = self.consume_token(&Token::Comma);
8448        }
8449
8450        Ok(options)
8451    }
8452
8453    pub fn parse_optional_inline_comment(&mut self) -> Result<Option<CommentDef>, ParserError> {
8454        let comment = if self.parse_keyword(Keyword::COMMENT) {
8455            let has_eq = self.consume_token(&Token::Eq);
8456            let comment = self.parse_comment_value()?;
8457            Some(if has_eq {
8458                CommentDef::WithEq(comment)
8459            } else {
8460                CommentDef::WithoutEq(comment)
8461            })
8462        } else {
8463            None
8464        };
8465        Ok(comment)
8466    }
8467
8468    pub fn parse_comment_value(&mut self) -> Result<String, ParserError> {
8469        let next_token = self.next_token();
8470        let value = match next_token.token {
8471            Token::SingleQuotedString(str) => str,
8472            Token::DollarQuotedString(str) => str.value,
8473            _ => self.expected("string literal", next_token)?,
8474        };
8475        Ok(value)
8476    }
8477
8478    pub fn parse_optional_procedure_parameters(
8479        &mut self,
8480    ) -> Result<Option<Vec<ProcedureParam>>, ParserError> {
8481        let mut params = vec![];
8482        if !self.consume_token(&Token::LParen) || self.consume_token(&Token::RParen) {
8483            return Ok(Some(params));
8484        }
8485        loop {
8486            if let Token::Word(_) = self.peek_token().token {
8487                params.push(self.parse_procedure_param()?)
8488            }
8489            let comma = self.consume_token(&Token::Comma);
8490            if self.consume_token(&Token::RParen) {
8491                // allow a trailing comma, even though it's not in standard
8492                break;
8493            } else if !comma {
8494                return self.expected("',' or ')' after parameter definition", self.peek_token());
8495            }
8496        }
8497        Ok(Some(params))
8498    }
8499
8500    pub fn parse_columns(&mut self) -> Result<(Vec<ColumnDef>, Vec<TableConstraint>), ParserError> {
8501        let mut columns = vec![];
8502        let mut constraints = vec![];
8503        if !self.consume_token(&Token::LParen) || self.consume_token(&Token::RParen) {
8504            return Ok((columns, constraints));
8505        }
8506
8507        loop {
8508            if let Some(constraint) = self.parse_optional_table_constraint()? {
8509                constraints.push(constraint);
8510            } else if let Token::Word(_) = self.peek_token().token {
8511                columns.push(self.parse_column_def()?);
8512            } else {
8513                return self.expected("column name or constraint definition", self.peek_token());
8514            }
8515
8516            let comma = self.consume_token(&Token::Comma);
8517            let rparen = self.peek_token().token == Token::RParen;
8518
8519            if !comma && !rparen {
8520                return self.expected("',' or ')' after column definition", self.peek_token());
8521            };
8522
8523            if rparen
8524                && (!comma
8525                    || self.dialect.supports_column_definition_trailing_commas()
8526                    || self.options.trailing_commas)
8527            {
8528                let _ = self.consume_token(&Token::RParen);
8529                break;
8530            }
8531        }
8532
8533        Ok((columns, constraints))
8534    }
8535
8536    pub fn parse_procedure_param(&mut self) -> Result<ProcedureParam, ParserError> {
8537        let mode = if self.parse_keyword(Keyword::IN) {
8538            Some(ArgMode::In)
8539        } else if self.parse_keyword(Keyword::OUT) {
8540            Some(ArgMode::Out)
8541        } else if self.parse_keyword(Keyword::INOUT) {
8542            Some(ArgMode::InOut)
8543        } else {
8544            None
8545        };
8546        let name = self.parse_identifier()?;
8547        let data_type = self.parse_data_type()?;
8548        let default = if self.consume_token(&Token::Eq) {
8549            Some(self.parse_expr()?)
8550        } else {
8551            None
8552        };
8553
8554        Ok(ProcedureParam {
8555            name,
8556            data_type,
8557            mode,
8558            default,
8559        })
8560    }
8561
8562    pub fn parse_column_def(&mut self) -> Result<ColumnDef, ParserError> {
8563        let col_name = self.parse_identifier()?;
8564        let data_type = if self.is_column_type_sqlite_unspecified() {
8565            DataType::Unspecified
8566        } else {
8567            self.parse_data_type()?
8568        };
8569        let mut options = vec![];
8570        loop {
8571            if self.parse_keyword(Keyword::CONSTRAINT) {
8572                let name = Some(self.parse_identifier()?);
8573                if let Some(option) = self.parse_optional_column_option()? {
8574                    options.push(ColumnOptionDef { name, option });
8575                } else {
8576                    return self.expected(
8577                        "constraint details after CONSTRAINT <name>",
8578                        self.peek_token(),
8579                    );
8580                }
8581            } else if let Some(option) = self.parse_optional_column_option()? {
8582                options.push(ColumnOptionDef { name: None, option });
8583            } else {
8584                break;
8585            };
8586        }
8587        Ok(ColumnDef {
8588            name: col_name,
8589            data_type,
8590            options,
8591        })
8592    }
8593
8594    fn is_column_type_sqlite_unspecified(&mut self) -> bool {
8595        if dialect_of!(self is SQLiteDialect) {
8596            match self.peek_token().token {
8597                Token::Word(word) => matches!(
8598                    word.keyword,
8599                    Keyword::CONSTRAINT
8600                        | Keyword::PRIMARY
8601                        | Keyword::NOT
8602                        | Keyword::UNIQUE
8603                        | Keyword::CHECK
8604                        | Keyword::DEFAULT
8605                        | Keyword::COLLATE
8606                        | Keyword::REFERENCES
8607                        | Keyword::GENERATED
8608                        | Keyword::AS
8609                ),
8610                _ => true, // e.g. comma immediately after column name
8611            }
8612        } else {
8613            false
8614        }
8615    }
8616
8617    pub fn parse_optional_column_option(&mut self) -> Result<Option<ColumnOption>, ParserError> {
8618        if let Some(option) = self.dialect.parse_column_option(self)? {
8619            return option;
8620        }
8621
8622        self.with_state(
8623            ColumnDefinition,
8624            |parser| -> Result<Option<ColumnOption>, ParserError> {
8625                parser.parse_optional_column_option_inner()
8626            },
8627        )
8628    }
8629
8630    fn parse_optional_column_option_inner(&mut self) -> Result<Option<ColumnOption>, ParserError> {
8631        if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
8632            Ok(Some(ColumnOption::CharacterSet(
8633                self.parse_object_name(false)?,
8634            )))
8635        } else if self.parse_keywords(&[Keyword::COLLATE]) {
8636            Ok(Some(ColumnOption::Collation(
8637                self.parse_object_name(false)?,
8638            )))
8639        } else if self.parse_keywords(&[Keyword::NOT, Keyword::NULL]) {
8640            Ok(Some(ColumnOption::NotNull))
8641        } else if self.parse_keywords(&[Keyword::COMMENT]) {
8642            Ok(Some(ColumnOption::Comment(self.parse_comment_value()?)))
8643        } else if self.parse_keyword(Keyword::NULL) {
8644            Ok(Some(ColumnOption::Null))
8645        } else if self.parse_keyword(Keyword::DEFAULT) {
8646            Ok(Some(ColumnOption::Default(
8647                self.parse_column_option_expr()?,
8648            )))
8649        } else if dialect_of!(self is ClickHouseDialect| GenericDialect)
8650            && self.parse_keyword(Keyword::MATERIALIZED)
8651        {
8652            Ok(Some(ColumnOption::Materialized(
8653                self.parse_column_option_expr()?,
8654            )))
8655        } else if dialect_of!(self is ClickHouseDialect| GenericDialect)
8656            && self.parse_keyword(Keyword::ALIAS)
8657        {
8658            Ok(Some(ColumnOption::Alias(self.parse_column_option_expr()?)))
8659        } else if dialect_of!(self is ClickHouseDialect| GenericDialect)
8660            && self.parse_keyword(Keyword::EPHEMERAL)
8661        {
8662            // The expression is optional for the EPHEMERAL syntax, so we need to check
8663            // if the column definition has remaining tokens before parsing the expression.
8664            if matches!(self.peek_token().token, Token::Comma | Token::RParen) {
8665                Ok(Some(ColumnOption::Ephemeral(None)))
8666            } else {
8667                Ok(Some(ColumnOption::Ephemeral(Some(
8668                    self.parse_column_option_expr()?,
8669                ))))
8670            }
8671        } else if self.parse_keywords(&[Keyword::PRIMARY, Keyword::KEY]) {
8672            let characteristics = self.parse_constraint_characteristics()?;
8673            Ok(Some(
8674                PrimaryKeyConstraint {
8675                    name: None,
8676                    index_name: None,
8677                    index_type: None,
8678                    columns: vec![],
8679                    index_options: vec![],
8680                    characteristics,
8681                }
8682                .into(),
8683            ))
8684        } else if self.parse_keyword(Keyword::UNIQUE) {
8685            let characteristics = self.parse_constraint_characteristics()?;
8686            Ok(Some(
8687                UniqueConstraint {
8688                    name: None,
8689                    index_name: None,
8690                    index_type_display: KeyOrIndexDisplay::None,
8691                    index_type: None,
8692                    columns: vec![],
8693                    index_options: vec![],
8694                    characteristics,
8695                    nulls_distinct: NullsDistinctOption::None,
8696                }
8697                .into(),
8698            ))
8699        } else if self.parse_keyword(Keyword::REFERENCES) {
8700            let foreign_table = self.parse_object_name(false)?;
8701            // PostgreSQL allows omitting the column list and
8702            // uses the primary key column of the foreign table by default
8703            let referred_columns = self.parse_parenthesized_column_list(Optional, false)?;
8704            let mut match_kind = None;
8705            let mut on_delete = None;
8706            let mut on_update = None;
8707            loop {
8708                if match_kind.is_none() && self.parse_keyword(Keyword::MATCH) {
8709                    match_kind = Some(self.parse_match_kind()?);
8710                } else if on_delete.is_none()
8711                    && self.parse_keywords(&[Keyword::ON, Keyword::DELETE])
8712                {
8713                    on_delete = Some(self.parse_referential_action()?);
8714                } else if on_update.is_none()
8715                    && self.parse_keywords(&[Keyword::ON, Keyword::UPDATE])
8716                {
8717                    on_update = Some(self.parse_referential_action()?);
8718                } else {
8719                    break;
8720                }
8721            }
8722            let characteristics = self.parse_constraint_characteristics()?;
8723
8724            Ok(Some(
8725                ForeignKeyConstraint {
8726                    name: None,       // Column-level constraints don't have names
8727                    index_name: None, // Not applicable for column-level constraints
8728                    columns: vec![],  // Not applicable for column-level constraints
8729                    foreign_table,
8730                    referred_columns,
8731                    on_delete,
8732                    on_update,
8733                    match_kind,
8734                    characteristics,
8735                }
8736                .into(),
8737            ))
8738        } else if self.parse_keyword(Keyword::CHECK) {
8739            self.expect_token(&Token::LParen)?;
8740            // since `CHECK` requires parentheses, we can parse the inner expression in ParserState::Normal
8741            let expr: Expr = self.with_state(ParserState::Normal, |p| p.parse_expr())?;
8742            self.expect_token(&Token::RParen)?;
8743            Ok(Some(
8744                CheckConstraint {
8745                    name: None, // Column-level check constraints don't have names
8746                    expr: Box::new(expr),
8747                    enforced: None, // Could be extended later to support MySQL ENFORCED/NOT ENFORCED
8748                }
8749                .into(),
8750            ))
8751        } else if self.parse_keyword(Keyword::AUTO_INCREMENT)
8752            && dialect_of!(self is MySqlDialect | GenericDialect)
8753        {
8754            // Support AUTO_INCREMENT for MySQL
8755            Ok(Some(ColumnOption::DialectSpecific(vec![
8756                Token::make_keyword("AUTO_INCREMENT"),
8757            ])))
8758        } else if self.parse_keyword(Keyword::AUTOINCREMENT)
8759            && dialect_of!(self is SQLiteDialect |  GenericDialect)
8760        {
8761            // Support AUTOINCREMENT for SQLite
8762            Ok(Some(ColumnOption::DialectSpecific(vec![
8763                Token::make_keyword("AUTOINCREMENT"),
8764            ])))
8765        } else if self.parse_keyword(Keyword::ASC)
8766            && self.dialect.supports_asc_desc_in_column_definition()
8767        {
8768            // Support ASC for SQLite
8769            Ok(Some(ColumnOption::DialectSpecific(vec![
8770                Token::make_keyword("ASC"),
8771            ])))
8772        } else if self.parse_keyword(Keyword::DESC)
8773            && self.dialect.supports_asc_desc_in_column_definition()
8774        {
8775            // Support DESC for SQLite
8776            Ok(Some(ColumnOption::DialectSpecific(vec![
8777                Token::make_keyword("DESC"),
8778            ])))
8779        } else if self.parse_keywords(&[Keyword::ON, Keyword::UPDATE])
8780            && dialect_of!(self is MySqlDialect | GenericDialect)
8781        {
8782            let expr = self.parse_column_option_expr()?;
8783            Ok(Some(ColumnOption::OnUpdate(expr)))
8784        } else if self.parse_keyword(Keyword::GENERATED) {
8785            self.parse_optional_column_option_generated()
8786        } else if dialect_of!(self is BigQueryDialect | GenericDialect)
8787            && self.parse_keyword(Keyword::OPTIONS)
8788        {
8789            self.prev_token();
8790            Ok(Some(ColumnOption::Options(
8791                self.parse_options(Keyword::OPTIONS)?,
8792            )))
8793        } else if self.parse_keyword(Keyword::AS)
8794            && dialect_of!(self is MySqlDialect | SQLiteDialect | DuckDbDialect | GenericDialect)
8795        {
8796            self.parse_optional_column_option_as()
8797        } else if self.parse_keyword(Keyword::SRID)
8798            && dialect_of!(self is MySqlDialect | GenericDialect)
8799        {
8800            Ok(Some(ColumnOption::Srid(Box::new(
8801                self.parse_column_option_expr()?,
8802            ))))
8803        } else if self.parse_keyword(Keyword::IDENTITY)
8804            && dialect_of!(self is MsSqlDialect | GenericDialect)
8805        {
8806            let parameters = if self.consume_token(&Token::LParen) {
8807                let seed = self.parse_number()?;
8808                self.expect_token(&Token::Comma)?;
8809                let increment = self.parse_number()?;
8810                self.expect_token(&Token::RParen)?;
8811
8812                Some(IdentityPropertyFormatKind::FunctionCall(
8813                    IdentityParameters { seed, increment },
8814                ))
8815            } else {
8816                None
8817            };
8818            Ok(Some(ColumnOption::Identity(
8819                IdentityPropertyKind::Identity(IdentityProperty {
8820                    parameters,
8821                    order: None,
8822                }),
8823            )))
8824        } else if dialect_of!(self is SQLiteDialect | GenericDialect)
8825            && self.parse_keywords(&[Keyword::ON, Keyword::CONFLICT])
8826        {
8827            // Support ON CONFLICT for SQLite
8828            Ok(Some(ColumnOption::OnConflict(
8829                self.expect_one_of_keywords(&[
8830                    Keyword::ROLLBACK,
8831                    Keyword::ABORT,
8832                    Keyword::FAIL,
8833                    Keyword::IGNORE,
8834                    Keyword::REPLACE,
8835                ])?,
8836            )))
8837        } else if self.parse_keyword(Keyword::INVISIBLE) {
8838            Ok(Some(ColumnOption::Invisible))
8839        } else {
8840            Ok(None)
8841        }
8842    }
8843
8844    /// When parsing some column option expressions we need to revert to [ParserState::Normal] since
8845    /// `NOT NULL` is allowed as an alias for `IS NOT NULL`.
8846    /// In those cases we use this helper instead of calling [Parser::parse_expr] directly.
8847    ///
8848    /// For example, consider these `CREATE TABLE` statements:
8849    /// ```sql
8850    /// CREATE TABLE foo (abc BOOL DEFAULT (42 NOT NULL) NOT NULL);
8851    /// ```
8852    /// vs
8853    /// ```sql
8854    /// CREATE TABLE foo (abc BOOL NOT NULL);
8855    /// ```
8856    ///
8857    /// In the first we should parse the inner portion of `(42 NOT NULL)` as [Expr::IsNotNull],
8858    /// whereas is both statements that trailing `NOT NULL` should only be parsed as a
8859    /// [ColumnOption::NotNull].
8860    fn parse_column_option_expr(&mut self) -> Result<Expr, ParserError> {
8861        if self.peek_token_ref().token == Token::LParen {
8862            let expr: Expr = self.with_state(ParserState::Normal, |p| p.parse_prefix())?;
8863            Ok(expr)
8864        } else {
8865            Ok(self.parse_expr()?)
8866        }
8867    }
8868
8869    pub(crate) fn parse_tag(&mut self) -> Result<Tag, ParserError> {
8870        let name = self.parse_object_name(false)?;
8871        self.expect_token(&Token::Eq)?;
8872        let value = self.parse_literal_string()?;
8873
8874        Ok(Tag::new(name, value))
8875    }
8876
8877    fn parse_optional_column_option_generated(
8878        &mut self,
8879    ) -> Result<Option<ColumnOption>, ParserError> {
8880        if self.parse_keywords(&[Keyword::ALWAYS, Keyword::AS, Keyword::IDENTITY]) {
8881            let mut sequence_options = vec![];
8882            if self.expect_token(&Token::LParen).is_ok() {
8883                sequence_options = self.parse_create_sequence_options()?;
8884                self.expect_token(&Token::RParen)?;
8885            }
8886            Ok(Some(ColumnOption::Generated {
8887                generated_as: GeneratedAs::Always,
8888                sequence_options: Some(sequence_options),
8889                generation_expr: None,
8890                generation_expr_mode: None,
8891                generated_keyword: true,
8892            }))
8893        } else if self.parse_keywords(&[
8894            Keyword::BY,
8895            Keyword::DEFAULT,
8896            Keyword::AS,
8897            Keyword::IDENTITY,
8898        ]) {
8899            let mut sequence_options = vec![];
8900            if self.expect_token(&Token::LParen).is_ok() {
8901                sequence_options = self.parse_create_sequence_options()?;
8902                self.expect_token(&Token::RParen)?;
8903            }
8904            Ok(Some(ColumnOption::Generated {
8905                generated_as: GeneratedAs::ByDefault,
8906                sequence_options: Some(sequence_options),
8907                generation_expr: None,
8908                generation_expr_mode: None,
8909                generated_keyword: true,
8910            }))
8911        } else if self.parse_keywords(&[Keyword::ALWAYS, Keyword::AS]) {
8912            if self.expect_token(&Token::LParen).is_ok() {
8913                let expr: Expr = self.with_state(ParserState::Normal, |p| p.parse_expr())?;
8914                self.expect_token(&Token::RParen)?;
8915                let (gen_as, expr_mode) = if self.parse_keywords(&[Keyword::STORED]) {
8916                    Ok((
8917                        GeneratedAs::ExpStored,
8918                        Some(GeneratedExpressionMode::Stored),
8919                    ))
8920                } else if dialect_of!(self is PostgreSqlDialect) {
8921                    // Postgres' AS IDENTITY branches are above, this one needs STORED
8922                    self.expected("STORED", self.peek_token())
8923                } else if self.parse_keywords(&[Keyword::VIRTUAL]) {
8924                    Ok((GeneratedAs::Always, Some(GeneratedExpressionMode::Virtual)))
8925                } else {
8926                    Ok((GeneratedAs::Always, None))
8927                }?;
8928
8929                Ok(Some(ColumnOption::Generated {
8930                    generated_as: gen_as,
8931                    sequence_options: None,
8932                    generation_expr: Some(expr),
8933                    generation_expr_mode: expr_mode,
8934                    generated_keyword: true,
8935                }))
8936            } else {
8937                Ok(None)
8938            }
8939        } else {
8940            Ok(None)
8941        }
8942    }
8943
8944    fn parse_optional_column_option_as(&mut self) -> Result<Option<ColumnOption>, ParserError> {
8945        // Some DBs allow 'AS (expr)', shorthand for GENERATED ALWAYS AS
8946        self.expect_token(&Token::LParen)?;
8947        let expr = self.parse_expr()?;
8948        self.expect_token(&Token::RParen)?;
8949
8950        let (gen_as, expr_mode) = if self.parse_keywords(&[Keyword::STORED]) {
8951            (
8952                GeneratedAs::ExpStored,
8953                Some(GeneratedExpressionMode::Stored),
8954            )
8955        } else if self.parse_keywords(&[Keyword::VIRTUAL]) {
8956            (GeneratedAs::Always, Some(GeneratedExpressionMode::Virtual))
8957        } else {
8958            (GeneratedAs::Always, None)
8959        };
8960
8961        Ok(Some(ColumnOption::Generated {
8962            generated_as: gen_as,
8963            sequence_options: None,
8964            generation_expr: Some(expr),
8965            generation_expr_mode: expr_mode,
8966            generated_keyword: false,
8967        }))
8968    }
8969
8970    pub fn parse_optional_clustered_by(&mut self) -> Result<Option<ClusteredBy>, ParserError> {
8971        let clustered_by = if dialect_of!(self is HiveDialect|GenericDialect)
8972            && self.parse_keywords(&[Keyword::CLUSTERED, Keyword::BY])
8973        {
8974            let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
8975
8976            let sorted_by = if self.parse_keywords(&[Keyword::SORTED, Keyword::BY]) {
8977                self.expect_token(&Token::LParen)?;
8978                let sorted_by_columns = self.parse_comma_separated(|p| p.parse_order_by_expr())?;
8979                self.expect_token(&Token::RParen)?;
8980                Some(sorted_by_columns)
8981            } else {
8982                None
8983            };
8984
8985            self.expect_keyword_is(Keyword::INTO)?;
8986            let num_buckets = self.parse_number_value()?.value;
8987            self.expect_keyword_is(Keyword::BUCKETS)?;
8988            Some(ClusteredBy {
8989                columns,
8990                sorted_by,
8991                num_buckets,
8992            })
8993        } else {
8994            None
8995        };
8996        Ok(clustered_by)
8997    }
8998
8999    pub fn parse_referential_action(&mut self) -> Result<ReferentialAction, ParserError> {
9000        if self.parse_keyword(Keyword::RESTRICT) {
9001            Ok(ReferentialAction::Restrict)
9002        } else if self.parse_keyword(Keyword::CASCADE) {
9003            Ok(ReferentialAction::Cascade)
9004        } else if self.parse_keywords(&[Keyword::SET, Keyword::NULL]) {
9005            Ok(ReferentialAction::SetNull)
9006        } else if self.parse_keywords(&[Keyword::NO, Keyword::ACTION]) {
9007            Ok(ReferentialAction::NoAction)
9008        } else if self.parse_keywords(&[Keyword::SET, Keyword::DEFAULT]) {
9009            Ok(ReferentialAction::SetDefault)
9010        } else {
9011            self.expected(
9012                "one of RESTRICT, CASCADE, SET NULL, NO ACTION or SET DEFAULT",
9013                self.peek_token(),
9014            )
9015        }
9016    }
9017
9018    pub fn parse_match_kind(&mut self) -> Result<ConstraintReferenceMatchKind, ParserError> {
9019        if self.parse_keyword(Keyword::FULL) {
9020            Ok(ConstraintReferenceMatchKind::Full)
9021        } else if self.parse_keyword(Keyword::PARTIAL) {
9022            Ok(ConstraintReferenceMatchKind::Partial)
9023        } else if self.parse_keyword(Keyword::SIMPLE) {
9024            Ok(ConstraintReferenceMatchKind::Simple)
9025        } else {
9026            self.expected("one of FULL, PARTIAL or SIMPLE", self.peek_token())
9027        }
9028    }
9029
9030    pub fn parse_constraint_characteristics(
9031        &mut self,
9032    ) -> Result<Option<ConstraintCharacteristics>, ParserError> {
9033        let mut cc = ConstraintCharacteristics::default();
9034
9035        loop {
9036            if cc.deferrable.is_none() && self.parse_keywords(&[Keyword::NOT, Keyword::DEFERRABLE])
9037            {
9038                cc.deferrable = Some(false);
9039            } else if cc.deferrable.is_none() && self.parse_keyword(Keyword::DEFERRABLE) {
9040                cc.deferrable = Some(true);
9041            } else if cc.initially.is_none() && self.parse_keyword(Keyword::INITIALLY) {
9042                if self.parse_keyword(Keyword::DEFERRED) {
9043                    cc.initially = Some(DeferrableInitial::Deferred);
9044                } else if self.parse_keyword(Keyword::IMMEDIATE) {
9045                    cc.initially = Some(DeferrableInitial::Immediate);
9046                } else {
9047                    self.expected("one of DEFERRED or IMMEDIATE", self.peek_token())?;
9048                }
9049            } else if cc.enforced.is_none() && self.parse_keyword(Keyword::ENFORCED) {
9050                cc.enforced = Some(true);
9051            } else if cc.enforced.is_none()
9052                && self.parse_keywords(&[Keyword::NOT, Keyword::ENFORCED])
9053            {
9054                cc.enforced = Some(false);
9055            } else {
9056                break;
9057            }
9058        }
9059
9060        if cc.deferrable.is_some() || cc.initially.is_some() || cc.enforced.is_some() {
9061            Ok(Some(cc))
9062        } else {
9063            Ok(None)
9064        }
9065    }
9066
9067    pub fn parse_optional_table_constraint(
9068        &mut self,
9069    ) -> Result<Option<TableConstraint>, ParserError> {
9070        let name = if self.parse_keyword(Keyword::CONSTRAINT) {
9071            Some(self.parse_identifier()?)
9072        } else {
9073            None
9074        };
9075
9076        let next_token = self.next_token();
9077        match next_token.token {
9078            Token::Word(w) if w.keyword == Keyword::UNIQUE => {
9079                let index_type_display = self.parse_index_type_display();
9080                if !dialect_of!(self is GenericDialect | MySqlDialect)
9081                    && !index_type_display.is_none()
9082                {
9083                    return self
9084                        .expected("`index_name` or `(column_name [, ...])`", self.peek_token());
9085                }
9086
9087                let nulls_distinct = self.parse_optional_nulls_distinct()?;
9088
9089                // optional index name
9090                let index_name = self.parse_optional_ident()?;
9091                let index_type = self.parse_optional_using_then_index_type()?;
9092
9093                let columns = self.parse_parenthesized_index_column_list()?;
9094                let index_options = self.parse_index_options()?;
9095                let characteristics = self.parse_constraint_characteristics()?;
9096                Ok(Some(
9097                    UniqueConstraint {
9098                        name,
9099                        index_name,
9100                        index_type_display,
9101                        index_type,
9102                        columns,
9103                        index_options,
9104                        characteristics,
9105                        nulls_distinct,
9106                    }
9107                    .into(),
9108                ))
9109            }
9110            Token::Word(w) if w.keyword == Keyword::PRIMARY => {
9111                // after `PRIMARY` always stay `KEY`
9112                self.expect_keyword_is(Keyword::KEY)?;
9113
9114                // optional index name
9115                let index_name = self.parse_optional_ident()?;
9116                let index_type = self.parse_optional_using_then_index_type()?;
9117
9118                let columns = self.parse_parenthesized_index_column_list()?;
9119                let index_options = self.parse_index_options()?;
9120                let characteristics = self.parse_constraint_characteristics()?;
9121                Ok(Some(
9122                    PrimaryKeyConstraint {
9123                        name,
9124                        index_name,
9125                        index_type,
9126                        columns,
9127                        index_options,
9128                        characteristics,
9129                    }
9130                    .into(),
9131                ))
9132            }
9133            Token::Word(w) if w.keyword == Keyword::FOREIGN => {
9134                self.expect_keyword_is(Keyword::KEY)?;
9135                let index_name = self.parse_optional_ident()?;
9136                let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
9137                self.expect_keyword_is(Keyword::REFERENCES)?;
9138                let foreign_table = self.parse_object_name(false)?;
9139                let referred_columns = self.parse_parenthesized_column_list(Optional, false)?;
9140                let mut match_kind = None;
9141                let mut on_delete = None;
9142                let mut on_update = None;
9143                loop {
9144                    if match_kind.is_none() && self.parse_keyword(Keyword::MATCH) {
9145                        match_kind = Some(self.parse_match_kind()?);
9146                    } else if on_delete.is_none()
9147                        && self.parse_keywords(&[Keyword::ON, Keyword::DELETE])
9148                    {
9149                        on_delete = Some(self.parse_referential_action()?);
9150                    } else if on_update.is_none()
9151                        && self.parse_keywords(&[Keyword::ON, Keyword::UPDATE])
9152                    {
9153                        on_update = Some(self.parse_referential_action()?);
9154                    } else {
9155                        break;
9156                    }
9157                }
9158
9159                let characteristics = self.parse_constraint_characteristics()?;
9160
9161                Ok(Some(
9162                    ForeignKeyConstraint {
9163                        name,
9164                        index_name,
9165                        columns,
9166                        foreign_table,
9167                        referred_columns,
9168                        on_delete,
9169                        on_update,
9170                        match_kind,
9171                        characteristics,
9172                    }
9173                    .into(),
9174                ))
9175            }
9176            Token::Word(w) if w.keyword == Keyword::CHECK => {
9177                self.expect_token(&Token::LParen)?;
9178                let expr = Box::new(self.parse_expr()?);
9179                self.expect_token(&Token::RParen)?;
9180
9181                let enforced = if self.parse_keyword(Keyword::ENFORCED) {
9182                    Some(true)
9183                } else if self.parse_keywords(&[Keyword::NOT, Keyword::ENFORCED]) {
9184                    Some(false)
9185                } else {
9186                    None
9187                };
9188
9189                Ok(Some(
9190                    CheckConstraint {
9191                        name,
9192                        expr,
9193                        enforced,
9194                    }
9195                    .into(),
9196                ))
9197            }
9198            Token::Word(w)
9199                if (w.keyword == Keyword::INDEX || w.keyword == Keyword::KEY)
9200                    && dialect_of!(self is GenericDialect | MySqlDialect)
9201                    && name.is_none() =>
9202            {
9203                let display_as_key = w.keyword == Keyword::KEY;
9204
9205                let name = match self.peek_token().token {
9206                    Token::Word(word) if word.keyword == Keyword::USING => None,
9207                    _ => self.parse_optional_ident()?,
9208                };
9209
9210                let index_type = self.parse_optional_using_then_index_type()?;
9211                let columns = self.parse_parenthesized_index_column_list()?;
9212                let index_options = self.parse_index_options()?;
9213
9214                Ok(Some(
9215                    IndexConstraint {
9216                        display_as_key,
9217                        name,
9218                        index_type,
9219                        columns,
9220                        index_options,
9221                    }
9222                    .into(),
9223                ))
9224            }
9225            Token::Word(w)
9226                if (w.keyword == Keyword::FULLTEXT || w.keyword == Keyword::SPATIAL)
9227                    && dialect_of!(self is GenericDialect | MySqlDialect) =>
9228            {
9229                if let Some(name) = name {
9230                    return self.expected(
9231                        "FULLTEXT or SPATIAL option without constraint name",
9232                        TokenWithSpan {
9233                            token: Token::make_keyword(&name.to_string()),
9234                            span: next_token.span,
9235                        },
9236                    );
9237                }
9238
9239                let fulltext = w.keyword == Keyword::FULLTEXT;
9240
9241                let index_type_display = self.parse_index_type_display();
9242
9243                let opt_index_name = self.parse_optional_ident()?;
9244
9245                let columns = self.parse_parenthesized_index_column_list()?;
9246
9247                Ok(Some(
9248                    FullTextOrSpatialConstraint {
9249                        fulltext,
9250                        index_type_display,
9251                        opt_index_name,
9252                        columns,
9253                    }
9254                    .into(),
9255                ))
9256            }
9257            _ => {
9258                if name.is_some() {
9259                    self.expected("PRIMARY, UNIQUE, FOREIGN, or CHECK", next_token)
9260                } else {
9261                    self.prev_token();
9262                    Ok(None)
9263                }
9264            }
9265        }
9266    }
9267
9268    fn parse_optional_nulls_distinct(&mut self) -> Result<NullsDistinctOption, ParserError> {
9269        Ok(if self.parse_keyword(Keyword::NULLS) {
9270            let not = self.parse_keyword(Keyword::NOT);
9271            self.expect_keyword_is(Keyword::DISTINCT)?;
9272            if not {
9273                NullsDistinctOption::NotDistinct
9274            } else {
9275                NullsDistinctOption::Distinct
9276            }
9277        } else {
9278            NullsDistinctOption::None
9279        })
9280    }
9281
9282    pub fn maybe_parse_options(
9283        &mut self,
9284        keyword: Keyword,
9285    ) -> Result<Option<Vec<SqlOption>>, ParserError> {
9286        if let Token::Word(word) = self.peek_token().token {
9287            if word.keyword == keyword {
9288                return Ok(Some(self.parse_options(keyword)?));
9289            }
9290        };
9291        Ok(None)
9292    }
9293
9294    pub fn parse_options(&mut self, keyword: Keyword) -> Result<Vec<SqlOption>, ParserError> {
9295        if self.parse_keyword(keyword) {
9296            self.expect_token(&Token::LParen)?;
9297            let options = self.parse_comma_separated0(Parser::parse_sql_option, Token::RParen)?;
9298            self.expect_token(&Token::RParen)?;
9299            Ok(options)
9300        } else {
9301            Ok(vec![])
9302        }
9303    }
9304
9305    pub fn parse_options_with_keywords(
9306        &mut self,
9307        keywords: &[Keyword],
9308    ) -> Result<Vec<SqlOption>, ParserError> {
9309        if self.parse_keywords(keywords) {
9310            self.expect_token(&Token::LParen)?;
9311            let options = self.parse_comma_separated(Parser::parse_sql_option)?;
9312            self.expect_token(&Token::RParen)?;
9313            Ok(options)
9314        } else {
9315            Ok(vec![])
9316        }
9317    }
9318
9319    pub fn parse_index_type(&mut self) -> Result<IndexType, ParserError> {
9320        Ok(if self.parse_keyword(Keyword::BTREE) {
9321            IndexType::BTree
9322        } else if self.parse_keyword(Keyword::HASH) {
9323            IndexType::Hash
9324        } else if self.parse_keyword(Keyword::GIN) {
9325            IndexType::GIN
9326        } else if self.parse_keyword(Keyword::GIST) {
9327            IndexType::GiST
9328        } else if self.parse_keyword(Keyword::SPGIST) {
9329            IndexType::SPGiST
9330        } else if self.parse_keyword(Keyword::BRIN) {
9331            IndexType::BRIN
9332        } else if self.parse_keyword(Keyword::BLOOM) {
9333            IndexType::Bloom
9334        } else {
9335            IndexType::Custom(self.parse_identifier()?)
9336        })
9337    }
9338
9339    /// Optionally parse the `USING` keyword, followed by an [IndexType]
9340    /// Example:
9341    /// ```sql
9342    //// USING BTREE (name, age DESC)
9343    /// ```
9344    pub fn parse_optional_using_then_index_type(
9345        &mut self,
9346    ) -> Result<Option<IndexType>, ParserError> {
9347        if self.parse_keyword(Keyword::USING) {
9348            Ok(Some(self.parse_index_type()?))
9349        } else {
9350            Ok(None)
9351        }
9352    }
9353
9354    /// Parse `[ident]`, mostly `ident` is name, like:
9355    /// `window_name`, `index_name`, ...
9356    pub fn parse_optional_ident(&mut self) -> Result<Option<Ident>, ParserError> {
9357        self.maybe_parse(|parser| parser.parse_identifier())
9358    }
9359
9360    #[must_use]
9361    pub fn parse_index_type_display(&mut self) -> KeyOrIndexDisplay {
9362        if self.parse_keyword(Keyword::KEY) {
9363            KeyOrIndexDisplay::Key
9364        } else if self.parse_keyword(Keyword::INDEX) {
9365            KeyOrIndexDisplay::Index
9366        } else {
9367            KeyOrIndexDisplay::None
9368        }
9369    }
9370
9371    pub fn parse_optional_index_option(&mut self) -> Result<Option<IndexOption>, ParserError> {
9372        if let Some(index_type) = self.parse_optional_using_then_index_type()? {
9373            Ok(Some(IndexOption::Using(index_type)))
9374        } else if self.parse_keyword(Keyword::COMMENT) {
9375            let s = self.parse_literal_string()?;
9376            Ok(Some(IndexOption::Comment(s)))
9377        } else {
9378            Ok(None)
9379        }
9380    }
9381
9382    pub fn parse_index_options(&mut self) -> Result<Vec<IndexOption>, ParserError> {
9383        let mut options = Vec::new();
9384
9385        loop {
9386            match self.parse_optional_index_option()? {
9387                Some(index_option) => options.push(index_option),
9388                None => return Ok(options),
9389            }
9390        }
9391    }
9392
9393    pub fn parse_sql_option(&mut self) -> Result<SqlOption, ParserError> {
9394        let is_mssql = dialect_of!(self is MsSqlDialect|GenericDialect);
9395
9396        match self.peek_token().token {
9397            Token::Word(w) if w.keyword == Keyword::HEAP && is_mssql => {
9398                Ok(SqlOption::Ident(self.parse_identifier()?))
9399            }
9400            Token::Word(w) if w.keyword == Keyword::PARTITION && is_mssql => {
9401                self.parse_option_partition()
9402            }
9403            Token::Word(w) if w.keyword == Keyword::CLUSTERED && is_mssql => {
9404                self.parse_option_clustered()
9405            }
9406            _ => {
9407                let name = self.parse_identifier()?;
9408                self.expect_token(&Token::Eq)?;
9409                let value = self.parse_expr()?;
9410
9411                Ok(SqlOption::KeyValue { key: name, value })
9412            }
9413        }
9414    }
9415
9416    pub fn parse_option_clustered(&mut self) -> Result<SqlOption, ParserError> {
9417        if self.parse_keywords(&[
9418            Keyword::CLUSTERED,
9419            Keyword::COLUMNSTORE,
9420            Keyword::INDEX,
9421            Keyword::ORDER,
9422        ]) {
9423            Ok(SqlOption::Clustered(
9424                TableOptionsClustered::ColumnstoreIndexOrder(
9425                    self.parse_parenthesized_column_list(IsOptional::Mandatory, false)?,
9426                ),
9427            ))
9428        } else if self.parse_keywords(&[Keyword::CLUSTERED, Keyword::COLUMNSTORE, Keyword::INDEX]) {
9429            Ok(SqlOption::Clustered(
9430                TableOptionsClustered::ColumnstoreIndex,
9431            ))
9432        } else if self.parse_keywords(&[Keyword::CLUSTERED, Keyword::INDEX]) {
9433            self.expect_token(&Token::LParen)?;
9434
9435            let columns = self.parse_comma_separated(|p| {
9436                let name = p.parse_identifier()?;
9437                let asc = p.parse_asc_desc();
9438
9439                Ok(ClusteredIndex { name, asc })
9440            })?;
9441
9442            self.expect_token(&Token::RParen)?;
9443
9444            Ok(SqlOption::Clustered(TableOptionsClustered::Index(columns)))
9445        } else {
9446            Err(ParserError::ParserError(
9447                "invalid CLUSTERED sequence".to_string(),
9448            ))
9449        }
9450    }
9451
9452    pub fn parse_option_partition(&mut self) -> Result<SqlOption, ParserError> {
9453        self.expect_keyword_is(Keyword::PARTITION)?;
9454        self.expect_token(&Token::LParen)?;
9455        let column_name = self.parse_identifier()?;
9456
9457        self.expect_keyword_is(Keyword::RANGE)?;
9458        let range_direction = if self.parse_keyword(Keyword::LEFT) {
9459            Some(PartitionRangeDirection::Left)
9460        } else if self.parse_keyword(Keyword::RIGHT) {
9461            Some(PartitionRangeDirection::Right)
9462        } else {
9463            None
9464        };
9465
9466        self.expect_keywords(&[Keyword::FOR, Keyword::VALUES])?;
9467        self.expect_token(&Token::LParen)?;
9468
9469        let for_values = self.parse_comma_separated(Parser::parse_expr)?;
9470
9471        self.expect_token(&Token::RParen)?;
9472        self.expect_token(&Token::RParen)?;
9473
9474        Ok(SqlOption::Partition {
9475            column_name,
9476            range_direction,
9477            for_values,
9478        })
9479    }
9480
9481    pub fn parse_partition(&mut self) -> Result<Partition, ParserError> {
9482        self.expect_token(&Token::LParen)?;
9483        let partitions = self.parse_comma_separated(Parser::parse_expr)?;
9484        self.expect_token(&Token::RParen)?;
9485        Ok(Partition::Partitions(partitions))
9486    }
9487
9488    pub fn parse_projection_select(&mut self) -> Result<ProjectionSelect, ParserError> {
9489        self.expect_token(&Token::LParen)?;
9490        self.expect_keyword_is(Keyword::SELECT)?;
9491        let projection = self.parse_projection()?;
9492        let group_by = self.parse_optional_group_by()?;
9493        let order_by = self.parse_optional_order_by()?;
9494        self.expect_token(&Token::RParen)?;
9495        Ok(ProjectionSelect {
9496            projection,
9497            group_by,
9498            order_by,
9499        })
9500    }
9501    pub fn parse_alter_table_add_projection(&mut self) -> Result<AlterTableOperation, ParserError> {
9502        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
9503        let name = self.parse_identifier()?;
9504        let query = self.parse_projection_select()?;
9505        Ok(AlterTableOperation::AddProjection {
9506            if_not_exists,
9507            name,
9508            select: query,
9509        })
9510    }
9511
9512    pub fn parse_alter_table_operation(&mut self) -> Result<AlterTableOperation, ParserError> {
9513        let operation = if self.parse_keyword(Keyword::ADD) {
9514            if let Some(constraint) = self.parse_optional_table_constraint()? {
9515                let not_valid = self.parse_keywords(&[Keyword::NOT, Keyword::VALID]);
9516                AlterTableOperation::AddConstraint {
9517                    constraint,
9518                    not_valid,
9519                }
9520            } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
9521                && self.parse_keyword(Keyword::PROJECTION)
9522            {
9523                return self.parse_alter_table_add_projection();
9524            } else {
9525                let if_not_exists =
9526                    self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
9527                let mut new_partitions = vec![];
9528                loop {
9529                    if self.parse_keyword(Keyword::PARTITION) {
9530                        new_partitions.push(self.parse_partition()?);
9531                    } else {
9532                        break;
9533                    }
9534                }
9535                if !new_partitions.is_empty() {
9536                    AlterTableOperation::AddPartitions {
9537                        if_not_exists,
9538                        new_partitions,
9539                    }
9540                } else {
9541                    let column_keyword = self.parse_keyword(Keyword::COLUMN);
9542
9543                    let if_not_exists = if dialect_of!(self is PostgreSqlDialect | BigQueryDialect | DuckDbDialect | GenericDialect)
9544                    {
9545                        self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS])
9546                            || if_not_exists
9547                    } else {
9548                        false
9549                    };
9550
9551                    let column_def = self.parse_column_def()?;
9552
9553                    let column_position = self.parse_column_position()?;
9554
9555                    AlterTableOperation::AddColumn {
9556                        column_keyword,
9557                        if_not_exists,
9558                        column_def,
9559                        column_position,
9560                    }
9561                }
9562            }
9563        } else if self.parse_keyword(Keyword::RENAME) {
9564            if dialect_of!(self is PostgreSqlDialect) && self.parse_keyword(Keyword::CONSTRAINT) {
9565                let old_name = self.parse_identifier()?;
9566                self.expect_keyword_is(Keyword::TO)?;
9567                let new_name = self.parse_identifier()?;
9568                AlterTableOperation::RenameConstraint { old_name, new_name }
9569            } else if self.parse_keyword(Keyword::TO) {
9570                let table_name = self.parse_object_name(false)?;
9571                AlterTableOperation::RenameTable {
9572                    table_name: RenameTableNameKind::To(table_name),
9573                }
9574            } else if self.parse_keyword(Keyword::AS) {
9575                let table_name = self.parse_object_name(false)?;
9576                AlterTableOperation::RenameTable {
9577                    table_name: RenameTableNameKind::As(table_name),
9578                }
9579            } else {
9580                let _ = self.parse_keyword(Keyword::COLUMN); // [ COLUMN ]
9581                let old_column_name = self.parse_identifier()?;
9582                self.expect_keyword_is(Keyword::TO)?;
9583                let new_column_name = self.parse_identifier()?;
9584                AlterTableOperation::RenameColumn {
9585                    old_column_name,
9586                    new_column_name,
9587                }
9588            }
9589        } else if self.parse_keyword(Keyword::DISABLE) {
9590            if self.parse_keywords(&[Keyword::ROW, Keyword::LEVEL, Keyword::SECURITY]) {
9591                AlterTableOperation::DisableRowLevelSecurity {}
9592            } else if self.parse_keyword(Keyword::RULE) {
9593                let name = self.parse_identifier()?;
9594                AlterTableOperation::DisableRule { name }
9595            } else if self.parse_keyword(Keyword::TRIGGER) {
9596                let name = self.parse_identifier()?;
9597                AlterTableOperation::DisableTrigger { name }
9598            } else {
9599                return self.expected(
9600                    "ROW LEVEL SECURITY, RULE, or TRIGGER after DISABLE",
9601                    self.peek_token(),
9602                );
9603            }
9604        } else if self.parse_keyword(Keyword::ENABLE) {
9605            if self.parse_keywords(&[Keyword::ALWAYS, Keyword::RULE]) {
9606                let name = self.parse_identifier()?;
9607                AlterTableOperation::EnableAlwaysRule { name }
9608            } else if self.parse_keywords(&[Keyword::ALWAYS, Keyword::TRIGGER]) {
9609                let name = self.parse_identifier()?;
9610                AlterTableOperation::EnableAlwaysTrigger { name }
9611            } else if self.parse_keywords(&[Keyword::ROW, Keyword::LEVEL, Keyword::SECURITY]) {
9612                AlterTableOperation::EnableRowLevelSecurity {}
9613            } else if self.parse_keywords(&[Keyword::REPLICA, Keyword::RULE]) {
9614                let name = self.parse_identifier()?;
9615                AlterTableOperation::EnableReplicaRule { name }
9616            } else if self.parse_keywords(&[Keyword::REPLICA, Keyword::TRIGGER]) {
9617                let name = self.parse_identifier()?;
9618                AlterTableOperation::EnableReplicaTrigger { name }
9619            } else if self.parse_keyword(Keyword::RULE) {
9620                let name = self.parse_identifier()?;
9621                AlterTableOperation::EnableRule { name }
9622            } else if self.parse_keyword(Keyword::TRIGGER) {
9623                let name = self.parse_identifier()?;
9624                AlterTableOperation::EnableTrigger { name }
9625            } else {
9626                return self.expected(
9627                    "ALWAYS, REPLICA, ROW LEVEL SECURITY, RULE, or TRIGGER after ENABLE",
9628                    self.peek_token(),
9629                );
9630            }
9631        } else if self.parse_keywords(&[Keyword::CLEAR, Keyword::PROJECTION])
9632            && dialect_of!(self is ClickHouseDialect|GenericDialect)
9633        {
9634            let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
9635            let name = self.parse_identifier()?;
9636            let partition = if self.parse_keywords(&[Keyword::IN, Keyword::PARTITION]) {
9637                Some(self.parse_identifier()?)
9638            } else {
9639                None
9640            };
9641            AlterTableOperation::ClearProjection {
9642                if_exists,
9643                name,
9644                partition,
9645            }
9646        } else if self.parse_keywords(&[Keyword::MATERIALIZE, Keyword::PROJECTION])
9647            && dialect_of!(self is ClickHouseDialect|GenericDialect)
9648        {
9649            let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
9650            let name = self.parse_identifier()?;
9651            let partition = if self.parse_keywords(&[Keyword::IN, Keyword::PARTITION]) {
9652                Some(self.parse_identifier()?)
9653            } else {
9654                None
9655            };
9656            AlterTableOperation::MaterializeProjection {
9657                if_exists,
9658                name,
9659                partition,
9660            }
9661        } else if self.parse_keyword(Keyword::DROP) {
9662            if self.parse_keywords(&[Keyword::IF, Keyword::EXISTS, Keyword::PARTITION]) {
9663                self.expect_token(&Token::LParen)?;
9664                let partitions = self.parse_comma_separated(Parser::parse_expr)?;
9665                self.expect_token(&Token::RParen)?;
9666                AlterTableOperation::DropPartitions {
9667                    partitions,
9668                    if_exists: true,
9669                }
9670            } else if self.parse_keyword(Keyword::PARTITION) {
9671                self.expect_token(&Token::LParen)?;
9672                let partitions = self.parse_comma_separated(Parser::parse_expr)?;
9673                self.expect_token(&Token::RParen)?;
9674                AlterTableOperation::DropPartitions {
9675                    partitions,
9676                    if_exists: false,
9677                }
9678            } else if self.parse_keyword(Keyword::CONSTRAINT) {
9679                let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
9680                let name = self.parse_identifier()?;
9681                let drop_behavior = self.parse_optional_drop_behavior();
9682                AlterTableOperation::DropConstraint {
9683                    if_exists,
9684                    name,
9685                    drop_behavior,
9686                }
9687            } else if self.parse_keywords(&[Keyword::PRIMARY, Keyword::KEY]) {
9688                let drop_behavior = self.parse_optional_drop_behavior();
9689                AlterTableOperation::DropPrimaryKey { drop_behavior }
9690            } else if self.parse_keywords(&[Keyword::FOREIGN, Keyword::KEY]) {
9691                let name = self.parse_identifier()?;
9692                let drop_behavior = self.parse_optional_drop_behavior();
9693                AlterTableOperation::DropForeignKey {
9694                    name,
9695                    drop_behavior,
9696                }
9697            } else if self.parse_keyword(Keyword::INDEX) {
9698                let name = self.parse_identifier()?;
9699                AlterTableOperation::DropIndex { name }
9700            } else if self.parse_keyword(Keyword::PROJECTION)
9701                && dialect_of!(self is ClickHouseDialect|GenericDialect)
9702            {
9703                let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
9704                let name = self.parse_identifier()?;
9705                AlterTableOperation::DropProjection { if_exists, name }
9706            } else if self.parse_keywords(&[Keyword::CLUSTERING, Keyword::KEY]) {
9707                AlterTableOperation::DropClusteringKey
9708            } else {
9709                let has_column_keyword = self.parse_keyword(Keyword::COLUMN); // [ COLUMN ]
9710                let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
9711                let column_names = if self.dialect.supports_comma_separated_drop_column_list() {
9712                    self.parse_comma_separated(Parser::parse_identifier)?
9713                } else {
9714                    vec![self.parse_identifier()?]
9715                };
9716                let drop_behavior = self.parse_optional_drop_behavior();
9717                AlterTableOperation::DropColumn {
9718                    has_column_keyword,
9719                    column_names,
9720                    if_exists,
9721                    drop_behavior,
9722                }
9723            }
9724        } else if self.parse_keyword(Keyword::PARTITION) {
9725            self.expect_token(&Token::LParen)?;
9726            let before = self.parse_comma_separated(Parser::parse_expr)?;
9727            self.expect_token(&Token::RParen)?;
9728            self.expect_keyword_is(Keyword::RENAME)?;
9729            self.expect_keywords(&[Keyword::TO, Keyword::PARTITION])?;
9730            self.expect_token(&Token::LParen)?;
9731            let renames = self.parse_comma_separated(Parser::parse_expr)?;
9732            self.expect_token(&Token::RParen)?;
9733            AlterTableOperation::RenamePartitions {
9734                old_partitions: before,
9735                new_partitions: renames,
9736            }
9737        } else if self.parse_keyword(Keyword::CHANGE) {
9738            let _ = self.parse_keyword(Keyword::COLUMN); // [ COLUMN ]
9739            let old_name = self.parse_identifier()?;
9740            let new_name = self.parse_identifier()?;
9741            let data_type = self.parse_data_type()?;
9742            let mut options = vec![];
9743            while let Some(option) = self.parse_optional_column_option()? {
9744                options.push(option);
9745            }
9746
9747            let column_position = self.parse_column_position()?;
9748
9749            AlterTableOperation::ChangeColumn {
9750                old_name,
9751                new_name,
9752                data_type,
9753                options,
9754                column_position,
9755            }
9756        } else if self.parse_keyword(Keyword::MODIFY) {
9757            let _ = self.parse_keyword(Keyword::COLUMN); // [ COLUMN ]
9758            let col_name = self.parse_identifier()?;
9759            let data_type = self.parse_data_type()?;
9760            let mut options = vec![];
9761            while let Some(option) = self.parse_optional_column_option()? {
9762                options.push(option);
9763            }
9764
9765            let column_position = self.parse_column_position()?;
9766
9767            AlterTableOperation::ModifyColumn {
9768                col_name,
9769                data_type,
9770                options,
9771                column_position,
9772            }
9773        } else if self.parse_keyword(Keyword::ALTER) {
9774            let _ = self.parse_keyword(Keyword::COLUMN); // [ COLUMN ]
9775            let column_name = self.parse_identifier()?;
9776            let is_postgresql = dialect_of!(self is PostgreSqlDialect);
9777
9778            let op: AlterColumnOperation = if self.parse_keywords(&[
9779                Keyword::SET,
9780                Keyword::NOT,
9781                Keyword::NULL,
9782            ]) {
9783                AlterColumnOperation::SetNotNull {}
9784            } else if self.parse_keywords(&[Keyword::DROP, Keyword::NOT, Keyword::NULL]) {
9785                AlterColumnOperation::DropNotNull {}
9786            } else if self.parse_keywords(&[Keyword::SET, Keyword::DEFAULT]) {
9787                AlterColumnOperation::SetDefault {
9788                    value: self.parse_expr()?,
9789                }
9790            } else if self.parse_keywords(&[Keyword::DROP, Keyword::DEFAULT]) {
9791                AlterColumnOperation::DropDefault {}
9792            } else if self.parse_keywords(&[Keyword::SET, Keyword::DATA, Keyword::TYPE]) {
9793                self.parse_set_data_type(true)?
9794            } else if self.parse_keyword(Keyword::TYPE) {
9795                self.parse_set_data_type(false)?
9796            } else if self.parse_keywords(&[Keyword::ADD, Keyword::GENERATED]) {
9797                let generated_as = if self.parse_keyword(Keyword::ALWAYS) {
9798                    Some(GeneratedAs::Always)
9799                } else if self.parse_keywords(&[Keyword::BY, Keyword::DEFAULT]) {
9800                    Some(GeneratedAs::ByDefault)
9801                } else {
9802                    None
9803                };
9804
9805                self.expect_keywords(&[Keyword::AS, Keyword::IDENTITY])?;
9806
9807                let mut sequence_options: Option<Vec<SequenceOptions>> = None;
9808
9809                if self.peek_token().token == Token::LParen {
9810                    self.expect_token(&Token::LParen)?;
9811                    sequence_options = Some(self.parse_create_sequence_options()?);
9812                    self.expect_token(&Token::RParen)?;
9813                }
9814
9815                AlterColumnOperation::AddGenerated {
9816                    generated_as,
9817                    sequence_options,
9818                }
9819            } else {
9820                let message = if is_postgresql {
9821                    "SET/DROP NOT NULL, SET DEFAULT, SET DATA TYPE, or ADD GENERATED after ALTER COLUMN"
9822                } else {
9823                    "SET/DROP NOT NULL, SET DEFAULT, or SET DATA TYPE after ALTER COLUMN"
9824                };
9825
9826                return self.expected(message, self.peek_token());
9827            };
9828            AlterTableOperation::AlterColumn { column_name, op }
9829        } else if self.parse_keyword(Keyword::SWAP) {
9830            self.expect_keyword_is(Keyword::WITH)?;
9831            let table_name = self.parse_object_name(false)?;
9832            AlterTableOperation::SwapWith { table_name }
9833        } else if dialect_of!(self is PostgreSqlDialect | GenericDialect)
9834            && self.parse_keywords(&[Keyword::OWNER, Keyword::TO])
9835        {
9836            let new_owner = self.parse_owner()?;
9837            AlterTableOperation::OwnerTo { new_owner }
9838        } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
9839            && self.parse_keyword(Keyword::ATTACH)
9840        {
9841            AlterTableOperation::AttachPartition {
9842                partition: self.parse_part_or_partition()?,
9843            }
9844        } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
9845            && self.parse_keyword(Keyword::DETACH)
9846        {
9847            AlterTableOperation::DetachPartition {
9848                partition: self.parse_part_or_partition()?,
9849            }
9850        } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
9851            && self.parse_keyword(Keyword::FREEZE)
9852        {
9853            let partition = self.parse_part_or_partition()?;
9854            let with_name = if self.parse_keyword(Keyword::WITH) {
9855                self.expect_keyword_is(Keyword::NAME)?;
9856                Some(self.parse_identifier()?)
9857            } else {
9858                None
9859            };
9860            AlterTableOperation::FreezePartition {
9861                partition,
9862                with_name,
9863            }
9864        } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
9865            && self.parse_keyword(Keyword::UNFREEZE)
9866        {
9867            let partition = self.parse_part_or_partition()?;
9868            let with_name = if self.parse_keyword(Keyword::WITH) {
9869                self.expect_keyword_is(Keyword::NAME)?;
9870                Some(self.parse_identifier()?)
9871            } else {
9872                None
9873            };
9874            AlterTableOperation::UnfreezePartition {
9875                partition,
9876                with_name,
9877            }
9878        } else if self.parse_keywords(&[Keyword::CLUSTER, Keyword::BY]) {
9879            self.expect_token(&Token::LParen)?;
9880            let exprs = self.parse_comma_separated(|parser| parser.parse_expr())?;
9881            self.expect_token(&Token::RParen)?;
9882            AlterTableOperation::ClusterBy { exprs }
9883        } else if self.parse_keywords(&[Keyword::SUSPEND, Keyword::RECLUSTER]) {
9884            AlterTableOperation::SuspendRecluster
9885        } else if self.parse_keywords(&[Keyword::RESUME, Keyword::RECLUSTER]) {
9886            AlterTableOperation::ResumeRecluster
9887        } else if self.parse_keyword(Keyword::LOCK) {
9888            let equals = self.consume_token(&Token::Eq);
9889            let lock = match self.parse_one_of_keywords(&[
9890                Keyword::DEFAULT,
9891                Keyword::EXCLUSIVE,
9892                Keyword::NONE,
9893                Keyword::SHARED,
9894            ]) {
9895                Some(Keyword::DEFAULT) => AlterTableLock::Default,
9896                Some(Keyword::EXCLUSIVE) => AlterTableLock::Exclusive,
9897                Some(Keyword::NONE) => AlterTableLock::None,
9898                Some(Keyword::SHARED) => AlterTableLock::Shared,
9899                _ => self.expected(
9900                    "DEFAULT, EXCLUSIVE, NONE or SHARED after LOCK [=]",
9901                    self.peek_token(),
9902                )?,
9903            };
9904            AlterTableOperation::Lock { equals, lock }
9905        } else if self.parse_keyword(Keyword::ALGORITHM) {
9906            let equals = self.consume_token(&Token::Eq);
9907            let algorithm = match self.parse_one_of_keywords(&[
9908                Keyword::DEFAULT,
9909                Keyword::INSTANT,
9910                Keyword::INPLACE,
9911                Keyword::COPY,
9912            ]) {
9913                Some(Keyword::DEFAULT) => AlterTableAlgorithm::Default,
9914                Some(Keyword::INSTANT) => AlterTableAlgorithm::Instant,
9915                Some(Keyword::INPLACE) => AlterTableAlgorithm::Inplace,
9916                Some(Keyword::COPY) => AlterTableAlgorithm::Copy,
9917                _ => self.expected(
9918                    "DEFAULT, INSTANT, INPLACE, or COPY after ALGORITHM [=]",
9919                    self.peek_token(),
9920                )?,
9921            };
9922            AlterTableOperation::Algorithm { equals, algorithm }
9923        } else if self.parse_keyword(Keyword::AUTO_INCREMENT) {
9924            let equals = self.consume_token(&Token::Eq);
9925            let value = self.parse_number_value()?;
9926            AlterTableOperation::AutoIncrement { equals, value }
9927        } else if self.parse_keywords(&[Keyword::REPLICA, Keyword::IDENTITY]) {
9928            let identity = if self.parse_keyword(Keyword::NONE) {
9929                ReplicaIdentity::None
9930            } else if self.parse_keyword(Keyword::FULL) {
9931                ReplicaIdentity::Full
9932            } else if self.parse_keyword(Keyword::DEFAULT) {
9933                ReplicaIdentity::Default
9934            } else if self.parse_keywords(&[Keyword::USING, Keyword::INDEX]) {
9935                ReplicaIdentity::Index(self.parse_identifier()?)
9936            } else {
9937                return self.expected(
9938                    "NONE, FULL, DEFAULT, or USING INDEX index_name after REPLICA IDENTITY",
9939                    self.peek_token(),
9940                );
9941            };
9942
9943            AlterTableOperation::ReplicaIdentity { identity }
9944        } else if self.parse_keywords(&[Keyword::VALIDATE, Keyword::CONSTRAINT]) {
9945            let name = self.parse_identifier()?;
9946            AlterTableOperation::ValidateConstraint { name }
9947        } else {
9948            let mut options =
9949                self.parse_options_with_keywords(&[Keyword::SET, Keyword::TBLPROPERTIES])?;
9950            if !options.is_empty() {
9951                AlterTableOperation::SetTblProperties {
9952                    table_properties: options,
9953                }
9954            } else {
9955                options = self.parse_options(Keyword::SET)?;
9956                if !options.is_empty() {
9957                    AlterTableOperation::SetOptionsParens { options }
9958                } else {
9959                    return self.expected(
9960                    "ADD, RENAME, PARTITION, SWAP, DROP, REPLICA IDENTITY, SET, or SET TBLPROPERTIES after ALTER TABLE",
9961                    self.peek_token(),
9962                  );
9963                }
9964            }
9965        };
9966        Ok(operation)
9967    }
9968
9969    fn parse_set_data_type(&mut self, had_set: bool) -> Result<AlterColumnOperation, ParserError> {
9970        let data_type = self.parse_data_type()?;
9971        let using = if self.dialect.supports_alter_column_type_using()
9972            && self.parse_keyword(Keyword::USING)
9973        {
9974            Some(self.parse_expr()?)
9975        } else {
9976            None
9977        };
9978        Ok(AlterColumnOperation::SetDataType {
9979            data_type,
9980            using,
9981            had_set,
9982        })
9983    }
9984
9985    fn parse_part_or_partition(&mut self) -> Result<Partition, ParserError> {
9986        let keyword = self.expect_one_of_keywords(&[Keyword::PART, Keyword::PARTITION])?;
9987        match keyword {
9988            Keyword::PART => Ok(Partition::Part(self.parse_expr()?)),
9989            Keyword::PARTITION => Ok(Partition::Expr(self.parse_expr()?)),
9990            // unreachable because expect_one_of_keywords used above
9991            unexpected_keyword => Err(ParserError::ParserError(
9992                format!("Internal parser error: expected any of {{PART, PARTITION}}, got {unexpected_keyword:?}"),
9993            )),
9994        }
9995    }
9996
9997    pub fn parse_alter(&mut self) -> Result<Statement, ParserError> {
9998        let object_type = self.expect_one_of_keywords(&[
9999            Keyword::VIEW,
10000            Keyword::TYPE,
10001            Keyword::TABLE,
10002            Keyword::INDEX,
10003            Keyword::ROLE,
10004            Keyword::POLICY,
10005            Keyword::CONNECTOR,
10006            Keyword::ICEBERG,
10007            Keyword::SCHEMA,
10008            Keyword::USER,
10009            Keyword::OPERATOR,
10010        ])?;
10011        match object_type {
10012            Keyword::SCHEMA => {
10013                self.prev_token();
10014                self.prev_token();
10015                self.parse_alter_schema()
10016            }
10017            Keyword::VIEW => self.parse_alter_view(),
10018            Keyword::TYPE => self.parse_alter_type(),
10019            Keyword::TABLE => self.parse_alter_table(false),
10020            Keyword::ICEBERG => {
10021                self.expect_keyword(Keyword::TABLE)?;
10022                self.parse_alter_table(true)
10023            }
10024            Keyword::INDEX => {
10025                let index_name = self.parse_object_name(false)?;
10026                let operation = if self.parse_keyword(Keyword::RENAME) {
10027                    if self.parse_keyword(Keyword::TO) {
10028                        let index_name = self.parse_object_name(false)?;
10029                        AlterIndexOperation::RenameIndex { index_name }
10030                    } else {
10031                        return self.expected("TO after RENAME", self.peek_token());
10032                    }
10033                } else {
10034                    return self.expected("RENAME after ALTER INDEX", self.peek_token());
10035                };
10036
10037                Ok(Statement::AlterIndex {
10038                    name: index_name,
10039                    operation,
10040                })
10041            }
10042            Keyword::OPERATOR => {
10043                if self.parse_keyword(Keyword::FAMILY) {
10044                    self.parse_alter_operator_family()
10045                } else {
10046                    self.parse_alter_operator()
10047                }
10048            }
10049            Keyword::ROLE => self.parse_alter_role(),
10050            Keyword::POLICY => self.parse_alter_policy(),
10051            Keyword::CONNECTOR => self.parse_alter_connector(),
10052            Keyword::USER => self.parse_alter_user(),
10053            // unreachable because expect_one_of_keywords used above
10054            unexpected_keyword => Err(ParserError::ParserError(
10055                format!("Internal parser error: expected any of {{VIEW, TYPE, TABLE, INDEX, ROLE, POLICY, CONNECTOR, ICEBERG, SCHEMA, USER, OPERATOR}}, got {unexpected_keyword:?}"),
10056            )),
10057        }
10058    }
10059
10060    /// Parse a [Statement::AlterTable]
10061    pub fn parse_alter_table(&mut self, iceberg: bool) -> Result<Statement, ParserError> {
10062        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
10063        let only = self.parse_keyword(Keyword::ONLY); // [ ONLY ]
10064        let table_name = self.parse_object_name(false)?;
10065        let on_cluster = self.parse_optional_on_cluster()?;
10066        let operations = self.parse_comma_separated(Parser::parse_alter_table_operation)?;
10067
10068        let mut location = None;
10069        if self.parse_keyword(Keyword::LOCATION) {
10070            location = Some(HiveSetLocation {
10071                has_set: false,
10072                location: self.parse_identifier()?,
10073            });
10074        } else if self.parse_keywords(&[Keyword::SET, Keyword::LOCATION]) {
10075            location = Some(HiveSetLocation {
10076                has_set: true,
10077                location: self.parse_identifier()?,
10078            });
10079        }
10080
10081        let end_token = if self.peek_token_ref().token == Token::SemiColon {
10082            self.peek_token_ref().clone()
10083        } else {
10084            self.get_current_token().clone()
10085        };
10086
10087        Ok(AlterTable {
10088            name: table_name,
10089            if_exists,
10090            only,
10091            operations,
10092            location,
10093            on_cluster,
10094            table_type: if iceberg {
10095                Some(AlterTableType::Iceberg)
10096            } else {
10097                None
10098            },
10099            end_token: AttachedToken(end_token),
10100        }
10101        .into())
10102    }
10103
10104    pub fn parse_alter_view(&mut self) -> Result<Statement, ParserError> {
10105        let name = self.parse_object_name(false)?;
10106        let columns = self.parse_parenthesized_column_list(Optional, false)?;
10107
10108        let with_options = self.parse_options(Keyword::WITH)?;
10109
10110        self.expect_keyword_is(Keyword::AS)?;
10111        let query = self.parse_query()?;
10112
10113        Ok(Statement::AlterView {
10114            name,
10115            columns,
10116            query,
10117            with_options,
10118        })
10119    }
10120
10121    /// Parse a [Statement::AlterType]
10122    pub fn parse_alter_type(&mut self) -> Result<Statement, ParserError> {
10123        let name = self.parse_object_name(false)?;
10124
10125        if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
10126            let new_name = self.parse_identifier()?;
10127            Ok(Statement::AlterType(AlterType {
10128                name,
10129                operation: AlterTypeOperation::Rename(AlterTypeRename { new_name }),
10130            }))
10131        } else if self.parse_keywords(&[Keyword::ADD, Keyword::VALUE]) {
10132            let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
10133            let new_enum_value = self.parse_identifier()?;
10134            let position = if self.parse_keyword(Keyword::BEFORE) {
10135                Some(AlterTypeAddValuePosition::Before(self.parse_identifier()?))
10136            } else if self.parse_keyword(Keyword::AFTER) {
10137                Some(AlterTypeAddValuePosition::After(self.parse_identifier()?))
10138            } else {
10139                None
10140            };
10141
10142            Ok(Statement::AlterType(AlterType {
10143                name,
10144                operation: AlterTypeOperation::AddValue(AlterTypeAddValue {
10145                    if_not_exists,
10146                    value: new_enum_value,
10147                    position,
10148                }),
10149            }))
10150        } else if self.parse_keywords(&[Keyword::RENAME, Keyword::VALUE]) {
10151            let existing_enum_value = self.parse_identifier()?;
10152            self.expect_keyword(Keyword::TO)?;
10153            let new_enum_value = self.parse_identifier()?;
10154
10155            Ok(Statement::AlterType(AlterType {
10156                name,
10157                operation: AlterTypeOperation::RenameValue(AlterTypeRenameValue {
10158                    from: existing_enum_value,
10159                    to: new_enum_value,
10160                }),
10161            }))
10162        } else {
10163            self.expected_ref(
10164                "{RENAME TO | { RENAME | ADD } VALUE}",
10165                self.peek_token_ref(),
10166            )
10167        }
10168    }
10169
10170    /// Parse a [Statement::AlterOperator]
10171    ///
10172    /// [PostgreSQL Documentation](https://www.postgresql.org/docs/current/sql-alteroperator.html)
10173    pub fn parse_alter_operator(&mut self) -> Result<Statement, ParserError> {
10174        let name = self.parse_operator_name()?;
10175
10176        // Parse (left_type, right_type)
10177        self.expect_token(&Token::LParen)?;
10178
10179        let left_type = if self.parse_keyword(Keyword::NONE) {
10180            None
10181        } else {
10182            Some(self.parse_data_type()?)
10183        };
10184
10185        self.expect_token(&Token::Comma)?;
10186        let right_type = self.parse_data_type()?;
10187        self.expect_token(&Token::RParen)?;
10188
10189        // Parse the operation
10190        let operation = if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
10191            let owner = if self.parse_keyword(Keyword::CURRENT_ROLE) {
10192                Owner::CurrentRole
10193            } else if self.parse_keyword(Keyword::CURRENT_USER) {
10194                Owner::CurrentUser
10195            } else if self.parse_keyword(Keyword::SESSION_USER) {
10196                Owner::SessionUser
10197            } else {
10198                Owner::Ident(self.parse_identifier()?)
10199            };
10200            AlterOperatorOperation::OwnerTo(owner)
10201        } else if self.parse_keywords(&[Keyword::SET, Keyword::SCHEMA]) {
10202            let schema_name = self.parse_object_name(false)?;
10203            AlterOperatorOperation::SetSchema { schema_name }
10204        } else if self.parse_keyword(Keyword::SET) {
10205            self.expect_token(&Token::LParen)?;
10206
10207            let mut options = Vec::new();
10208            loop {
10209                let keyword = self.expect_one_of_keywords(&[
10210                    Keyword::RESTRICT,
10211                    Keyword::JOIN,
10212                    Keyword::COMMUTATOR,
10213                    Keyword::NEGATOR,
10214                    Keyword::HASHES,
10215                    Keyword::MERGES,
10216                ])?;
10217
10218                match keyword {
10219                    Keyword::RESTRICT => {
10220                        self.expect_token(&Token::Eq)?;
10221                        let proc_name = if self.parse_keyword(Keyword::NONE) {
10222                            None
10223                        } else {
10224                            Some(self.parse_object_name(false)?)
10225                        };
10226                        options.push(OperatorOption::Restrict(proc_name));
10227                    }
10228                    Keyword::JOIN => {
10229                        self.expect_token(&Token::Eq)?;
10230                        let proc_name = if self.parse_keyword(Keyword::NONE) {
10231                            None
10232                        } else {
10233                            Some(self.parse_object_name(false)?)
10234                        };
10235                        options.push(OperatorOption::Join(proc_name));
10236                    }
10237                    Keyword::COMMUTATOR => {
10238                        self.expect_token(&Token::Eq)?;
10239                        let op_name = self.parse_operator_name()?;
10240                        options.push(OperatorOption::Commutator(op_name));
10241                    }
10242                    Keyword::NEGATOR => {
10243                        self.expect_token(&Token::Eq)?;
10244                        let op_name = self.parse_operator_name()?;
10245                        options.push(OperatorOption::Negator(op_name));
10246                    }
10247                    Keyword::HASHES => {
10248                        options.push(OperatorOption::Hashes);
10249                    }
10250                    Keyword::MERGES => {
10251                        options.push(OperatorOption::Merges);
10252                    }
10253                    unexpected_keyword => return Err(ParserError::ParserError(
10254                        format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in operator option"),
10255                    )),
10256                }
10257
10258                if !self.consume_token(&Token::Comma) {
10259                    break;
10260                }
10261            }
10262
10263            self.expect_token(&Token::RParen)?;
10264            AlterOperatorOperation::Set { options }
10265        } else {
10266            return self.expected_ref(
10267                "OWNER TO, SET SCHEMA, or SET after ALTER OPERATOR",
10268                self.peek_token_ref(),
10269            );
10270        };
10271
10272        Ok(Statement::AlterOperator(AlterOperator {
10273            name,
10274            left_type,
10275            right_type,
10276            operation,
10277        }))
10278    }
10279
10280    /// Parse an operator item for ALTER OPERATOR FAMILY ADD operations
10281    fn parse_operator_family_add_operator(&mut self) -> Result<OperatorFamilyItem, ParserError> {
10282        let strategy_number = self.parse_literal_uint()?;
10283        let operator_name = self.parse_operator_name()?;
10284
10285        // Operator argument types (required for ALTER OPERATOR FAMILY)
10286        self.expect_token(&Token::LParen)?;
10287        let op_types = self.parse_comma_separated(Parser::parse_data_type)?;
10288        self.expect_token(&Token::RParen)?;
10289
10290        // Optional purpose
10291        let purpose = if self.parse_keyword(Keyword::FOR) {
10292            if self.parse_keyword(Keyword::SEARCH) {
10293                Some(OperatorPurpose::ForSearch)
10294            } else if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
10295                let sort_family = self.parse_object_name(false)?;
10296                Some(OperatorPurpose::ForOrderBy { sort_family })
10297            } else {
10298                return self.expected("SEARCH or ORDER BY after FOR", self.peek_token());
10299            }
10300        } else {
10301            None
10302        };
10303
10304        Ok(OperatorFamilyItem::Operator {
10305            strategy_number,
10306            operator_name,
10307            op_types,
10308            purpose,
10309        })
10310    }
10311
10312    /// Parse a function item for ALTER OPERATOR FAMILY ADD operations
10313    fn parse_operator_family_add_function(&mut self) -> Result<OperatorFamilyItem, ParserError> {
10314        let support_number = self.parse_literal_uint()?;
10315
10316        // Optional operator types
10317        let op_types = if self.consume_token(&Token::LParen) && self.peek_token() != Token::RParen {
10318            let types = self.parse_comma_separated(Parser::parse_data_type)?;
10319            self.expect_token(&Token::RParen)?;
10320            Some(types)
10321        } else if self.consume_token(&Token::LParen) {
10322            self.expect_token(&Token::RParen)?;
10323            Some(vec![])
10324        } else {
10325            None
10326        };
10327
10328        let function_name = self.parse_object_name(false)?;
10329
10330        // Function argument types
10331        let argument_types = if self.consume_token(&Token::LParen) {
10332            if self.peek_token() == Token::RParen {
10333                self.expect_token(&Token::RParen)?;
10334                vec![]
10335            } else {
10336                let types = self.parse_comma_separated(Parser::parse_data_type)?;
10337                self.expect_token(&Token::RParen)?;
10338                types
10339            }
10340        } else {
10341            vec![]
10342        };
10343
10344        Ok(OperatorFamilyItem::Function {
10345            support_number,
10346            op_types,
10347            function_name,
10348            argument_types,
10349        })
10350    }
10351
10352    /// Parse an operator item for ALTER OPERATOR FAMILY DROP operations
10353    fn parse_operator_family_drop_operator(
10354        &mut self,
10355    ) -> Result<OperatorFamilyDropItem, ParserError> {
10356        let strategy_number = self.parse_literal_uint()?;
10357
10358        // Operator argument types (required for DROP)
10359        self.expect_token(&Token::LParen)?;
10360        let op_types = self.parse_comma_separated(Parser::parse_data_type)?;
10361        self.expect_token(&Token::RParen)?;
10362
10363        Ok(OperatorFamilyDropItem::Operator {
10364            strategy_number,
10365            op_types,
10366        })
10367    }
10368
10369    /// Parse a function item for ALTER OPERATOR FAMILY DROP operations
10370    fn parse_operator_family_drop_function(
10371        &mut self,
10372    ) -> Result<OperatorFamilyDropItem, ParserError> {
10373        let support_number = self.parse_literal_uint()?;
10374
10375        // Operator types (required for DROP)
10376        self.expect_token(&Token::LParen)?;
10377        let op_types = self.parse_comma_separated(Parser::parse_data_type)?;
10378        self.expect_token(&Token::RParen)?;
10379
10380        Ok(OperatorFamilyDropItem::Function {
10381            support_number,
10382            op_types,
10383        })
10384    }
10385
10386    /// Parse an operator family item for ADD operations (dispatches to operator or function parsing)
10387    fn parse_operator_family_add_item(&mut self) -> Result<OperatorFamilyItem, ParserError> {
10388        if self.parse_keyword(Keyword::OPERATOR) {
10389            self.parse_operator_family_add_operator()
10390        } else if self.parse_keyword(Keyword::FUNCTION) {
10391            self.parse_operator_family_add_function()
10392        } else {
10393            self.expected("OPERATOR or FUNCTION", self.peek_token())
10394        }
10395    }
10396
10397    /// Parse an operator family item for DROP operations (dispatches to operator or function parsing)
10398    fn parse_operator_family_drop_item(&mut self) -> Result<OperatorFamilyDropItem, ParserError> {
10399        if self.parse_keyword(Keyword::OPERATOR) {
10400            self.parse_operator_family_drop_operator()
10401        } else if self.parse_keyword(Keyword::FUNCTION) {
10402            self.parse_operator_family_drop_function()
10403        } else {
10404            self.expected("OPERATOR or FUNCTION", self.peek_token())
10405        }
10406    }
10407
10408    /// Parse a [Statement::AlterOperatorFamily]
10409    /// See <https://www.postgresql.org/docs/current/sql-alteropfamily.html>
10410    pub fn parse_alter_operator_family(&mut self) -> Result<Statement, ParserError> {
10411        let name = self.parse_object_name(false)?;
10412        self.expect_keyword(Keyword::USING)?;
10413        let using = self.parse_identifier()?;
10414
10415        let operation = if self.parse_keyword(Keyword::ADD) {
10416            let items = self.parse_comma_separated(Parser::parse_operator_family_add_item)?;
10417            AlterOperatorFamilyOperation::Add { items }
10418        } else if self.parse_keyword(Keyword::DROP) {
10419            let items = self.parse_comma_separated(Parser::parse_operator_family_drop_item)?;
10420            AlterOperatorFamilyOperation::Drop { items }
10421        } else if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
10422            let new_name = self.parse_object_name(false)?;
10423            AlterOperatorFamilyOperation::RenameTo { new_name }
10424        } else if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
10425            let owner = self.parse_owner()?;
10426            AlterOperatorFamilyOperation::OwnerTo(owner)
10427        } else if self.parse_keywords(&[Keyword::SET, Keyword::SCHEMA]) {
10428            let schema_name = self.parse_object_name(false)?;
10429            AlterOperatorFamilyOperation::SetSchema { schema_name }
10430        } else {
10431            return self.expected_ref(
10432                "ADD, DROP, RENAME TO, OWNER TO, or SET SCHEMA after ALTER OPERATOR FAMILY",
10433                self.peek_token_ref(),
10434            );
10435        };
10436
10437        Ok(Statement::AlterOperatorFamily(AlterOperatorFamily {
10438            name,
10439            using,
10440            operation,
10441        }))
10442    }
10443
10444    // Parse a [Statement::AlterSchema]
10445    // ALTER SCHEMA [ IF EXISTS ] schema_name
10446    pub fn parse_alter_schema(&mut self) -> Result<Statement, ParserError> {
10447        self.expect_keywords(&[Keyword::ALTER, Keyword::SCHEMA])?;
10448        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
10449        let name = self.parse_object_name(false)?;
10450        let operation = if self.parse_keywords(&[Keyword::SET, Keyword::OPTIONS]) {
10451            self.prev_token();
10452            let options = self.parse_options(Keyword::OPTIONS)?;
10453            AlterSchemaOperation::SetOptionsParens { options }
10454        } else if self.parse_keywords(&[Keyword::SET, Keyword::DEFAULT, Keyword::COLLATE]) {
10455            let collate = self.parse_expr()?;
10456            AlterSchemaOperation::SetDefaultCollate { collate }
10457        } else if self.parse_keywords(&[Keyword::ADD, Keyword::REPLICA]) {
10458            let replica = self.parse_identifier()?;
10459            let options = if self.peek_keyword(Keyword::OPTIONS) {
10460                Some(self.parse_options(Keyword::OPTIONS)?)
10461            } else {
10462                None
10463            };
10464            AlterSchemaOperation::AddReplica { replica, options }
10465        } else if self.parse_keywords(&[Keyword::DROP, Keyword::REPLICA]) {
10466            let replica = self.parse_identifier()?;
10467            AlterSchemaOperation::DropReplica { replica }
10468        } else if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
10469            let new_name = self.parse_object_name(false)?;
10470            AlterSchemaOperation::Rename { name: new_name }
10471        } else if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
10472            let owner = self.parse_owner()?;
10473            AlterSchemaOperation::OwnerTo { owner }
10474        } else {
10475            return self.expected_ref("ALTER SCHEMA operation", self.peek_token_ref());
10476        };
10477        Ok(Statement::AlterSchema(AlterSchema {
10478            name,
10479            if_exists,
10480            operations: vec![operation],
10481        }))
10482    }
10483
10484    /// Parse a `CALL procedure_name(arg1, arg2, ...)`
10485    /// or `CALL procedure_name` statement
10486    pub fn parse_call(&mut self) -> Result<Statement, ParserError> {
10487        let object_name = self.parse_object_name(false)?;
10488        if self.peek_token().token == Token::LParen {
10489            match self.parse_function(object_name)? {
10490                Expr::Function(f) => Ok(Statement::Call(f)),
10491                other => parser_err!(
10492                    format!("Expected a simple procedure call but found: {other}"),
10493                    self.peek_token().span.start
10494                ),
10495            }
10496        } else {
10497            Ok(Statement::Call(Function {
10498                name: object_name,
10499                uses_odbc_syntax: false,
10500                parameters: FunctionArguments::None,
10501                args: FunctionArguments::None,
10502                over: None,
10503                filter: None,
10504                null_treatment: None,
10505                within_group: vec![],
10506            }))
10507        }
10508    }
10509
10510    /// Parse a copy statement
10511    pub fn parse_copy(&mut self) -> Result<Statement, ParserError> {
10512        let source;
10513        if self.consume_token(&Token::LParen) {
10514            source = CopySource::Query(self.parse_query()?);
10515            self.expect_token(&Token::RParen)?;
10516        } else {
10517            let table_name = self.parse_object_name(false)?;
10518            let columns = self.parse_parenthesized_column_list(Optional, false)?;
10519            source = CopySource::Table {
10520                table_name,
10521                columns,
10522            };
10523        }
10524        let to = match self.parse_one_of_keywords(&[Keyword::FROM, Keyword::TO]) {
10525            Some(Keyword::FROM) => false,
10526            Some(Keyword::TO) => true,
10527            _ => self.expected("FROM or TO", self.peek_token())?,
10528        };
10529        if !to {
10530            // Use a separate if statement to prevent Rust compiler from complaining about
10531            // "if statement in this position is unstable: https://github.com/rust-lang/rust/issues/53667"
10532            if let CopySource::Query(_) = source {
10533                return Err(ParserError::ParserError(
10534                    "COPY ... FROM does not support query as a source".to_string(),
10535                ));
10536            }
10537        }
10538        let target = if self.parse_keyword(Keyword::STDIN) {
10539            CopyTarget::Stdin
10540        } else if self.parse_keyword(Keyword::STDOUT) {
10541            CopyTarget::Stdout
10542        } else if self.parse_keyword(Keyword::PROGRAM) {
10543            CopyTarget::Program {
10544                command: self.parse_literal_string()?,
10545            }
10546        } else {
10547            CopyTarget::File {
10548                filename: self.parse_literal_string()?,
10549            }
10550        };
10551        let _ = self.parse_keyword(Keyword::WITH); // [ WITH ]
10552        let mut options = vec![];
10553        if self.consume_token(&Token::LParen) {
10554            options = self.parse_comma_separated(Parser::parse_copy_option)?;
10555            self.expect_token(&Token::RParen)?;
10556        }
10557        let mut legacy_options = vec![];
10558        while let Some(opt) = self.maybe_parse(|parser| parser.parse_copy_legacy_option())? {
10559            legacy_options.push(opt);
10560        }
10561        let values = if let CopyTarget::Stdin = target {
10562            self.expect_token(&Token::SemiColon)?;
10563            self.parse_tsv()
10564        } else {
10565            vec![]
10566        };
10567        Ok(Statement::Copy {
10568            source,
10569            to,
10570            target,
10571            options,
10572            legacy_options,
10573            values,
10574        })
10575    }
10576
10577    /// Parse [Statement::Open]
10578    fn parse_open(&mut self) -> Result<Statement, ParserError> {
10579        self.expect_keyword(Keyword::OPEN)?;
10580        Ok(Statement::Open(OpenStatement {
10581            cursor_name: self.parse_identifier()?,
10582        }))
10583    }
10584
10585    pub fn parse_close(&mut self) -> Result<Statement, ParserError> {
10586        let cursor = if self.parse_keyword(Keyword::ALL) {
10587            CloseCursor::All
10588        } else {
10589            let name = self.parse_identifier()?;
10590
10591            CloseCursor::Specific { name }
10592        };
10593
10594        Ok(Statement::Close { cursor })
10595    }
10596
10597    fn parse_copy_option(&mut self) -> Result<CopyOption, ParserError> {
10598        let ret = match self.parse_one_of_keywords(&[
10599            Keyword::FORMAT,
10600            Keyword::FREEZE,
10601            Keyword::DELIMITER,
10602            Keyword::NULL,
10603            Keyword::HEADER,
10604            Keyword::QUOTE,
10605            Keyword::ESCAPE,
10606            Keyword::FORCE_QUOTE,
10607            Keyword::FORCE_NOT_NULL,
10608            Keyword::FORCE_NULL,
10609            Keyword::ENCODING,
10610        ]) {
10611            Some(Keyword::FORMAT) => CopyOption::Format(self.parse_identifier()?),
10612            Some(Keyword::FREEZE) => CopyOption::Freeze(!matches!(
10613                self.parse_one_of_keywords(&[Keyword::TRUE, Keyword::FALSE]),
10614                Some(Keyword::FALSE)
10615            )),
10616            Some(Keyword::DELIMITER) => CopyOption::Delimiter(self.parse_literal_char()?),
10617            Some(Keyword::NULL) => CopyOption::Null(self.parse_literal_string()?),
10618            Some(Keyword::HEADER) => CopyOption::Header(!matches!(
10619                self.parse_one_of_keywords(&[Keyword::TRUE, Keyword::FALSE]),
10620                Some(Keyword::FALSE)
10621            )),
10622            Some(Keyword::QUOTE) => CopyOption::Quote(self.parse_literal_char()?),
10623            Some(Keyword::ESCAPE) => CopyOption::Escape(self.parse_literal_char()?),
10624            Some(Keyword::FORCE_QUOTE) => {
10625                CopyOption::ForceQuote(self.parse_parenthesized_column_list(Mandatory, false)?)
10626            }
10627            Some(Keyword::FORCE_NOT_NULL) => {
10628                CopyOption::ForceNotNull(self.parse_parenthesized_column_list(Mandatory, false)?)
10629            }
10630            Some(Keyword::FORCE_NULL) => {
10631                CopyOption::ForceNull(self.parse_parenthesized_column_list(Mandatory, false)?)
10632            }
10633            Some(Keyword::ENCODING) => CopyOption::Encoding(self.parse_literal_string()?),
10634            _ => self.expected("option", self.peek_token())?,
10635        };
10636        Ok(ret)
10637    }
10638
10639    fn parse_copy_legacy_option(&mut self) -> Result<CopyLegacyOption, ParserError> {
10640        // FORMAT \[ AS \] is optional
10641        if self.parse_keyword(Keyword::FORMAT) {
10642            let _ = self.parse_keyword(Keyword::AS);
10643        }
10644
10645        let ret = match self.parse_one_of_keywords(&[
10646            Keyword::ACCEPTANYDATE,
10647            Keyword::ACCEPTINVCHARS,
10648            Keyword::ADDQUOTES,
10649            Keyword::ALLOWOVERWRITE,
10650            Keyword::BINARY,
10651            Keyword::BLANKSASNULL,
10652            Keyword::BZIP2,
10653            Keyword::CLEANPATH,
10654            Keyword::COMPUPDATE,
10655            Keyword::CSV,
10656            Keyword::DATEFORMAT,
10657            Keyword::DELIMITER,
10658            Keyword::EMPTYASNULL,
10659            Keyword::ENCRYPTED,
10660            Keyword::ESCAPE,
10661            Keyword::EXTENSION,
10662            Keyword::FIXEDWIDTH,
10663            Keyword::GZIP,
10664            Keyword::HEADER,
10665            Keyword::IAM_ROLE,
10666            Keyword::IGNOREHEADER,
10667            Keyword::JSON,
10668            Keyword::MANIFEST,
10669            Keyword::MAXFILESIZE,
10670            Keyword::NULL,
10671            Keyword::PARALLEL,
10672            Keyword::PARQUET,
10673            Keyword::PARTITION,
10674            Keyword::REGION,
10675            Keyword::REMOVEQUOTES,
10676            Keyword::ROWGROUPSIZE,
10677            Keyword::STATUPDATE,
10678            Keyword::TIMEFORMAT,
10679            Keyword::TRUNCATECOLUMNS,
10680            Keyword::ZSTD,
10681        ]) {
10682            Some(Keyword::ACCEPTANYDATE) => CopyLegacyOption::AcceptAnyDate,
10683            Some(Keyword::ACCEPTINVCHARS) => {
10684                let _ = self.parse_keyword(Keyword::AS); // [ AS ]
10685                let ch = if matches!(self.peek_token().token, Token::SingleQuotedString(_)) {
10686                    Some(self.parse_literal_string()?)
10687                } else {
10688                    None
10689                };
10690                CopyLegacyOption::AcceptInvChars(ch)
10691            }
10692            Some(Keyword::ADDQUOTES) => CopyLegacyOption::AddQuotes,
10693            Some(Keyword::ALLOWOVERWRITE) => CopyLegacyOption::AllowOverwrite,
10694            Some(Keyword::BINARY) => CopyLegacyOption::Binary,
10695            Some(Keyword::BLANKSASNULL) => CopyLegacyOption::BlankAsNull,
10696            Some(Keyword::BZIP2) => CopyLegacyOption::Bzip2,
10697            Some(Keyword::CLEANPATH) => CopyLegacyOption::CleanPath,
10698            Some(Keyword::COMPUPDATE) => {
10699                let preset = self.parse_keyword(Keyword::PRESET);
10700                let enabled = match self.parse_one_of_keywords(&[
10701                    Keyword::TRUE,
10702                    Keyword::FALSE,
10703                    Keyword::ON,
10704                    Keyword::OFF,
10705                ]) {
10706                    Some(Keyword::TRUE) | Some(Keyword::ON) => Some(true),
10707                    Some(Keyword::FALSE) | Some(Keyword::OFF) => Some(false),
10708                    _ => None,
10709                };
10710                CopyLegacyOption::CompUpdate { preset, enabled }
10711            }
10712            Some(Keyword::CSV) => CopyLegacyOption::Csv({
10713                let mut opts = vec![];
10714                while let Some(opt) =
10715                    self.maybe_parse(|parser| parser.parse_copy_legacy_csv_option())?
10716                {
10717                    opts.push(opt);
10718                }
10719                opts
10720            }),
10721            Some(Keyword::DATEFORMAT) => {
10722                let _ = self.parse_keyword(Keyword::AS);
10723                let fmt = if matches!(self.peek_token().token, Token::SingleQuotedString(_)) {
10724                    Some(self.parse_literal_string()?)
10725                } else {
10726                    None
10727                };
10728                CopyLegacyOption::DateFormat(fmt)
10729            }
10730            Some(Keyword::DELIMITER) => {
10731                let _ = self.parse_keyword(Keyword::AS);
10732                CopyLegacyOption::Delimiter(self.parse_literal_char()?)
10733            }
10734            Some(Keyword::EMPTYASNULL) => CopyLegacyOption::EmptyAsNull,
10735            Some(Keyword::ENCRYPTED) => {
10736                let auto = self.parse_keyword(Keyword::AUTO);
10737                CopyLegacyOption::Encrypted { auto }
10738            }
10739            Some(Keyword::ESCAPE) => CopyLegacyOption::Escape,
10740            Some(Keyword::EXTENSION) => {
10741                let ext = self.parse_literal_string()?;
10742                CopyLegacyOption::Extension(ext)
10743            }
10744            Some(Keyword::FIXEDWIDTH) => {
10745                let spec = self.parse_literal_string()?;
10746                CopyLegacyOption::FixedWidth(spec)
10747            }
10748            Some(Keyword::GZIP) => CopyLegacyOption::Gzip,
10749            Some(Keyword::HEADER) => CopyLegacyOption::Header,
10750            Some(Keyword::IAM_ROLE) => CopyLegacyOption::IamRole(self.parse_iam_role_kind()?),
10751            Some(Keyword::IGNOREHEADER) => {
10752                let _ = self.parse_keyword(Keyword::AS);
10753                let num_rows = self.parse_literal_uint()?;
10754                CopyLegacyOption::IgnoreHeader(num_rows)
10755            }
10756            Some(Keyword::JSON) => CopyLegacyOption::Json,
10757            Some(Keyword::MANIFEST) => {
10758                let verbose = self.parse_keyword(Keyword::VERBOSE);
10759                CopyLegacyOption::Manifest { verbose }
10760            }
10761            Some(Keyword::MAXFILESIZE) => {
10762                let _ = self.parse_keyword(Keyword::AS);
10763                let size = self.parse_number_value()?.value;
10764                let unit = match self.parse_one_of_keywords(&[Keyword::MB, Keyword::GB]) {
10765                    Some(Keyword::MB) => Some(FileSizeUnit::MB),
10766                    Some(Keyword::GB) => Some(FileSizeUnit::GB),
10767                    _ => None,
10768                };
10769                CopyLegacyOption::MaxFileSize(FileSize { size, unit })
10770            }
10771            Some(Keyword::NULL) => {
10772                let _ = self.parse_keyword(Keyword::AS);
10773                CopyLegacyOption::Null(self.parse_literal_string()?)
10774            }
10775            Some(Keyword::PARALLEL) => {
10776                let enabled = match self.parse_one_of_keywords(&[
10777                    Keyword::TRUE,
10778                    Keyword::FALSE,
10779                    Keyword::ON,
10780                    Keyword::OFF,
10781                ]) {
10782                    Some(Keyword::TRUE) | Some(Keyword::ON) => Some(true),
10783                    Some(Keyword::FALSE) | Some(Keyword::OFF) => Some(false),
10784                    _ => None,
10785                };
10786                CopyLegacyOption::Parallel(enabled)
10787            }
10788            Some(Keyword::PARQUET) => CopyLegacyOption::Parquet,
10789            Some(Keyword::PARTITION) => {
10790                self.expect_keyword(Keyword::BY)?;
10791                let columns = self.parse_parenthesized_column_list(IsOptional::Mandatory, false)?;
10792                let include = self.parse_keyword(Keyword::INCLUDE);
10793                CopyLegacyOption::PartitionBy(UnloadPartitionBy { columns, include })
10794            }
10795            Some(Keyword::REGION) => {
10796                let _ = self.parse_keyword(Keyword::AS);
10797                let region = self.parse_literal_string()?;
10798                CopyLegacyOption::Region(region)
10799            }
10800            Some(Keyword::REMOVEQUOTES) => CopyLegacyOption::RemoveQuotes,
10801            Some(Keyword::ROWGROUPSIZE) => {
10802                let _ = self.parse_keyword(Keyword::AS);
10803                let file_size = self.parse_file_size()?;
10804                CopyLegacyOption::RowGroupSize(file_size)
10805            }
10806            Some(Keyword::STATUPDATE) => {
10807                let enabled = match self.parse_one_of_keywords(&[
10808                    Keyword::TRUE,
10809                    Keyword::FALSE,
10810                    Keyword::ON,
10811                    Keyword::OFF,
10812                ]) {
10813                    Some(Keyword::TRUE) | Some(Keyword::ON) => Some(true),
10814                    Some(Keyword::FALSE) | Some(Keyword::OFF) => Some(false),
10815                    _ => None,
10816                };
10817                CopyLegacyOption::StatUpdate(enabled)
10818            }
10819            Some(Keyword::TIMEFORMAT) => {
10820                let _ = self.parse_keyword(Keyword::AS);
10821                let fmt = if matches!(self.peek_token().token, Token::SingleQuotedString(_)) {
10822                    Some(self.parse_literal_string()?)
10823                } else {
10824                    None
10825                };
10826                CopyLegacyOption::TimeFormat(fmt)
10827            }
10828            Some(Keyword::TRUNCATECOLUMNS) => CopyLegacyOption::TruncateColumns,
10829            Some(Keyword::ZSTD) => CopyLegacyOption::Zstd,
10830            _ => self.expected("option", self.peek_token())?,
10831        };
10832        Ok(ret)
10833    }
10834
10835    fn parse_file_size(&mut self) -> Result<FileSize, ParserError> {
10836        let size = self.parse_number_value()?.value;
10837        let unit = self.maybe_parse_file_size_unit();
10838        Ok(FileSize { size, unit })
10839    }
10840
10841    fn maybe_parse_file_size_unit(&mut self) -> Option<FileSizeUnit> {
10842        match self.parse_one_of_keywords(&[Keyword::MB, Keyword::GB]) {
10843            Some(Keyword::MB) => Some(FileSizeUnit::MB),
10844            Some(Keyword::GB) => Some(FileSizeUnit::GB),
10845            _ => None,
10846        }
10847    }
10848
10849    fn parse_iam_role_kind(&mut self) -> Result<IamRoleKind, ParserError> {
10850        if self.parse_keyword(Keyword::DEFAULT) {
10851            Ok(IamRoleKind::Default)
10852        } else {
10853            let arn = self.parse_literal_string()?;
10854            Ok(IamRoleKind::Arn(arn))
10855        }
10856    }
10857
10858    fn parse_copy_legacy_csv_option(&mut self) -> Result<CopyLegacyCsvOption, ParserError> {
10859        let ret = match self.parse_one_of_keywords(&[
10860            Keyword::HEADER,
10861            Keyword::QUOTE,
10862            Keyword::ESCAPE,
10863            Keyword::FORCE,
10864        ]) {
10865            Some(Keyword::HEADER) => CopyLegacyCsvOption::Header,
10866            Some(Keyword::QUOTE) => {
10867                let _ = self.parse_keyword(Keyword::AS); // [ AS ]
10868                CopyLegacyCsvOption::Quote(self.parse_literal_char()?)
10869            }
10870            Some(Keyword::ESCAPE) => {
10871                let _ = self.parse_keyword(Keyword::AS); // [ AS ]
10872                CopyLegacyCsvOption::Escape(self.parse_literal_char()?)
10873            }
10874            Some(Keyword::FORCE) if self.parse_keywords(&[Keyword::NOT, Keyword::NULL]) => {
10875                CopyLegacyCsvOption::ForceNotNull(
10876                    self.parse_comma_separated(|p| p.parse_identifier())?,
10877                )
10878            }
10879            Some(Keyword::FORCE) if self.parse_keywords(&[Keyword::QUOTE]) => {
10880                CopyLegacyCsvOption::ForceQuote(
10881                    self.parse_comma_separated(|p| p.parse_identifier())?,
10882                )
10883            }
10884            _ => self.expected("csv option", self.peek_token())?,
10885        };
10886        Ok(ret)
10887    }
10888
10889    fn parse_literal_char(&mut self) -> Result<char, ParserError> {
10890        let s = self.parse_literal_string()?;
10891        if s.len() != 1 {
10892            let loc = self
10893                .tokens
10894                .get(self.index - 1)
10895                .map_or(Location { line: 0, column: 0 }, |t| t.span.start);
10896            return parser_err!(format!("Expect a char, found {s:?}"), loc);
10897        }
10898        Ok(s.chars().next().unwrap())
10899    }
10900
10901    /// Parse a tab separated values in
10902    /// COPY payload
10903    pub fn parse_tsv(&mut self) -> Vec<Option<String>> {
10904        self.parse_tab_value()
10905    }
10906
10907    pub fn parse_tab_value(&mut self) -> Vec<Option<String>> {
10908        let mut values = vec![];
10909        let mut content = String::from("");
10910        while let Some(t) = self.next_token_no_skip().map(|t| &t.token) {
10911            match t {
10912                Token::Whitespace(Whitespace::Tab) => {
10913                    values.push(Some(content.to_string()));
10914                    content.clear();
10915                }
10916                Token::Whitespace(Whitespace::Newline) => {
10917                    values.push(Some(content.to_string()));
10918                    content.clear();
10919                }
10920                Token::Backslash => {
10921                    if self.consume_token(&Token::Period) {
10922                        return values;
10923                    }
10924                    if let Token::Word(w) = self.next_token().token {
10925                        if w.value == "N" {
10926                            values.push(None);
10927                        }
10928                    }
10929                }
10930                _ => {
10931                    content.push_str(&t.to_string());
10932                }
10933            }
10934        }
10935        values
10936    }
10937
10938    /// Parse a literal value (numbers, strings, date/time, booleans)
10939    pub fn parse_value(&mut self) -> Result<ValueWithSpan, ParserError> {
10940        let next_token = self.next_token();
10941        let span = next_token.span;
10942        let ok_value = |value: Value| Ok(value.with_span(span));
10943        match next_token.token {
10944            Token::Word(w) => match w.keyword {
10945                Keyword::TRUE if self.dialect.supports_boolean_literals() => {
10946                    ok_value(Value::Boolean(true))
10947                }
10948                Keyword::FALSE if self.dialect.supports_boolean_literals() => {
10949                    ok_value(Value::Boolean(false))
10950                }
10951                Keyword::NULL => ok_value(Value::Null),
10952                Keyword::NoKeyword if w.quote_style.is_some() => match w.quote_style {
10953                    Some('"') => ok_value(Value::DoubleQuotedString(w.value)),
10954                    Some('\'') => ok_value(Value::SingleQuotedString(w.value)),
10955                    _ => self.expected(
10956                        "A value?",
10957                        TokenWithSpan {
10958                            token: Token::Word(w),
10959                            span,
10960                        },
10961                    )?,
10962                },
10963                _ => self.expected(
10964                    "a concrete value",
10965                    TokenWithSpan {
10966                        token: Token::Word(w),
10967                        span,
10968                    },
10969                ),
10970            },
10971            // The call to n.parse() returns a bigdecimal when the
10972            // bigdecimal feature is enabled, and is otherwise a no-op
10973            // (i.e., it returns the input string).
10974            Token::Number(n, l) => ok_value(Value::Number(Self::parse(n, span.start)?, l)),
10975            Token::SingleQuotedString(ref s) => ok_value(Value::SingleQuotedString(
10976                self.maybe_concat_string_literal(s.to_string()),
10977            )),
10978            Token::DoubleQuotedString(ref s) => ok_value(Value::DoubleQuotedString(
10979                self.maybe_concat_string_literal(s.to_string()),
10980            )),
10981            Token::TripleSingleQuotedString(ref s) => {
10982                ok_value(Value::TripleSingleQuotedString(s.to_string()))
10983            }
10984            Token::TripleDoubleQuotedString(ref s) => {
10985                ok_value(Value::TripleDoubleQuotedString(s.to_string()))
10986            }
10987            Token::DollarQuotedString(ref s) => ok_value(Value::DollarQuotedString(s.clone())),
10988            Token::SingleQuotedByteStringLiteral(ref s) => {
10989                ok_value(Value::SingleQuotedByteStringLiteral(s.clone()))
10990            }
10991            Token::DoubleQuotedByteStringLiteral(ref s) => {
10992                ok_value(Value::DoubleQuotedByteStringLiteral(s.clone()))
10993            }
10994            Token::TripleSingleQuotedByteStringLiteral(ref s) => {
10995                ok_value(Value::TripleSingleQuotedByteStringLiteral(s.clone()))
10996            }
10997            Token::TripleDoubleQuotedByteStringLiteral(ref s) => {
10998                ok_value(Value::TripleDoubleQuotedByteStringLiteral(s.clone()))
10999            }
11000            Token::SingleQuotedRawStringLiteral(ref s) => {
11001                ok_value(Value::SingleQuotedRawStringLiteral(s.clone()))
11002            }
11003            Token::DoubleQuotedRawStringLiteral(ref s) => {
11004                ok_value(Value::DoubleQuotedRawStringLiteral(s.clone()))
11005            }
11006            Token::TripleSingleQuotedRawStringLiteral(ref s) => {
11007                ok_value(Value::TripleSingleQuotedRawStringLiteral(s.clone()))
11008            }
11009            Token::TripleDoubleQuotedRawStringLiteral(ref s) => {
11010                ok_value(Value::TripleDoubleQuotedRawStringLiteral(s.clone()))
11011            }
11012            Token::NationalStringLiteral(ref s) => {
11013                ok_value(Value::NationalStringLiteral(s.to_string()))
11014            }
11015            Token::QuoteDelimitedStringLiteral(v) => {
11016                ok_value(Value::QuoteDelimitedStringLiteral(v))
11017            }
11018            Token::NationalQuoteDelimitedStringLiteral(v) => {
11019                ok_value(Value::NationalQuoteDelimitedStringLiteral(v))
11020            }
11021            Token::EscapedStringLiteral(ref s) => {
11022                ok_value(Value::EscapedStringLiteral(s.to_string()))
11023            }
11024            Token::UnicodeStringLiteral(ref s) => {
11025                ok_value(Value::UnicodeStringLiteral(s.to_string()))
11026            }
11027            Token::HexStringLiteral(ref s) => ok_value(Value::HexStringLiteral(s.to_string())),
11028            Token::Placeholder(ref s) => ok_value(Value::Placeholder(s.to_string())),
11029            tok @ Token::Colon | tok @ Token::AtSign => {
11030                // 1. Not calling self.parse_identifier(false)?
11031                //    because only in placeholder we want to check
11032                //    numbers as idfentifies.  This because snowflake
11033                //    allows numbers as placeholders
11034                // 2. Not calling self.next_token() to enforce `tok`
11035                //    be followed immediately by a word/number, ie.
11036                //    without any whitespace in between
11037                let next_token = self.next_token_no_skip().unwrap_or(&EOF_TOKEN).clone();
11038                let ident = match next_token.token {
11039                    Token::Word(w) => Ok(w.into_ident(next_token.span)),
11040                    Token::Number(w, false) => Ok(Ident::with_span(next_token.span, w)),
11041                    _ => self.expected("placeholder", next_token),
11042                }?;
11043                Ok(Value::Placeholder(tok.to_string() + &ident.value)
11044                    .with_span(Span::new(span.start, ident.span.end)))
11045            }
11046            unexpected => self.expected(
11047                "a value",
11048                TokenWithSpan {
11049                    token: unexpected,
11050                    span,
11051                },
11052            ),
11053        }
11054    }
11055
11056    fn maybe_concat_string_literal(&mut self, mut str: String) -> String {
11057        if self.dialect.supports_string_literal_concatenation() {
11058            while let Token::SingleQuotedString(ref s) | Token::DoubleQuotedString(ref s) =
11059                self.peek_token_ref().token
11060            {
11061                str.push_str(s.clone().as_str());
11062                self.advance_token();
11063            }
11064        }
11065        str
11066    }
11067
11068    /// Parse an unsigned numeric literal
11069    pub fn parse_number_value(&mut self) -> Result<ValueWithSpan, ParserError> {
11070        let value_wrapper = self.parse_value()?;
11071        match &value_wrapper.value {
11072            Value::Number(_, _) => Ok(value_wrapper),
11073            Value::Placeholder(_) => Ok(value_wrapper),
11074            _ => {
11075                self.prev_token();
11076                self.expected("literal number", self.peek_token())
11077            }
11078        }
11079    }
11080
11081    /// Parse a numeric literal as an expression. Returns a [`Expr::UnaryOp`] if the number is signed,
11082    /// otherwise returns a [`Expr::Value`]
11083    pub fn parse_number(&mut self) -> Result<Expr, ParserError> {
11084        let next_token = self.next_token();
11085        match next_token.token {
11086            Token::Plus => Ok(Expr::UnaryOp {
11087                op: UnaryOperator::Plus,
11088                expr: Box::new(Expr::Value(self.parse_number_value()?)),
11089            }),
11090            Token::Minus => Ok(Expr::UnaryOp {
11091                op: UnaryOperator::Minus,
11092                expr: Box::new(Expr::Value(self.parse_number_value()?)),
11093            }),
11094            _ => {
11095                self.prev_token();
11096                Ok(Expr::Value(self.parse_number_value()?))
11097            }
11098        }
11099    }
11100
11101    fn parse_introduced_string_expr(&mut self) -> Result<Expr, ParserError> {
11102        let next_token = self.next_token();
11103        let span = next_token.span;
11104        match next_token.token {
11105            Token::SingleQuotedString(ref s) => Ok(Expr::Value(
11106                Value::SingleQuotedString(s.to_string()).with_span(span),
11107            )),
11108            Token::DoubleQuotedString(ref s) => Ok(Expr::Value(
11109                Value::DoubleQuotedString(s.to_string()).with_span(span),
11110            )),
11111            Token::HexStringLiteral(ref s) => Ok(Expr::Value(
11112                Value::HexStringLiteral(s.to_string()).with_span(span),
11113            )),
11114            unexpected => self.expected(
11115                "a string value",
11116                TokenWithSpan {
11117                    token: unexpected,
11118                    span,
11119                },
11120            ),
11121        }
11122    }
11123
11124    /// Parse an unsigned literal integer/long
11125    pub fn parse_literal_uint(&mut self) -> Result<u64, ParserError> {
11126        let next_token = self.next_token();
11127        match next_token.token {
11128            Token::Number(s, _) => Self::parse::<u64>(s, next_token.span.start),
11129            _ => self.expected("literal int", next_token),
11130        }
11131    }
11132
11133    /// Parse the body of a `CREATE FUNCTION` specified as a string.
11134    /// e.g. `CREATE FUNCTION ... AS $$ body $$`.
11135    fn parse_create_function_body_string(&mut self) -> Result<CreateFunctionBody, ParserError> {
11136        let parse_string_expr = |parser: &mut Parser| -> Result<Expr, ParserError> {
11137            let peek_token = parser.peek_token();
11138            let span = peek_token.span;
11139            match peek_token.token {
11140                Token::DollarQuotedString(s) if dialect_of!(parser is PostgreSqlDialect | GenericDialect) =>
11141                {
11142                    parser.next_token();
11143                    Ok(Expr::Value(Value::DollarQuotedString(s).with_span(span)))
11144                }
11145                _ => Ok(Expr::Value(
11146                    Value::SingleQuotedString(parser.parse_literal_string()?).with_span(span),
11147                )),
11148            }
11149        };
11150
11151        Ok(CreateFunctionBody::AsBeforeOptions {
11152            body: parse_string_expr(self)?,
11153            link_symbol: if self.consume_token(&Token::Comma) {
11154                Some(parse_string_expr(self)?)
11155            } else {
11156                None
11157            },
11158        })
11159    }
11160
11161    /// Parse a literal string
11162    pub fn parse_literal_string(&mut self) -> Result<String, ParserError> {
11163        let next_token = self.next_token();
11164        match next_token.token {
11165            Token::Word(Word {
11166                value,
11167                keyword: Keyword::NoKeyword,
11168                ..
11169            }) => Ok(value),
11170            Token::SingleQuotedString(s) => Ok(s),
11171            Token::DoubleQuotedString(s) => Ok(s),
11172            Token::EscapedStringLiteral(s) if dialect_of!(self is PostgreSqlDialect | GenericDialect) => {
11173                Ok(s)
11174            }
11175            Token::UnicodeStringLiteral(s) => Ok(s),
11176            _ => self.expected("literal string", next_token),
11177        }
11178    }
11179
11180    /// Parse a boolean string
11181    pub(crate) fn parse_boolean_string(&mut self) -> Result<bool, ParserError> {
11182        match self.parse_one_of_keywords(&[Keyword::TRUE, Keyword::FALSE]) {
11183            Some(Keyword::TRUE) => Ok(true),
11184            Some(Keyword::FALSE) => Ok(false),
11185            _ => self.expected("TRUE or FALSE", self.peek_token()),
11186        }
11187    }
11188
11189    /// Parse a literal unicode normalization clause
11190    pub fn parse_unicode_is_normalized(&mut self, expr: Expr) -> Result<Expr, ParserError> {
11191        let neg = self.parse_keyword(Keyword::NOT);
11192        let normalized_form = self.maybe_parse(|parser| {
11193            match parser.parse_one_of_keywords(&[
11194                Keyword::NFC,
11195                Keyword::NFD,
11196                Keyword::NFKC,
11197                Keyword::NFKD,
11198            ]) {
11199                Some(Keyword::NFC) => Ok(NormalizationForm::NFC),
11200                Some(Keyword::NFD) => Ok(NormalizationForm::NFD),
11201                Some(Keyword::NFKC) => Ok(NormalizationForm::NFKC),
11202                Some(Keyword::NFKD) => Ok(NormalizationForm::NFKD),
11203                _ => parser.expected("unicode normalization form", parser.peek_token()),
11204            }
11205        })?;
11206        if self.parse_keyword(Keyword::NORMALIZED) {
11207            return Ok(Expr::IsNormalized {
11208                expr: Box::new(expr),
11209                form: normalized_form,
11210                negated: neg,
11211            });
11212        }
11213        self.expected("unicode normalization form", self.peek_token())
11214    }
11215
11216    pub fn parse_enum_values(&mut self) -> Result<Vec<EnumMember>, ParserError> {
11217        self.expect_token(&Token::LParen)?;
11218        let values = self.parse_comma_separated(|parser| {
11219            let name = parser.parse_literal_string()?;
11220            let e = if parser.consume_token(&Token::Eq) {
11221                let value = parser.parse_number()?;
11222                EnumMember::NamedValue(name, value)
11223            } else {
11224                EnumMember::Name(name)
11225            };
11226            Ok(e)
11227        })?;
11228        self.expect_token(&Token::RParen)?;
11229
11230        Ok(values)
11231    }
11232
11233    /// Parse a SQL datatype (in the context of a CREATE TABLE statement for example)
11234    pub fn parse_data_type(&mut self) -> Result<DataType, ParserError> {
11235        let (ty, trailing_bracket) = self.parse_data_type_helper()?;
11236        if trailing_bracket.0 {
11237            return parser_err!(
11238                format!("unmatched > after parsing data type {ty}"),
11239                self.peek_token()
11240            );
11241        }
11242
11243        Ok(ty)
11244    }
11245
11246    fn parse_data_type_helper(
11247        &mut self,
11248    ) -> Result<(DataType, MatchedTrailingBracket), ParserError> {
11249        let dialect = self.dialect;
11250        self.advance_token();
11251        let next_token = self.get_current_token();
11252        let next_token_index = self.get_current_index();
11253
11254        let mut trailing_bracket: MatchedTrailingBracket = false.into();
11255        let mut data = match &next_token.token {
11256            Token::Word(w) => match w.keyword {
11257                Keyword::BOOLEAN => Ok(DataType::Boolean),
11258                Keyword::BOOL => Ok(DataType::Bool),
11259                Keyword::FLOAT => {
11260                    let precision = self.parse_exact_number_optional_precision_scale()?;
11261
11262                    if self.parse_keyword(Keyword::UNSIGNED) {
11263                        Ok(DataType::FloatUnsigned(precision))
11264                    } else {
11265                        Ok(DataType::Float(precision))
11266                    }
11267                }
11268                Keyword::REAL => {
11269                    if self.parse_keyword(Keyword::UNSIGNED) {
11270                        Ok(DataType::RealUnsigned)
11271                    } else {
11272                        Ok(DataType::Real)
11273                    }
11274                }
11275                Keyword::FLOAT4 => Ok(DataType::Float4),
11276                Keyword::FLOAT32 => Ok(DataType::Float32),
11277                Keyword::FLOAT64 => Ok(DataType::Float64),
11278                Keyword::FLOAT8 => Ok(DataType::Float8),
11279                Keyword::DOUBLE => {
11280                    if self.parse_keyword(Keyword::PRECISION) {
11281                        if self.parse_keyword(Keyword::UNSIGNED) {
11282                            Ok(DataType::DoublePrecisionUnsigned)
11283                        } else {
11284                            Ok(DataType::DoublePrecision)
11285                        }
11286                    } else {
11287                        let precision = self.parse_exact_number_optional_precision_scale()?;
11288
11289                        if self.parse_keyword(Keyword::UNSIGNED) {
11290                            Ok(DataType::DoubleUnsigned(precision))
11291                        } else {
11292                            Ok(DataType::Double(precision))
11293                        }
11294                    }
11295                }
11296                Keyword::TINYINT => {
11297                    let optional_precision = self.parse_optional_precision();
11298                    if self.parse_keyword(Keyword::UNSIGNED) {
11299                        Ok(DataType::TinyIntUnsigned(optional_precision?))
11300                    } else {
11301                        if dialect.supports_data_type_signed_suffix() {
11302                            let _ = self.parse_keyword(Keyword::SIGNED);
11303                        }
11304                        Ok(DataType::TinyInt(optional_precision?))
11305                    }
11306                }
11307                Keyword::INT2 => {
11308                    let optional_precision = self.parse_optional_precision();
11309                    if self.parse_keyword(Keyword::UNSIGNED) {
11310                        Ok(DataType::Int2Unsigned(optional_precision?))
11311                    } else {
11312                        Ok(DataType::Int2(optional_precision?))
11313                    }
11314                }
11315                Keyword::SMALLINT => {
11316                    let optional_precision = self.parse_optional_precision();
11317                    if self.parse_keyword(Keyword::UNSIGNED) {
11318                        Ok(DataType::SmallIntUnsigned(optional_precision?))
11319                    } else {
11320                        if dialect.supports_data_type_signed_suffix() {
11321                            let _ = self.parse_keyword(Keyword::SIGNED);
11322                        }
11323                        Ok(DataType::SmallInt(optional_precision?))
11324                    }
11325                }
11326                Keyword::MEDIUMINT => {
11327                    let optional_precision = self.parse_optional_precision();
11328                    if self.parse_keyword(Keyword::UNSIGNED) {
11329                        Ok(DataType::MediumIntUnsigned(optional_precision?))
11330                    } else {
11331                        if dialect.supports_data_type_signed_suffix() {
11332                            let _ = self.parse_keyword(Keyword::SIGNED);
11333                        }
11334                        Ok(DataType::MediumInt(optional_precision?))
11335                    }
11336                }
11337                Keyword::INT => {
11338                    let optional_precision = self.parse_optional_precision();
11339                    if self.parse_keyword(Keyword::UNSIGNED) {
11340                        Ok(DataType::IntUnsigned(optional_precision?))
11341                    } else {
11342                        if dialect.supports_data_type_signed_suffix() {
11343                            let _ = self.parse_keyword(Keyword::SIGNED);
11344                        }
11345                        Ok(DataType::Int(optional_precision?))
11346                    }
11347                }
11348                Keyword::INT4 => {
11349                    let optional_precision = self.parse_optional_precision();
11350                    if self.parse_keyword(Keyword::UNSIGNED) {
11351                        Ok(DataType::Int4Unsigned(optional_precision?))
11352                    } else {
11353                        Ok(DataType::Int4(optional_precision?))
11354                    }
11355                }
11356                Keyword::INT8 => {
11357                    let optional_precision = self.parse_optional_precision();
11358                    if self.parse_keyword(Keyword::UNSIGNED) {
11359                        Ok(DataType::Int8Unsigned(optional_precision?))
11360                    } else {
11361                        Ok(DataType::Int8(optional_precision?))
11362                    }
11363                }
11364                Keyword::INT16 => Ok(DataType::Int16),
11365                Keyword::INT32 => Ok(DataType::Int32),
11366                Keyword::INT64 => Ok(DataType::Int64),
11367                Keyword::INT128 => Ok(DataType::Int128),
11368                Keyword::INT256 => Ok(DataType::Int256),
11369                Keyword::INTEGER => {
11370                    let optional_precision = self.parse_optional_precision();
11371                    if self.parse_keyword(Keyword::UNSIGNED) {
11372                        Ok(DataType::IntegerUnsigned(optional_precision?))
11373                    } else {
11374                        if dialect.supports_data_type_signed_suffix() {
11375                            let _ = self.parse_keyword(Keyword::SIGNED);
11376                        }
11377                        Ok(DataType::Integer(optional_precision?))
11378                    }
11379                }
11380                Keyword::BIGINT => {
11381                    let optional_precision = self.parse_optional_precision();
11382                    if self.parse_keyword(Keyword::UNSIGNED) {
11383                        Ok(DataType::BigIntUnsigned(optional_precision?))
11384                    } else {
11385                        if dialect.supports_data_type_signed_suffix() {
11386                            let _ = self.parse_keyword(Keyword::SIGNED);
11387                        }
11388                        Ok(DataType::BigInt(optional_precision?))
11389                    }
11390                }
11391                Keyword::HUGEINT => Ok(DataType::HugeInt),
11392                Keyword::UBIGINT => Ok(DataType::UBigInt),
11393                Keyword::UHUGEINT => Ok(DataType::UHugeInt),
11394                Keyword::USMALLINT => Ok(DataType::USmallInt),
11395                Keyword::UTINYINT => Ok(DataType::UTinyInt),
11396                Keyword::UINT8 => Ok(DataType::UInt8),
11397                Keyword::UINT16 => Ok(DataType::UInt16),
11398                Keyword::UINT32 => Ok(DataType::UInt32),
11399                Keyword::UINT64 => Ok(DataType::UInt64),
11400                Keyword::UINT128 => Ok(DataType::UInt128),
11401                Keyword::UINT256 => Ok(DataType::UInt256),
11402                Keyword::VARCHAR => Ok(DataType::Varchar(self.parse_optional_character_length()?)),
11403                Keyword::NVARCHAR => {
11404                    Ok(DataType::Nvarchar(self.parse_optional_character_length()?))
11405                }
11406                Keyword::CHARACTER => {
11407                    if self.parse_keyword(Keyword::VARYING) {
11408                        Ok(DataType::CharacterVarying(
11409                            self.parse_optional_character_length()?,
11410                        ))
11411                    } else if self.parse_keywords(&[Keyword::LARGE, Keyword::OBJECT]) {
11412                        Ok(DataType::CharacterLargeObject(
11413                            self.parse_optional_precision()?,
11414                        ))
11415                    } else {
11416                        Ok(DataType::Character(self.parse_optional_character_length()?))
11417                    }
11418                }
11419                Keyword::CHAR => {
11420                    if self.parse_keyword(Keyword::VARYING) {
11421                        Ok(DataType::CharVarying(
11422                            self.parse_optional_character_length()?,
11423                        ))
11424                    } else if self.parse_keywords(&[Keyword::LARGE, Keyword::OBJECT]) {
11425                        Ok(DataType::CharLargeObject(self.parse_optional_precision()?))
11426                    } else {
11427                        Ok(DataType::Char(self.parse_optional_character_length()?))
11428                    }
11429                }
11430                Keyword::CLOB => Ok(DataType::Clob(self.parse_optional_precision()?)),
11431                Keyword::BINARY => Ok(DataType::Binary(self.parse_optional_precision()?)),
11432                Keyword::VARBINARY => Ok(DataType::Varbinary(self.parse_optional_binary_length()?)),
11433                Keyword::BLOB => Ok(DataType::Blob(self.parse_optional_precision()?)),
11434                Keyword::TINYBLOB => Ok(DataType::TinyBlob),
11435                Keyword::MEDIUMBLOB => Ok(DataType::MediumBlob),
11436                Keyword::LONGBLOB => Ok(DataType::LongBlob),
11437                Keyword::BYTES => Ok(DataType::Bytes(self.parse_optional_precision()?)),
11438                Keyword::BIT => {
11439                    if self.parse_keyword(Keyword::VARYING) {
11440                        Ok(DataType::BitVarying(self.parse_optional_precision()?))
11441                    } else {
11442                        Ok(DataType::Bit(self.parse_optional_precision()?))
11443                    }
11444                }
11445                Keyword::VARBIT => Ok(DataType::VarBit(self.parse_optional_precision()?)),
11446                Keyword::UUID => Ok(DataType::Uuid),
11447                Keyword::DATE => Ok(DataType::Date),
11448                Keyword::DATE32 => Ok(DataType::Date32),
11449                Keyword::DATETIME => Ok(DataType::Datetime(self.parse_optional_precision()?)),
11450                Keyword::DATETIME64 => {
11451                    self.prev_token();
11452                    let (precision, time_zone) = self.parse_datetime_64()?;
11453                    Ok(DataType::Datetime64(precision, time_zone))
11454                }
11455                Keyword::TIMESTAMP => {
11456                    let precision = self.parse_optional_precision()?;
11457                    let tz = if self.parse_keyword(Keyword::WITH) {
11458                        self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
11459                        TimezoneInfo::WithTimeZone
11460                    } else if self.parse_keyword(Keyword::WITHOUT) {
11461                        self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
11462                        TimezoneInfo::WithoutTimeZone
11463                    } else {
11464                        TimezoneInfo::None
11465                    };
11466                    Ok(DataType::Timestamp(precision, tz))
11467                }
11468                Keyword::TIMESTAMPTZ => Ok(DataType::Timestamp(
11469                    self.parse_optional_precision()?,
11470                    TimezoneInfo::Tz,
11471                )),
11472                Keyword::TIMESTAMP_NTZ => {
11473                    Ok(DataType::TimestampNtz(self.parse_optional_precision()?))
11474                }
11475                Keyword::TIME => {
11476                    let precision = self.parse_optional_precision()?;
11477                    let tz = if self.parse_keyword(Keyword::WITH) {
11478                        self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
11479                        TimezoneInfo::WithTimeZone
11480                    } else if self.parse_keyword(Keyword::WITHOUT) {
11481                        self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
11482                        TimezoneInfo::WithoutTimeZone
11483                    } else {
11484                        TimezoneInfo::None
11485                    };
11486                    Ok(DataType::Time(precision, tz))
11487                }
11488                Keyword::TIMETZ => Ok(DataType::Time(
11489                    self.parse_optional_precision()?,
11490                    TimezoneInfo::Tz,
11491                )),
11492                Keyword::INTERVAL => {
11493                    if self.dialect.supports_interval_options() {
11494                        let fields = self.maybe_parse_optional_interval_fields()?;
11495                        let precision = self.parse_optional_precision()?;
11496                        Ok(DataType::Interval { fields, precision })
11497                    } else {
11498                        Ok(DataType::Interval {
11499                            fields: None,
11500                            precision: None,
11501                        })
11502                    }
11503                }
11504                Keyword::JSON => Ok(DataType::JSON),
11505                Keyword::JSONB => Ok(DataType::JSONB),
11506                Keyword::REGCLASS => Ok(DataType::Regclass),
11507                Keyword::STRING => Ok(DataType::String(self.parse_optional_precision()?)),
11508                Keyword::FIXEDSTRING => {
11509                    self.expect_token(&Token::LParen)?;
11510                    let character_length = self.parse_literal_uint()?;
11511                    self.expect_token(&Token::RParen)?;
11512                    Ok(DataType::FixedString(character_length))
11513                }
11514                Keyword::TEXT => Ok(DataType::Text),
11515                Keyword::TINYTEXT => Ok(DataType::TinyText),
11516                Keyword::MEDIUMTEXT => Ok(DataType::MediumText),
11517                Keyword::LONGTEXT => Ok(DataType::LongText),
11518                Keyword::BYTEA => Ok(DataType::Bytea),
11519                Keyword::NUMERIC => Ok(DataType::Numeric(
11520                    self.parse_exact_number_optional_precision_scale()?,
11521                )),
11522                Keyword::DECIMAL => {
11523                    let precision = self.parse_exact_number_optional_precision_scale()?;
11524
11525                    if self.parse_keyword(Keyword::UNSIGNED) {
11526                        Ok(DataType::DecimalUnsigned(precision))
11527                    } else {
11528                        Ok(DataType::Decimal(precision))
11529                    }
11530                }
11531                Keyword::DEC => {
11532                    let precision = self.parse_exact_number_optional_precision_scale()?;
11533
11534                    if self.parse_keyword(Keyword::UNSIGNED) {
11535                        Ok(DataType::DecUnsigned(precision))
11536                    } else {
11537                        Ok(DataType::Dec(precision))
11538                    }
11539                }
11540                Keyword::BIGNUMERIC => Ok(DataType::BigNumeric(
11541                    self.parse_exact_number_optional_precision_scale()?,
11542                )),
11543                Keyword::BIGDECIMAL => Ok(DataType::BigDecimal(
11544                    self.parse_exact_number_optional_precision_scale()?,
11545                )),
11546                Keyword::ENUM => Ok(DataType::Enum(self.parse_enum_values()?, None)),
11547                Keyword::ENUM8 => Ok(DataType::Enum(self.parse_enum_values()?, Some(8))),
11548                Keyword::ENUM16 => Ok(DataType::Enum(self.parse_enum_values()?, Some(16))),
11549                Keyword::SET => Ok(DataType::Set(self.parse_string_values()?)),
11550                Keyword::ARRAY => {
11551                    if dialect_of!(self is SnowflakeDialect) {
11552                        Ok(DataType::Array(ArrayElemTypeDef::None))
11553                    } else if dialect_of!(self is ClickHouseDialect) {
11554                        Ok(self.parse_sub_type(|internal_type| {
11555                            DataType::Array(ArrayElemTypeDef::Parenthesis(internal_type))
11556                        })?)
11557                    } else {
11558                        self.expect_token(&Token::Lt)?;
11559                        let (inside_type, _trailing_bracket) = self.parse_data_type_helper()?;
11560                        trailing_bracket = self.expect_closing_angle_bracket(_trailing_bracket)?;
11561                        Ok(DataType::Array(ArrayElemTypeDef::AngleBracket(Box::new(
11562                            inside_type,
11563                        ))))
11564                    }
11565                }
11566                Keyword::STRUCT if dialect_is!(dialect is DuckDbDialect) => {
11567                    self.prev_token();
11568                    let field_defs = self.parse_duckdb_struct_type_def()?;
11569                    Ok(DataType::Struct(field_defs, StructBracketKind::Parentheses))
11570                }
11571                Keyword::STRUCT if dialect_is!(dialect is BigQueryDialect | GenericDialect) => {
11572                    self.prev_token();
11573                    let (field_defs, _trailing_bracket) =
11574                        self.parse_struct_type_def(Self::parse_struct_field_def)?;
11575                    trailing_bracket = _trailing_bracket;
11576                    Ok(DataType::Struct(
11577                        field_defs,
11578                        StructBracketKind::AngleBrackets,
11579                    ))
11580                }
11581                Keyword::UNION if dialect_is!(dialect is DuckDbDialect | GenericDialect) => {
11582                    self.prev_token();
11583                    let fields = self.parse_union_type_def()?;
11584                    Ok(DataType::Union(fields))
11585                }
11586                Keyword::NULLABLE if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
11587                    Ok(self.parse_sub_type(DataType::Nullable)?)
11588                }
11589                Keyword::LOWCARDINALITY if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
11590                    Ok(self.parse_sub_type(DataType::LowCardinality)?)
11591                }
11592                Keyword::MAP if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
11593                    self.prev_token();
11594                    let (key_data_type, value_data_type) = self.parse_click_house_map_def()?;
11595                    Ok(DataType::Map(
11596                        Box::new(key_data_type),
11597                        Box::new(value_data_type),
11598                    ))
11599                }
11600                Keyword::NESTED if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
11601                    self.expect_token(&Token::LParen)?;
11602                    let field_defs = self.parse_comma_separated(Parser::parse_column_def)?;
11603                    self.expect_token(&Token::RParen)?;
11604                    Ok(DataType::Nested(field_defs))
11605                }
11606                Keyword::TUPLE if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
11607                    self.prev_token();
11608                    let field_defs = self.parse_click_house_tuple_def()?;
11609                    Ok(DataType::Tuple(field_defs))
11610                }
11611                Keyword::TRIGGER => Ok(DataType::Trigger),
11612                Keyword::ANY if self.peek_keyword(Keyword::TYPE) => {
11613                    let _ = self.parse_keyword(Keyword::TYPE);
11614                    Ok(DataType::AnyType)
11615                }
11616                Keyword::TABLE => {
11617                    // an LParen after the TABLE keyword indicates that table columns are being defined
11618                    // whereas no LParen indicates an anonymous table expression will be returned
11619                    if self.peek_token() == Token::LParen {
11620                        let columns = self.parse_returns_table_columns()?;
11621                        Ok(DataType::Table(Some(columns)))
11622                    } else {
11623                        Ok(DataType::Table(None))
11624                    }
11625                }
11626                Keyword::SIGNED => {
11627                    if self.parse_keyword(Keyword::INTEGER) {
11628                        Ok(DataType::SignedInteger)
11629                    } else {
11630                        Ok(DataType::Signed)
11631                    }
11632                }
11633                Keyword::UNSIGNED => {
11634                    if self.parse_keyword(Keyword::INTEGER) {
11635                        Ok(DataType::UnsignedInteger)
11636                    } else {
11637                        Ok(DataType::Unsigned)
11638                    }
11639                }
11640                Keyword::TSVECTOR if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
11641                    Ok(DataType::TsVector)
11642                }
11643                Keyword::TSQUERY if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
11644                    Ok(DataType::TsQuery)
11645                }
11646                _ => {
11647                    self.prev_token();
11648                    let type_name = self.parse_object_name(false)?;
11649                    if let Some(modifiers) = self.parse_optional_type_modifiers()? {
11650                        Ok(DataType::Custom(type_name, modifiers))
11651                    } else {
11652                        Ok(DataType::Custom(type_name, vec![]))
11653                    }
11654                }
11655            },
11656            _ => self.expected_at("a data type name", next_token_index),
11657        }?;
11658
11659        if self.dialect.supports_array_typedef_with_brackets() {
11660            while self.consume_token(&Token::LBracket) {
11661                // Parse optional array data type size
11662                let size = self.maybe_parse(|p| p.parse_literal_uint())?;
11663                self.expect_token(&Token::RBracket)?;
11664                data = DataType::Array(ArrayElemTypeDef::SquareBracket(Box::new(data), size))
11665            }
11666        }
11667        Ok((data, trailing_bracket))
11668    }
11669
11670    fn parse_returns_table_column(&mut self) -> Result<ColumnDef, ParserError> {
11671        self.parse_column_def()
11672    }
11673
11674    fn parse_returns_table_columns(&mut self) -> Result<Vec<ColumnDef>, ParserError> {
11675        self.expect_token(&Token::LParen)?;
11676        let columns = self.parse_comma_separated(Parser::parse_returns_table_column)?;
11677        self.expect_token(&Token::RParen)?;
11678        Ok(columns)
11679    }
11680
11681    pub fn parse_string_values(&mut self) -> Result<Vec<String>, ParserError> {
11682        self.expect_token(&Token::LParen)?;
11683        let mut values = Vec::new();
11684        loop {
11685            let next_token = self.next_token();
11686            match next_token.token {
11687                Token::SingleQuotedString(value) => values.push(value),
11688                _ => self.expected("a string", next_token)?,
11689            }
11690            let next_token = self.next_token();
11691            match next_token.token {
11692                Token::Comma => (),
11693                Token::RParen => break,
11694                _ => self.expected(", or }", next_token)?,
11695            }
11696        }
11697        Ok(values)
11698    }
11699
11700    /// Strictly parse `identifier AS identifier`
11701    pub fn parse_identifier_with_alias(&mut self) -> Result<IdentWithAlias, ParserError> {
11702        let ident = self.parse_identifier()?;
11703        self.expect_keyword_is(Keyword::AS)?;
11704        let alias = self.parse_identifier()?;
11705        Ok(IdentWithAlias { ident, alias })
11706    }
11707
11708    /// Parse `identifier [AS] identifier` where the AS keyword is optional
11709    fn parse_identifier_with_optional_alias(&mut self) -> Result<IdentWithAlias, ParserError> {
11710        let ident = self.parse_identifier()?;
11711        let _after_as = self.parse_keyword(Keyword::AS);
11712        let alias = self.parse_identifier()?;
11713        Ok(IdentWithAlias { ident, alias })
11714    }
11715
11716    /// Parse comma-separated list of parenthesized queries for pipe operators
11717    fn parse_pipe_operator_queries(&mut self) -> Result<Vec<Query>, ParserError> {
11718        self.parse_comma_separated(|parser| {
11719            parser.expect_token(&Token::LParen)?;
11720            let query = parser.parse_query()?;
11721            parser.expect_token(&Token::RParen)?;
11722            Ok(*query)
11723        })
11724    }
11725
11726    /// Parse set quantifier for pipe operators that require DISTINCT. E.g. INTERSECT and EXCEPT
11727    fn parse_distinct_required_set_quantifier(
11728        &mut self,
11729        operator_name: &str,
11730    ) -> Result<SetQuantifier, ParserError> {
11731        let quantifier = self.parse_set_quantifier(&Some(SetOperator::Intersect));
11732        match quantifier {
11733            SetQuantifier::Distinct | SetQuantifier::DistinctByName => Ok(quantifier),
11734            _ => Err(ParserError::ParserError(format!(
11735                "{operator_name} pipe operator requires DISTINCT modifier",
11736            ))),
11737        }
11738    }
11739
11740    /// Parse optional identifier alias (with or without AS keyword)
11741    fn parse_identifier_optional_alias(&mut self) -> Result<Option<Ident>, ParserError> {
11742        if self.parse_keyword(Keyword::AS) {
11743            Ok(Some(self.parse_identifier()?))
11744        } else {
11745            // Check if the next token is an identifier (implicit alias)
11746            self.maybe_parse(|parser| parser.parse_identifier())
11747        }
11748    }
11749
11750    /// Optionally parses an alias for a select list item
11751    fn maybe_parse_select_item_alias(&mut self) -> Result<Option<Ident>, ParserError> {
11752        fn validator(explicit: bool, kw: &Keyword, parser: &mut Parser) -> bool {
11753            parser.dialect.is_select_item_alias(explicit, kw, parser)
11754        }
11755        self.parse_optional_alias_inner(None, validator)
11756    }
11757
11758    /// Optionally parses an alias for a table like in `... FROM generate_series(1, 10) AS t (col)`.
11759    /// In this case, the alias is allowed to optionally name the columns in the table, in
11760    /// addition to the table itself.
11761    pub fn maybe_parse_table_alias(&mut self) -> Result<Option<TableAlias>, ParserError> {
11762        fn validator(explicit: bool, kw: &Keyword, parser: &mut Parser) -> bool {
11763            parser.dialect.is_table_factor_alias(explicit, kw, parser)
11764        }
11765        let explicit = self.peek_keyword(Keyword::AS);
11766        match self.parse_optional_alias_inner(None, validator)? {
11767            Some(name) => {
11768                let columns = self.parse_table_alias_column_defs()?;
11769                Ok(Some(TableAlias {
11770                    explicit,
11771                    name,
11772                    columns,
11773                }))
11774            }
11775            None => Ok(None),
11776        }
11777    }
11778
11779    fn parse_table_index_hints(&mut self) -> Result<Vec<TableIndexHints>, ParserError> {
11780        let mut hints = vec![];
11781        while let Some(hint_type) =
11782            self.parse_one_of_keywords(&[Keyword::USE, Keyword::IGNORE, Keyword::FORCE])
11783        {
11784            let hint_type = match hint_type {
11785                Keyword::USE => TableIndexHintType::Use,
11786                Keyword::IGNORE => TableIndexHintType::Ignore,
11787                Keyword::FORCE => TableIndexHintType::Force,
11788                _ => {
11789                    return self.expected(
11790                        "expected to match USE/IGNORE/FORCE keyword",
11791                        self.peek_token(),
11792                    )
11793                }
11794            };
11795            let index_type = match self.parse_one_of_keywords(&[Keyword::INDEX, Keyword::KEY]) {
11796                Some(Keyword::INDEX) => TableIndexType::Index,
11797                Some(Keyword::KEY) => TableIndexType::Key,
11798                _ => {
11799                    return self.expected("expected to match INDEX/KEY keyword", self.peek_token())
11800                }
11801            };
11802            let for_clause = if self.parse_keyword(Keyword::FOR) {
11803                let clause = if self.parse_keyword(Keyword::JOIN) {
11804                    TableIndexHintForClause::Join
11805                } else if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
11806                    TableIndexHintForClause::OrderBy
11807                } else if self.parse_keywords(&[Keyword::GROUP, Keyword::BY]) {
11808                    TableIndexHintForClause::GroupBy
11809                } else {
11810                    return self.expected(
11811                        "expected to match FOR/ORDER BY/GROUP BY table hint in for clause",
11812                        self.peek_token(),
11813                    );
11814                };
11815                Some(clause)
11816            } else {
11817                None
11818            };
11819
11820            self.expect_token(&Token::LParen)?;
11821            let index_names = if self.peek_token().token != Token::RParen {
11822                self.parse_comma_separated(Parser::parse_identifier)?
11823            } else {
11824                vec![]
11825            };
11826            self.expect_token(&Token::RParen)?;
11827            hints.push(TableIndexHints {
11828                hint_type,
11829                index_type,
11830                for_clause,
11831                index_names,
11832            });
11833        }
11834        Ok(hints)
11835    }
11836
11837    /// Wrapper for parse_optional_alias_inner, left for backwards-compatibility
11838    /// but new flows should use the context-specific methods such as `maybe_parse_select_item_alias`
11839    /// and `maybe_parse_table_alias`.
11840    pub fn parse_optional_alias(
11841        &mut self,
11842        reserved_kwds: &[Keyword],
11843    ) -> Result<Option<Ident>, ParserError> {
11844        fn validator(_explicit: bool, _kw: &Keyword, _parser: &mut Parser) -> bool {
11845            false
11846        }
11847        self.parse_optional_alias_inner(Some(reserved_kwds), validator)
11848    }
11849
11850    /// Parses an optional alias after a SQL element such as a select list item
11851    /// or a table name.
11852    ///
11853    /// This method accepts an optional list of reserved keywords or a function
11854    /// to call to validate if a keyword should be parsed as an alias, to allow
11855    /// callers to customize the parsing logic based on their context.
11856    fn parse_optional_alias_inner<F>(
11857        &mut self,
11858        reserved_kwds: Option<&[Keyword]>,
11859        validator: F,
11860    ) -> Result<Option<Ident>, ParserError>
11861    where
11862        F: Fn(bool, &Keyword, &mut Parser) -> bool,
11863    {
11864        let after_as = self.parse_keyword(Keyword::AS);
11865
11866        let next_token = self.next_token();
11867        match next_token.token {
11868            // Accepts a keyword as an alias if the AS keyword explicitly indicate an alias or if the
11869            // caller provided a list of reserved keywords and the keyword is not on that list.
11870            Token::Word(w)
11871                if reserved_kwds.is_some()
11872                    && (after_as || reserved_kwds.is_some_and(|x| !x.contains(&w.keyword))) =>
11873            {
11874                Ok(Some(w.into_ident(next_token.span)))
11875            }
11876            // Accepts a keyword as alias based on the caller's context, such as to what SQL element
11877            // this word is a potential alias of using the validator call-back. This allows for
11878            // dialect-specific logic.
11879            Token::Word(w) if validator(after_as, &w.keyword, self) => {
11880                Ok(Some(w.into_ident(next_token.span)))
11881            }
11882            // For backwards-compatibility, we accept quoted strings as aliases regardless of the context.
11883            Token::SingleQuotedString(s) => Ok(Some(Ident::with_quote('\'', s))),
11884            Token::DoubleQuotedString(s) => Ok(Some(Ident::with_quote('\"', s))),
11885            _ => {
11886                if after_as {
11887                    return self.expected("an identifier after AS", next_token);
11888                }
11889                self.prev_token();
11890                Ok(None) // no alias found
11891            }
11892        }
11893    }
11894
11895    pub fn parse_optional_group_by(&mut self) -> Result<Option<GroupByExpr>, ParserError> {
11896        if self.parse_keywords(&[Keyword::GROUP, Keyword::BY]) {
11897            let expressions = if self.parse_keyword(Keyword::ALL) {
11898                None
11899            } else {
11900                Some(self.parse_comma_separated(Parser::parse_group_by_expr)?)
11901            };
11902
11903            let mut modifiers = vec![];
11904            if self.dialect.supports_group_by_with_modifier() {
11905                loop {
11906                    if !self.parse_keyword(Keyword::WITH) {
11907                        break;
11908                    }
11909                    let keyword = self.expect_one_of_keywords(&[
11910                        Keyword::ROLLUP,
11911                        Keyword::CUBE,
11912                        Keyword::TOTALS,
11913                    ])?;
11914                    modifiers.push(match keyword {
11915                        Keyword::ROLLUP => GroupByWithModifier::Rollup,
11916                        Keyword::CUBE => GroupByWithModifier::Cube,
11917                        Keyword::TOTALS => GroupByWithModifier::Totals,
11918                        _ => {
11919                            return parser_err!(
11920                                "BUG: expected to match GroupBy modifier keyword",
11921                                self.peek_token().span.start
11922                            )
11923                        }
11924                    });
11925                }
11926            }
11927            if self.parse_keywords(&[Keyword::GROUPING, Keyword::SETS]) {
11928                self.expect_token(&Token::LParen)?;
11929                let result = self.parse_comma_separated(|p| {
11930                    if p.peek_token_ref().token == Token::LParen {
11931                        p.parse_tuple(true, true)
11932                    } else {
11933                        Ok(vec![p.parse_expr()?])
11934                    }
11935                })?;
11936                self.expect_token(&Token::RParen)?;
11937                modifiers.push(GroupByWithModifier::GroupingSets(Expr::GroupingSets(
11938                    result,
11939                )));
11940            };
11941            let group_by = match expressions {
11942                None => GroupByExpr::All(modifiers),
11943                Some(exprs) => GroupByExpr::Expressions(exprs, modifiers),
11944            };
11945            Ok(Some(group_by))
11946        } else {
11947            Ok(None)
11948        }
11949    }
11950
11951    pub fn parse_optional_order_by(&mut self) -> Result<Option<OrderBy>, ParserError> {
11952        if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
11953            let order_by =
11954                if self.dialect.supports_order_by_all() && self.parse_keyword(Keyword::ALL) {
11955                    let order_by_options = self.parse_order_by_options()?;
11956                    OrderBy {
11957                        kind: OrderByKind::All(order_by_options),
11958                        interpolate: None,
11959                    }
11960                } else {
11961                    let exprs = self.parse_comma_separated(Parser::parse_order_by_expr)?;
11962                    let interpolate = if dialect_of!(self is ClickHouseDialect | GenericDialect) {
11963                        self.parse_interpolations()?
11964                    } else {
11965                        None
11966                    };
11967                    OrderBy {
11968                        kind: OrderByKind::Expressions(exprs),
11969                        interpolate,
11970                    }
11971                };
11972            Ok(Some(order_by))
11973        } else {
11974            Ok(None)
11975        }
11976    }
11977
11978    fn parse_optional_limit_clause(&mut self) -> Result<Option<LimitClause>, ParserError> {
11979        let mut offset = if self.parse_keyword(Keyword::OFFSET) {
11980            Some(self.parse_offset()?)
11981        } else {
11982            None
11983        };
11984
11985        let (limit, limit_by) = if self.parse_keyword(Keyword::LIMIT) {
11986            let expr = self.parse_limit()?;
11987
11988            if self.dialect.supports_limit_comma()
11989                && offset.is_none()
11990                && expr.is_some() // ALL not supported with comma
11991                && self.consume_token(&Token::Comma)
11992            {
11993                let offset = expr.ok_or_else(|| {
11994                    ParserError::ParserError(
11995                        "Missing offset for LIMIT <offset>, <limit>".to_string(),
11996                    )
11997                })?;
11998                return Ok(Some(LimitClause::OffsetCommaLimit {
11999                    offset,
12000                    limit: self.parse_expr()?,
12001                }));
12002            }
12003
12004            let limit_by = if dialect_of!(self is ClickHouseDialect | GenericDialect)
12005                && self.parse_keyword(Keyword::BY)
12006            {
12007                Some(self.parse_comma_separated(Parser::parse_expr)?)
12008            } else {
12009                None
12010            };
12011
12012            (Some(expr), limit_by)
12013        } else {
12014            (None, None)
12015        };
12016
12017        if offset.is_none() && limit.is_some() && self.parse_keyword(Keyword::OFFSET) {
12018            offset = Some(self.parse_offset()?);
12019        }
12020
12021        if offset.is_some() || (limit.is_some() && limit != Some(None)) || limit_by.is_some() {
12022            Ok(Some(LimitClause::LimitOffset {
12023                limit: limit.unwrap_or_default(),
12024                offset,
12025                limit_by: limit_by.unwrap_or_default(),
12026            }))
12027        } else {
12028            Ok(None)
12029        }
12030    }
12031
12032    /// Parse a table object for insertion
12033    /// e.g. `some_database.some_table` or `FUNCTION some_table_func(...)`
12034    pub fn parse_table_object(&mut self) -> Result<TableObject, ParserError> {
12035        if self.dialect.supports_insert_table_function() && self.parse_keyword(Keyword::FUNCTION) {
12036            let fn_name = self.parse_object_name(false)?;
12037            self.parse_function_call(fn_name)
12038                .map(TableObject::TableFunction)
12039        } else {
12040            self.parse_object_name(false).map(TableObject::TableName)
12041        }
12042    }
12043
12044    /// Parse a possibly qualified, possibly quoted identifier, e.g.
12045    /// `foo` or `myschema."table"
12046    ///
12047    /// The `in_table_clause` parameter indicates whether the object name is a table in a FROM, JOIN,
12048    /// or similar table clause. Currently, this is used only to support unquoted hyphenated identifiers
12049    /// in this context on BigQuery.
12050    pub fn parse_object_name(&mut self, in_table_clause: bool) -> Result<ObjectName, ParserError> {
12051        self.parse_object_name_inner(in_table_clause, false)
12052    }
12053
12054    /// Parse a possibly qualified, possibly quoted identifier, e.g.
12055    /// `foo` or `myschema."table"
12056    ///
12057    /// The `in_table_clause` parameter indicates whether the object name is a table in a FROM, JOIN,
12058    /// or similar table clause. Currently, this is used only to support unquoted hyphenated identifiers
12059    /// in this context on BigQuery.
12060    ///
12061    /// The `allow_wildcards` parameter indicates whether to allow for wildcards in the object name
12062    /// e.g. *, *.*, `foo`.*, or "foo"."bar"
12063    fn parse_object_name_inner(
12064        &mut self,
12065        in_table_clause: bool,
12066        allow_wildcards: bool,
12067    ) -> Result<ObjectName, ParserError> {
12068        let mut parts = vec![];
12069        if dialect_of!(self is BigQueryDialect) && in_table_clause {
12070            loop {
12071                let (ident, end_with_period) = self.parse_unquoted_hyphenated_identifier()?;
12072                parts.push(ObjectNamePart::Identifier(ident));
12073                if !self.consume_token(&Token::Period) && !end_with_period {
12074                    break;
12075                }
12076            }
12077        } else {
12078            loop {
12079                if allow_wildcards && self.peek_token().token == Token::Mul {
12080                    let span = self.next_token().span;
12081                    parts.push(ObjectNamePart::Identifier(Ident {
12082                        value: Token::Mul.to_string(),
12083                        quote_style: None,
12084                        span,
12085                    }));
12086                } else if dialect_of!(self is BigQueryDialect) && in_table_clause {
12087                    let (ident, end_with_period) = self.parse_unquoted_hyphenated_identifier()?;
12088                    parts.push(ObjectNamePart::Identifier(ident));
12089                    if !self.consume_token(&Token::Period) && !end_with_period {
12090                        break;
12091                    }
12092                } else if self.dialect.supports_object_name_double_dot_notation()
12093                    && parts.len() == 1
12094                    && matches!(self.peek_token().token, Token::Period)
12095                {
12096                    // Empty string here means default schema
12097                    parts.push(ObjectNamePart::Identifier(Ident::new("")));
12098                } else {
12099                    let ident = self.parse_identifier()?;
12100                    let part = if self
12101                        .dialect
12102                        .is_identifier_generating_function_name(&ident, &parts)
12103                    {
12104                        self.expect_token(&Token::LParen)?;
12105                        let args: Vec<FunctionArg> =
12106                            self.parse_comma_separated0(Self::parse_function_args, Token::RParen)?;
12107                        self.expect_token(&Token::RParen)?;
12108                        ObjectNamePart::Function(ObjectNamePartFunction { name: ident, args })
12109                    } else {
12110                        ObjectNamePart::Identifier(ident)
12111                    };
12112                    parts.push(part);
12113                }
12114
12115                if !self.consume_token(&Token::Period) {
12116                    break;
12117                }
12118            }
12119        }
12120
12121        // BigQuery accepts any number of quoted identifiers of a table name.
12122        // https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#quoted_identifiers
12123        if dialect_of!(self is BigQueryDialect)
12124            && parts.iter().any(|part| {
12125                part.as_ident()
12126                    .is_some_and(|ident| ident.value.contains('.'))
12127            })
12128        {
12129            parts = parts
12130                .into_iter()
12131                .flat_map(|part| match part.as_ident() {
12132                    Some(ident) => ident
12133                        .value
12134                        .split('.')
12135                        .map(|value| {
12136                            ObjectNamePart::Identifier(Ident {
12137                                value: value.into(),
12138                                quote_style: ident.quote_style,
12139                                span: ident.span,
12140                            })
12141                        })
12142                        .collect::<Vec<_>>(),
12143                    None => vec![part],
12144                })
12145                .collect()
12146        }
12147
12148        Ok(ObjectName(parts))
12149    }
12150
12151    /// Parse identifiers
12152    pub fn parse_identifiers(&mut self) -> Result<Vec<Ident>, ParserError> {
12153        let mut idents = vec![];
12154        loop {
12155            match &self.peek_token_ref().token {
12156                Token::Word(w) => {
12157                    idents.push(w.clone().into_ident(self.peek_token_ref().span));
12158                }
12159                Token::EOF | Token::Eq | Token::SemiColon => break,
12160                _ => {}
12161            }
12162            self.advance_token();
12163        }
12164        Ok(idents)
12165    }
12166
12167    /// Parse identifiers of form ident1[.identN]*
12168    ///
12169    /// Similar in functionality to [parse_identifiers], with difference
12170    /// being this function is much more strict about parsing a valid multipart identifier, not
12171    /// allowing extraneous tokens to be parsed, otherwise it fails.
12172    ///
12173    /// For example:
12174    ///
12175    /// ```rust
12176    /// use sqlparser::ast::Ident;
12177    /// use sqlparser::dialect::GenericDialect;
12178    /// use sqlparser::parser::Parser;
12179    ///
12180    /// let dialect = GenericDialect {};
12181    /// let expected = vec![Ident::new("one"), Ident::new("two")];
12182    ///
12183    /// // expected usage
12184    /// let sql = "one.two";
12185    /// let mut parser = Parser::new(&dialect).try_with_sql(sql).unwrap();
12186    /// let actual = parser.parse_multipart_identifier().unwrap();
12187    /// assert_eq!(&actual, &expected);
12188    ///
12189    /// // parse_identifiers is more loose on what it allows, parsing successfully
12190    /// let sql = "one + two";
12191    /// let mut parser = Parser::new(&dialect).try_with_sql(sql).unwrap();
12192    /// let actual = parser.parse_identifiers().unwrap();
12193    /// assert_eq!(&actual, &expected);
12194    ///
12195    /// // expected to strictly fail due to + separator
12196    /// let sql = "one + two";
12197    /// let mut parser = Parser::new(&dialect).try_with_sql(sql).unwrap();
12198    /// let actual = parser.parse_multipart_identifier().unwrap_err();
12199    /// assert_eq!(
12200    ///     actual.to_string(),
12201    ///     "sql parser error: Unexpected token in identifier: +"
12202    /// );
12203    /// ```
12204    ///
12205    /// [parse_identifiers]: Parser::parse_identifiers
12206    pub fn parse_multipart_identifier(&mut self) -> Result<Vec<Ident>, ParserError> {
12207        let mut idents = vec![];
12208
12209        // expecting at least one word for identifier
12210        let next_token = self.next_token();
12211        match next_token.token {
12212            Token::Word(w) => idents.push(w.into_ident(next_token.span)),
12213            Token::EOF => {
12214                return Err(ParserError::ParserError(
12215                    "Empty input when parsing identifier".to_string(),
12216                ))?
12217            }
12218            token => {
12219                return Err(ParserError::ParserError(format!(
12220                    "Unexpected token in identifier: {token}"
12221                )))?
12222            }
12223        };
12224
12225        // parse optional next parts if exist
12226        loop {
12227            match self.next_token().token {
12228                // ensure that optional period is succeeded by another identifier
12229                Token::Period => {
12230                    let next_token = self.next_token();
12231                    match next_token.token {
12232                        Token::Word(w) => idents.push(w.into_ident(next_token.span)),
12233                        Token::EOF => {
12234                            return Err(ParserError::ParserError(
12235                                "Trailing period in identifier".to_string(),
12236                            ))?
12237                        }
12238                        token => {
12239                            return Err(ParserError::ParserError(format!(
12240                                "Unexpected token following period in identifier: {token}"
12241                            )))?
12242                        }
12243                    }
12244                }
12245                Token::EOF => break,
12246                token => {
12247                    return Err(ParserError::ParserError(format!(
12248                        "Unexpected token in identifier: {token}"
12249                    )))?;
12250                }
12251            }
12252        }
12253
12254        Ok(idents)
12255    }
12256
12257    /// Parse a simple one-word identifier (possibly quoted, possibly a keyword)
12258    pub fn parse_identifier(&mut self) -> Result<Ident, ParserError> {
12259        let next_token = self.next_token();
12260        match next_token.token {
12261            Token::Word(w) => Ok(w.into_ident(next_token.span)),
12262            Token::SingleQuotedString(s) => Ok(Ident::with_quote('\'', s)),
12263            Token::DoubleQuotedString(s) => Ok(Ident::with_quote('\"', s)),
12264            _ => self.expected("identifier", next_token),
12265        }
12266    }
12267
12268    /// On BigQuery, hyphens are permitted in unquoted identifiers inside of a FROM or
12269    /// TABLE clause.
12270    ///
12271    /// The first segment must be an ordinary unquoted identifier, e.g. it must not start
12272    /// with a digit. Subsequent segments are either must either be valid identifiers or
12273    /// integers, e.g. foo-123 is allowed, but foo-123a is not.
12274    ///
12275    /// [BigQuery-lexical](https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical)
12276    ///
12277    /// Return a tuple of the identifier and a boolean indicating it ends with a period.
12278    fn parse_unquoted_hyphenated_identifier(&mut self) -> Result<(Ident, bool), ParserError> {
12279        match self.peek_token().token {
12280            Token::Word(w) => {
12281                let quote_style_is_none = w.quote_style.is_none();
12282                let mut requires_whitespace = false;
12283                let mut ident = w.into_ident(self.next_token().span);
12284                if quote_style_is_none {
12285                    while matches!(self.peek_token_no_skip().token, Token::Minus) {
12286                        self.next_token();
12287                        ident.value.push('-');
12288
12289                        let token = self
12290                            .next_token_no_skip()
12291                            .cloned()
12292                            .unwrap_or(TokenWithSpan::wrap(Token::EOF));
12293                        requires_whitespace = match token.token {
12294                            Token::Word(next_word) if next_word.quote_style.is_none() => {
12295                                ident.value.push_str(&next_word.value);
12296                                false
12297                            }
12298                            Token::Number(s, false) => {
12299                                // A number token can represent a decimal value ending with a period, e.g., `Number('123.')`.
12300                                // However, for an [ObjectName], it is part of a hyphenated identifier, e.g., `foo-123.bar`.
12301                                //
12302                                // If a number token is followed by a period, it is part of an [ObjectName].
12303                                // Return the identifier with `true` if the number token is followed by a period, indicating that
12304                                // parsing should continue for the next part of the hyphenated identifier.
12305                                if s.ends_with('.') {
12306                                    let Some(s) = s.split('.').next().filter(|s| {
12307                                        !s.is_empty() && s.chars().all(|c| c.is_ascii_digit())
12308                                    }) else {
12309                                        return self.expected(
12310                                            "continuation of hyphenated identifier",
12311                                            TokenWithSpan::new(Token::Number(s, false), token.span),
12312                                        );
12313                                    };
12314                                    ident.value.push_str(s);
12315                                    return Ok((ident, true));
12316                                } else {
12317                                    ident.value.push_str(&s);
12318                                }
12319                                // If next token is period, then it is part of an ObjectName and we don't expect whitespace
12320                                // after the number.
12321                                !matches!(self.peek_token().token, Token::Period)
12322                            }
12323                            _ => {
12324                                return self
12325                                    .expected("continuation of hyphenated identifier", token);
12326                            }
12327                        }
12328                    }
12329
12330                    // If the last segment was a number, we must check that it's followed by whitespace,
12331                    // otherwise foo-123a will be parsed as `foo-123` with the alias `a`.
12332                    if requires_whitespace {
12333                        let token = self.next_token();
12334                        if !matches!(token.token, Token::EOF | Token::Whitespace(_)) {
12335                            return self
12336                                .expected("whitespace following hyphenated identifier", token);
12337                        }
12338                    }
12339                }
12340                Ok((ident, false))
12341            }
12342            _ => Ok((self.parse_identifier()?, false)),
12343        }
12344    }
12345
12346    /// Parses a parenthesized, comma-separated list of column definitions within a view.
12347    fn parse_view_columns(&mut self) -> Result<Vec<ViewColumnDef>, ParserError> {
12348        if self.consume_token(&Token::LParen) {
12349            if self.peek_token().token == Token::RParen {
12350                self.next_token();
12351                Ok(vec![])
12352            } else {
12353                let cols = self.parse_comma_separated_with_trailing_commas(
12354                    Parser::parse_view_column,
12355                    self.dialect.supports_column_definition_trailing_commas(),
12356                    Self::is_reserved_for_column_alias,
12357                )?;
12358                self.expect_token(&Token::RParen)?;
12359                Ok(cols)
12360            }
12361        } else {
12362            Ok(vec![])
12363        }
12364    }
12365
12366    /// Parses a column definition within a view.
12367    fn parse_view_column(&mut self) -> Result<ViewColumnDef, ParserError> {
12368        let name = self.parse_identifier()?;
12369        let options = self.parse_view_column_options()?;
12370        let data_type = if dialect_of!(self is ClickHouseDialect) {
12371            Some(self.parse_data_type()?)
12372        } else {
12373            None
12374        };
12375        Ok(ViewColumnDef {
12376            name,
12377            data_type,
12378            options,
12379        })
12380    }
12381
12382    fn parse_view_column_options(&mut self) -> Result<Option<ColumnOptions>, ParserError> {
12383        let mut options = Vec::new();
12384        loop {
12385            let option = self.parse_optional_column_option()?;
12386            if let Some(option) = option {
12387                options.push(option);
12388            } else {
12389                break;
12390            }
12391        }
12392        if options.is_empty() {
12393            Ok(None)
12394        } else if self.dialect.supports_space_separated_column_options() {
12395            Ok(Some(ColumnOptions::SpaceSeparated(options)))
12396        } else {
12397            Ok(Some(ColumnOptions::CommaSeparated(options)))
12398        }
12399    }
12400
12401    /// Parses a parenthesized comma-separated list of unqualified, possibly quoted identifiers.
12402    /// For example: `(col1, "col 2", ...)`
12403    pub fn parse_parenthesized_column_list(
12404        &mut self,
12405        optional: IsOptional,
12406        allow_empty: bool,
12407    ) -> Result<Vec<Ident>, ParserError> {
12408        self.parse_parenthesized_column_list_inner(optional, allow_empty, |p| p.parse_identifier())
12409    }
12410
12411    pub fn parse_parenthesized_compound_identifier_list(
12412        &mut self,
12413        optional: IsOptional,
12414        allow_empty: bool,
12415    ) -> Result<Vec<Expr>, ParserError> {
12416        self.parse_parenthesized_column_list_inner(optional, allow_empty, |p| {
12417            Ok(Expr::CompoundIdentifier(
12418                p.parse_period_separated(|p| p.parse_identifier())?,
12419            ))
12420        })
12421    }
12422
12423    /// Parses a parenthesized comma-separated list of index columns, which can be arbitrary
12424    /// expressions with ordering information (and an opclass in some dialects).
12425    fn parse_parenthesized_index_column_list(&mut self) -> Result<Vec<IndexColumn>, ParserError> {
12426        self.parse_parenthesized_column_list_inner(Mandatory, false, |p| {
12427            p.parse_create_index_expr()
12428        })
12429    }
12430
12431    /// Parses a parenthesized comma-separated list of qualified, possibly quoted identifiers.
12432    /// For example: `(db1.sc1.tbl1.col1, db1.sc1.tbl1."col 2", ...)`
12433    pub fn parse_parenthesized_qualified_column_list(
12434        &mut self,
12435        optional: IsOptional,
12436        allow_empty: bool,
12437    ) -> Result<Vec<ObjectName>, ParserError> {
12438        self.parse_parenthesized_column_list_inner(optional, allow_empty, |p| {
12439            p.parse_object_name(true)
12440        })
12441    }
12442
12443    /// Parses a parenthesized comma-separated list of columns using
12444    /// the provided function to parse each element.
12445    fn parse_parenthesized_column_list_inner<F, T>(
12446        &mut self,
12447        optional: IsOptional,
12448        allow_empty: bool,
12449        mut f: F,
12450    ) -> Result<Vec<T>, ParserError>
12451    where
12452        F: FnMut(&mut Parser) -> Result<T, ParserError>,
12453    {
12454        if self.consume_token(&Token::LParen) {
12455            if allow_empty && self.peek_token().token == Token::RParen {
12456                self.next_token();
12457                Ok(vec![])
12458            } else {
12459                let cols = self.parse_comma_separated(|p| f(p))?;
12460                self.expect_token(&Token::RParen)?;
12461                Ok(cols)
12462            }
12463        } else if optional == Optional {
12464            Ok(vec![])
12465        } else {
12466            self.expected("a list of columns in parentheses", self.peek_token())
12467        }
12468    }
12469
12470    /// Parses a parenthesized comma-separated list of table alias column definitions.
12471    fn parse_table_alias_column_defs(&mut self) -> Result<Vec<TableAliasColumnDef>, ParserError> {
12472        if self.consume_token(&Token::LParen) {
12473            let cols = self.parse_comma_separated(|p| {
12474                let name = p.parse_identifier()?;
12475                let data_type = p.maybe_parse(|p| p.parse_data_type())?;
12476                Ok(TableAliasColumnDef { name, data_type })
12477            })?;
12478            self.expect_token(&Token::RParen)?;
12479            Ok(cols)
12480        } else {
12481            Ok(vec![])
12482        }
12483    }
12484
12485    pub fn parse_precision(&mut self) -> Result<u64, ParserError> {
12486        self.expect_token(&Token::LParen)?;
12487        let n = self.parse_literal_uint()?;
12488        self.expect_token(&Token::RParen)?;
12489        Ok(n)
12490    }
12491
12492    pub fn parse_optional_precision(&mut self) -> Result<Option<u64>, ParserError> {
12493        if self.consume_token(&Token::LParen) {
12494            let n = self.parse_literal_uint()?;
12495            self.expect_token(&Token::RParen)?;
12496            Ok(Some(n))
12497        } else {
12498            Ok(None)
12499        }
12500    }
12501
12502    fn maybe_parse_optional_interval_fields(
12503        &mut self,
12504    ) -> Result<Option<IntervalFields>, ParserError> {
12505        match self.parse_one_of_keywords(&[
12506            // Can be followed by `TO` option
12507            Keyword::YEAR,
12508            Keyword::DAY,
12509            Keyword::HOUR,
12510            Keyword::MINUTE,
12511            // No `TO` option
12512            Keyword::MONTH,
12513            Keyword::SECOND,
12514        ]) {
12515            Some(Keyword::YEAR) => {
12516                if self.peek_keyword(Keyword::TO) {
12517                    self.expect_keyword(Keyword::TO)?;
12518                    self.expect_keyword(Keyword::MONTH)?;
12519                    Ok(Some(IntervalFields::YearToMonth))
12520                } else {
12521                    Ok(Some(IntervalFields::Year))
12522                }
12523            }
12524            Some(Keyword::DAY) => {
12525                if self.peek_keyword(Keyword::TO) {
12526                    self.expect_keyword(Keyword::TO)?;
12527                    match self.expect_one_of_keywords(&[
12528                        Keyword::HOUR,
12529                        Keyword::MINUTE,
12530                        Keyword::SECOND,
12531                    ])? {
12532                        Keyword::HOUR => Ok(Some(IntervalFields::DayToHour)),
12533                        Keyword::MINUTE => Ok(Some(IntervalFields::DayToMinute)),
12534                        Keyword::SECOND => Ok(Some(IntervalFields::DayToSecond)),
12535                        _ => {
12536                            self.prev_token();
12537                            self.expected("HOUR, MINUTE, or SECOND", self.peek_token())
12538                        }
12539                    }
12540                } else {
12541                    Ok(Some(IntervalFields::Day))
12542                }
12543            }
12544            Some(Keyword::HOUR) => {
12545                if self.peek_keyword(Keyword::TO) {
12546                    self.expect_keyword(Keyword::TO)?;
12547                    match self.expect_one_of_keywords(&[Keyword::MINUTE, Keyword::SECOND])? {
12548                        Keyword::MINUTE => Ok(Some(IntervalFields::HourToMinute)),
12549                        Keyword::SECOND => Ok(Some(IntervalFields::HourToSecond)),
12550                        _ => {
12551                            self.prev_token();
12552                            self.expected("MINUTE or SECOND", self.peek_token())
12553                        }
12554                    }
12555                } else {
12556                    Ok(Some(IntervalFields::Hour))
12557                }
12558            }
12559            Some(Keyword::MINUTE) => {
12560                if self.peek_keyword(Keyword::TO) {
12561                    self.expect_keyword(Keyword::TO)?;
12562                    self.expect_keyword(Keyword::SECOND)?;
12563                    Ok(Some(IntervalFields::MinuteToSecond))
12564                } else {
12565                    Ok(Some(IntervalFields::Minute))
12566                }
12567            }
12568            Some(Keyword::MONTH) => Ok(Some(IntervalFields::Month)),
12569            Some(Keyword::SECOND) => Ok(Some(IntervalFields::Second)),
12570            Some(_) => {
12571                self.prev_token();
12572                self.expected(
12573                    "YEAR, MONTH, DAY, HOUR, MINUTE, or SECOND",
12574                    self.peek_token(),
12575                )
12576            }
12577            None => Ok(None),
12578        }
12579    }
12580
12581    /// Parse datetime64 [1]
12582    /// Syntax
12583    /// ```sql
12584    /// DateTime64(precision[, timezone])
12585    /// ```
12586    ///
12587    /// [1]: https://clickhouse.com/docs/en/sql-reference/data-types/datetime64
12588    pub fn parse_datetime_64(&mut self) -> Result<(u64, Option<String>), ParserError> {
12589        self.expect_keyword_is(Keyword::DATETIME64)?;
12590        self.expect_token(&Token::LParen)?;
12591        let precision = self.parse_literal_uint()?;
12592        let time_zone = if self.consume_token(&Token::Comma) {
12593            Some(self.parse_literal_string()?)
12594        } else {
12595            None
12596        };
12597        self.expect_token(&Token::RParen)?;
12598        Ok((precision, time_zone))
12599    }
12600
12601    pub fn parse_optional_character_length(
12602        &mut self,
12603    ) -> Result<Option<CharacterLength>, ParserError> {
12604        if self.consume_token(&Token::LParen) {
12605            let character_length = self.parse_character_length()?;
12606            self.expect_token(&Token::RParen)?;
12607            Ok(Some(character_length))
12608        } else {
12609            Ok(None)
12610        }
12611    }
12612
12613    pub fn parse_optional_binary_length(&mut self) -> Result<Option<BinaryLength>, ParserError> {
12614        if self.consume_token(&Token::LParen) {
12615            let binary_length = self.parse_binary_length()?;
12616            self.expect_token(&Token::RParen)?;
12617            Ok(Some(binary_length))
12618        } else {
12619            Ok(None)
12620        }
12621    }
12622
12623    pub fn parse_character_length(&mut self) -> Result<CharacterLength, ParserError> {
12624        if self.parse_keyword(Keyword::MAX) {
12625            return Ok(CharacterLength::Max);
12626        }
12627        let length = self.parse_literal_uint()?;
12628        let unit = if self.parse_keyword(Keyword::CHARACTERS) {
12629            Some(CharLengthUnits::Characters)
12630        } else if self.parse_keyword(Keyword::OCTETS) {
12631            Some(CharLengthUnits::Octets)
12632        } else {
12633            None
12634        };
12635        Ok(CharacterLength::IntegerLength { length, unit })
12636    }
12637
12638    pub fn parse_binary_length(&mut self) -> Result<BinaryLength, ParserError> {
12639        if self.parse_keyword(Keyword::MAX) {
12640            return Ok(BinaryLength::Max);
12641        }
12642        let length = self.parse_literal_uint()?;
12643        Ok(BinaryLength::IntegerLength { length })
12644    }
12645
12646    pub fn parse_optional_precision_scale(
12647        &mut self,
12648    ) -> Result<(Option<u64>, Option<u64>), ParserError> {
12649        if self.consume_token(&Token::LParen) {
12650            let n = self.parse_literal_uint()?;
12651            let scale = if self.consume_token(&Token::Comma) {
12652                Some(self.parse_literal_uint()?)
12653            } else {
12654                None
12655            };
12656            self.expect_token(&Token::RParen)?;
12657            Ok((Some(n), scale))
12658        } else {
12659            Ok((None, None))
12660        }
12661    }
12662
12663    pub fn parse_exact_number_optional_precision_scale(
12664        &mut self,
12665    ) -> Result<ExactNumberInfo, ParserError> {
12666        if self.consume_token(&Token::LParen) {
12667            let precision = self.parse_literal_uint()?;
12668            let scale = if self.consume_token(&Token::Comma) {
12669                Some(self.parse_signed_integer()?)
12670            } else {
12671                None
12672            };
12673
12674            self.expect_token(&Token::RParen)?;
12675
12676            match scale {
12677                None => Ok(ExactNumberInfo::Precision(precision)),
12678                Some(scale) => Ok(ExactNumberInfo::PrecisionAndScale(precision, scale)),
12679            }
12680        } else {
12681            Ok(ExactNumberInfo::None)
12682        }
12683    }
12684
12685    /// Parse an optionally signed integer literal.
12686    fn parse_signed_integer(&mut self) -> Result<i64, ParserError> {
12687        let is_negative = self.consume_token(&Token::Minus);
12688
12689        if !is_negative {
12690            let _ = self.consume_token(&Token::Plus);
12691        }
12692
12693        let current_token = self.peek_token_ref();
12694        match &current_token.token {
12695            Token::Number(s, _) => {
12696                let s = s.clone();
12697                let span_start = current_token.span.start;
12698                self.advance_token();
12699                let value = Self::parse::<i64>(s, span_start)?;
12700                Ok(if is_negative { -value } else { value })
12701            }
12702            _ => self.expected_ref("number", current_token),
12703        }
12704    }
12705
12706    pub fn parse_optional_type_modifiers(&mut self) -> Result<Option<Vec<String>>, ParserError> {
12707        if self.consume_token(&Token::LParen) {
12708            let mut modifiers = Vec::new();
12709            loop {
12710                let next_token = self.next_token();
12711                match next_token.token {
12712                    Token::Word(w) => modifiers.push(w.to_string()),
12713                    Token::Number(n, _) => modifiers.push(n),
12714                    Token::SingleQuotedString(s) => modifiers.push(s),
12715
12716                    Token::Comma => {
12717                        continue;
12718                    }
12719                    Token::RParen => {
12720                        break;
12721                    }
12722                    _ => self.expected("type modifiers", next_token)?,
12723                }
12724            }
12725
12726            Ok(Some(modifiers))
12727        } else {
12728            Ok(None)
12729        }
12730    }
12731
12732    /// Parse a parenthesized sub data type
12733    fn parse_sub_type<F>(&mut self, parent_type: F) -> Result<DataType, ParserError>
12734    where
12735        F: FnOnce(Box<DataType>) -> DataType,
12736    {
12737        self.expect_token(&Token::LParen)?;
12738        let inside_type = self.parse_data_type()?;
12739        self.expect_token(&Token::RParen)?;
12740        Ok(parent_type(inside_type.into()))
12741    }
12742
12743    /// Parse a DELETE statement, returning a `Box`ed SetExpr
12744    ///
12745    /// This is used to reduce the size of the stack frames in debug builds
12746    fn parse_delete_setexpr_boxed(
12747        &mut self,
12748        delete_token: TokenWithSpan,
12749    ) -> Result<Box<SetExpr>, ParserError> {
12750        Ok(Box::new(SetExpr::Delete(self.parse_delete(delete_token)?)))
12751    }
12752
12753    pub fn parse_delete(&mut self, delete_token: TokenWithSpan) -> Result<Statement, ParserError> {
12754        let (tables, with_from_keyword) = if !self.parse_keyword(Keyword::FROM) {
12755            // `FROM` keyword is optional in BigQuery SQL.
12756            // https://cloud.google.com/bigquery/docs/reference/standard-sql/dml-syntax#delete_statement
12757            if dialect_of!(self is BigQueryDialect | OracleDialect | GenericDialect) {
12758                (vec![], false)
12759            } else {
12760                let tables = self.parse_comma_separated(|p| p.parse_object_name(false))?;
12761                self.expect_keyword_is(Keyword::FROM)?;
12762                (tables, true)
12763            }
12764        } else {
12765            (vec![], true)
12766        };
12767
12768        let from = self.parse_comma_separated(Parser::parse_table_and_joins)?;
12769        let using = if self.parse_keyword(Keyword::USING) {
12770            Some(self.parse_comma_separated(Parser::parse_table_and_joins)?)
12771        } else {
12772            None
12773        };
12774        let selection = if self.parse_keyword(Keyword::WHERE) {
12775            Some(self.parse_expr()?)
12776        } else {
12777            None
12778        };
12779        let returning = if self.parse_keyword(Keyword::RETURNING) {
12780            Some(self.parse_comma_separated(Parser::parse_select_item)?)
12781        } else {
12782            None
12783        };
12784        let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
12785            self.parse_comma_separated(Parser::parse_order_by_expr)?
12786        } else {
12787            vec![]
12788        };
12789        let limit = if self.parse_keyword(Keyword::LIMIT) {
12790            self.parse_limit()?
12791        } else {
12792            None
12793        };
12794
12795        Ok(Statement::Delete(Delete {
12796            delete_token: delete_token.into(),
12797            tables,
12798            from: if with_from_keyword {
12799                FromTable::WithFromKeyword(from)
12800            } else {
12801                FromTable::WithoutKeyword(from)
12802            },
12803            using,
12804            selection,
12805            returning,
12806            order_by,
12807            limit,
12808        }))
12809    }
12810
12811    // KILL [CONNECTION | QUERY | MUTATION] processlist_id
12812    pub fn parse_kill(&mut self) -> Result<Statement, ParserError> {
12813        let modifier_keyword =
12814            self.parse_one_of_keywords(&[Keyword::CONNECTION, Keyword::QUERY, Keyword::MUTATION]);
12815
12816        let id = self.parse_literal_uint()?;
12817
12818        let modifier = match modifier_keyword {
12819            Some(Keyword::CONNECTION) => Some(KillType::Connection),
12820            Some(Keyword::QUERY) => Some(KillType::Query),
12821            Some(Keyword::MUTATION) => {
12822                if dialect_of!(self is ClickHouseDialect | GenericDialect) {
12823                    Some(KillType::Mutation)
12824                } else {
12825                    self.expected(
12826                        "Unsupported type for KILL, allowed: CONNECTION | QUERY",
12827                        self.peek_token(),
12828                    )?
12829                }
12830            }
12831            _ => None,
12832        };
12833
12834        Ok(Statement::Kill { modifier, id })
12835    }
12836
12837    pub fn parse_explain(
12838        &mut self,
12839        describe_alias: DescribeAlias,
12840    ) -> Result<Statement, ParserError> {
12841        let mut analyze = false;
12842        let mut verbose = false;
12843        let mut query_plan = false;
12844        let mut estimate = false;
12845        let mut format = None;
12846        let mut options = None;
12847
12848        // Note: DuckDB is compatible with PostgreSQL syntax for this statement,
12849        // although not all features may be implemented.
12850        if describe_alias == DescribeAlias::Explain
12851            && self.dialect.supports_explain_with_utility_options()
12852            && self.peek_token().token == Token::LParen
12853        {
12854            options = Some(self.parse_utility_options()?)
12855        } else if self.parse_keywords(&[Keyword::QUERY, Keyword::PLAN]) {
12856            query_plan = true;
12857        } else if self.parse_keyword(Keyword::ESTIMATE) {
12858            estimate = true;
12859        } else {
12860            analyze = self.parse_keyword(Keyword::ANALYZE);
12861            verbose = self.parse_keyword(Keyword::VERBOSE);
12862            if self.parse_keyword(Keyword::FORMAT) {
12863                format = Some(self.parse_analyze_format_kind()?);
12864            }
12865        }
12866
12867        match self.maybe_parse(|parser| parser.parse_statement())? {
12868            Some(Statement::Explain { .. }) | Some(Statement::ExplainTable { .. }) => Err(
12869                ParserError::ParserError("Explain must be root of the plan".to_string()),
12870            ),
12871            Some(statement) => Ok(Statement::Explain {
12872                describe_alias,
12873                analyze,
12874                verbose,
12875                query_plan,
12876                estimate,
12877                statement: Box::new(statement),
12878                format,
12879                options,
12880            }),
12881            _ => {
12882                let hive_format =
12883                    match self.parse_one_of_keywords(&[Keyword::EXTENDED, Keyword::FORMATTED]) {
12884                        Some(Keyword::EXTENDED) => Some(HiveDescribeFormat::Extended),
12885                        Some(Keyword::FORMATTED) => Some(HiveDescribeFormat::Formatted),
12886                        _ => None,
12887                    };
12888
12889                let has_table_keyword = if self.dialect.describe_requires_table_keyword() {
12890                    // only allow to use TABLE keyword for DESC|DESCRIBE statement
12891                    self.parse_keyword(Keyword::TABLE)
12892                } else {
12893                    false
12894                };
12895
12896                let table_name = self.parse_object_name(false)?;
12897                Ok(Statement::ExplainTable {
12898                    describe_alias,
12899                    hive_format,
12900                    has_table_keyword,
12901                    table_name,
12902                })
12903            }
12904        }
12905    }
12906
12907    /// Parse a query expression, i.e. a `SELECT` statement optionally
12908    /// preceded with some `WITH` CTE declarations and optionally followed
12909    /// by `ORDER BY`. Unlike some other parse_... methods, this one doesn't
12910    /// expect the initial keyword to be already consumed
12911    pub fn parse_query(&mut self) -> Result<Box<Query>, ParserError> {
12912        let _guard = self.recursion_counter.try_decrease()?;
12913        let with = if self.parse_keyword(Keyword::WITH) {
12914            let with_token = self.get_current_token();
12915            Some(With {
12916                with_token: with_token.clone().into(),
12917                recursive: self.parse_keyword(Keyword::RECURSIVE),
12918                cte_tables: self.parse_comma_separated(Parser::parse_cte)?,
12919            })
12920        } else {
12921            None
12922        };
12923        if self.parse_keyword(Keyword::INSERT) {
12924            Ok(Query {
12925                with,
12926                body: self.parse_insert_setexpr_boxed(self.get_current_token().clone())?,
12927                order_by: None,
12928                limit_clause: None,
12929                fetch: None,
12930                locks: vec![],
12931                for_clause: None,
12932                settings: None,
12933                format_clause: None,
12934                pipe_operators: vec![],
12935            }
12936            .into())
12937        } else if self.parse_keyword(Keyword::UPDATE) {
12938            Ok(Query {
12939                with,
12940                body: self.parse_update_setexpr_boxed(self.get_current_token().clone())?,
12941                order_by: None,
12942                limit_clause: None,
12943                fetch: None,
12944                locks: vec![],
12945                for_clause: None,
12946                settings: None,
12947                format_clause: None,
12948                pipe_operators: vec![],
12949            }
12950            .into())
12951        } else if self.parse_keyword(Keyword::DELETE) {
12952            Ok(Query {
12953                with,
12954                body: self.parse_delete_setexpr_boxed(self.get_current_token().clone())?,
12955                limit_clause: None,
12956                order_by: None,
12957                fetch: None,
12958                locks: vec![],
12959                for_clause: None,
12960                settings: None,
12961                format_clause: None,
12962                pipe_operators: vec![],
12963            }
12964            .into())
12965        } else if self.parse_keyword(Keyword::MERGE) {
12966            Ok(Query {
12967                with,
12968                body: self.parse_merge_setexpr_boxed(self.get_current_token().clone())?,
12969                limit_clause: None,
12970                order_by: None,
12971                fetch: None,
12972                locks: vec![],
12973                for_clause: None,
12974                settings: None,
12975                format_clause: None,
12976                pipe_operators: vec![],
12977            }
12978            .into())
12979        } else {
12980            let body = self.parse_query_body(self.dialect.prec_unknown())?;
12981
12982            let order_by = self.parse_optional_order_by()?;
12983
12984            let limit_clause = self.parse_optional_limit_clause()?;
12985
12986            let settings = self.parse_settings()?;
12987
12988            let fetch = if self.parse_keyword(Keyword::FETCH) {
12989                Some(self.parse_fetch()?)
12990            } else {
12991                None
12992            };
12993
12994            let mut for_clause = None;
12995            let mut locks = Vec::new();
12996            while self.parse_keyword(Keyword::FOR) {
12997                if let Some(parsed_for_clause) = self.parse_for_clause()? {
12998                    for_clause = Some(parsed_for_clause);
12999                    break;
13000                } else {
13001                    locks.push(self.parse_lock()?);
13002                }
13003            }
13004            let format_clause = if dialect_of!(self is ClickHouseDialect | GenericDialect)
13005                && self.parse_keyword(Keyword::FORMAT)
13006            {
13007                if self.parse_keyword(Keyword::NULL) {
13008                    Some(FormatClause::Null)
13009                } else {
13010                    let ident = self.parse_identifier()?;
13011                    Some(FormatClause::Identifier(ident))
13012                }
13013            } else {
13014                None
13015            };
13016
13017            let pipe_operators = if self.dialect.supports_pipe_operator() {
13018                self.parse_pipe_operators()?
13019            } else {
13020                Vec::new()
13021            };
13022
13023            Ok(Query {
13024                with,
13025                body,
13026                order_by,
13027                limit_clause,
13028                fetch,
13029                locks,
13030                for_clause,
13031                settings,
13032                format_clause,
13033                pipe_operators,
13034            }
13035            .into())
13036        }
13037    }
13038
13039    fn parse_pipe_operators(&mut self) -> Result<Vec<PipeOperator>, ParserError> {
13040        let mut pipe_operators = Vec::new();
13041
13042        while self.consume_token(&Token::VerticalBarRightAngleBracket) {
13043            let kw = self.expect_one_of_keywords(&[
13044                Keyword::SELECT,
13045                Keyword::EXTEND,
13046                Keyword::SET,
13047                Keyword::DROP,
13048                Keyword::AS,
13049                Keyword::WHERE,
13050                Keyword::LIMIT,
13051                Keyword::AGGREGATE,
13052                Keyword::ORDER,
13053                Keyword::TABLESAMPLE,
13054                Keyword::RENAME,
13055                Keyword::UNION,
13056                Keyword::INTERSECT,
13057                Keyword::EXCEPT,
13058                Keyword::CALL,
13059                Keyword::PIVOT,
13060                Keyword::UNPIVOT,
13061                Keyword::JOIN,
13062                Keyword::INNER,
13063                Keyword::LEFT,
13064                Keyword::RIGHT,
13065                Keyword::FULL,
13066                Keyword::CROSS,
13067            ])?;
13068            match kw {
13069                Keyword::SELECT => {
13070                    let exprs = self.parse_comma_separated(Parser::parse_select_item)?;
13071                    pipe_operators.push(PipeOperator::Select { exprs })
13072                }
13073                Keyword::EXTEND => {
13074                    let exprs = self.parse_comma_separated(Parser::parse_select_item)?;
13075                    pipe_operators.push(PipeOperator::Extend { exprs })
13076                }
13077                Keyword::SET => {
13078                    let assignments = self.parse_comma_separated(Parser::parse_assignment)?;
13079                    pipe_operators.push(PipeOperator::Set { assignments })
13080                }
13081                Keyword::DROP => {
13082                    let columns = self.parse_identifiers()?;
13083                    pipe_operators.push(PipeOperator::Drop { columns })
13084                }
13085                Keyword::AS => {
13086                    let alias = self.parse_identifier()?;
13087                    pipe_operators.push(PipeOperator::As { alias })
13088                }
13089                Keyword::WHERE => {
13090                    let expr = self.parse_expr()?;
13091                    pipe_operators.push(PipeOperator::Where { expr })
13092                }
13093                Keyword::LIMIT => {
13094                    let expr = self.parse_expr()?;
13095                    let offset = if self.parse_keyword(Keyword::OFFSET) {
13096                        Some(self.parse_expr()?)
13097                    } else {
13098                        None
13099                    };
13100                    pipe_operators.push(PipeOperator::Limit { expr, offset })
13101                }
13102                Keyword::AGGREGATE => {
13103                    let full_table_exprs = if self.peek_keyword(Keyword::GROUP) {
13104                        vec![]
13105                    } else {
13106                        self.parse_comma_separated(|parser| {
13107                            parser.parse_expr_with_alias_and_order_by()
13108                        })?
13109                    };
13110
13111                    let group_by_expr = if self.parse_keywords(&[Keyword::GROUP, Keyword::BY]) {
13112                        self.parse_comma_separated(|parser| {
13113                            parser.parse_expr_with_alias_and_order_by()
13114                        })?
13115                    } else {
13116                        vec![]
13117                    };
13118
13119                    pipe_operators.push(PipeOperator::Aggregate {
13120                        full_table_exprs,
13121                        group_by_expr,
13122                    })
13123                }
13124                Keyword::ORDER => {
13125                    self.expect_one_of_keywords(&[Keyword::BY])?;
13126                    let exprs = self.parse_comma_separated(Parser::parse_order_by_expr)?;
13127                    pipe_operators.push(PipeOperator::OrderBy { exprs })
13128                }
13129                Keyword::TABLESAMPLE => {
13130                    let sample = self.parse_table_sample(TableSampleModifier::TableSample)?;
13131                    pipe_operators.push(PipeOperator::TableSample { sample });
13132                }
13133                Keyword::RENAME => {
13134                    let mappings =
13135                        self.parse_comma_separated(Parser::parse_identifier_with_optional_alias)?;
13136                    pipe_operators.push(PipeOperator::Rename { mappings });
13137                }
13138                Keyword::UNION => {
13139                    let set_quantifier = self.parse_set_quantifier(&Some(SetOperator::Union));
13140                    let queries = self.parse_pipe_operator_queries()?;
13141                    pipe_operators.push(PipeOperator::Union {
13142                        set_quantifier,
13143                        queries,
13144                    });
13145                }
13146                Keyword::INTERSECT => {
13147                    let set_quantifier =
13148                        self.parse_distinct_required_set_quantifier("INTERSECT")?;
13149                    let queries = self.parse_pipe_operator_queries()?;
13150                    pipe_operators.push(PipeOperator::Intersect {
13151                        set_quantifier,
13152                        queries,
13153                    });
13154                }
13155                Keyword::EXCEPT => {
13156                    let set_quantifier = self.parse_distinct_required_set_quantifier("EXCEPT")?;
13157                    let queries = self.parse_pipe_operator_queries()?;
13158                    pipe_operators.push(PipeOperator::Except {
13159                        set_quantifier,
13160                        queries,
13161                    });
13162                }
13163                Keyword::CALL => {
13164                    let function_name = self.parse_object_name(false)?;
13165                    let function_expr = self.parse_function(function_name)?;
13166                    if let Expr::Function(function) = function_expr {
13167                        let alias = self.parse_identifier_optional_alias()?;
13168                        pipe_operators.push(PipeOperator::Call { function, alias });
13169                    } else {
13170                        return Err(ParserError::ParserError(
13171                            "Expected function call after CALL".to_string(),
13172                        ));
13173                    }
13174                }
13175                Keyword::PIVOT => {
13176                    self.expect_token(&Token::LParen)?;
13177                    let aggregate_functions =
13178                        self.parse_comma_separated(Self::parse_aliased_function_call)?;
13179                    self.expect_keyword_is(Keyword::FOR)?;
13180                    let value_column = self.parse_period_separated(|p| p.parse_identifier())?;
13181                    self.expect_keyword_is(Keyword::IN)?;
13182
13183                    self.expect_token(&Token::LParen)?;
13184                    let value_source = if self.parse_keyword(Keyword::ANY) {
13185                        let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
13186                            self.parse_comma_separated(Parser::parse_order_by_expr)?
13187                        } else {
13188                            vec![]
13189                        };
13190                        PivotValueSource::Any(order_by)
13191                    } else if self.peek_sub_query() {
13192                        PivotValueSource::Subquery(self.parse_query()?)
13193                    } else {
13194                        PivotValueSource::List(
13195                            self.parse_comma_separated(Self::parse_expr_with_alias)?,
13196                        )
13197                    };
13198                    self.expect_token(&Token::RParen)?;
13199                    self.expect_token(&Token::RParen)?;
13200
13201                    let alias = self.parse_identifier_optional_alias()?;
13202
13203                    pipe_operators.push(PipeOperator::Pivot {
13204                        aggregate_functions,
13205                        value_column,
13206                        value_source,
13207                        alias,
13208                    });
13209                }
13210                Keyword::UNPIVOT => {
13211                    self.expect_token(&Token::LParen)?;
13212                    let value_column = self.parse_identifier()?;
13213                    self.expect_keyword(Keyword::FOR)?;
13214                    let name_column = self.parse_identifier()?;
13215                    self.expect_keyword(Keyword::IN)?;
13216
13217                    self.expect_token(&Token::LParen)?;
13218                    let unpivot_columns = self.parse_comma_separated(Parser::parse_identifier)?;
13219                    self.expect_token(&Token::RParen)?;
13220
13221                    self.expect_token(&Token::RParen)?;
13222
13223                    let alias = self.parse_identifier_optional_alias()?;
13224
13225                    pipe_operators.push(PipeOperator::Unpivot {
13226                        value_column,
13227                        name_column,
13228                        unpivot_columns,
13229                        alias,
13230                    });
13231                }
13232                Keyword::JOIN
13233                | Keyword::INNER
13234                | Keyword::LEFT
13235                | Keyword::RIGHT
13236                | Keyword::FULL
13237                | Keyword::CROSS => {
13238                    self.prev_token();
13239                    let mut joins = self.parse_joins()?;
13240                    if joins.len() != 1 {
13241                        return Err(ParserError::ParserError(
13242                            "Join pipe operator must have a single join".to_string(),
13243                        ));
13244                    }
13245                    let join = joins.swap_remove(0);
13246                    pipe_operators.push(PipeOperator::Join(join))
13247                }
13248                unhandled => {
13249                    return Err(ParserError::ParserError(format!(
13250                    "`expect_one_of_keywords` further up allowed unhandled keyword: {unhandled:?}"
13251                )))
13252                }
13253            }
13254        }
13255        Ok(pipe_operators)
13256    }
13257
13258    fn parse_settings(&mut self) -> Result<Option<Vec<Setting>>, ParserError> {
13259        let settings = if dialect_of!(self is ClickHouseDialect|GenericDialect)
13260            && self.parse_keyword(Keyword::SETTINGS)
13261        {
13262            let key_values = self.parse_comma_separated(|p| {
13263                let key = p.parse_identifier()?;
13264                p.expect_token(&Token::Eq)?;
13265                let value = p.parse_expr()?;
13266                Ok(Setting { key, value })
13267            })?;
13268            Some(key_values)
13269        } else {
13270            None
13271        };
13272        Ok(settings)
13273    }
13274
13275    /// Parse a mssql `FOR [XML | JSON | BROWSE]` clause
13276    pub fn parse_for_clause(&mut self) -> Result<Option<ForClause>, ParserError> {
13277        if self.parse_keyword(Keyword::XML) {
13278            Ok(Some(self.parse_for_xml()?))
13279        } else if self.parse_keyword(Keyword::JSON) {
13280            Ok(Some(self.parse_for_json()?))
13281        } else if self.parse_keyword(Keyword::BROWSE) {
13282            Ok(Some(ForClause::Browse))
13283        } else {
13284            Ok(None)
13285        }
13286    }
13287
13288    /// Parse a mssql `FOR XML` clause
13289    pub fn parse_for_xml(&mut self) -> Result<ForClause, ParserError> {
13290        let for_xml = if self.parse_keyword(Keyword::RAW) {
13291            let mut element_name = None;
13292            if self.peek_token().token == Token::LParen {
13293                self.expect_token(&Token::LParen)?;
13294                element_name = Some(self.parse_literal_string()?);
13295                self.expect_token(&Token::RParen)?;
13296            }
13297            ForXml::Raw(element_name)
13298        } else if self.parse_keyword(Keyword::AUTO) {
13299            ForXml::Auto
13300        } else if self.parse_keyword(Keyword::EXPLICIT) {
13301            ForXml::Explicit
13302        } else if self.parse_keyword(Keyword::PATH) {
13303            let mut element_name = None;
13304            if self.peek_token().token == Token::LParen {
13305                self.expect_token(&Token::LParen)?;
13306                element_name = Some(self.parse_literal_string()?);
13307                self.expect_token(&Token::RParen)?;
13308            }
13309            ForXml::Path(element_name)
13310        } else {
13311            return Err(ParserError::ParserError(
13312                "Expected FOR XML [RAW | AUTO | EXPLICIT | PATH ]".to_string(),
13313            ));
13314        };
13315        let mut elements = false;
13316        let mut binary_base64 = false;
13317        let mut root = None;
13318        let mut r#type = false;
13319        while self.peek_token().token == Token::Comma {
13320            self.next_token();
13321            if self.parse_keyword(Keyword::ELEMENTS) {
13322                elements = true;
13323            } else if self.parse_keyword(Keyword::BINARY) {
13324                self.expect_keyword_is(Keyword::BASE64)?;
13325                binary_base64 = true;
13326            } else if self.parse_keyword(Keyword::ROOT) {
13327                self.expect_token(&Token::LParen)?;
13328                root = Some(self.parse_literal_string()?);
13329                self.expect_token(&Token::RParen)?;
13330            } else if self.parse_keyword(Keyword::TYPE) {
13331                r#type = true;
13332            }
13333        }
13334        Ok(ForClause::Xml {
13335            for_xml,
13336            elements,
13337            binary_base64,
13338            root,
13339            r#type,
13340        })
13341    }
13342
13343    /// Parse a mssql `FOR JSON` clause
13344    pub fn parse_for_json(&mut self) -> Result<ForClause, ParserError> {
13345        let for_json = if self.parse_keyword(Keyword::AUTO) {
13346            ForJson::Auto
13347        } else if self.parse_keyword(Keyword::PATH) {
13348            ForJson::Path
13349        } else {
13350            return Err(ParserError::ParserError(
13351                "Expected FOR JSON [AUTO | PATH ]".to_string(),
13352            ));
13353        };
13354        let mut root = None;
13355        let mut include_null_values = false;
13356        let mut without_array_wrapper = false;
13357        while self.peek_token().token == Token::Comma {
13358            self.next_token();
13359            if self.parse_keyword(Keyword::ROOT) {
13360                self.expect_token(&Token::LParen)?;
13361                root = Some(self.parse_literal_string()?);
13362                self.expect_token(&Token::RParen)?;
13363            } else if self.parse_keyword(Keyword::INCLUDE_NULL_VALUES) {
13364                include_null_values = true;
13365            } else if self.parse_keyword(Keyword::WITHOUT_ARRAY_WRAPPER) {
13366                without_array_wrapper = true;
13367            }
13368        }
13369        Ok(ForClause::Json {
13370            for_json,
13371            root,
13372            include_null_values,
13373            without_array_wrapper,
13374        })
13375    }
13376
13377    /// Parse a CTE (`alias [( col1, col2, ... )] AS (subquery)`)
13378    pub fn parse_cte(&mut self) -> Result<Cte, ParserError> {
13379        let name = self.parse_identifier()?;
13380
13381        let mut cte = if self.parse_keyword(Keyword::AS) {
13382            let mut is_materialized = None;
13383            if dialect_of!(self is PostgreSqlDialect) {
13384                if self.parse_keyword(Keyword::MATERIALIZED) {
13385                    is_materialized = Some(CteAsMaterialized::Materialized);
13386                } else if self.parse_keywords(&[Keyword::NOT, Keyword::MATERIALIZED]) {
13387                    is_materialized = Some(CteAsMaterialized::NotMaterialized);
13388                }
13389            }
13390            self.expect_token(&Token::LParen)?;
13391
13392            let query = self.parse_query()?;
13393            let closing_paren_token = self.expect_token(&Token::RParen)?;
13394
13395            let alias = TableAlias {
13396                explicit: false,
13397                name,
13398                columns: vec![],
13399            };
13400            Cte {
13401                alias,
13402                query,
13403                from: None,
13404                materialized: is_materialized,
13405                closing_paren_token: closing_paren_token.into(),
13406            }
13407        } else {
13408            let columns = self.parse_table_alias_column_defs()?;
13409            self.expect_keyword_is(Keyword::AS)?;
13410            let mut is_materialized = None;
13411            if dialect_of!(self is PostgreSqlDialect) {
13412                if self.parse_keyword(Keyword::MATERIALIZED) {
13413                    is_materialized = Some(CteAsMaterialized::Materialized);
13414                } else if self.parse_keywords(&[Keyword::NOT, Keyword::MATERIALIZED]) {
13415                    is_materialized = Some(CteAsMaterialized::NotMaterialized);
13416                }
13417            }
13418            self.expect_token(&Token::LParen)?;
13419
13420            let query = self.parse_query()?;
13421            let closing_paren_token = self.expect_token(&Token::RParen)?;
13422
13423            let alias = TableAlias {
13424                explicit: false,
13425                name,
13426                columns,
13427            };
13428            Cte {
13429                alias,
13430                query,
13431                from: None,
13432                materialized: is_materialized,
13433                closing_paren_token: closing_paren_token.into(),
13434            }
13435        };
13436        if self.parse_keyword(Keyword::FROM) {
13437            cte.from = Some(self.parse_identifier()?);
13438        }
13439        Ok(cte)
13440    }
13441
13442    /// Parse a "query body", which is an expression with roughly the
13443    /// following grammar:
13444    /// ```sql
13445    ///   query_body ::= restricted_select | '(' subquery ')' | set_operation
13446    ///   restricted_select ::= 'SELECT' [expr_list] [ from ] [ where ] [ groupby_having ]
13447    ///   subquery ::= query_body [ order_by_limit ]
13448    ///   set_operation ::= query_body { 'UNION' | 'EXCEPT' | 'INTERSECT' } [ 'ALL' ] query_body
13449    /// ```
13450    pub fn parse_query_body(&mut self, precedence: u8) -> Result<Box<SetExpr>, ParserError> {
13451        // We parse the expression using a Pratt parser, as in `parse_expr()`.
13452        // Start by parsing a restricted SELECT or a `(subquery)`:
13453        let expr = if self.peek_keyword(Keyword::SELECT)
13454            || (self.peek_keyword(Keyword::FROM) && self.dialect.supports_from_first_select())
13455        {
13456            SetExpr::Select(self.parse_select().map(Box::new)?)
13457        } else if self.consume_token(&Token::LParen) {
13458            // CTEs are not allowed here, but the parser currently accepts them
13459            let subquery = self.parse_query()?;
13460            self.expect_token(&Token::RParen)?;
13461            SetExpr::Query(subquery)
13462        } else if self.parse_keyword(Keyword::VALUES) {
13463            let is_mysql = dialect_of!(self is MySqlDialect);
13464            SetExpr::Values(self.parse_values(is_mysql, false)?)
13465        } else if self.parse_keyword(Keyword::VALUE) {
13466            let is_mysql = dialect_of!(self is MySqlDialect);
13467            SetExpr::Values(self.parse_values(is_mysql, true)?)
13468        } else if self.parse_keyword(Keyword::TABLE) {
13469            SetExpr::Table(Box::new(self.parse_as_table()?))
13470        } else {
13471            return self.expected(
13472                "SELECT, VALUES, or a subquery in the query body",
13473                self.peek_token(),
13474            );
13475        };
13476
13477        self.parse_remaining_set_exprs(expr, precedence)
13478    }
13479
13480    /// Parse any extra set expressions that may be present in a query body
13481    ///
13482    /// (this is its own function to reduce required stack size in debug builds)
13483    fn parse_remaining_set_exprs(
13484        &mut self,
13485        mut expr: SetExpr,
13486        precedence: u8,
13487    ) -> Result<Box<SetExpr>, ParserError> {
13488        loop {
13489            // The query can be optionally followed by a set operator:
13490            let op = self.parse_set_operator(&self.peek_token().token);
13491            let next_precedence = match op {
13492                // UNION and EXCEPT have the same binding power and evaluate left-to-right
13493                Some(SetOperator::Union) | Some(SetOperator::Except) | Some(SetOperator::Minus) => {
13494                    10
13495                }
13496                // INTERSECT has higher precedence than UNION/EXCEPT
13497                Some(SetOperator::Intersect) => 20,
13498                // Unexpected token or EOF => stop parsing the query body
13499                None => break,
13500            };
13501            if precedence >= next_precedence {
13502                break;
13503            }
13504            self.next_token(); // skip past the set operator
13505            let set_quantifier = self.parse_set_quantifier(&op);
13506            expr = SetExpr::SetOperation {
13507                left: Box::new(expr),
13508                op: op.unwrap(),
13509                set_quantifier,
13510                right: self.parse_query_body(next_precedence)?,
13511            };
13512        }
13513
13514        Ok(expr.into())
13515    }
13516
13517    pub fn parse_set_operator(&mut self, token: &Token) -> Option<SetOperator> {
13518        match token {
13519            Token::Word(w) if w.keyword == Keyword::UNION => Some(SetOperator::Union),
13520            Token::Word(w) if w.keyword == Keyword::EXCEPT => Some(SetOperator::Except),
13521            Token::Word(w) if w.keyword == Keyword::INTERSECT => Some(SetOperator::Intersect),
13522            Token::Word(w) if w.keyword == Keyword::MINUS => Some(SetOperator::Minus),
13523            _ => None,
13524        }
13525    }
13526
13527    pub fn parse_set_quantifier(&mut self, op: &Option<SetOperator>) -> SetQuantifier {
13528        match op {
13529            Some(
13530                SetOperator::Except
13531                | SetOperator::Intersect
13532                | SetOperator::Union
13533                | SetOperator::Minus,
13534            ) => {
13535                if self.parse_keywords(&[Keyword::DISTINCT, Keyword::BY, Keyword::NAME]) {
13536                    SetQuantifier::DistinctByName
13537                } else if self.parse_keywords(&[Keyword::BY, Keyword::NAME]) {
13538                    SetQuantifier::ByName
13539                } else if self.parse_keyword(Keyword::ALL) {
13540                    if self.parse_keywords(&[Keyword::BY, Keyword::NAME]) {
13541                        SetQuantifier::AllByName
13542                    } else {
13543                        SetQuantifier::All
13544                    }
13545                } else if self.parse_keyword(Keyword::DISTINCT) {
13546                    SetQuantifier::Distinct
13547                } else {
13548                    SetQuantifier::None
13549                }
13550            }
13551            _ => SetQuantifier::None,
13552        }
13553    }
13554
13555    /// Parse a restricted `SELECT` statement (no CTEs / `UNION` / `ORDER BY`)
13556    pub fn parse_select(&mut self) -> Result<Select, ParserError> {
13557        let mut from_first = None;
13558
13559        if self.dialect.supports_from_first_select() && self.peek_keyword(Keyword::FROM) {
13560            let from_token = self.expect_keyword(Keyword::FROM)?;
13561            let from = self.parse_table_with_joins()?;
13562            if !self.peek_keyword(Keyword::SELECT) {
13563                return Ok(Select {
13564                    select_token: AttachedToken(from_token),
13565                    distinct: None,
13566                    top: None,
13567                    top_before_distinct: false,
13568                    projection: vec![],
13569                    exclude: None,
13570                    into: None,
13571                    from,
13572                    lateral_views: vec![],
13573                    prewhere: None,
13574                    selection: None,
13575                    group_by: GroupByExpr::Expressions(vec![], vec![]),
13576                    cluster_by: vec![],
13577                    distribute_by: vec![],
13578                    sort_by: vec![],
13579                    having: None,
13580                    named_window: vec![],
13581                    window_before_qualify: false,
13582                    qualify: None,
13583                    value_table_mode: None,
13584                    connect_by: None,
13585                    flavor: SelectFlavor::FromFirstNoSelect,
13586                });
13587            }
13588            from_first = Some(from);
13589        }
13590
13591        let select_token = self.expect_keyword(Keyword::SELECT)?;
13592        let value_table_mode = self.parse_value_table_mode()?;
13593
13594        let mut top_before_distinct = false;
13595        let mut top = None;
13596        if self.dialect.supports_top_before_distinct() && self.parse_keyword(Keyword::TOP) {
13597            top = Some(self.parse_top()?);
13598            top_before_distinct = true;
13599        }
13600        let distinct = self.parse_all_or_distinct()?;
13601        if !self.dialect.supports_top_before_distinct() && self.parse_keyword(Keyword::TOP) {
13602            top = Some(self.parse_top()?);
13603        }
13604
13605        let projection =
13606            if self.dialect.supports_empty_projections() && self.peek_keyword(Keyword::FROM) {
13607                vec![]
13608            } else {
13609                self.parse_projection()?
13610            };
13611
13612        let exclude = if self.dialect.supports_select_exclude() {
13613            self.parse_optional_select_item_exclude()?
13614        } else {
13615            None
13616        };
13617
13618        let into = if self.parse_keyword(Keyword::INTO) {
13619            Some(self.parse_select_into()?)
13620        } else {
13621            None
13622        };
13623
13624        // Note that for keywords to be properly handled here, they need to be
13625        // added to `RESERVED_FOR_COLUMN_ALIAS` / `RESERVED_FOR_TABLE_ALIAS`,
13626        // otherwise they may be parsed as an alias as part of the `projection`
13627        // or `from`.
13628
13629        let (from, from_first) = if let Some(from) = from_first.take() {
13630            (from, true)
13631        } else if self.parse_keyword(Keyword::FROM) {
13632            (self.parse_table_with_joins()?, false)
13633        } else {
13634            (vec![], false)
13635        };
13636
13637        let mut lateral_views = vec![];
13638        loop {
13639            if self.parse_keywords(&[Keyword::LATERAL, Keyword::VIEW]) {
13640                let outer = self.parse_keyword(Keyword::OUTER);
13641                let lateral_view = self.parse_expr()?;
13642                let lateral_view_name = self.parse_object_name(false)?;
13643                let lateral_col_alias = self
13644                    .parse_comma_separated(|parser| {
13645                        parser.parse_optional_alias(&[
13646                            Keyword::WHERE,
13647                            Keyword::GROUP,
13648                            Keyword::CLUSTER,
13649                            Keyword::HAVING,
13650                            Keyword::LATERAL,
13651                        ]) // This couldn't possibly be a bad idea
13652                    })?
13653                    .into_iter()
13654                    .flatten()
13655                    .collect();
13656
13657                lateral_views.push(LateralView {
13658                    lateral_view,
13659                    lateral_view_name,
13660                    lateral_col_alias,
13661                    outer,
13662                });
13663            } else {
13664                break;
13665            }
13666        }
13667
13668        let prewhere = if dialect_of!(self is ClickHouseDialect|GenericDialect)
13669            && self.parse_keyword(Keyword::PREWHERE)
13670        {
13671            Some(self.parse_expr()?)
13672        } else {
13673            None
13674        };
13675
13676        let selection = if self.parse_keyword(Keyword::WHERE) {
13677            Some(self.parse_expr()?)
13678        } else {
13679            None
13680        };
13681
13682        let group_by = self
13683            .parse_optional_group_by()?
13684            .unwrap_or_else(|| GroupByExpr::Expressions(vec![], vec![]));
13685
13686        let cluster_by = if self.parse_keywords(&[Keyword::CLUSTER, Keyword::BY]) {
13687            self.parse_comma_separated(Parser::parse_expr)?
13688        } else {
13689            vec![]
13690        };
13691
13692        let distribute_by = if self.parse_keywords(&[Keyword::DISTRIBUTE, Keyword::BY]) {
13693            self.parse_comma_separated(Parser::parse_expr)?
13694        } else {
13695            vec![]
13696        };
13697
13698        let sort_by = if self.parse_keywords(&[Keyword::SORT, Keyword::BY]) {
13699            self.parse_comma_separated(Parser::parse_order_by_expr)?
13700        } else {
13701            vec![]
13702        };
13703
13704        let having = if self.parse_keyword(Keyword::HAVING) {
13705            Some(self.parse_expr()?)
13706        } else {
13707            None
13708        };
13709
13710        // Accept QUALIFY and WINDOW in any order and flag accordingly.
13711        let (named_windows, qualify, window_before_qualify) = if self.parse_keyword(Keyword::WINDOW)
13712        {
13713            let named_windows = self.parse_comma_separated(Parser::parse_named_window)?;
13714            if self.parse_keyword(Keyword::QUALIFY) {
13715                (named_windows, Some(self.parse_expr()?), true)
13716            } else {
13717                (named_windows, None, true)
13718            }
13719        } else if self.parse_keyword(Keyword::QUALIFY) {
13720            let qualify = Some(self.parse_expr()?);
13721            if self.parse_keyword(Keyword::WINDOW) {
13722                (
13723                    self.parse_comma_separated(Parser::parse_named_window)?,
13724                    qualify,
13725                    false,
13726                )
13727            } else {
13728                (Default::default(), qualify, false)
13729            }
13730        } else {
13731            Default::default()
13732        };
13733
13734        let connect_by = if self.dialect.supports_connect_by()
13735            && self
13736                .parse_one_of_keywords(&[Keyword::START, Keyword::CONNECT])
13737                .is_some()
13738        {
13739            self.prev_token();
13740            Some(self.parse_connect_by()?)
13741        } else {
13742            None
13743        };
13744
13745        Ok(Select {
13746            select_token: AttachedToken(select_token),
13747            distinct,
13748            top,
13749            top_before_distinct,
13750            projection,
13751            exclude,
13752            into,
13753            from,
13754            lateral_views,
13755            prewhere,
13756            selection,
13757            group_by,
13758            cluster_by,
13759            distribute_by,
13760            sort_by,
13761            having,
13762            named_window: named_windows,
13763            window_before_qualify,
13764            qualify,
13765            value_table_mode,
13766            connect_by,
13767            flavor: if from_first {
13768                SelectFlavor::FromFirst
13769            } else {
13770                SelectFlavor::Standard
13771            },
13772        })
13773    }
13774
13775    fn parse_value_table_mode(&mut self) -> Result<Option<ValueTableMode>, ParserError> {
13776        if !dialect_of!(self is BigQueryDialect) {
13777            return Ok(None);
13778        }
13779
13780        let mode = if self.parse_keywords(&[Keyword::DISTINCT, Keyword::AS, Keyword::VALUE]) {
13781            Some(ValueTableMode::DistinctAsValue)
13782        } else if self.parse_keywords(&[Keyword::DISTINCT, Keyword::AS, Keyword::STRUCT]) {
13783            Some(ValueTableMode::DistinctAsStruct)
13784        } else if self.parse_keywords(&[Keyword::AS, Keyword::VALUE])
13785            || self.parse_keywords(&[Keyword::ALL, Keyword::AS, Keyword::VALUE])
13786        {
13787            Some(ValueTableMode::AsValue)
13788        } else if self.parse_keywords(&[Keyword::AS, Keyword::STRUCT])
13789            || self.parse_keywords(&[Keyword::ALL, Keyword::AS, Keyword::STRUCT])
13790        {
13791            Some(ValueTableMode::AsStruct)
13792        } else if self.parse_keyword(Keyword::AS) {
13793            self.expected("VALUE or STRUCT", self.peek_token())?
13794        } else {
13795            None
13796        };
13797
13798        Ok(mode)
13799    }
13800
13801    /// Invoke `f` after first setting the parser's `ParserState` to `state`.
13802    ///
13803    /// Upon return, restores the parser's state to what it started at.
13804    fn with_state<T, F>(&mut self, state: ParserState, mut f: F) -> Result<T, ParserError>
13805    where
13806        F: FnMut(&mut Parser) -> Result<T, ParserError>,
13807    {
13808        let current_state = self.state;
13809        self.state = state;
13810        let res = f(self);
13811        self.state = current_state;
13812        res
13813    }
13814
13815    pub fn parse_connect_by(&mut self) -> Result<ConnectBy, ParserError> {
13816        let (condition, relationships) = if self.parse_keywords(&[Keyword::CONNECT, Keyword::BY]) {
13817            let relationships = self.with_state(ParserState::ConnectBy, |parser| {
13818                parser.parse_comma_separated(Parser::parse_expr)
13819            })?;
13820            self.expect_keywords(&[Keyword::START, Keyword::WITH])?;
13821            let condition = self.parse_expr()?;
13822            (condition, relationships)
13823        } else {
13824            self.expect_keywords(&[Keyword::START, Keyword::WITH])?;
13825            let condition = self.parse_expr()?;
13826            self.expect_keywords(&[Keyword::CONNECT, Keyword::BY])?;
13827            let relationships = self.with_state(ParserState::ConnectBy, |parser| {
13828                parser.parse_comma_separated(Parser::parse_expr)
13829            })?;
13830            (condition, relationships)
13831        };
13832        Ok(ConnectBy {
13833            condition,
13834            relationships,
13835        })
13836    }
13837
13838    /// Parse `CREATE TABLE x AS TABLE y`
13839    pub fn parse_as_table(&mut self) -> Result<Table, ParserError> {
13840        let token1 = self.next_token();
13841        let token2 = self.next_token();
13842        let token3 = self.next_token();
13843
13844        let table_name;
13845        let schema_name;
13846        if token2 == Token::Period {
13847            match token1.token {
13848                Token::Word(w) => {
13849                    schema_name = w.value;
13850                }
13851                _ => {
13852                    return self.expected("Schema name", token1);
13853                }
13854            }
13855            match token3.token {
13856                Token::Word(w) => {
13857                    table_name = w.value;
13858                }
13859                _ => {
13860                    return self.expected("Table name", token3);
13861                }
13862            }
13863            Ok(Table {
13864                table_name: Some(table_name),
13865                schema_name: Some(schema_name),
13866            })
13867        } else {
13868            match token1.token {
13869                Token::Word(w) => {
13870                    table_name = w.value;
13871                }
13872                _ => {
13873                    return self.expected("Table name", token1);
13874                }
13875            }
13876            Ok(Table {
13877                table_name: Some(table_name),
13878                schema_name: None,
13879            })
13880        }
13881    }
13882
13883    /// Parse a `SET ROLE` statement. Expects SET to be consumed already.
13884    fn parse_set_role(
13885        &mut self,
13886        modifier: Option<ContextModifier>,
13887    ) -> Result<Statement, ParserError> {
13888        self.expect_keyword_is(Keyword::ROLE)?;
13889
13890        let role_name = if self.parse_keyword(Keyword::NONE) {
13891            None
13892        } else {
13893            Some(self.parse_identifier()?)
13894        };
13895        Ok(Statement::Set(Set::SetRole {
13896            context_modifier: modifier,
13897            role_name,
13898        }))
13899    }
13900
13901    fn parse_set_values(
13902        &mut self,
13903        parenthesized_assignment: bool,
13904    ) -> Result<Vec<Expr>, ParserError> {
13905        let mut values = vec![];
13906
13907        if parenthesized_assignment {
13908            self.expect_token(&Token::LParen)?;
13909        }
13910
13911        loop {
13912            let value = if let Some(expr) = self.try_parse_expr_sub_query()? {
13913                expr
13914            } else if let Ok(expr) = self.parse_expr() {
13915                expr
13916            } else {
13917                self.expected("variable value", self.peek_token())?
13918            };
13919
13920            values.push(value);
13921            if self.consume_token(&Token::Comma) {
13922                continue;
13923            }
13924
13925            if parenthesized_assignment {
13926                self.expect_token(&Token::RParen)?;
13927            }
13928            return Ok(values);
13929        }
13930    }
13931
13932    fn parse_context_modifier(&mut self) -> Option<ContextModifier> {
13933        let modifier =
13934            self.parse_one_of_keywords(&[Keyword::SESSION, Keyword::LOCAL, Keyword::GLOBAL])?;
13935
13936        Self::keyword_to_modifier(modifier)
13937    }
13938
13939    /// Parse a single SET statement assignment `var = expr`.
13940    fn parse_set_assignment(&mut self) -> Result<SetAssignment, ParserError> {
13941        let scope = self.parse_context_modifier();
13942
13943        let name = if self.dialect.supports_parenthesized_set_variables()
13944            && self.consume_token(&Token::LParen)
13945        {
13946            // Parenthesized assignments are handled in the `parse_set` function after
13947            // trying to parse list of assignments using this function.
13948            // If a dialect supports both, and we find a LParen, we early exit from this function.
13949            self.expected("Unparenthesized assignment", self.peek_token())?
13950        } else {
13951            self.parse_object_name(false)?
13952        };
13953
13954        if !(self.consume_token(&Token::Eq) || self.parse_keyword(Keyword::TO)) {
13955            return self.expected("assignment operator", self.peek_token());
13956        }
13957
13958        let value = self.parse_expr()?;
13959
13960        Ok(SetAssignment { scope, name, value })
13961    }
13962
13963    fn parse_set(&mut self) -> Result<Statement, ParserError> {
13964        let hivevar = self.parse_keyword(Keyword::HIVEVAR);
13965
13966        // Modifier is either HIVEVAR: or a ContextModifier (LOCAL, SESSION, etc), not both
13967        let scope = if !hivevar {
13968            self.parse_context_modifier()
13969        } else {
13970            None
13971        };
13972
13973        if hivevar {
13974            self.expect_token(&Token::Colon)?;
13975        }
13976
13977        if let Some(set_role_stmt) = self.maybe_parse(|parser| parser.parse_set_role(scope))? {
13978            return Ok(set_role_stmt);
13979        }
13980
13981        // Handle special cases first
13982        if self.parse_keywords(&[Keyword::TIME, Keyword::ZONE])
13983            || self.parse_keyword(Keyword::TIMEZONE)
13984        {
13985            if self.consume_token(&Token::Eq) || self.parse_keyword(Keyword::TO) {
13986                return Ok(Set::SingleAssignment {
13987                    scope,
13988                    hivevar,
13989                    variable: ObjectName::from(vec!["TIMEZONE".into()]),
13990                    values: self.parse_set_values(false)?,
13991                }
13992                .into());
13993            } else {
13994                // A shorthand alias for SET TIME ZONE that doesn't require
13995                // the assignment operator. It's originally PostgreSQL specific,
13996                // but we allow it for all the dialects
13997                return Ok(Set::SetTimeZone {
13998                    local: scope == Some(ContextModifier::Local),
13999                    value: self.parse_expr()?,
14000                }
14001                .into());
14002            }
14003        } else if self.dialect.supports_set_names() && self.parse_keyword(Keyword::NAMES) {
14004            if self.parse_keyword(Keyword::DEFAULT) {
14005                return Ok(Set::SetNamesDefault {}.into());
14006            }
14007            let charset_name = self.parse_identifier()?;
14008            let collation_name = if self.parse_one_of_keywords(&[Keyword::COLLATE]).is_some() {
14009                Some(self.parse_literal_string()?)
14010            } else {
14011                None
14012            };
14013
14014            return Ok(Set::SetNames {
14015                charset_name,
14016                collation_name,
14017            }
14018            .into());
14019        } else if self.parse_keyword(Keyword::CHARACTERISTICS) {
14020            self.expect_keywords(&[Keyword::AS, Keyword::TRANSACTION])?;
14021            return Ok(Set::SetTransaction {
14022                modes: self.parse_transaction_modes()?,
14023                snapshot: None,
14024                session: true,
14025            }
14026            .into());
14027        } else if self.parse_keyword(Keyword::TRANSACTION) {
14028            if self.parse_keyword(Keyword::SNAPSHOT) {
14029                let snapshot_id = self.parse_value()?.value;
14030                return Ok(Set::SetTransaction {
14031                    modes: vec![],
14032                    snapshot: Some(snapshot_id),
14033                    session: false,
14034                }
14035                .into());
14036            }
14037            return Ok(Set::SetTransaction {
14038                modes: self.parse_transaction_modes()?,
14039                snapshot: None,
14040                session: false,
14041            }
14042            .into());
14043        } else if self.parse_keyword(Keyword::AUTHORIZATION) {
14044            let auth_value = if self.parse_keyword(Keyword::DEFAULT) {
14045                SetSessionAuthorizationParamKind::Default
14046            } else {
14047                let value = self.parse_identifier()?;
14048                SetSessionAuthorizationParamKind::User(value)
14049            };
14050            return Ok(Set::SetSessionAuthorization(SetSessionAuthorizationParam {
14051                scope: scope.expect("SET ... AUTHORIZATION must have a scope"),
14052                kind: auth_value,
14053            })
14054            .into());
14055        }
14056
14057        if self.dialect.supports_comma_separated_set_assignments() {
14058            if scope.is_some() {
14059                self.prev_token();
14060            }
14061
14062            if let Some(assignments) = self
14063                .maybe_parse(|parser| parser.parse_comma_separated(Parser::parse_set_assignment))?
14064            {
14065                return if assignments.len() > 1 {
14066                    Ok(Set::MultipleAssignments { assignments }.into())
14067                } else {
14068                    let SetAssignment { scope, name, value } =
14069                        assignments.into_iter().next().ok_or_else(|| {
14070                            ParserError::ParserError("Expected at least one assignment".to_string())
14071                        })?;
14072
14073                    Ok(Set::SingleAssignment {
14074                        scope,
14075                        hivevar,
14076                        variable: name,
14077                        values: vec![value],
14078                    }
14079                    .into())
14080                };
14081            }
14082        }
14083
14084        let variables = if self.dialect.supports_parenthesized_set_variables()
14085            && self.consume_token(&Token::LParen)
14086        {
14087            let vars = OneOrManyWithParens::Many(
14088                self.parse_comma_separated(|parser: &mut Parser<'a>| parser.parse_identifier())?
14089                    .into_iter()
14090                    .map(|ident| ObjectName::from(vec![ident]))
14091                    .collect(),
14092            );
14093            self.expect_token(&Token::RParen)?;
14094            vars
14095        } else {
14096            OneOrManyWithParens::One(self.parse_object_name(false)?)
14097        };
14098
14099        if self.consume_token(&Token::Eq) || self.parse_keyword(Keyword::TO) {
14100            let stmt = match variables {
14101                OneOrManyWithParens::One(var) => Set::SingleAssignment {
14102                    scope,
14103                    hivevar,
14104                    variable: var,
14105                    values: self.parse_set_values(false)?,
14106                },
14107                OneOrManyWithParens::Many(vars) => Set::ParenthesizedAssignments {
14108                    variables: vars,
14109                    values: self.parse_set_values(true)?,
14110                },
14111            };
14112
14113            return Ok(stmt.into());
14114        }
14115
14116        if self.dialect.supports_set_stmt_without_operator() {
14117            self.prev_token();
14118            return self.parse_set_session_params();
14119        };
14120
14121        self.expected("equals sign or TO", self.peek_token())
14122    }
14123
14124    pub fn parse_set_session_params(&mut self) -> Result<Statement, ParserError> {
14125        if self.parse_keyword(Keyword::STATISTICS) {
14126            let topic = match self.parse_one_of_keywords(&[
14127                Keyword::IO,
14128                Keyword::PROFILE,
14129                Keyword::TIME,
14130                Keyword::XML,
14131            ]) {
14132                Some(Keyword::IO) => SessionParamStatsTopic::IO,
14133                Some(Keyword::PROFILE) => SessionParamStatsTopic::Profile,
14134                Some(Keyword::TIME) => SessionParamStatsTopic::Time,
14135                Some(Keyword::XML) => SessionParamStatsTopic::Xml,
14136                _ => return self.expected("IO, PROFILE, TIME or XML", self.peek_token()),
14137            };
14138            let value = self.parse_session_param_value()?;
14139            Ok(
14140                Set::SetSessionParam(SetSessionParamKind::Statistics(SetSessionParamStatistics {
14141                    topic,
14142                    value,
14143                }))
14144                .into(),
14145            )
14146        } else if self.parse_keyword(Keyword::IDENTITY_INSERT) {
14147            let obj = self.parse_object_name(false)?;
14148            let value = self.parse_session_param_value()?;
14149            Ok(Set::SetSessionParam(SetSessionParamKind::IdentityInsert(
14150                SetSessionParamIdentityInsert { obj, value },
14151            ))
14152            .into())
14153        } else if self.parse_keyword(Keyword::OFFSETS) {
14154            let keywords = self.parse_comma_separated(|parser| {
14155                let next_token = parser.next_token();
14156                match &next_token.token {
14157                    Token::Word(w) => Ok(w.to_string()),
14158                    _ => parser.expected("SQL keyword", next_token),
14159                }
14160            })?;
14161            let value = self.parse_session_param_value()?;
14162            Ok(
14163                Set::SetSessionParam(SetSessionParamKind::Offsets(SetSessionParamOffsets {
14164                    keywords,
14165                    value,
14166                }))
14167                .into(),
14168            )
14169        } else {
14170            let names = self.parse_comma_separated(|parser| {
14171                let next_token = parser.next_token();
14172                match next_token.token {
14173                    Token::Word(w) => Ok(w.to_string()),
14174                    _ => parser.expected("Session param name", next_token),
14175                }
14176            })?;
14177            let value = self.parse_expr()?.to_string();
14178            Ok(
14179                Set::SetSessionParam(SetSessionParamKind::Generic(SetSessionParamGeneric {
14180                    names,
14181                    value,
14182                }))
14183                .into(),
14184            )
14185        }
14186    }
14187
14188    fn parse_session_param_value(&mut self) -> Result<SessionParamValue, ParserError> {
14189        if self.parse_keyword(Keyword::ON) {
14190            Ok(SessionParamValue::On)
14191        } else if self.parse_keyword(Keyword::OFF) {
14192            Ok(SessionParamValue::Off)
14193        } else {
14194            self.expected("ON or OFF", self.peek_token())
14195        }
14196    }
14197
14198    pub fn parse_show(&mut self) -> Result<Statement, ParserError> {
14199        let terse = self.parse_keyword(Keyword::TERSE);
14200        let extended = self.parse_keyword(Keyword::EXTENDED);
14201        let full = self.parse_keyword(Keyword::FULL);
14202        let session = self.parse_keyword(Keyword::SESSION);
14203        let global = self.parse_keyword(Keyword::GLOBAL);
14204        let external = self.parse_keyword(Keyword::EXTERNAL);
14205        if self
14206            .parse_one_of_keywords(&[Keyword::COLUMNS, Keyword::FIELDS])
14207            .is_some()
14208        {
14209            Ok(self.parse_show_columns(extended, full)?)
14210        } else if self.parse_keyword(Keyword::TABLES) {
14211            Ok(self.parse_show_tables(terse, extended, full, external)?)
14212        } else if self.parse_keywords(&[Keyword::MATERIALIZED, Keyword::VIEWS]) {
14213            Ok(self.parse_show_views(terse, true)?)
14214        } else if self.parse_keyword(Keyword::VIEWS) {
14215            Ok(self.parse_show_views(terse, false)?)
14216        } else if self.parse_keyword(Keyword::FUNCTIONS) {
14217            Ok(self.parse_show_functions()?)
14218        } else if extended || full {
14219            Err(ParserError::ParserError(
14220                "EXTENDED/FULL are not supported with this type of SHOW query".to_string(),
14221            ))
14222        } else if self.parse_one_of_keywords(&[Keyword::CREATE]).is_some() {
14223            Ok(self.parse_show_create()?)
14224        } else if self.parse_keyword(Keyword::COLLATION) {
14225            Ok(self.parse_show_collation()?)
14226        } else if self.parse_keyword(Keyword::VARIABLES)
14227            && dialect_of!(self is MySqlDialect | GenericDialect)
14228        {
14229            Ok(Statement::ShowVariables {
14230                filter: self.parse_show_statement_filter()?,
14231                session,
14232                global,
14233            })
14234        } else if self.parse_keyword(Keyword::STATUS)
14235            && dialect_of!(self is MySqlDialect | GenericDialect)
14236        {
14237            Ok(Statement::ShowStatus {
14238                filter: self.parse_show_statement_filter()?,
14239                session,
14240                global,
14241            })
14242        } else if self.parse_keyword(Keyword::DATABASES) {
14243            self.parse_show_databases(terse)
14244        } else if self.parse_keyword(Keyword::SCHEMAS) {
14245            self.parse_show_schemas(terse)
14246        } else if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
14247            self.parse_show_charset(false)
14248        } else if self.parse_keyword(Keyword::CHARSET) {
14249            self.parse_show_charset(true)
14250        } else {
14251            Ok(Statement::ShowVariable {
14252                variable: self.parse_identifiers()?,
14253            })
14254        }
14255    }
14256
14257    fn parse_show_charset(&mut self, is_shorthand: bool) -> Result<Statement, ParserError> {
14258        // parse one of keywords
14259        Ok(Statement::ShowCharset(ShowCharset {
14260            is_shorthand,
14261            filter: self.parse_show_statement_filter()?,
14262        }))
14263    }
14264
14265    fn parse_show_databases(&mut self, terse: bool) -> Result<Statement, ParserError> {
14266        let history = self.parse_keyword(Keyword::HISTORY);
14267        let show_options = self.parse_show_stmt_options()?;
14268        Ok(Statement::ShowDatabases {
14269            terse,
14270            history,
14271            show_options,
14272        })
14273    }
14274
14275    fn parse_show_schemas(&mut self, terse: bool) -> Result<Statement, ParserError> {
14276        let history = self.parse_keyword(Keyword::HISTORY);
14277        let show_options = self.parse_show_stmt_options()?;
14278        Ok(Statement::ShowSchemas {
14279            terse,
14280            history,
14281            show_options,
14282        })
14283    }
14284
14285    pub fn parse_show_create(&mut self) -> Result<Statement, ParserError> {
14286        let obj_type = match self.expect_one_of_keywords(&[
14287            Keyword::TABLE,
14288            Keyword::TRIGGER,
14289            Keyword::FUNCTION,
14290            Keyword::PROCEDURE,
14291            Keyword::EVENT,
14292            Keyword::VIEW,
14293        ])? {
14294            Keyword::TABLE => Ok(ShowCreateObject::Table),
14295            Keyword::TRIGGER => Ok(ShowCreateObject::Trigger),
14296            Keyword::FUNCTION => Ok(ShowCreateObject::Function),
14297            Keyword::PROCEDURE => Ok(ShowCreateObject::Procedure),
14298            Keyword::EVENT => Ok(ShowCreateObject::Event),
14299            Keyword::VIEW => Ok(ShowCreateObject::View),
14300            keyword => Err(ParserError::ParserError(format!(
14301                "Unable to map keyword to ShowCreateObject: {keyword:?}"
14302            ))),
14303        }?;
14304
14305        let obj_name = self.parse_object_name(false)?;
14306
14307        Ok(Statement::ShowCreate { obj_type, obj_name })
14308    }
14309
14310    pub fn parse_show_columns(
14311        &mut self,
14312        extended: bool,
14313        full: bool,
14314    ) -> Result<Statement, ParserError> {
14315        let show_options = self.parse_show_stmt_options()?;
14316        Ok(Statement::ShowColumns {
14317            extended,
14318            full,
14319            show_options,
14320        })
14321    }
14322
14323    fn parse_show_tables(
14324        &mut self,
14325        terse: bool,
14326        extended: bool,
14327        full: bool,
14328        external: bool,
14329    ) -> Result<Statement, ParserError> {
14330        let history = !external && self.parse_keyword(Keyword::HISTORY);
14331        let show_options = self.parse_show_stmt_options()?;
14332        Ok(Statement::ShowTables {
14333            terse,
14334            history,
14335            extended,
14336            full,
14337            external,
14338            show_options,
14339        })
14340    }
14341
14342    fn parse_show_views(
14343        &mut self,
14344        terse: bool,
14345        materialized: bool,
14346    ) -> Result<Statement, ParserError> {
14347        let show_options = self.parse_show_stmt_options()?;
14348        Ok(Statement::ShowViews {
14349            materialized,
14350            terse,
14351            show_options,
14352        })
14353    }
14354
14355    pub fn parse_show_functions(&mut self) -> Result<Statement, ParserError> {
14356        let filter = self.parse_show_statement_filter()?;
14357        Ok(Statement::ShowFunctions { filter })
14358    }
14359
14360    pub fn parse_show_collation(&mut self) -> Result<Statement, ParserError> {
14361        let filter = self.parse_show_statement_filter()?;
14362        Ok(Statement::ShowCollation { filter })
14363    }
14364
14365    pub fn parse_show_statement_filter(
14366        &mut self,
14367    ) -> Result<Option<ShowStatementFilter>, ParserError> {
14368        if self.parse_keyword(Keyword::LIKE) {
14369            Ok(Some(ShowStatementFilter::Like(
14370                self.parse_literal_string()?,
14371            )))
14372        } else if self.parse_keyword(Keyword::ILIKE) {
14373            Ok(Some(ShowStatementFilter::ILike(
14374                self.parse_literal_string()?,
14375            )))
14376        } else if self.parse_keyword(Keyword::WHERE) {
14377            Ok(Some(ShowStatementFilter::Where(self.parse_expr()?)))
14378        } else {
14379            self.maybe_parse(|parser| -> Result<String, ParserError> {
14380                parser.parse_literal_string()
14381            })?
14382            .map_or(Ok(None), |filter| {
14383                Ok(Some(ShowStatementFilter::NoKeyword(filter)))
14384            })
14385        }
14386    }
14387
14388    pub fn parse_use(&mut self) -> Result<Statement, ParserError> {
14389        // Determine which keywords are recognized by the current dialect
14390        let parsed_keyword = if dialect_of!(self is HiveDialect) {
14391            // HiveDialect accepts USE DEFAULT; statement without any db specified
14392            if self.parse_keyword(Keyword::DEFAULT) {
14393                return Ok(Statement::Use(Use::Default));
14394            }
14395            None // HiveDialect doesn't expect any other specific keyword after `USE`
14396        } else if dialect_of!(self is DatabricksDialect) {
14397            self.parse_one_of_keywords(&[Keyword::CATALOG, Keyword::DATABASE, Keyword::SCHEMA])
14398        } else if dialect_of!(self is SnowflakeDialect) {
14399            self.parse_one_of_keywords(&[
14400                Keyword::DATABASE,
14401                Keyword::SCHEMA,
14402                Keyword::WAREHOUSE,
14403                Keyword::ROLE,
14404                Keyword::SECONDARY,
14405            ])
14406        } else {
14407            None // No specific keywords for other dialects, including GenericDialect
14408        };
14409
14410        let result = if matches!(parsed_keyword, Some(Keyword::SECONDARY)) {
14411            self.parse_secondary_roles()?
14412        } else {
14413            let obj_name = self.parse_object_name(false)?;
14414            match parsed_keyword {
14415                Some(Keyword::CATALOG) => Use::Catalog(obj_name),
14416                Some(Keyword::DATABASE) => Use::Database(obj_name),
14417                Some(Keyword::SCHEMA) => Use::Schema(obj_name),
14418                Some(Keyword::WAREHOUSE) => Use::Warehouse(obj_name),
14419                Some(Keyword::ROLE) => Use::Role(obj_name),
14420                _ => Use::Object(obj_name),
14421            }
14422        };
14423
14424        Ok(Statement::Use(result))
14425    }
14426
14427    fn parse_secondary_roles(&mut self) -> Result<Use, ParserError> {
14428        self.expect_one_of_keywords(&[Keyword::ROLES, Keyword::ROLE])?;
14429        if self.parse_keyword(Keyword::NONE) {
14430            Ok(Use::SecondaryRoles(SecondaryRoles::None))
14431        } else if self.parse_keyword(Keyword::ALL) {
14432            Ok(Use::SecondaryRoles(SecondaryRoles::All))
14433        } else {
14434            let roles = self.parse_comma_separated(|parser| parser.parse_identifier())?;
14435            Ok(Use::SecondaryRoles(SecondaryRoles::List(roles)))
14436        }
14437    }
14438
14439    pub fn parse_table_and_joins(&mut self) -> Result<TableWithJoins, ParserError> {
14440        let relation = self.parse_table_factor()?;
14441        // Note that for keywords to be properly handled here, they need to be
14442        // added to `RESERVED_FOR_TABLE_ALIAS`, otherwise they may be parsed as
14443        // a table alias.
14444        let joins = self.parse_joins()?;
14445        Ok(TableWithJoins { relation, joins })
14446    }
14447
14448    fn parse_joins(&mut self) -> Result<Vec<Join>, ParserError> {
14449        let mut joins = vec![];
14450        loop {
14451            let global = self.parse_keyword(Keyword::GLOBAL);
14452            let join = if self.parse_keyword(Keyword::CROSS) {
14453                let join_operator = if self.parse_keyword(Keyword::JOIN) {
14454                    JoinOperator::CrossJoin(JoinConstraint::None)
14455                } else if self.parse_keyword(Keyword::APPLY) {
14456                    // MSSQL extension, similar to CROSS JOIN LATERAL
14457                    JoinOperator::CrossApply
14458                } else {
14459                    return self.expected("JOIN or APPLY after CROSS", self.peek_token());
14460                };
14461                let relation = self.parse_table_factor()?;
14462                let join_operator = if matches!(join_operator, JoinOperator::CrossJoin(_))
14463                    && self.dialect.supports_cross_join_constraint()
14464                {
14465                    let constraint = self.parse_join_constraint(false)?;
14466                    JoinOperator::CrossJoin(constraint)
14467                } else {
14468                    join_operator
14469                };
14470                Join {
14471                    relation,
14472                    global,
14473                    join_operator,
14474                }
14475            } else if self.parse_keyword(Keyword::OUTER) {
14476                // MSSQL extension, similar to LEFT JOIN LATERAL .. ON 1=1
14477                self.expect_keyword_is(Keyword::APPLY)?;
14478                Join {
14479                    relation: self.parse_table_factor()?,
14480                    global,
14481                    join_operator: JoinOperator::OuterApply,
14482                }
14483            } else if self.parse_keyword(Keyword::ASOF) {
14484                self.expect_keyword_is(Keyword::JOIN)?;
14485                let relation = self.parse_table_factor()?;
14486                self.expect_keyword_is(Keyword::MATCH_CONDITION)?;
14487                let match_condition = self.parse_parenthesized(Self::parse_expr)?;
14488                Join {
14489                    relation,
14490                    global,
14491                    join_operator: JoinOperator::AsOf {
14492                        match_condition,
14493                        constraint: self.parse_join_constraint(false)?,
14494                    },
14495                }
14496            } else {
14497                let natural = self.parse_keyword(Keyword::NATURAL);
14498                let peek_keyword = if let Token::Word(w) = self.peek_token().token {
14499                    w.keyword
14500                } else {
14501                    Keyword::NoKeyword
14502                };
14503
14504                let join_operator_type = match peek_keyword {
14505                    Keyword::INNER | Keyword::JOIN => {
14506                        let inner = self.parse_keyword(Keyword::INNER); // [ INNER ]
14507                        self.expect_keyword_is(Keyword::JOIN)?;
14508                        if inner {
14509                            JoinOperator::Inner
14510                        } else {
14511                            JoinOperator::Join
14512                        }
14513                    }
14514                    kw @ Keyword::LEFT | kw @ Keyword::RIGHT => {
14515                        let _ = self.next_token(); // consume LEFT/RIGHT
14516                        let is_left = kw == Keyword::LEFT;
14517                        let join_type = self.parse_one_of_keywords(&[
14518                            Keyword::OUTER,
14519                            Keyword::SEMI,
14520                            Keyword::ANTI,
14521                            Keyword::JOIN,
14522                        ]);
14523                        match join_type {
14524                            Some(Keyword::OUTER) => {
14525                                self.expect_keyword_is(Keyword::JOIN)?;
14526                                if is_left {
14527                                    JoinOperator::LeftOuter
14528                                } else {
14529                                    JoinOperator::RightOuter
14530                                }
14531                            }
14532                            Some(Keyword::SEMI) => {
14533                                self.expect_keyword_is(Keyword::JOIN)?;
14534                                if is_left {
14535                                    JoinOperator::LeftSemi
14536                                } else {
14537                                    JoinOperator::RightSemi
14538                                }
14539                            }
14540                            Some(Keyword::ANTI) => {
14541                                self.expect_keyword_is(Keyword::JOIN)?;
14542                                if is_left {
14543                                    JoinOperator::LeftAnti
14544                                } else {
14545                                    JoinOperator::RightAnti
14546                                }
14547                            }
14548                            Some(Keyword::JOIN) => {
14549                                if is_left {
14550                                    JoinOperator::Left
14551                                } else {
14552                                    JoinOperator::Right
14553                                }
14554                            }
14555                            _ => {
14556                                return Err(ParserError::ParserError(format!(
14557                                    "expected OUTER, SEMI, ANTI or JOIN after {kw:?}"
14558                                )))
14559                            }
14560                        }
14561                    }
14562                    Keyword::ANTI => {
14563                        let _ = self.next_token(); // consume ANTI
14564                        self.expect_keyword_is(Keyword::JOIN)?;
14565                        JoinOperator::Anti
14566                    }
14567                    Keyword::SEMI => {
14568                        let _ = self.next_token(); // consume SEMI
14569                        self.expect_keyword_is(Keyword::JOIN)?;
14570                        JoinOperator::Semi
14571                    }
14572                    Keyword::FULL => {
14573                        let _ = self.next_token(); // consume FULL
14574                        let _ = self.parse_keyword(Keyword::OUTER); // [ OUTER ]
14575                        self.expect_keyword_is(Keyword::JOIN)?;
14576                        JoinOperator::FullOuter
14577                    }
14578                    Keyword::OUTER => {
14579                        return self.expected("LEFT, RIGHT, or FULL", self.peek_token());
14580                    }
14581                    Keyword::STRAIGHT_JOIN => {
14582                        let _ = self.next_token(); // consume STRAIGHT_JOIN
14583                        JoinOperator::StraightJoin
14584                    }
14585                    _ if natural => {
14586                        return self.expected("a join type after NATURAL", self.peek_token());
14587                    }
14588                    _ => break,
14589                };
14590                let mut relation = self.parse_table_factor()?;
14591
14592                if !self
14593                    .dialect
14594                    .supports_left_associative_joins_without_parens()
14595                    && self.peek_parens_less_nested_join()
14596                {
14597                    let joins = self.parse_joins()?;
14598                    relation = TableFactor::NestedJoin {
14599                        table_with_joins: Box::new(TableWithJoins { relation, joins }),
14600                        alias: None,
14601                    };
14602                }
14603
14604                let join_constraint = self.parse_join_constraint(natural)?;
14605                Join {
14606                    relation,
14607                    global,
14608                    join_operator: join_operator_type(join_constraint),
14609                }
14610            };
14611            joins.push(join);
14612        }
14613        Ok(joins)
14614    }
14615
14616    fn peek_parens_less_nested_join(&self) -> bool {
14617        matches!(
14618            self.peek_token_ref().token,
14619            Token::Word(Word {
14620                keyword: Keyword::JOIN
14621                    | Keyword::INNER
14622                    | Keyword::LEFT
14623                    | Keyword::RIGHT
14624                    | Keyword::FULL,
14625                ..
14626            })
14627        )
14628    }
14629
14630    /// A table name or a parenthesized subquery, followed by optional `[AS] alias`
14631    pub fn parse_table_factor(&mut self) -> Result<TableFactor, ParserError> {
14632        if self.parse_keyword(Keyword::LATERAL) {
14633            // LATERAL must always be followed by a subquery or table function.
14634            if self.consume_token(&Token::LParen) {
14635                self.parse_derived_table_factor(Lateral)
14636            } else {
14637                let name = self.parse_object_name(false)?;
14638                self.expect_token(&Token::LParen)?;
14639                let args = self.parse_optional_args()?;
14640                let alias = self.maybe_parse_table_alias()?;
14641                Ok(TableFactor::Function {
14642                    lateral: true,
14643                    name,
14644                    args,
14645                    alias,
14646                })
14647            }
14648        } else if self.parse_keyword(Keyword::TABLE) {
14649            // parse table function (SELECT * FROM TABLE (<expr>) [ AS <alias> ])
14650            self.expect_token(&Token::LParen)?;
14651            let expr = self.parse_expr()?;
14652            self.expect_token(&Token::RParen)?;
14653            let alias = self.maybe_parse_table_alias()?;
14654            Ok(TableFactor::TableFunction { expr, alias })
14655        } else if self.consume_token(&Token::LParen) {
14656            // A left paren introduces either a derived table (i.e., a subquery)
14657            // or a nested join. It's nearly impossible to determine ahead of
14658            // time which it is... so we just try to parse both.
14659            //
14660            // Here's an example that demonstrates the complexity:
14661            //                     /-------------------------------------------------------\
14662            //                     | /-----------------------------------\                 |
14663            //     SELECT * FROM ( ( ( (SELECT 1) UNION (SELECT 2) ) AS t1 NATURAL JOIN t2 ) )
14664            //                   ^ ^ ^ ^
14665            //                   | | | |
14666            //                   | | | |
14667            //                   | | | (4) belongs to a SetExpr::Query inside the subquery
14668            //                   | | (3) starts a derived table (subquery)
14669            //                   | (2) starts a nested join
14670            //                   (1) an additional set of parens around a nested join
14671            //
14672
14673            // If the recently consumed '(' starts a derived table, the call to
14674            // `parse_derived_table_factor` below will return success after parsing the
14675            // subquery, followed by the closing ')', and the alias of the derived table.
14676            // In the example above this is case (3).
14677            if let Some(mut table) =
14678                self.maybe_parse(|parser| parser.parse_derived_table_factor(NotLateral))?
14679            {
14680                while let Some(kw) = self.parse_one_of_keywords(&[Keyword::PIVOT, Keyword::UNPIVOT])
14681                {
14682                    table = match kw {
14683                        Keyword::PIVOT => self.parse_pivot_table_factor(table)?,
14684                        Keyword::UNPIVOT => self.parse_unpivot_table_factor(table)?,
14685                        unexpected_keyword => return Err(ParserError::ParserError(
14686                            format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in pivot/unpivot"),
14687                        )),
14688                    }
14689                }
14690                return Ok(table);
14691            }
14692
14693            // A parsing error from `parse_derived_table_factor` indicates that the '(' we've
14694            // recently consumed does not start a derived table (cases 1, 2, or 4).
14695            // `maybe_parse` will ignore such an error and rewind to be after the opening '('.
14696
14697            // Inside the parentheses we expect to find an (A) table factor
14698            // followed by some joins or (B) another level of nesting.
14699            let mut table_and_joins = self.parse_table_and_joins()?;
14700
14701            #[allow(clippy::if_same_then_else)]
14702            if !table_and_joins.joins.is_empty() {
14703                self.expect_token(&Token::RParen)?;
14704                let alias = self.maybe_parse_table_alias()?;
14705                Ok(TableFactor::NestedJoin {
14706                    table_with_joins: Box::new(table_and_joins),
14707                    alias,
14708                }) // (A)
14709            } else if let TableFactor::NestedJoin {
14710                table_with_joins: _,
14711                alias: _,
14712            } = &table_and_joins.relation
14713            {
14714                // (B): `table_and_joins` (what we found inside the parentheses)
14715                // is a nested join `(foo JOIN bar)`, not followed by other joins.
14716                self.expect_token(&Token::RParen)?;
14717                let alias = self.maybe_parse_table_alias()?;
14718                Ok(TableFactor::NestedJoin {
14719                    table_with_joins: Box::new(table_and_joins),
14720                    alias,
14721                })
14722            } else if dialect_of!(self is SnowflakeDialect | GenericDialect) {
14723                // Dialect-specific behavior: Snowflake diverges from the
14724                // standard and from most of the other implementations by
14725                // allowing extra parentheses not only around a join (B), but
14726                // around lone table names (e.g. `FROM (mytable [AS alias])`)
14727                // and around derived tables (e.g. `FROM ((SELECT ...)
14728                // [AS alias])`) as well.
14729                self.expect_token(&Token::RParen)?;
14730
14731                if let Some(outer_alias) = self.maybe_parse_table_alias()? {
14732                    // Snowflake also allows specifying an alias *after* parens
14733                    // e.g. `FROM (mytable) AS alias`
14734                    match &mut table_and_joins.relation {
14735                        TableFactor::Derived { alias, .. }
14736                        | TableFactor::Table { alias, .. }
14737                        | TableFactor::Function { alias, .. }
14738                        | TableFactor::UNNEST { alias, .. }
14739                        | TableFactor::JsonTable { alias, .. }
14740                        | TableFactor::XmlTable { alias, .. }
14741                        | TableFactor::OpenJsonTable { alias, .. }
14742                        | TableFactor::TableFunction { alias, .. }
14743                        | TableFactor::Pivot { alias, .. }
14744                        | TableFactor::Unpivot { alias, .. }
14745                        | TableFactor::MatchRecognize { alias, .. }
14746                        | TableFactor::SemanticView { alias, .. }
14747                        | TableFactor::NestedJoin { alias, .. } => {
14748                            // but not `FROM (mytable AS alias1) AS alias2`.
14749                            if let Some(inner_alias) = alias {
14750                                return Err(ParserError::ParserError(format!(
14751                                    "duplicate alias {inner_alias}"
14752                                )));
14753                            }
14754                            // Act as if the alias was specified normally next
14755                            // to the table name: `(mytable) AS alias` ->
14756                            // `(mytable AS alias)`
14757                            alias.replace(outer_alias);
14758                        }
14759                    };
14760                }
14761                // Do not store the extra set of parens in the AST
14762                Ok(table_and_joins.relation)
14763            } else {
14764                // The SQL spec prohibits derived tables and bare tables from
14765                // appearing alone in parentheses (e.g. `FROM (mytable)`)
14766                self.expected("joined table", self.peek_token())
14767            }
14768        } else if dialect_of!(self is SnowflakeDialect | DatabricksDialect | GenericDialect)
14769            && matches!(
14770                self.peek_tokens(),
14771                [
14772                    Token::Word(Word {
14773                        keyword: Keyword::VALUES,
14774                        ..
14775                    }),
14776                    Token::LParen
14777                ]
14778            )
14779        {
14780            self.expect_keyword_is(Keyword::VALUES)?;
14781
14782            // Snowflake and Databricks allow syntax like below:
14783            // SELECT * FROM VALUES (1, 'a'), (2, 'b') AS t (col1, col2)
14784            // where there are no parentheses around the VALUES clause.
14785            let values = SetExpr::Values(self.parse_values(false, false)?);
14786            let alias = self.maybe_parse_table_alias()?;
14787            Ok(TableFactor::Derived {
14788                lateral: false,
14789                subquery: Box::new(Query {
14790                    with: None,
14791                    body: Box::new(values),
14792                    order_by: None,
14793                    limit_clause: None,
14794                    fetch: None,
14795                    locks: vec![],
14796                    for_clause: None,
14797                    settings: None,
14798                    format_clause: None,
14799                    pipe_operators: vec![],
14800                }),
14801                alias,
14802            })
14803        } else if dialect_of!(self is BigQueryDialect | PostgreSqlDialect | GenericDialect)
14804            && self.parse_keyword(Keyword::UNNEST)
14805        {
14806            self.expect_token(&Token::LParen)?;
14807            let array_exprs = self.parse_comma_separated(Parser::parse_expr)?;
14808            self.expect_token(&Token::RParen)?;
14809
14810            let with_ordinality = self.parse_keywords(&[Keyword::WITH, Keyword::ORDINALITY]);
14811            let alias = match self.maybe_parse_table_alias() {
14812                Ok(Some(alias)) => Some(alias),
14813                Ok(None) => None,
14814                Err(e) => return Err(e),
14815            };
14816
14817            let with_offset = match self.expect_keywords(&[Keyword::WITH, Keyword::OFFSET]) {
14818                Ok(()) => true,
14819                Err(_) => false,
14820            };
14821
14822            let with_offset_alias = if with_offset {
14823                match self.parse_optional_alias(keywords::RESERVED_FOR_COLUMN_ALIAS) {
14824                    Ok(Some(alias)) => Some(alias),
14825                    Ok(None) => None,
14826                    Err(e) => return Err(e),
14827                }
14828            } else {
14829                None
14830            };
14831
14832            Ok(TableFactor::UNNEST {
14833                alias,
14834                array_exprs,
14835                with_offset,
14836                with_offset_alias,
14837                with_ordinality,
14838            })
14839        } else if self.parse_keyword_with_tokens(Keyword::JSON_TABLE, &[Token::LParen]) {
14840            let json_expr = self.parse_expr()?;
14841            self.expect_token(&Token::Comma)?;
14842            let json_path = self.parse_value()?.value;
14843            self.expect_keyword_is(Keyword::COLUMNS)?;
14844            self.expect_token(&Token::LParen)?;
14845            let columns = self.parse_comma_separated(Parser::parse_json_table_column_def)?;
14846            self.expect_token(&Token::RParen)?;
14847            self.expect_token(&Token::RParen)?;
14848            let alias = self.maybe_parse_table_alias()?;
14849            Ok(TableFactor::JsonTable {
14850                json_expr,
14851                json_path,
14852                columns,
14853                alias,
14854            })
14855        } else if self.parse_keyword_with_tokens(Keyword::OPENJSON, &[Token::LParen]) {
14856            self.prev_token();
14857            self.parse_open_json_table_factor()
14858        } else if self.parse_keyword_with_tokens(Keyword::XMLTABLE, &[Token::LParen]) {
14859            self.prev_token();
14860            self.parse_xml_table_factor()
14861        } else if self.dialect.supports_semantic_view_table_factor()
14862            && self.peek_keyword_with_tokens(Keyword::SEMANTIC_VIEW, &[Token::LParen])
14863        {
14864            self.parse_semantic_view_table_factor()
14865        } else {
14866            let name = self.parse_object_name(true)?;
14867
14868            let json_path = match self.peek_token().token {
14869                Token::LBracket if self.dialect.supports_partiql() => Some(self.parse_json_path()?),
14870                _ => None,
14871            };
14872
14873            let partitions: Vec<Ident> = if dialect_of!(self is MySqlDialect | GenericDialect)
14874                && self.parse_keyword(Keyword::PARTITION)
14875            {
14876                self.parse_parenthesized_identifiers()?
14877            } else {
14878                vec![]
14879            };
14880
14881            // Parse potential version qualifier
14882            let version = self.maybe_parse_table_version()?;
14883
14884            // Postgres, MSSQL, ClickHouse: table-valued functions:
14885            let args = if self.consume_token(&Token::LParen) {
14886                Some(self.parse_table_function_args()?)
14887            } else {
14888                None
14889            };
14890
14891            let with_ordinality = self.parse_keywords(&[Keyword::WITH, Keyword::ORDINALITY]);
14892
14893            let mut sample = None;
14894            if self.dialect.supports_table_sample_before_alias() {
14895                if let Some(parsed_sample) = self.maybe_parse_table_sample()? {
14896                    sample = Some(TableSampleKind::BeforeTableAlias(parsed_sample));
14897                }
14898            }
14899
14900            let alias = self.maybe_parse_table_alias()?;
14901
14902            // MYSQL-specific table hints:
14903            let index_hints = if self.dialect.supports_table_hints() {
14904                self.maybe_parse(|p| p.parse_table_index_hints())?
14905                    .unwrap_or(vec![])
14906            } else {
14907                vec![]
14908            };
14909
14910            // MSSQL-specific table hints:
14911            let mut with_hints = vec![];
14912            if self.parse_keyword(Keyword::WITH) {
14913                if self.consume_token(&Token::LParen) {
14914                    with_hints = self.parse_comma_separated(Parser::parse_expr)?;
14915                    self.expect_token(&Token::RParen)?;
14916                } else {
14917                    // rewind, as WITH may belong to the next statement's CTE
14918                    self.prev_token();
14919                }
14920            };
14921
14922            if !self.dialect.supports_table_sample_before_alias() {
14923                if let Some(parsed_sample) = self.maybe_parse_table_sample()? {
14924                    sample = Some(TableSampleKind::AfterTableAlias(parsed_sample));
14925                }
14926            }
14927
14928            let mut table = TableFactor::Table {
14929                name,
14930                alias,
14931                args,
14932                with_hints,
14933                version,
14934                partitions,
14935                with_ordinality,
14936                json_path,
14937                sample,
14938                index_hints,
14939            };
14940
14941            while let Some(kw) = self.parse_one_of_keywords(&[Keyword::PIVOT, Keyword::UNPIVOT]) {
14942                table = match kw {
14943                    Keyword::PIVOT => self.parse_pivot_table_factor(table)?,
14944                    Keyword::UNPIVOT => self.parse_unpivot_table_factor(table)?,
14945                    unexpected_keyword => return Err(ParserError::ParserError(
14946                        format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in pivot/unpivot"),
14947                    )),
14948                }
14949            }
14950
14951            if self.dialect.supports_match_recognize()
14952                && self.parse_keyword(Keyword::MATCH_RECOGNIZE)
14953            {
14954                table = self.parse_match_recognize(table)?;
14955            }
14956
14957            Ok(table)
14958        }
14959    }
14960
14961    fn maybe_parse_table_sample(&mut self) -> Result<Option<Box<TableSample>>, ParserError> {
14962        let modifier = if self.parse_keyword(Keyword::TABLESAMPLE) {
14963            TableSampleModifier::TableSample
14964        } else if self.parse_keyword(Keyword::SAMPLE) {
14965            TableSampleModifier::Sample
14966        } else {
14967            return Ok(None);
14968        };
14969        self.parse_table_sample(modifier).map(Some)
14970    }
14971
14972    fn parse_table_sample(
14973        &mut self,
14974        modifier: TableSampleModifier,
14975    ) -> Result<Box<TableSample>, ParserError> {
14976        let name = match self.parse_one_of_keywords(&[
14977            Keyword::BERNOULLI,
14978            Keyword::ROW,
14979            Keyword::SYSTEM,
14980            Keyword::BLOCK,
14981        ]) {
14982            Some(Keyword::BERNOULLI) => Some(TableSampleMethod::Bernoulli),
14983            Some(Keyword::ROW) => Some(TableSampleMethod::Row),
14984            Some(Keyword::SYSTEM) => Some(TableSampleMethod::System),
14985            Some(Keyword::BLOCK) => Some(TableSampleMethod::Block),
14986            _ => None,
14987        };
14988
14989        let parenthesized = self.consume_token(&Token::LParen);
14990
14991        let (quantity, bucket) = if parenthesized && self.parse_keyword(Keyword::BUCKET) {
14992            let selected_bucket = self.parse_number_value()?.value;
14993            self.expect_keywords(&[Keyword::OUT, Keyword::OF])?;
14994            let total = self.parse_number_value()?.value;
14995            let on = if self.parse_keyword(Keyword::ON) {
14996                Some(self.parse_expr()?)
14997            } else {
14998                None
14999            };
15000            (
15001                None,
15002                Some(TableSampleBucket {
15003                    bucket: selected_bucket,
15004                    total,
15005                    on,
15006                }),
15007            )
15008        } else {
15009            let value = match self.maybe_parse(|p| p.parse_expr())? {
15010                Some(num) => num,
15011                None => {
15012                    let next_token = self.next_token();
15013                    if let Token::Word(w) = next_token.token {
15014                        Expr::Value(Value::Placeholder(w.value).with_span(next_token.span))
15015                    } else {
15016                        return parser_err!(
15017                            "Expecting number or byte length e.g. 100M",
15018                            self.peek_token().span.start
15019                        );
15020                    }
15021                }
15022            };
15023            let unit = if self.parse_keyword(Keyword::ROWS) {
15024                Some(TableSampleUnit::Rows)
15025            } else if self.parse_keyword(Keyword::PERCENT) {
15026                Some(TableSampleUnit::Percent)
15027            } else {
15028                None
15029            };
15030            (
15031                Some(TableSampleQuantity {
15032                    parenthesized,
15033                    value,
15034                    unit,
15035                }),
15036                None,
15037            )
15038        };
15039        if parenthesized {
15040            self.expect_token(&Token::RParen)?;
15041        }
15042
15043        let seed = if self.parse_keyword(Keyword::REPEATABLE) {
15044            Some(self.parse_table_sample_seed(TableSampleSeedModifier::Repeatable)?)
15045        } else if self.parse_keyword(Keyword::SEED) {
15046            Some(self.parse_table_sample_seed(TableSampleSeedModifier::Seed)?)
15047        } else {
15048            None
15049        };
15050
15051        let offset = if self.parse_keyword(Keyword::OFFSET) {
15052            Some(self.parse_expr()?)
15053        } else {
15054            None
15055        };
15056
15057        Ok(Box::new(TableSample {
15058            modifier,
15059            name,
15060            quantity,
15061            seed,
15062            bucket,
15063            offset,
15064        }))
15065    }
15066
15067    fn parse_table_sample_seed(
15068        &mut self,
15069        modifier: TableSampleSeedModifier,
15070    ) -> Result<TableSampleSeed, ParserError> {
15071        self.expect_token(&Token::LParen)?;
15072        let value = self.parse_number_value()?.value;
15073        self.expect_token(&Token::RParen)?;
15074        Ok(TableSampleSeed { modifier, value })
15075    }
15076
15077    /// Parses `OPENJSON( jsonExpression [ , path ] )  [ <with_clause> ]` clause,
15078    /// assuming the `OPENJSON` keyword was already consumed.
15079    fn parse_open_json_table_factor(&mut self) -> Result<TableFactor, ParserError> {
15080        self.expect_token(&Token::LParen)?;
15081        let json_expr = self.parse_expr()?;
15082        let json_path = if self.consume_token(&Token::Comma) {
15083            Some(self.parse_value()?.value)
15084        } else {
15085            None
15086        };
15087        self.expect_token(&Token::RParen)?;
15088        let columns = if self.parse_keyword(Keyword::WITH) {
15089            self.expect_token(&Token::LParen)?;
15090            let columns = self.parse_comma_separated(Parser::parse_openjson_table_column_def)?;
15091            self.expect_token(&Token::RParen)?;
15092            columns
15093        } else {
15094            Vec::new()
15095        };
15096        let alias = self.maybe_parse_table_alias()?;
15097        Ok(TableFactor::OpenJsonTable {
15098            json_expr,
15099            json_path,
15100            columns,
15101            alias,
15102        })
15103    }
15104
15105    fn parse_xml_table_factor(&mut self) -> Result<TableFactor, ParserError> {
15106        self.expect_token(&Token::LParen)?;
15107        let namespaces = if self.parse_keyword(Keyword::XMLNAMESPACES) {
15108            self.expect_token(&Token::LParen)?;
15109            let namespaces = self.parse_comma_separated(Parser::parse_xml_namespace_definition)?;
15110            self.expect_token(&Token::RParen)?;
15111            self.expect_token(&Token::Comma)?;
15112            namespaces
15113        } else {
15114            vec![]
15115        };
15116        let row_expression = self.parse_expr()?;
15117        let passing = self.parse_xml_passing_clause()?;
15118        self.expect_keyword_is(Keyword::COLUMNS)?;
15119        let columns = self.parse_comma_separated(Parser::parse_xml_table_column)?;
15120        self.expect_token(&Token::RParen)?;
15121        let alias = self.maybe_parse_table_alias()?;
15122        Ok(TableFactor::XmlTable {
15123            namespaces,
15124            row_expression,
15125            passing,
15126            columns,
15127            alias,
15128        })
15129    }
15130
15131    fn parse_xml_namespace_definition(&mut self) -> Result<XmlNamespaceDefinition, ParserError> {
15132        let uri = self.parse_expr()?;
15133        self.expect_keyword_is(Keyword::AS)?;
15134        let name = self.parse_identifier()?;
15135        Ok(XmlNamespaceDefinition { uri, name })
15136    }
15137
15138    fn parse_xml_table_column(&mut self) -> Result<XmlTableColumn, ParserError> {
15139        let name = self.parse_identifier()?;
15140
15141        let option = if self.parse_keyword(Keyword::FOR) {
15142            self.expect_keyword(Keyword::ORDINALITY)?;
15143            XmlTableColumnOption::ForOrdinality
15144        } else {
15145            let r#type = self.parse_data_type()?;
15146            let mut path = None;
15147            let mut default = None;
15148
15149            if self.parse_keyword(Keyword::PATH) {
15150                path = Some(self.parse_expr()?);
15151            }
15152
15153            if self.parse_keyword(Keyword::DEFAULT) {
15154                default = Some(self.parse_expr()?);
15155            }
15156
15157            let not_null = self.parse_keywords(&[Keyword::NOT, Keyword::NULL]);
15158            if !not_null {
15159                // NULL is the default but can be specified explicitly
15160                let _ = self.parse_keyword(Keyword::NULL);
15161            }
15162
15163            XmlTableColumnOption::NamedInfo {
15164                r#type,
15165                path,
15166                default,
15167                nullable: !not_null,
15168            }
15169        };
15170        Ok(XmlTableColumn { name, option })
15171    }
15172
15173    fn parse_xml_passing_clause(&mut self) -> Result<XmlPassingClause, ParserError> {
15174        let mut arguments = vec![];
15175        if self.parse_keyword(Keyword::PASSING) {
15176            loop {
15177                let by_value =
15178                    self.parse_keyword(Keyword::BY) && self.expect_keyword(Keyword::VALUE).is_ok();
15179                let expr = self.parse_expr()?;
15180                let alias = if self.parse_keyword(Keyword::AS) {
15181                    Some(self.parse_identifier()?)
15182                } else {
15183                    None
15184                };
15185                arguments.push(XmlPassingArgument {
15186                    expr,
15187                    alias,
15188                    by_value,
15189                });
15190                if !self.consume_token(&Token::Comma) {
15191                    break;
15192                }
15193            }
15194        }
15195        Ok(XmlPassingClause { arguments })
15196    }
15197
15198    /// Parse a [TableFactor::SemanticView]
15199    fn parse_semantic_view_table_factor(&mut self) -> Result<TableFactor, ParserError> {
15200        self.expect_keyword(Keyword::SEMANTIC_VIEW)?;
15201        self.expect_token(&Token::LParen)?;
15202
15203        let name = self.parse_object_name(true)?;
15204
15205        // Parse DIMENSIONS, METRICS, FACTS and WHERE clauses in flexible order
15206        let mut dimensions = Vec::new();
15207        let mut metrics = Vec::new();
15208        let mut facts = Vec::new();
15209        let mut where_clause = None;
15210
15211        while self.peek_token().token != Token::RParen {
15212            if self.parse_keyword(Keyword::DIMENSIONS) {
15213                if !dimensions.is_empty() {
15214                    return Err(ParserError::ParserError(
15215                        "DIMENSIONS clause can only be specified once".to_string(),
15216                    ));
15217                }
15218                dimensions = self.parse_comma_separated(Parser::parse_wildcard_expr)?;
15219            } else if self.parse_keyword(Keyword::METRICS) {
15220                if !metrics.is_empty() {
15221                    return Err(ParserError::ParserError(
15222                        "METRICS clause can only be specified once".to_string(),
15223                    ));
15224                }
15225                metrics = self.parse_comma_separated(Parser::parse_wildcard_expr)?;
15226            } else if self.parse_keyword(Keyword::FACTS) {
15227                if !facts.is_empty() {
15228                    return Err(ParserError::ParserError(
15229                        "FACTS clause can only be specified once".to_string(),
15230                    ));
15231                }
15232                facts = self.parse_comma_separated(Parser::parse_wildcard_expr)?;
15233            } else if self.parse_keyword(Keyword::WHERE) {
15234                if where_clause.is_some() {
15235                    return Err(ParserError::ParserError(
15236                        "WHERE clause can only be specified once".to_string(),
15237                    ));
15238                }
15239                where_clause = Some(self.parse_expr()?);
15240            } else {
15241                return parser_err!(
15242                    format!(
15243                        "Expected one of DIMENSIONS, METRICS, FACTS or WHERE, got {}",
15244                        self.peek_token().token
15245                    ),
15246                    self.peek_token().span.start
15247                )?;
15248            }
15249        }
15250
15251        self.expect_token(&Token::RParen)?;
15252
15253        let alias = self.maybe_parse_table_alias()?;
15254
15255        Ok(TableFactor::SemanticView {
15256            name,
15257            dimensions,
15258            metrics,
15259            facts,
15260            where_clause,
15261            alias,
15262        })
15263    }
15264
15265    fn parse_match_recognize(&mut self, table: TableFactor) -> Result<TableFactor, ParserError> {
15266        self.expect_token(&Token::LParen)?;
15267
15268        let partition_by = if self.parse_keywords(&[Keyword::PARTITION, Keyword::BY]) {
15269            self.parse_comma_separated(Parser::parse_expr)?
15270        } else {
15271            vec![]
15272        };
15273
15274        let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
15275            self.parse_comma_separated(Parser::parse_order_by_expr)?
15276        } else {
15277            vec![]
15278        };
15279
15280        let measures = if self.parse_keyword(Keyword::MEASURES) {
15281            self.parse_comma_separated(|p| {
15282                let expr = p.parse_expr()?;
15283                let _ = p.parse_keyword(Keyword::AS);
15284                let alias = p.parse_identifier()?;
15285                Ok(Measure { expr, alias })
15286            })?
15287        } else {
15288            vec![]
15289        };
15290
15291        let rows_per_match =
15292            if self.parse_keywords(&[Keyword::ONE, Keyword::ROW, Keyword::PER, Keyword::MATCH]) {
15293                Some(RowsPerMatch::OneRow)
15294            } else if self.parse_keywords(&[
15295                Keyword::ALL,
15296                Keyword::ROWS,
15297                Keyword::PER,
15298                Keyword::MATCH,
15299            ]) {
15300                Some(RowsPerMatch::AllRows(
15301                    if self.parse_keywords(&[Keyword::SHOW, Keyword::EMPTY, Keyword::MATCHES]) {
15302                        Some(EmptyMatchesMode::Show)
15303                    } else if self.parse_keywords(&[
15304                        Keyword::OMIT,
15305                        Keyword::EMPTY,
15306                        Keyword::MATCHES,
15307                    ]) {
15308                        Some(EmptyMatchesMode::Omit)
15309                    } else if self.parse_keywords(&[
15310                        Keyword::WITH,
15311                        Keyword::UNMATCHED,
15312                        Keyword::ROWS,
15313                    ]) {
15314                        Some(EmptyMatchesMode::WithUnmatched)
15315                    } else {
15316                        None
15317                    },
15318                ))
15319            } else {
15320                None
15321            };
15322
15323        let after_match_skip =
15324            if self.parse_keywords(&[Keyword::AFTER, Keyword::MATCH, Keyword::SKIP]) {
15325                if self.parse_keywords(&[Keyword::PAST, Keyword::LAST, Keyword::ROW]) {
15326                    Some(AfterMatchSkip::PastLastRow)
15327                } else if self.parse_keywords(&[Keyword::TO, Keyword::NEXT, Keyword::ROW]) {
15328                    Some(AfterMatchSkip::ToNextRow)
15329                } else if self.parse_keywords(&[Keyword::TO, Keyword::FIRST]) {
15330                    Some(AfterMatchSkip::ToFirst(self.parse_identifier()?))
15331                } else if self.parse_keywords(&[Keyword::TO, Keyword::LAST]) {
15332                    Some(AfterMatchSkip::ToLast(self.parse_identifier()?))
15333                } else {
15334                    let found = self.next_token();
15335                    return self.expected("after match skip option", found);
15336                }
15337            } else {
15338                None
15339            };
15340
15341        self.expect_keyword_is(Keyword::PATTERN)?;
15342        let pattern = self.parse_parenthesized(Self::parse_pattern)?;
15343
15344        self.expect_keyword_is(Keyword::DEFINE)?;
15345
15346        let symbols = self.parse_comma_separated(|p| {
15347            let symbol = p.parse_identifier()?;
15348            p.expect_keyword_is(Keyword::AS)?;
15349            let definition = p.parse_expr()?;
15350            Ok(SymbolDefinition { symbol, definition })
15351        })?;
15352
15353        self.expect_token(&Token::RParen)?;
15354
15355        let alias = self.maybe_parse_table_alias()?;
15356
15357        Ok(TableFactor::MatchRecognize {
15358            table: Box::new(table),
15359            partition_by,
15360            order_by,
15361            measures,
15362            rows_per_match,
15363            after_match_skip,
15364            pattern,
15365            symbols,
15366            alias,
15367        })
15368    }
15369
15370    fn parse_base_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
15371        match self.next_token().token {
15372            Token::Caret => Ok(MatchRecognizePattern::Symbol(MatchRecognizeSymbol::Start)),
15373            Token::Placeholder(s) if s == "$" => {
15374                Ok(MatchRecognizePattern::Symbol(MatchRecognizeSymbol::End))
15375            }
15376            Token::LBrace => {
15377                self.expect_token(&Token::Minus)?;
15378                let symbol = self.parse_identifier().map(MatchRecognizeSymbol::Named)?;
15379                self.expect_token(&Token::Minus)?;
15380                self.expect_token(&Token::RBrace)?;
15381                Ok(MatchRecognizePattern::Exclude(symbol))
15382            }
15383            Token::Word(Word {
15384                value,
15385                quote_style: None,
15386                ..
15387            }) if value == "PERMUTE" => {
15388                self.expect_token(&Token::LParen)?;
15389                let symbols = self.parse_comma_separated(|p| {
15390                    p.parse_identifier().map(MatchRecognizeSymbol::Named)
15391                })?;
15392                self.expect_token(&Token::RParen)?;
15393                Ok(MatchRecognizePattern::Permute(symbols))
15394            }
15395            Token::LParen => {
15396                let pattern = self.parse_pattern()?;
15397                self.expect_token(&Token::RParen)?;
15398                Ok(MatchRecognizePattern::Group(Box::new(pattern)))
15399            }
15400            _ => {
15401                self.prev_token();
15402                self.parse_identifier()
15403                    .map(MatchRecognizeSymbol::Named)
15404                    .map(MatchRecognizePattern::Symbol)
15405            }
15406        }
15407    }
15408
15409    fn parse_repetition_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
15410        let mut pattern = self.parse_base_pattern()?;
15411        loop {
15412            let token = self.next_token();
15413            let quantifier = match token.token {
15414                Token::Mul => RepetitionQuantifier::ZeroOrMore,
15415                Token::Plus => RepetitionQuantifier::OneOrMore,
15416                Token::Placeholder(s) if s == "?" => RepetitionQuantifier::AtMostOne,
15417                Token::LBrace => {
15418                    // quantifier is a range like {n} or {n,} or {,m} or {n,m}
15419                    let token = self.next_token();
15420                    match token.token {
15421                        Token::Comma => {
15422                            let next_token = self.next_token();
15423                            let Token::Number(n, _) = next_token.token else {
15424                                return self.expected("literal number", next_token);
15425                            };
15426                            self.expect_token(&Token::RBrace)?;
15427                            RepetitionQuantifier::AtMost(Self::parse(n, token.span.start)?)
15428                        }
15429                        Token::Number(n, _) if self.consume_token(&Token::Comma) => {
15430                            let next_token = self.next_token();
15431                            match next_token.token {
15432                                Token::Number(m, _) => {
15433                                    self.expect_token(&Token::RBrace)?;
15434                                    RepetitionQuantifier::Range(
15435                                        Self::parse(n, token.span.start)?,
15436                                        Self::parse(m, token.span.start)?,
15437                                    )
15438                                }
15439                                Token::RBrace => {
15440                                    RepetitionQuantifier::AtLeast(Self::parse(n, token.span.start)?)
15441                                }
15442                                _ => {
15443                                    return self.expected("} or upper bound", next_token);
15444                                }
15445                            }
15446                        }
15447                        Token::Number(n, _) => {
15448                            self.expect_token(&Token::RBrace)?;
15449                            RepetitionQuantifier::Exactly(Self::parse(n, token.span.start)?)
15450                        }
15451                        _ => return self.expected("quantifier range", token),
15452                    }
15453                }
15454                _ => {
15455                    self.prev_token();
15456                    break;
15457                }
15458            };
15459            pattern = MatchRecognizePattern::Repetition(Box::new(pattern), quantifier);
15460        }
15461        Ok(pattern)
15462    }
15463
15464    fn parse_concat_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
15465        let mut patterns = vec![self.parse_repetition_pattern()?];
15466        while !matches!(self.peek_token().token, Token::RParen | Token::Pipe) {
15467            patterns.push(self.parse_repetition_pattern()?);
15468        }
15469        match <[MatchRecognizePattern; 1]>::try_from(patterns) {
15470            Ok([pattern]) => Ok(pattern),
15471            Err(patterns) => Ok(MatchRecognizePattern::Concat(patterns)),
15472        }
15473    }
15474
15475    fn parse_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
15476        let pattern = self.parse_concat_pattern()?;
15477        if self.consume_token(&Token::Pipe) {
15478            match self.parse_pattern()? {
15479                // flatten nested alternations
15480                MatchRecognizePattern::Alternation(mut patterns) => {
15481                    patterns.insert(0, pattern);
15482                    Ok(MatchRecognizePattern::Alternation(patterns))
15483                }
15484                next => Ok(MatchRecognizePattern::Alternation(vec![pattern, next])),
15485            }
15486        } else {
15487            Ok(pattern)
15488        }
15489    }
15490
15491    /// Parses a the timestamp version specifier (i.e. query historical data)
15492    pub fn maybe_parse_table_version(&mut self) -> Result<Option<TableVersion>, ParserError> {
15493        if self.dialect.supports_timestamp_versioning() {
15494            if self.parse_keywords(&[Keyword::FOR, Keyword::SYSTEM_TIME, Keyword::AS, Keyword::OF])
15495            {
15496                let expr = self.parse_expr()?;
15497                return Ok(Some(TableVersion::ForSystemTimeAsOf(expr)));
15498            } else if self.peek_keyword(Keyword::AT) || self.peek_keyword(Keyword::BEFORE) {
15499                let func_name = self.parse_object_name(true)?;
15500                let func = self.parse_function(func_name)?;
15501                return Ok(Some(TableVersion::Function(func)));
15502            }
15503        }
15504        Ok(None)
15505    }
15506
15507    /// Parses MySQL's JSON_TABLE column definition.
15508    /// For example: `id INT EXISTS PATH '$' DEFAULT '0' ON EMPTY ERROR ON ERROR`
15509    pub fn parse_json_table_column_def(&mut self) -> Result<JsonTableColumn, ParserError> {
15510        if self.parse_keyword(Keyword::NESTED) {
15511            let _has_path_keyword = self.parse_keyword(Keyword::PATH);
15512            let path = self.parse_value()?.value;
15513            self.expect_keyword_is(Keyword::COLUMNS)?;
15514            let columns = self.parse_parenthesized(|p| {
15515                p.parse_comma_separated(Self::parse_json_table_column_def)
15516            })?;
15517            return Ok(JsonTableColumn::Nested(JsonTableNestedColumn {
15518                path,
15519                columns,
15520            }));
15521        }
15522        let name = self.parse_identifier()?;
15523        if self.parse_keyword(Keyword::FOR) {
15524            self.expect_keyword_is(Keyword::ORDINALITY)?;
15525            return Ok(JsonTableColumn::ForOrdinality(name));
15526        }
15527        let r#type = self.parse_data_type()?;
15528        let exists = self.parse_keyword(Keyword::EXISTS);
15529        self.expect_keyword_is(Keyword::PATH)?;
15530        let path = self.parse_value()?.value;
15531        let mut on_empty = None;
15532        let mut on_error = None;
15533        while let Some(error_handling) = self.parse_json_table_column_error_handling()? {
15534            if self.parse_keyword(Keyword::EMPTY) {
15535                on_empty = Some(error_handling);
15536            } else {
15537                self.expect_keyword_is(Keyword::ERROR)?;
15538                on_error = Some(error_handling);
15539            }
15540        }
15541        Ok(JsonTableColumn::Named(JsonTableNamedColumn {
15542            name,
15543            r#type,
15544            path,
15545            exists,
15546            on_empty,
15547            on_error,
15548        }))
15549    }
15550
15551    /// Parses MSSQL's `OPENJSON WITH` column definition.
15552    ///
15553    /// ```sql
15554    /// colName type [ column_path ] [ AS JSON ]
15555    /// ```
15556    ///
15557    /// Reference: <https://learn.microsoft.com/en-us/sql/t-sql/functions/openjson-transact-sql?view=sql-server-ver16#syntax>
15558    pub fn parse_openjson_table_column_def(&mut self) -> Result<OpenJsonTableColumn, ParserError> {
15559        let name = self.parse_identifier()?;
15560        let r#type = self.parse_data_type()?;
15561        let path = if let Token::SingleQuotedString(path) = self.peek_token().token {
15562            self.next_token();
15563            Some(path)
15564        } else {
15565            None
15566        };
15567        let as_json = self.parse_keyword(Keyword::AS);
15568        if as_json {
15569            self.expect_keyword_is(Keyword::JSON)?;
15570        }
15571        Ok(OpenJsonTableColumn {
15572            name,
15573            r#type,
15574            path,
15575            as_json,
15576        })
15577    }
15578
15579    fn parse_json_table_column_error_handling(
15580        &mut self,
15581    ) -> Result<Option<JsonTableColumnErrorHandling>, ParserError> {
15582        let res = if self.parse_keyword(Keyword::NULL) {
15583            JsonTableColumnErrorHandling::Null
15584        } else if self.parse_keyword(Keyword::ERROR) {
15585            JsonTableColumnErrorHandling::Error
15586        } else if self.parse_keyword(Keyword::DEFAULT) {
15587            JsonTableColumnErrorHandling::Default(self.parse_value()?.value)
15588        } else {
15589            return Ok(None);
15590        };
15591        self.expect_keyword_is(Keyword::ON)?;
15592        Ok(Some(res))
15593    }
15594
15595    pub fn parse_derived_table_factor(
15596        &mut self,
15597        lateral: IsLateral,
15598    ) -> Result<TableFactor, ParserError> {
15599        let subquery = self.parse_query()?;
15600        self.expect_token(&Token::RParen)?;
15601        let alias = self.maybe_parse_table_alias()?;
15602        Ok(TableFactor::Derived {
15603            lateral: match lateral {
15604                Lateral => true,
15605                NotLateral => false,
15606            },
15607            subquery,
15608            alias,
15609        })
15610    }
15611
15612    fn parse_aliased_function_call(&mut self) -> Result<ExprWithAlias, ParserError> {
15613        let function_name = match self.next_token().token {
15614            Token::Word(w) => Ok(w.value),
15615            _ => self.expected("a function identifier", self.peek_token()),
15616        }?;
15617        let expr = self.parse_function(ObjectName::from(vec![Ident::new(function_name)]))?;
15618        let alias = if self.parse_keyword(Keyword::AS) {
15619            Some(self.parse_identifier()?)
15620        } else {
15621            None
15622        };
15623
15624        Ok(ExprWithAlias { expr, alias })
15625    }
15626    /// Parses an expression with an optional alias
15627    ///
15628    /// Examples:
15629    ///
15630    /// ```sql
15631    /// SUM(price) AS total_price
15632    /// ```
15633    /// ```sql
15634    /// SUM(price)
15635    /// ```
15636    ///
15637    /// Example
15638    /// ```
15639    /// # use sqlparser::parser::{Parser, ParserError};
15640    /// # use sqlparser::dialect::GenericDialect;
15641    /// # fn main() ->Result<(), ParserError> {
15642    /// let sql = r#"SUM("a") as "b""#;
15643    /// let mut parser = Parser::new(&GenericDialect).try_with_sql(sql)?;
15644    /// let expr_with_alias = parser.parse_expr_with_alias()?;
15645    /// assert_eq!(Some("b".to_string()), expr_with_alias.alias.map(|x|x.value));
15646    /// # Ok(())
15647    /// # }
15648    pub fn parse_expr_with_alias(&mut self) -> Result<ExprWithAlias, ParserError> {
15649        let expr = self.parse_expr()?;
15650        let alias = if self.parse_keyword(Keyword::AS) {
15651            Some(self.parse_identifier()?)
15652        } else {
15653            None
15654        };
15655
15656        Ok(ExprWithAlias { expr, alias })
15657    }
15658
15659    pub fn parse_pivot_table_factor(
15660        &mut self,
15661        table: TableFactor,
15662    ) -> Result<TableFactor, ParserError> {
15663        self.expect_token(&Token::LParen)?;
15664        let aggregate_functions = self.parse_comma_separated(Self::parse_aliased_function_call)?;
15665        self.expect_keyword_is(Keyword::FOR)?;
15666        let value_column = if self.peek_token_ref().token == Token::LParen {
15667            self.parse_parenthesized_column_list_inner(Mandatory, false, |p| {
15668                p.parse_subexpr(self.dialect.prec_value(Precedence::Between))
15669            })?
15670        } else {
15671            vec![self.parse_subexpr(self.dialect.prec_value(Precedence::Between))?]
15672        };
15673        self.expect_keyword_is(Keyword::IN)?;
15674
15675        self.expect_token(&Token::LParen)?;
15676        let value_source = if self.parse_keyword(Keyword::ANY) {
15677            let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
15678                self.parse_comma_separated(Parser::parse_order_by_expr)?
15679            } else {
15680                vec![]
15681            };
15682            PivotValueSource::Any(order_by)
15683        } else if self.peek_sub_query() {
15684            PivotValueSource::Subquery(self.parse_query()?)
15685        } else {
15686            PivotValueSource::List(self.parse_comma_separated(Self::parse_expr_with_alias)?)
15687        };
15688        self.expect_token(&Token::RParen)?;
15689
15690        let default_on_null =
15691            if self.parse_keywords(&[Keyword::DEFAULT, Keyword::ON, Keyword::NULL]) {
15692                self.expect_token(&Token::LParen)?;
15693                let expr = self.parse_expr()?;
15694                self.expect_token(&Token::RParen)?;
15695                Some(expr)
15696            } else {
15697                None
15698            };
15699
15700        self.expect_token(&Token::RParen)?;
15701        let alias = self.maybe_parse_table_alias()?;
15702        Ok(TableFactor::Pivot {
15703            table: Box::new(table),
15704            aggregate_functions,
15705            value_column,
15706            value_source,
15707            default_on_null,
15708            alias,
15709        })
15710    }
15711
15712    pub fn parse_unpivot_table_factor(
15713        &mut self,
15714        table: TableFactor,
15715    ) -> Result<TableFactor, ParserError> {
15716        let null_inclusion = if self.parse_keyword(Keyword::INCLUDE) {
15717            self.expect_keyword_is(Keyword::NULLS)?;
15718            Some(NullInclusion::IncludeNulls)
15719        } else if self.parse_keyword(Keyword::EXCLUDE) {
15720            self.expect_keyword_is(Keyword::NULLS)?;
15721            Some(NullInclusion::ExcludeNulls)
15722        } else {
15723            None
15724        };
15725        self.expect_token(&Token::LParen)?;
15726        let value = self.parse_expr()?;
15727        self.expect_keyword_is(Keyword::FOR)?;
15728        let name = self.parse_identifier()?;
15729        self.expect_keyword_is(Keyword::IN)?;
15730        let columns = self.parse_parenthesized_column_list_inner(Mandatory, false, |p| {
15731            p.parse_expr_with_alias()
15732        })?;
15733        self.expect_token(&Token::RParen)?;
15734        let alias = self.maybe_parse_table_alias()?;
15735        Ok(TableFactor::Unpivot {
15736            table: Box::new(table),
15737            value,
15738            null_inclusion,
15739            name,
15740            columns,
15741            alias,
15742        })
15743    }
15744
15745    pub fn parse_join_constraint(&mut self, natural: bool) -> Result<JoinConstraint, ParserError> {
15746        if natural {
15747            Ok(JoinConstraint::Natural)
15748        } else if self.parse_keyword(Keyword::ON) {
15749            let constraint = self.parse_expr()?;
15750            Ok(JoinConstraint::On(constraint))
15751        } else if self.parse_keyword(Keyword::USING) {
15752            let columns = self.parse_parenthesized_qualified_column_list(Mandatory, false)?;
15753            Ok(JoinConstraint::Using(columns))
15754        } else {
15755            Ok(JoinConstraint::None)
15756            //self.expected("ON, or USING after JOIN", self.peek_token())
15757        }
15758    }
15759
15760    /// Parse a GRANT statement.
15761    pub fn parse_grant(&mut self) -> Result<Statement, ParserError> {
15762        let (privileges, objects) = self.parse_grant_deny_revoke_privileges_objects()?;
15763
15764        self.expect_keyword_is(Keyword::TO)?;
15765        let grantees = self.parse_grantees()?;
15766
15767        let with_grant_option =
15768            self.parse_keywords(&[Keyword::WITH, Keyword::GRANT, Keyword::OPTION]);
15769
15770        let current_grants =
15771            if self.parse_keywords(&[Keyword::COPY, Keyword::CURRENT, Keyword::GRANTS]) {
15772                Some(CurrentGrantsKind::CopyCurrentGrants)
15773            } else if self.parse_keywords(&[Keyword::REVOKE, Keyword::CURRENT, Keyword::GRANTS]) {
15774                Some(CurrentGrantsKind::RevokeCurrentGrants)
15775            } else {
15776                None
15777            };
15778
15779        let as_grantor = if self.parse_keywords(&[Keyword::AS]) {
15780            Some(self.parse_identifier()?)
15781        } else {
15782            None
15783        };
15784
15785        let granted_by = if self.parse_keywords(&[Keyword::GRANTED, Keyword::BY]) {
15786            Some(self.parse_identifier()?)
15787        } else {
15788            None
15789        };
15790
15791        Ok(Statement::Grant {
15792            privileges,
15793            objects,
15794            grantees,
15795            with_grant_option,
15796            as_grantor,
15797            granted_by,
15798            current_grants,
15799        })
15800    }
15801
15802    fn parse_grantees(&mut self) -> Result<Vec<Grantee>, ParserError> {
15803        let mut values = vec![];
15804        let mut grantee_type = GranteesType::None;
15805        loop {
15806            let new_grantee_type = if self.parse_keyword(Keyword::ROLE) {
15807                GranteesType::Role
15808            } else if self.parse_keyword(Keyword::USER) {
15809                GranteesType::User
15810            } else if self.parse_keyword(Keyword::SHARE) {
15811                GranteesType::Share
15812            } else if self.parse_keyword(Keyword::GROUP) {
15813                GranteesType::Group
15814            } else if self.parse_keyword(Keyword::PUBLIC) {
15815                GranteesType::Public
15816            } else if self.parse_keywords(&[Keyword::DATABASE, Keyword::ROLE]) {
15817                GranteesType::DatabaseRole
15818            } else if self.parse_keywords(&[Keyword::APPLICATION, Keyword::ROLE]) {
15819                GranteesType::ApplicationRole
15820            } else if self.parse_keyword(Keyword::APPLICATION) {
15821                GranteesType::Application
15822            } else {
15823                grantee_type.clone() // keep from previous iteraton, if not specified
15824            };
15825
15826            if self
15827                .dialect
15828                .get_reserved_grantees_types()
15829                .contains(&new_grantee_type)
15830            {
15831                self.prev_token();
15832            } else {
15833                grantee_type = new_grantee_type;
15834            }
15835
15836            let grantee = if grantee_type == GranteesType::Public {
15837                Grantee {
15838                    grantee_type: grantee_type.clone(),
15839                    name: None,
15840                }
15841            } else {
15842                let mut name = self.parse_grantee_name()?;
15843                if self.consume_token(&Token::Colon) {
15844                    // Redshift supports namespace prefix for external users and groups:
15845                    // <Namespace>:<GroupName> or <Namespace>:<UserName>
15846                    // https://docs.aws.amazon.com/redshift/latest/mgmt/redshift-iam-access-control-native-idp.html
15847                    let ident = self.parse_identifier()?;
15848                    if let GranteeName::ObjectName(namespace) = name {
15849                        name = GranteeName::ObjectName(ObjectName::from(vec![Ident::new(
15850                            format!("{namespace}:{ident}"),
15851                        )]));
15852                    };
15853                }
15854                Grantee {
15855                    grantee_type: grantee_type.clone(),
15856                    name: Some(name),
15857                }
15858            };
15859
15860            values.push(grantee);
15861
15862            if !self.consume_token(&Token::Comma) {
15863                break;
15864            }
15865        }
15866
15867        Ok(values)
15868    }
15869
15870    pub fn parse_grant_deny_revoke_privileges_objects(
15871        &mut self,
15872    ) -> Result<(Privileges, Option<GrantObjects>), ParserError> {
15873        let privileges = if self.parse_keyword(Keyword::ALL) {
15874            Privileges::All {
15875                with_privileges_keyword: self.parse_keyword(Keyword::PRIVILEGES),
15876            }
15877        } else {
15878            let actions = self.parse_actions_list()?;
15879            Privileges::Actions(actions)
15880        };
15881
15882        let objects = if self.parse_keyword(Keyword::ON) {
15883            if self.parse_keywords(&[Keyword::ALL, Keyword::TABLES, Keyword::IN, Keyword::SCHEMA]) {
15884                Some(GrantObjects::AllTablesInSchema {
15885                    schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15886                })
15887            } else if self.parse_keywords(&[
15888                Keyword::ALL,
15889                Keyword::EXTERNAL,
15890                Keyword::TABLES,
15891                Keyword::IN,
15892                Keyword::SCHEMA,
15893            ]) {
15894                Some(GrantObjects::AllExternalTablesInSchema {
15895                    schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15896                })
15897            } else if self.parse_keywords(&[
15898                Keyword::ALL,
15899                Keyword::VIEWS,
15900                Keyword::IN,
15901                Keyword::SCHEMA,
15902            ]) {
15903                Some(GrantObjects::AllViewsInSchema {
15904                    schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15905                })
15906            } else if self.parse_keywords(&[
15907                Keyword::ALL,
15908                Keyword::MATERIALIZED,
15909                Keyword::VIEWS,
15910                Keyword::IN,
15911                Keyword::SCHEMA,
15912            ]) {
15913                Some(GrantObjects::AllMaterializedViewsInSchema {
15914                    schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15915                })
15916            } else if self.parse_keywords(&[
15917                Keyword::ALL,
15918                Keyword::FUNCTIONS,
15919                Keyword::IN,
15920                Keyword::SCHEMA,
15921            ]) {
15922                Some(GrantObjects::AllFunctionsInSchema {
15923                    schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15924                })
15925            } else if self.parse_keywords(&[
15926                Keyword::FUTURE,
15927                Keyword::SCHEMAS,
15928                Keyword::IN,
15929                Keyword::DATABASE,
15930            ]) {
15931                Some(GrantObjects::FutureSchemasInDatabase {
15932                    databases: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15933                })
15934            } else if self.parse_keywords(&[
15935                Keyword::FUTURE,
15936                Keyword::TABLES,
15937                Keyword::IN,
15938                Keyword::SCHEMA,
15939            ]) {
15940                Some(GrantObjects::FutureTablesInSchema {
15941                    schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15942                })
15943            } else if self.parse_keywords(&[
15944                Keyword::FUTURE,
15945                Keyword::EXTERNAL,
15946                Keyword::TABLES,
15947                Keyword::IN,
15948                Keyword::SCHEMA,
15949            ]) {
15950                Some(GrantObjects::FutureExternalTablesInSchema {
15951                    schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15952                })
15953            } else if self.parse_keywords(&[
15954                Keyword::FUTURE,
15955                Keyword::VIEWS,
15956                Keyword::IN,
15957                Keyword::SCHEMA,
15958            ]) {
15959                Some(GrantObjects::FutureViewsInSchema {
15960                    schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15961                })
15962            } else if self.parse_keywords(&[
15963                Keyword::FUTURE,
15964                Keyword::MATERIALIZED,
15965                Keyword::VIEWS,
15966                Keyword::IN,
15967                Keyword::SCHEMA,
15968            ]) {
15969                Some(GrantObjects::FutureMaterializedViewsInSchema {
15970                    schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15971                })
15972            } else if self.parse_keywords(&[
15973                Keyword::ALL,
15974                Keyword::SEQUENCES,
15975                Keyword::IN,
15976                Keyword::SCHEMA,
15977            ]) {
15978                Some(GrantObjects::AllSequencesInSchema {
15979                    schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15980                })
15981            } else if self.parse_keywords(&[
15982                Keyword::FUTURE,
15983                Keyword::SEQUENCES,
15984                Keyword::IN,
15985                Keyword::SCHEMA,
15986            ]) {
15987                Some(GrantObjects::FutureSequencesInSchema {
15988                    schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15989                })
15990            } else if self.parse_keywords(&[Keyword::RESOURCE, Keyword::MONITOR]) {
15991                Some(GrantObjects::ResourceMonitors(
15992                    self.parse_comma_separated(|p| p.parse_object_name(false))?,
15993                ))
15994            } else if self.parse_keywords(&[Keyword::COMPUTE, Keyword::POOL]) {
15995                Some(GrantObjects::ComputePools(
15996                    self.parse_comma_separated(|p| p.parse_object_name(false))?,
15997                ))
15998            } else if self.parse_keywords(&[Keyword::FAILOVER, Keyword::GROUP]) {
15999                Some(GrantObjects::FailoverGroup(
16000                    self.parse_comma_separated(|p| p.parse_object_name(false))?,
16001                ))
16002            } else if self.parse_keywords(&[Keyword::REPLICATION, Keyword::GROUP]) {
16003                Some(GrantObjects::ReplicationGroup(
16004                    self.parse_comma_separated(|p| p.parse_object_name(false))?,
16005                ))
16006            } else if self.parse_keywords(&[Keyword::EXTERNAL, Keyword::VOLUME]) {
16007                Some(GrantObjects::ExternalVolumes(
16008                    self.parse_comma_separated(|p| p.parse_object_name(false))?,
16009                ))
16010            } else {
16011                let object_type = self.parse_one_of_keywords(&[
16012                    Keyword::SEQUENCE,
16013                    Keyword::DATABASE,
16014                    Keyword::SCHEMA,
16015                    Keyword::TABLE,
16016                    Keyword::VIEW,
16017                    Keyword::WAREHOUSE,
16018                    Keyword::INTEGRATION,
16019                    Keyword::VIEW,
16020                    Keyword::WAREHOUSE,
16021                    Keyword::INTEGRATION,
16022                    Keyword::USER,
16023                    Keyword::CONNECTION,
16024                    Keyword::PROCEDURE,
16025                    Keyword::FUNCTION,
16026                ]);
16027                let objects =
16028                    self.parse_comma_separated(|p| p.parse_object_name_inner(false, true));
16029                match object_type {
16030                    Some(Keyword::DATABASE) => Some(GrantObjects::Databases(objects?)),
16031                    Some(Keyword::SCHEMA) => Some(GrantObjects::Schemas(objects?)),
16032                    Some(Keyword::SEQUENCE) => Some(GrantObjects::Sequences(objects?)),
16033                    Some(Keyword::WAREHOUSE) => Some(GrantObjects::Warehouses(objects?)),
16034                    Some(Keyword::INTEGRATION) => Some(GrantObjects::Integrations(objects?)),
16035                    Some(Keyword::VIEW) => Some(GrantObjects::Views(objects?)),
16036                    Some(Keyword::USER) => Some(GrantObjects::Users(objects?)),
16037                    Some(Keyword::CONNECTION) => Some(GrantObjects::Connections(objects?)),
16038                    kw @ (Some(Keyword::PROCEDURE) | Some(Keyword::FUNCTION)) => {
16039                        if let Some(name) = objects?.first() {
16040                            self.parse_grant_procedure_or_function(name, &kw)?
16041                        } else {
16042                            self.expected("procedure or function name", self.peek_token())?
16043                        }
16044                    }
16045                    Some(Keyword::TABLE) | None => Some(GrantObjects::Tables(objects?)),
16046                    Some(unexpected_keyword) => return Err(ParserError::ParserError(
16047                        format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in grant objects"),
16048                    )),
16049                }
16050            }
16051        } else {
16052            None
16053        };
16054
16055        Ok((privileges, objects))
16056    }
16057
16058    fn parse_grant_procedure_or_function(
16059        &mut self,
16060        name: &ObjectName,
16061        kw: &Option<Keyword>,
16062    ) -> Result<Option<GrantObjects>, ParserError> {
16063        let arg_types = if self.consume_token(&Token::LParen) {
16064            let list = self.parse_comma_separated0(Self::parse_data_type, Token::RParen)?;
16065            self.expect_token(&Token::RParen)?;
16066            list
16067        } else {
16068            vec![]
16069        };
16070        match kw {
16071            Some(Keyword::PROCEDURE) => Ok(Some(GrantObjects::Procedure {
16072                name: name.clone(),
16073                arg_types,
16074            })),
16075            Some(Keyword::FUNCTION) => Ok(Some(GrantObjects::Function {
16076                name: name.clone(),
16077                arg_types,
16078            })),
16079            _ => self.expected("procedure or function keywords", self.peek_token())?,
16080        }
16081    }
16082
16083    pub fn parse_grant_permission(&mut self) -> Result<Action, ParserError> {
16084        fn parse_columns(parser: &mut Parser) -> Result<Option<Vec<Ident>>, ParserError> {
16085            let columns = parser.parse_parenthesized_column_list(Optional, false)?;
16086            if columns.is_empty() {
16087                Ok(None)
16088            } else {
16089                Ok(Some(columns))
16090            }
16091        }
16092
16093        // Multi-word privileges
16094        if self.parse_keywords(&[Keyword::IMPORTED, Keyword::PRIVILEGES]) {
16095            Ok(Action::ImportedPrivileges)
16096        } else if self.parse_keywords(&[Keyword::ADD, Keyword::SEARCH, Keyword::OPTIMIZATION]) {
16097            Ok(Action::AddSearchOptimization)
16098        } else if self.parse_keywords(&[Keyword::ATTACH, Keyword::LISTING]) {
16099            Ok(Action::AttachListing)
16100        } else if self.parse_keywords(&[Keyword::ATTACH, Keyword::POLICY]) {
16101            Ok(Action::AttachPolicy)
16102        } else if self.parse_keywords(&[Keyword::BIND, Keyword::SERVICE, Keyword::ENDPOINT]) {
16103            Ok(Action::BindServiceEndpoint)
16104        } else if self.parse_keywords(&[Keyword::DATABASE, Keyword::ROLE]) {
16105            let role = self.parse_object_name(false)?;
16106            Ok(Action::DatabaseRole { role })
16107        } else if self.parse_keywords(&[Keyword::EVOLVE, Keyword::SCHEMA]) {
16108            Ok(Action::EvolveSchema)
16109        } else if self.parse_keywords(&[Keyword::IMPORT, Keyword::SHARE]) {
16110            Ok(Action::ImportShare)
16111        } else if self.parse_keywords(&[Keyword::MANAGE, Keyword::VERSIONS]) {
16112            Ok(Action::ManageVersions)
16113        } else if self.parse_keywords(&[Keyword::MANAGE, Keyword::RELEASES]) {
16114            Ok(Action::ManageReleases)
16115        } else if self.parse_keywords(&[Keyword::OVERRIDE, Keyword::SHARE, Keyword::RESTRICTIONS]) {
16116            Ok(Action::OverrideShareRestrictions)
16117        } else if self.parse_keywords(&[
16118            Keyword::PURCHASE,
16119            Keyword::DATA,
16120            Keyword::EXCHANGE,
16121            Keyword::LISTING,
16122        ]) {
16123            Ok(Action::PurchaseDataExchangeListing)
16124        } else if self.parse_keywords(&[Keyword::RESOLVE, Keyword::ALL]) {
16125            Ok(Action::ResolveAll)
16126        } else if self.parse_keywords(&[Keyword::READ, Keyword::SESSION]) {
16127            Ok(Action::ReadSession)
16128
16129        // Single-word privileges
16130        } else if self.parse_keyword(Keyword::APPLY) {
16131            let apply_type = self.parse_action_apply_type()?;
16132            Ok(Action::Apply { apply_type })
16133        } else if self.parse_keyword(Keyword::APPLYBUDGET) {
16134            Ok(Action::ApplyBudget)
16135        } else if self.parse_keyword(Keyword::AUDIT) {
16136            Ok(Action::Audit)
16137        } else if self.parse_keyword(Keyword::CONNECT) {
16138            Ok(Action::Connect)
16139        } else if self.parse_keyword(Keyword::CREATE) {
16140            let obj_type = self.maybe_parse_action_create_object_type();
16141            Ok(Action::Create { obj_type })
16142        } else if self.parse_keyword(Keyword::DELETE) {
16143            Ok(Action::Delete)
16144        } else if self.parse_keyword(Keyword::EXEC) {
16145            let obj_type = self.maybe_parse_action_execute_obj_type();
16146            Ok(Action::Exec { obj_type })
16147        } else if self.parse_keyword(Keyword::EXECUTE) {
16148            let obj_type = self.maybe_parse_action_execute_obj_type();
16149            Ok(Action::Execute { obj_type })
16150        } else if self.parse_keyword(Keyword::FAILOVER) {
16151            Ok(Action::Failover)
16152        } else if self.parse_keyword(Keyword::INSERT) {
16153            Ok(Action::Insert {
16154                columns: parse_columns(self)?,
16155            })
16156        } else if self.parse_keyword(Keyword::MANAGE) {
16157            let manage_type = self.parse_action_manage_type()?;
16158            Ok(Action::Manage { manage_type })
16159        } else if self.parse_keyword(Keyword::MODIFY) {
16160            let modify_type = self.parse_action_modify_type();
16161            Ok(Action::Modify { modify_type })
16162        } else if self.parse_keyword(Keyword::MONITOR) {
16163            let monitor_type = self.parse_action_monitor_type();
16164            Ok(Action::Monitor { monitor_type })
16165        } else if self.parse_keyword(Keyword::OPERATE) {
16166            Ok(Action::Operate)
16167        } else if self.parse_keyword(Keyword::REFERENCES) {
16168            Ok(Action::References {
16169                columns: parse_columns(self)?,
16170            })
16171        } else if self.parse_keyword(Keyword::READ) {
16172            Ok(Action::Read)
16173        } else if self.parse_keyword(Keyword::REPLICATE) {
16174            Ok(Action::Replicate)
16175        } else if self.parse_keyword(Keyword::ROLE) {
16176            let role = self.parse_object_name(false)?;
16177            Ok(Action::Role { role })
16178        } else if self.parse_keyword(Keyword::SELECT) {
16179            Ok(Action::Select {
16180                columns: parse_columns(self)?,
16181            })
16182        } else if self.parse_keyword(Keyword::TEMPORARY) {
16183            Ok(Action::Temporary)
16184        } else if self.parse_keyword(Keyword::TRIGGER) {
16185            Ok(Action::Trigger)
16186        } else if self.parse_keyword(Keyword::TRUNCATE) {
16187            Ok(Action::Truncate)
16188        } else if self.parse_keyword(Keyword::UPDATE) {
16189            Ok(Action::Update {
16190                columns: parse_columns(self)?,
16191            })
16192        } else if self.parse_keyword(Keyword::USAGE) {
16193            Ok(Action::Usage)
16194        } else if self.parse_keyword(Keyword::OWNERSHIP) {
16195            Ok(Action::Ownership)
16196        } else if self.parse_keyword(Keyword::DROP) {
16197            Ok(Action::Drop)
16198        } else {
16199            self.expected("a privilege keyword", self.peek_token())?
16200        }
16201    }
16202
16203    fn maybe_parse_action_create_object_type(&mut self) -> Option<ActionCreateObjectType> {
16204        // Multi-word object types
16205        if self.parse_keywords(&[Keyword::APPLICATION, Keyword::PACKAGE]) {
16206            Some(ActionCreateObjectType::ApplicationPackage)
16207        } else if self.parse_keywords(&[Keyword::COMPUTE, Keyword::POOL]) {
16208            Some(ActionCreateObjectType::ComputePool)
16209        } else if self.parse_keywords(&[Keyword::DATA, Keyword::EXCHANGE, Keyword::LISTING]) {
16210            Some(ActionCreateObjectType::DataExchangeListing)
16211        } else if self.parse_keywords(&[Keyword::EXTERNAL, Keyword::VOLUME]) {
16212            Some(ActionCreateObjectType::ExternalVolume)
16213        } else if self.parse_keywords(&[Keyword::FAILOVER, Keyword::GROUP]) {
16214            Some(ActionCreateObjectType::FailoverGroup)
16215        } else if self.parse_keywords(&[Keyword::NETWORK, Keyword::POLICY]) {
16216            Some(ActionCreateObjectType::NetworkPolicy)
16217        } else if self.parse_keywords(&[Keyword::ORGANIZATION, Keyword::LISTING]) {
16218            Some(ActionCreateObjectType::OrganiationListing)
16219        } else if self.parse_keywords(&[Keyword::REPLICATION, Keyword::GROUP]) {
16220            Some(ActionCreateObjectType::ReplicationGroup)
16221        }
16222        // Single-word object types
16223        else if self.parse_keyword(Keyword::ACCOUNT) {
16224            Some(ActionCreateObjectType::Account)
16225        } else if self.parse_keyword(Keyword::APPLICATION) {
16226            Some(ActionCreateObjectType::Application)
16227        } else if self.parse_keyword(Keyword::DATABASE) {
16228            Some(ActionCreateObjectType::Database)
16229        } else if self.parse_keyword(Keyword::INTEGRATION) {
16230            Some(ActionCreateObjectType::Integration)
16231        } else if self.parse_keyword(Keyword::ROLE) {
16232            Some(ActionCreateObjectType::Role)
16233        } else if self.parse_keyword(Keyword::SCHEMA) {
16234            Some(ActionCreateObjectType::Schema)
16235        } else if self.parse_keyword(Keyword::SHARE) {
16236            Some(ActionCreateObjectType::Share)
16237        } else if self.parse_keyword(Keyword::USER) {
16238            Some(ActionCreateObjectType::User)
16239        } else if self.parse_keyword(Keyword::WAREHOUSE) {
16240            Some(ActionCreateObjectType::Warehouse)
16241        } else {
16242            None
16243        }
16244    }
16245
16246    fn parse_action_apply_type(&mut self) -> Result<ActionApplyType, ParserError> {
16247        if self.parse_keywords(&[Keyword::AGGREGATION, Keyword::POLICY]) {
16248            Ok(ActionApplyType::AggregationPolicy)
16249        } else if self.parse_keywords(&[Keyword::AUTHENTICATION, Keyword::POLICY]) {
16250            Ok(ActionApplyType::AuthenticationPolicy)
16251        } else if self.parse_keywords(&[Keyword::JOIN, Keyword::POLICY]) {
16252            Ok(ActionApplyType::JoinPolicy)
16253        } else if self.parse_keywords(&[Keyword::MASKING, Keyword::POLICY]) {
16254            Ok(ActionApplyType::MaskingPolicy)
16255        } else if self.parse_keywords(&[Keyword::PACKAGES, Keyword::POLICY]) {
16256            Ok(ActionApplyType::PackagesPolicy)
16257        } else if self.parse_keywords(&[Keyword::PASSWORD, Keyword::POLICY]) {
16258            Ok(ActionApplyType::PasswordPolicy)
16259        } else if self.parse_keywords(&[Keyword::PROJECTION, Keyword::POLICY]) {
16260            Ok(ActionApplyType::ProjectionPolicy)
16261        } else if self.parse_keywords(&[Keyword::ROW, Keyword::ACCESS, Keyword::POLICY]) {
16262            Ok(ActionApplyType::RowAccessPolicy)
16263        } else if self.parse_keywords(&[Keyword::SESSION, Keyword::POLICY]) {
16264            Ok(ActionApplyType::SessionPolicy)
16265        } else if self.parse_keyword(Keyword::TAG) {
16266            Ok(ActionApplyType::Tag)
16267        } else {
16268            self.expected("GRANT APPLY type", self.peek_token())
16269        }
16270    }
16271
16272    fn maybe_parse_action_execute_obj_type(&mut self) -> Option<ActionExecuteObjectType> {
16273        if self.parse_keywords(&[Keyword::DATA, Keyword::METRIC, Keyword::FUNCTION]) {
16274            Some(ActionExecuteObjectType::DataMetricFunction)
16275        } else if self.parse_keywords(&[Keyword::MANAGED, Keyword::ALERT]) {
16276            Some(ActionExecuteObjectType::ManagedAlert)
16277        } else if self.parse_keywords(&[Keyword::MANAGED, Keyword::TASK]) {
16278            Some(ActionExecuteObjectType::ManagedTask)
16279        } else if self.parse_keyword(Keyword::ALERT) {
16280            Some(ActionExecuteObjectType::Alert)
16281        } else if self.parse_keyword(Keyword::TASK) {
16282            Some(ActionExecuteObjectType::Task)
16283        } else {
16284            None
16285        }
16286    }
16287
16288    fn parse_action_manage_type(&mut self) -> Result<ActionManageType, ParserError> {
16289        if self.parse_keywords(&[Keyword::ACCOUNT, Keyword::SUPPORT, Keyword::CASES]) {
16290            Ok(ActionManageType::AccountSupportCases)
16291        } else if self.parse_keywords(&[Keyword::EVENT, Keyword::SHARING]) {
16292            Ok(ActionManageType::EventSharing)
16293        } else if self.parse_keywords(&[Keyword::LISTING, Keyword::AUTO, Keyword::FULFILLMENT]) {
16294            Ok(ActionManageType::ListingAutoFulfillment)
16295        } else if self.parse_keywords(&[Keyword::ORGANIZATION, Keyword::SUPPORT, Keyword::CASES]) {
16296            Ok(ActionManageType::OrganizationSupportCases)
16297        } else if self.parse_keywords(&[Keyword::USER, Keyword::SUPPORT, Keyword::CASES]) {
16298            Ok(ActionManageType::UserSupportCases)
16299        } else if self.parse_keyword(Keyword::GRANTS) {
16300            Ok(ActionManageType::Grants)
16301        } else if self.parse_keyword(Keyword::WAREHOUSES) {
16302            Ok(ActionManageType::Warehouses)
16303        } else {
16304            self.expected("GRANT MANAGE type", self.peek_token())
16305        }
16306    }
16307
16308    fn parse_action_modify_type(&mut self) -> Option<ActionModifyType> {
16309        if self.parse_keywords(&[Keyword::LOG, Keyword::LEVEL]) {
16310            Some(ActionModifyType::LogLevel)
16311        } else if self.parse_keywords(&[Keyword::TRACE, Keyword::LEVEL]) {
16312            Some(ActionModifyType::TraceLevel)
16313        } else if self.parse_keywords(&[Keyword::SESSION, Keyword::LOG, Keyword::LEVEL]) {
16314            Some(ActionModifyType::SessionLogLevel)
16315        } else if self.parse_keywords(&[Keyword::SESSION, Keyword::TRACE, Keyword::LEVEL]) {
16316            Some(ActionModifyType::SessionTraceLevel)
16317        } else {
16318            None
16319        }
16320    }
16321
16322    fn parse_action_monitor_type(&mut self) -> Option<ActionMonitorType> {
16323        if self.parse_keyword(Keyword::EXECUTION) {
16324            Some(ActionMonitorType::Execution)
16325        } else if self.parse_keyword(Keyword::SECURITY) {
16326            Some(ActionMonitorType::Security)
16327        } else if self.parse_keyword(Keyword::USAGE) {
16328            Some(ActionMonitorType::Usage)
16329        } else {
16330            None
16331        }
16332    }
16333
16334    pub fn parse_grantee_name(&mut self) -> Result<GranteeName, ParserError> {
16335        let mut name = self.parse_object_name(false)?;
16336        if self.dialect.supports_user_host_grantee()
16337            && name.0.len() == 1
16338            && name.0[0].as_ident().is_some()
16339            && self.consume_token(&Token::AtSign)
16340        {
16341            let user = name.0.pop().unwrap().as_ident().unwrap().clone();
16342            let host = self.parse_identifier()?;
16343            Ok(GranteeName::UserHost { user, host })
16344        } else {
16345            Ok(GranteeName::ObjectName(name))
16346        }
16347    }
16348
16349    /// Parse [`Statement::Deny`]
16350    pub fn parse_deny(&mut self) -> Result<Statement, ParserError> {
16351        self.expect_keyword(Keyword::DENY)?;
16352
16353        let (privileges, objects) = self.parse_grant_deny_revoke_privileges_objects()?;
16354        let objects = match objects {
16355            Some(o) => o,
16356            None => {
16357                return parser_err!(
16358                    "DENY statements must specify an object",
16359                    self.peek_token().span.start
16360                )
16361            }
16362        };
16363
16364        self.expect_keyword_is(Keyword::TO)?;
16365        let grantees = self.parse_grantees()?;
16366        let cascade = self.parse_cascade_option();
16367        let granted_by = if self.parse_keywords(&[Keyword::AS]) {
16368            Some(self.parse_identifier()?)
16369        } else {
16370            None
16371        };
16372
16373        Ok(Statement::Deny(DenyStatement {
16374            privileges,
16375            objects,
16376            grantees,
16377            cascade,
16378            granted_by,
16379        }))
16380    }
16381
16382    /// Parse a REVOKE statement
16383    pub fn parse_revoke(&mut self) -> Result<Statement, ParserError> {
16384        let (privileges, objects) = self.parse_grant_deny_revoke_privileges_objects()?;
16385
16386        self.expect_keyword_is(Keyword::FROM)?;
16387        let grantees = self.parse_grantees()?;
16388
16389        let granted_by = if self.parse_keywords(&[Keyword::GRANTED, Keyword::BY]) {
16390            Some(self.parse_identifier()?)
16391        } else {
16392            None
16393        };
16394
16395        let cascade = self.parse_cascade_option();
16396
16397        Ok(Statement::Revoke {
16398            privileges,
16399            objects,
16400            grantees,
16401            granted_by,
16402            cascade,
16403        })
16404    }
16405
16406    /// Parse an REPLACE statement
16407    pub fn parse_replace(
16408        &mut self,
16409        replace_token: TokenWithSpan,
16410    ) -> Result<Statement, ParserError> {
16411        if !dialect_of!(self is MySqlDialect | GenericDialect) {
16412            return parser_err!(
16413                "Unsupported statement REPLACE",
16414                self.peek_token().span.start
16415            );
16416        }
16417
16418        let mut insert = self.parse_insert(replace_token)?;
16419        if let Statement::Insert(Insert { replace_into, .. }) = &mut insert {
16420            *replace_into = true;
16421        }
16422
16423        Ok(insert)
16424    }
16425
16426    /// Parse an INSERT statement, returning a `Box`ed SetExpr
16427    ///
16428    /// This is used to reduce the size of the stack frames in debug builds
16429    fn parse_insert_setexpr_boxed(
16430        &mut self,
16431        insert_token: TokenWithSpan,
16432    ) -> Result<Box<SetExpr>, ParserError> {
16433        Ok(Box::new(SetExpr::Insert(self.parse_insert(insert_token)?)))
16434    }
16435
16436    /// Parse an INSERT statement
16437    pub fn parse_insert(&mut self, insert_token: TokenWithSpan) -> Result<Statement, ParserError> {
16438        let or = self.parse_conflict_clause();
16439        let priority = if !dialect_of!(self is MySqlDialect | GenericDialect) {
16440            None
16441        } else if self.parse_keyword(Keyword::LOW_PRIORITY) {
16442            Some(MysqlInsertPriority::LowPriority)
16443        } else if self.parse_keyword(Keyword::DELAYED) {
16444            Some(MysqlInsertPriority::Delayed)
16445        } else if self.parse_keyword(Keyword::HIGH_PRIORITY) {
16446            Some(MysqlInsertPriority::HighPriority)
16447        } else {
16448            None
16449        };
16450
16451        let ignore = dialect_of!(self is MySqlDialect | GenericDialect)
16452            && self.parse_keyword(Keyword::IGNORE);
16453
16454        let replace_into = false;
16455
16456        let overwrite = self.parse_keyword(Keyword::OVERWRITE);
16457        let into = self.parse_keyword(Keyword::INTO);
16458
16459        let local = self.parse_keyword(Keyword::LOCAL);
16460
16461        if self.parse_keyword(Keyword::DIRECTORY) {
16462            let path = self.parse_literal_string()?;
16463            let file_format = if self.parse_keywords(&[Keyword::STORED, Keyword::AS]) {
16464                Some(self.parse_file_format()?)
16465            } else {
16466                None
16467            };
16468            let source = self.parse_query()?;
16469            Ok(Statement::Directory {
16470                local,
16471                path,
16472                overwrite,
16473                file_format,
16474                source,
16475            })
16476        } else {
16477            // Hive lets you put table here regardless
16478            let table = self.parse_keyword(Keyword::TABLE);
16479            let table_object = self.parse_table_object()?;
16480
16481            let table_alias =
16482                if dialect_of!(self is PostgreSqlDialect) && self.parse_keyword(Keyword::AS) {
16483                    Some(self.parse_identifier()?)
16484                } else {
16485                    None
16486                };
16487
16488            let is_mysql = dialect_of!(self is MySqlDialect);
16489
16490            let (columns, partitioned, after_columns, source, assignments) = if self
16491                .parse_keywords(&[Keyword::DEFAULT, Keyword::VALUES])
16492            {
16493                (vec![], None, vec![], None, vec![])
16494            } else {
16495                let (columns, partitioned, after_columns) = if !self.peek_subquery_start() {
16496                    let columns = self.parse_parenthesized_column_list(Optional, is_mysql)?;
16497
16498                    let partitioned = self.parse_insert_partition()?;
16499                    // Hive allows you to specify columns after partitions as well if you want.
16500                    let after_columns = if dialect_of!(self is HiveDialect) {
16501                        self.parse_parenthesized_column_list(Optional, false)?
16502                    } else {
16503                        vec![]
16504                    };
16505                    (columns, partitioned, after_columns)
16506                } else {
16507                    Default::default()
16508                };
16509
16510                let (source, assignments) = if self.peek_keyword(Keyword::FORMAT)
16511                    || self.peek_keyword(Keyword::SETTINGS)
16512                {
16513                    (None, vec![])
16514                } else if self.dialect.supports_insert_set() && self.parse_keyword(Keyword::SET) {
16515                    (None, self.parse_comma_separated(Parser::parse_assignment)?)
16516                } else {
16517                    (Some(self.parse_query()?), vec![])
16518                };
16519
16520                (columns, partitioned, after_columns, source, assignments)
16521            };
16522
16523            let (format_clause, settings) = if self.dialect.supports_insert_format() {
16524                // Settings always comes before `FORMAT` for ClickHouse:
16525                // <https://clickhouse.com/docs/en/sql-reference/statements/insert-into>
16526                let settings = self.parse_settings()?;
16527
16528                let format = if self.parse_keyword(Keyword::FORMAT) {
16529                    Some(self.parse_input_format_clause()?)
16530                } else {
16531                    None
16532                };
16533
16534                (format, settings)
16535            } else {
16536                Default::default()
16537            };
16538
16539            let insert_alias = if dialect_of!(self is MySqlDialect | GenericDialect)
16540                && self.parse_keyword(Keyword::AS)
16541            {
16542                let row_alias = self.parse_object_name(false)?;
16543                let col_aliases = Some(self.parse_parenthesized_column_list(Optional, false)?);
16544                Some(InsertAliases {
16545                    row_alias,
16546                    col_aliases,
16547                })
16548            } else {
16549                None
16550            };
16551
16552            let on = if self.parse_keyword(Keyword::ON) {
16553                if self.parse_keyword(Keyword::CONFLICT) {
16554                    let conflict_target =
16555                        if self.parse_keywords(&[Keyword::ON, Keyword::CONSTRAINT]) {
16556                            Some(ConflictTarget::OnConstraint(self.parse_object_name(false)?))
16557                        } else if self.peek_token() == Token::LParen {
16558                            Some(ConflictTarget::Columns(
16559                                self.parse_parenthesized_column_list(IsOptional::Mandatory, false)?,
16560                            ))
16561                        } else {
16562                            None
16563                        };
16564
16565                    self.expect_keyword_is(Keyword::DO)?;
16566                    let action = if self.parse_keyword(Keyword::NOTHING) {
16567                        OnConflictAction::DoNothing
16568                    } else {
16569                        self.expect_keyword_is(Keyword::UPDATE)?;
16570                        self.expect_keyword_is(Keyword::SET)?;
16571                        let assignments = self.parse_comma_separated(Parser::parse_assignment)?;
16572                        let selection = if self.parse_keyword(Keyword::WHERE) {
16573                            Some(self.parse_expr()?)
16574                        } else {
16575                            None
16576                        };
16577                        OnConflictAction::DoUpdate(DoUpdate {
16578                            assignments,
16579                            selection,
16580                        })
16581                    };
16582
16583                    Some(OnInsert::OnConflict(OnConflict {
16584                        conflict_target,
16585                        action,
16586                    }))
16587                } else {
16588                    self.expect_keyword_is(Keyword::DUPLICATE)?;
16589                    self.expect_keyword_is(Keyword::KEY)?;
16590                    self.expect_keyword_is(Keyword::UPDATE)?;
16591                    let l = self.parse_comma_separated(Parser::parse_assignment)?;
16592
16593                    Some(OnInsert::DuplicateKeyUpdate(l))
16594                }
16595            } else {
16596                None
16597            };
16598
16599            let returning = if self.parse_keyword(Keyword::RETURNING) {
16600                Some(self.parse_comma_separated(Parser::parse_select_item)?)
16601            } else {
16602                None
16603            };
16604
16605            Ok(Statement::Insert(Insert {
16606                insert_token: insert_token.into(),
16607                or,
16608                table: table_object,
16609                table_alias,
16610                ignore,
16611                into,
16612                overwrite,
16613                partitioned,
16614                columns,
16615                after_columns,
16616                source,
16617                assignments,
16618                has_table_keyword: table,
16619                on,
16620                returning,
16621                replace_into,
16622                priority,
16623                insert_alias,
16624                settings,
16625                format_clause,
16626            }))
16627        }
16628    }
16629
16630    // Parses input format clause used for [ClickHouse].
16631    //
16632    // <https://clickhouse.com/docs/en/interfaces/formats>
16633    pub fn parse_input_format_clause(&mut self) -> Result<InputFormatClause, ParserError> {
16634        let ident = self.parse_identifier()?;
16635        let values = self
16636            .maybe_parse(|p| p.parse_comma_separated(|p| p.parse_expr()))?
16637            .unwrap_or_default();
16638
16639        Ok(InputFormatClause { ident, values })
16640    }
16641
16642    /// Returns true if the immediate tokens look like the
16643    /// beginning of a subquery. `(SELECT ...`
16644    fn peek_subquery_start(&mut self) -> bool {
16645        let [maybe_lparen, maybe_select] = self.peek_tokens();
16646        Token::LParen == maybe_lparen
16647            && matches!(maybe_select, Token::Word(w) if w.keyword == Keyword::SELECT)
16648    }
16649
16650    fn parse_conflict_clause(&mut self) -> Option<SqliteOnConflict> {
16651        if self.parse_keywords(&[Keyword::OR, Keyword::REPLACE]) {
16652            Some(SqliteOnConflict::Replace)
16653        } else if self.parse_keywords(&[Keyword::OR, Keyword::ROLLBACK]) {
16654            Some(SqliteOnConflict::Rollback)
16655        } else if self.parse_keywords(&[Keyword::OR, Keyword::ABORT]) {
16656            Some(SqliteOnConflict::Abort)
16657        } else if self.parse_keywords(&[Keyword::OR, Keyword::FAIL]) {
16658            Some(SqliteOnConflict::Fail)
16659        } else if self.parse_keywords(&[Keyword::OR, Keyword::IGNORE]) {
16660            Some(SqliteOnConflict::Ignore)
16661        } else if self.parse_keyword(Keyword::REPLACE) {
16662            Some(SqliteOnConflict::Replace)
16663        } else {
16664            None
16665        }
16666    }
16667
16668    pub fn parse_insert_partition(&mut self) -> Result<Option<Vec<Expr>>, ParserError> {
16669        if self.parse_keyword(Keyword::PARTITION) {
16670            self.expect_token(&Token::LParen)?;
16671            let partition_cols = Some(self.parse_comma_separated(Parser::parse_expr)?);
16672            self.expect_token(&Token::RParen)?;
16673            Ok(partition_cols)
16674        } else {
16675            Ok(None)
16676        }
16677    }
16678
16679    pub fn parse_load_data_table_format(
16680        &mut self,
16681    ) -> Result<Option<HiveLoadDataFormat>, ParserError> {
16682        if self.parse_keyword(Keyword::INPUTFORMAT) {
16683            let input_format = self.parse_expr()?;
16684            self.expect_keyword_is(Keyword::SERDE)?;
16685            let serde = self.parse_expr()?;
16686            Ok(Some(HiveLoadDataFormat {
16687                input_format,
16688                serde,
16689            }))
16690        } else {
16691            Ok(None)
16692        }
16693    }
16694
16695    /// Parse an UPDATE statement, returning a `Box`ed SetExpr
16696    ///
16697    /// This is used to reduce the size of the stack frames in debug builds
16698    fn parse_update_setexpr_boxed(
16699        &mut self,
16700        update_token: TokenWithSpan,
16701    ) -> Result<Box<SetExpr>, ParserError> {
16702        Ok(Box::new(SetExpr::Update(self.parse_update(update_token)?)))
16703    }
16704
16705    pub fn parse_update(&mut self, update_token: TokenWithSpan) -> Result<Statement, ParserError> {
16706        let or = self.parse_conflict_clause();
16707        let table = self.parse_table_and_joins()?;
16708        let from_before_set = if self.parse_keyword(Keyword::FROM) {
16709            Some(UpdateTableFromKind::BeforeSet(
16710                self.parse_table_with_joins()?,
16711            ))
16712        } else {
16713            None
16714        };
16715        self.expect_keyword(Keyword::SET)?;
16716        let assignments = self.parse_comma_separated(Parser::parse_assignment)?;
16717        let from = if from_before_set.is_none() && self.parse_keyword(Keyword::FROM) {
16718            Some(UpdateTableFromKind::AfterSet(
16719                self.parse_table_with_joins()?,
16720            ))
16721        } else {
16722            from_before_set
16723        };
16724        let selection = if self.parse_keyword(Keyword::WHERE) {
16725            Some(self.parse_expr()?)
16726        } else {
16727            None
16728        };
16729        let returning = if self.parse_keyword(Keyword::RETURNING) {
16730            Some(self.parse_comma_separated(Parser::parse_select_item)?)
16731        } else {
16732            None
16733        };
16734        let limit = if self.parse_keyword(Keyword::LIMIT) {
16735            Some(self.parse_expr()?)
16736        } else {
16737            None
16738        };
16739        Ok(Update {
16740            update_token: update_token.into(),
16741            table,
16742            assignments,
16743            from,
16744            selection,
16745            returning,
16746            or,
16747            limit,
16748        }
16749        .into())
16750    }
16751
16752    /// Parse a `var = expr` assignment, used in an UPDATE statement
16753    pub fn parse_assignment(&mut self) -> Result<Assignment, ParserError> {
16754        let target = self.parse_assignment_target()?;
16755        self.expect_token(&Token::Eq)?;
16756        let value = self.parse_expr()?;
16757        Ok(Assignment { target, value })
16758    }
16759
16760    /// Parse the left-hand side of an assignment, used in an UPDATE statement
16761    pub fn parse_assignment_target(&mut self) -> Result<AssignmentTarget, ParserError> {
16762        if self.consume_token(&Token::LParen) {
16763            let columns = self.parse_comma_separated(|p| p.parse_object_name(false))?;
16764            self.expect_token(&Token::RParen)?;
16765            Ok(AssignmentTarget::Tuple(columns))
16766        } else {
16767            let column = self.parse_object_name(false)?;
16768            Ok(AssignmentTarget::ColumnName(column))
16769        }
16770    }
16771
16772    pub fn parse_function_args(&mut self) -> Result<FunctionArg, ParserError> {
16773        let arg = if self.dialect.supports_named_fn_args_with_expr_name() {
16774            self.maybe_parse(|p| {
16775                let name = p.parse_expr()?;
16776                let operator = p.parse_function_named_arg_operator()?;
16777                let arg = p.parse_wildcard_expr()?.into();
16778                Ok(FunctionArg::ExprNamed {
16779                    name,
16780                    arg,
16781                    operator,
16782                })
16783            })?
16784        } else {
16785            self.maybe_parse(|p| {
16786                let name = p.parse_identifier()?;
16787                let operator = p.parse_function_named_arg_operator()?;
16788                let arg = p.parse_wildcard_expr()?.into();
16789                Ok(FunctionArg::Named {
16790                    name,
16791                    arg,
16792                    operator,
16793                })
16794            })?
16795        };
16796        if let Some(arg) = arg {
16797            return Ok(arg);
16798        }
16799        Ok(FunctionArg::Unnamed(self.parse_wildcard_expr()?.into()))
16800    }
16801
16802    fn parse_function_named_arg_operator(&mut self) -> Result<FunctionArgOperator, ParserError> {
16803        if self.parse_keyword(Keyword::VALUE) {
16804            return Ok(FunctionArgOperator::Value);
16805        }
16806        let tok = self.next_token();
16807        match tok.token {
16808            Token::RArrow if self.dialect.supports_named_fn_args_with_rarrow_operator() => {
16809                Ok(FunctionArgOperator::RightArrow)
16810            }
16811            Token::Eq if self.dialect.supports_named_fn_args_with_eq_operator() => {
16812                Ok(FunctionArgOperator::Equals)
16813            }
16814            Token::Assignment
16815                if self
16816                    .dialect
16817                    .supports_named_fn_args_with_assignment_operator() =>
16818            {
16819                Ok(FunctionArgOperator::Assignment)
16820            }
16821            Token::Colon if self.dialect.supports_named_fn_args_with_colon_operator() => {
16822                Ok(FunctionArgOperator::Colon)
16823            }
16824            _ => {
16825                self.prev_token();
16826                self.expected("argument operator", tok)
16827            }
16828        }
16829    }
16830
16831    pub fn parse_optional_args(&mut self) -> Result<Vec<FunctionArg>, ParserError> {
16832        if self.consume_token(&Token::RParen) {
16833            Ok(vec![])
16834        } else {
16835            let args = self.parse_comma_separated(Parser::parse_function_args)?;
16836            self.expect_token(&Token::RParen)?;
16837            Ok(args)
16838        }
16839    }
16840
16841    fn parse_table_function_args(&mut self) -> Result<TableFunctionArgs, ParserError> {
16842        if self.consume_token(&Token::RParen) {
16843            return Ok(TableFunctionArgs {
16844                args: vec![],
16845                settings: None,
16846            });
16847        }
16848        let mut args = vec![];
16849        let settings = loop {
16850            if let Some(settings) = self.parse_settings()? {
16851                break Some(settings);
16852            }
16853            args.push(self.parse_function_args()?);
16854            if self.is_parse_comma_separated_end() {
16855                break None;
16856            }
16857        };
16858        self.expect_token(&Token::RParen)?;
16859        Ok(TableFunctionArgs { args, settings })
16860    }
16861
16862    /// Parses a potentially empty list of arguments to a function
16863    /// (including the closing parenthesis).
16864    ///
16865    /// Examples:
16866    /// ```sql
16867    /// FIRST_VALUE(x ORDER BY 1,2,3);
16868    /// FIRST_VALUE(x IGNORE NULL);
16869    /// ```
16870    fn parse_function_argument_list(&mut self) -> Result<FunctionArgumentList, ParserError> {
16871        let mut clauses = vec![];
16872
16873        // Handle clauses that may exist with an empty argument list
16874
16875        if let Some(null_clause) = self.parse_json_null_clause() {
16876            clauses.push(FunctionArgumentClause::JsonNullClause(null_clause));
16877        }
16878
16879        if let Some(json_returning_clause) = self.maybe_parse_json_returning_clause()? {
16880            clauses.push(FunctionArgumentClause::JsonReturningClause(
16881                json_returning_clause,
16882            ));
16883        }
16884
16885        if self.consume_token(&Token::RParen) {
16886            return Ok(FunctionArgumentList {
16887                duplicate_treatment: None,
16888                args: vec![],
16889                clauses,
16890            });
16891        }
16892
16893        let duplicate_treatment = self.parse_duplicate_treatment()?;
16894        let args = self.parse_comma_separated(Parser::parse_function_args)?;
16895
16896        if self.dialect.supports_window_function_null_treatment_arg() {
16897            if let Some(null_treatment) = self.parse_null_treatment()? {
16898                clauses.push(FunctionArgumentClause::IgnoreOrRespectNulls(null_treatment));
16899            }
16900        }
16901
16902        if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
16903            clauses.push(FunctionArgumentClause::OrderBy(
16904                self.parse_comma_separated(Parser::parse_order_by_expr)?,
16905            ));
16906        }
16907
16908        if self.parse_keyword(Keyword::LIMIT) {
16909            clauses.push(FunctionArgumentClause::Limit(self.parse_expr()?));
16910        }
16911
16912        if dialect_of!(self is GenericDialect | BigQueryDialect)
16913            && self.parse_keyword(Keyword::HAVING)
16914        {
16915            let kind = match self.expect_one_of_keywords(&[Keyword::MIN, Keyword::MAX])? {
16916                Keyword::MIN => HavingBoundKind::Min,
16917                Keyword::MAX => HavingBoundKind::Max,
16918                unexpected_keyword => return Err(ParserError::ParserError(
16919                    format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in having bound"),
16920                )),
16921            };
16922            clauses.push(FunctionArgumentClause::Having(HavingBound(
16923                kind,
16924                self.parse_expr()?,
16925            )))
16926        }
16927
16928        if dialect_of!(self is GenericDialect | MySqlDialect)
16929            && self.parse_keyword(Keyword::SEPARATOR)
16930        {
16931            clauses.push(FunctionArgumentClause::Separator(self.parse_value()?.value));
16932        }
16933
16934        if let Some(on_overflow) = self.parse_listagg_on_overflow()? {
16935            clauses.push(FunctionArgumentClause::OnOverflow(on_overflow));
16936        }
16937
16938        if let Some(null_clause) = self.parse_json_null_clause() {
16939            clauses.push(FunctionArgumentClause::JsonNullClause(null_clause));
16940        }
16941
16942        if let Some(json_returning_clause) = self.maybe_parse_json_returning_clause()? {
16943            clauses.push(FunctionArgumentClause::JsonReturningClause(
16944                json_returning_clause,
16945            ));
16946        }
16947
16948        self.expect_token(&Token::RParen)?;
16949        Ok(FunctionArgumentList {
16950            duplicate_treatment,
16951            args,
16952            clauses,
16953        })
16954    }
16955
16956    fn parse_json_null_clause(&mut self) -> Option<JsonNullClause> {
16957        if self.parse_keywords(&[Keyword::ABSENT, Keyword::ON, Keyword::NULL]) {
16958            Some(JsonNullClause::AbsentOnNull)
16959        } else if self.parse_keywords(&[Keyword::NULL, Keyword::ON, Keyword::NULL]) {
16960            Some(JsonNullClause::NullOnNull)
16961        } else {
16962            None
16963        }
16964    }
16965
16966    fn maybe_parse_json_returning_clause(
16967        &mut self,
16968    ) -> Result<Option<JsonReturningClause>, ParserError> {
16969        if self.parse_keyword(Keyword::RETURNING) {
16970            let data_type = self.parse_data_type()?;
16971            Ok(Some(JsonReturningClause { data_type }))
16972        } else {
16973            Ok(None)
16974        }
16975    }
16976
16977    fn parse_duplicate_treatment(&mut self) -> Result<Option<DuplicateTreatment>, ParserError> {
16978        let loc = self.peek_token().span.start;
16979        match (
16980            self.parse_keyword(Keyword::ALL),
16981            self.parse_keyword(Keyword::DISTINCT),
16982        ) {
16983            (true, false) => Ok(Some(DuplicateTreatment::All)),
16984            (false, true) => Ok(Some(DuplicateTreatment::Distinct)),
16985            (false, false) => Ok(None),
16986            (true, true) => parser_err!("Cannot specify both ALL and DISTINCT".to_string(), loc),
16987        }
16988    }
16989
16990    /// Parse a comma-delimited list of projections after SELECT
16991    pub fn parse_select_item(&mut self) -> Result<SelectItem, ParserError> {
16992        let prefix = self
16993            .parse_one_of_keywords(
16994                self.dialect
16995                    .get_reserved_keywords_for_select_item_operator(),
16996            )
16997            .map(|keyword| Ident::new(format!("{keyword:?}")));
16998
16999        match self.parse_wildcard_expr()? {
17000            Expr::QualifiedWildcard(prefix, token) => Ok(SelectItem::QualifiedWildcard(
17001                SelectItemQualifiedWildcardKind::ObjectName(prefix),
17002                self.parse_wildcard_additional_options(token.0)?,
17003            )),
17004            Expr::Wildcard(token) => Ok(SelectItem::Wildcard(
17005                self.parse_wildcard_additional_options(token.0)?,
17006            )),
17007            Expr::Identifier(v) if v.value.to_lowercase() == "from" && v.quote_style.is_none() => {
17008                parser_err!(
17009                    format!("Expected an expression, found: {}", v),
17010                    self.peek_token().span.start
17011                )
17012            }
17013            Expr::BinaryOp {
17014                left,
17015                op: BinaryOperator::Eq,
17016                right,
17017            } if self.dialect.supports_eq_alias_assignment()
17018                && matches!(left.as_ref(), Expr::Identifier(_)) =>
17019            {
17020                let Expr::Identifier(alias) = *left else {
17021                    return parser_err!(
17022                        "BUG: expected identifier expression as alias",
17023                        self.peek_token().span.start
17024                    );
17025                };
17026                Ok(SelectItem::ExprWithAlias {
17027                    expr: *right,
17028                    alias,
17029                })
17030            }
17031            expr if self.dialect.supports_select_expr_star()
17032                && self.consume_tokens(&[Token::Period, Token::Mul]) =>
17033            {
17034                let wildcard_token = self.get_previous_token().clone();
17035                Ok(SelectItem::QualifiedWildcard(
17036                    SelectItemQualifiedWildcardKind::Expr(expr),
17037                    self.parse_wildcard_additional_options(wildcard_token)?,
17038                ))
17039            }
17040            expr => self
17041                .maybe_parse_select_item_alias()
17042                .map(|alias| match alias {
17043                    Some(alias) => SelectItem::ExprWithAlias {
17044                        expr: maybe_prefixed_expr(expr, prefix),
17045                        alias,
17046                    },
17047                    None => SelectItem::UnnamedExpr(maybe_prefixed_expr(expr, prefix)),
17048                }),
17049        }
17050    }
17051
17052    /// Parse an [`WildcardAdditionalOptions`] information for wildcard select items.
17053    ///
17054    /// If it is not possible to parse it, will return an option.
17055    pub fn parse_wildcard_additional_options(
17056        &mut self,
17057        wildcard_token: TokenWithSpan,
17058    ) -> Result<WildcardAdditionalOptions, ParserError> {
17059        let opt_ilike = if dialect_of!(self is GenericDialect | SnowflakeDialect) {
17060            self.parse_optional_select_item_ilike()?
17061        } else {
17062            None
17063        };
17064        let opt_exclude = if opt_ilike.is_none() && self.dialect.supports_select_wildcard_exclude()
17065        {
17066            self.parse_optional_select_item_exclude()?
17067        } else {
17068            None
17069        };
17070        let opt_except = if self.dialect.supports_select_wildcard_except() {
17071            self.parse_optional_select_item_except()?
17072        } else {
17073            None
17074        };
17075        let opt_replace = if dialect_of!(self is GenericDialect | BigQueryDialect | ClickHouseDialect | DuckDbDialect | SnowflakeDialect)
17076        {
17077            self.parse_optional_select_item_replace()?
17078        } else {
17079            None
17080        };
17081        let opt_rename = if dialect_of!(self is GenericDialect | SnowflakeDialect) {
17082            self.parse_optional_select_item_rename()?
17083        } else {
17084            None
17085        };
17086
17087        Ok(WildcardAdditionalOptions {
17088            wildcard_token: wildcard_token.into(),
17089            opt_ilike,
17090            opt_exclude,
17091            opt_except,
17092            opt_rename,
17093            opt_replace,
17094        })
17095    }
17096
17097    /// Parse an [`Ilike`](IlikeSelectItem) information for wildcard select items.
17098    ///
17099    /// If it is not possible to parse it, will return an option.
17100    pub fn parse_optional_select_item_ilike(
17101        &mut self,
17102    ) -> Result<Option<IlikeSelectItem>, ParserError> {
17103        let opt_ilike = if self.parse_keyword(Keyword::ILIKE) {
17104            let next_token = self.next_token();
17105            let pattern = match next_token.token {
17106                Token::SingleQuotedString(s) => s,
17107                _ => return self.expected("ilike pattern", next_token),
17108            };
17109            Some(IlikeSelectItem { pattern })
17110        } else {
17111            None
17112        };
17113        Ok(opt_ilike)
17114    }
17115
17116    /// Parse an [`Exclude`](ExcludeSelectItem) information for wildcard select items.
17117    ///
17118    /// If it is not possible to parse it, will return an option.
17119    pub fn parse_optional_select_item_exclude(
17120        &mut self,
17121    ) -> Result<Option<ExcludeSelectItem>, ParserError> {
17122        let opt_exclude = if self.parse_keyword(Keyword::EXCLUDE) {
17123            if self.consume_token(&Token::LParen) {
17124                let columns = self.parse_comma_separated(|parser| parser.parse_identifier())?;
17125                self.expect_token(&Token::RParen)?;
17126                Some(ExcludeSelectItem::Multiple(columns))
17127            } else {
17128                let column = self.parse_identifier()?;
17129                Some(ExcludeSelectItem::Single(column))
17130            }
17131        } else {
17132            None
17133        };
17134
17135        Ok(opt_exclude)
17136    }
17137
17138    /// Parse an [`Except`](ExceptSelectItem) information for wildcard select items.
17139    ///
17140    /// If it is not possible to parse it, will return an option.
17141    pub fn parse_optional_select_item_except(
17142        &mut self,
17143    ) -> Result<Option<ExceptSelectItem>, ParserError> {
17144        let opt_except = if self.parse_keyword(Keyword::EXCEPT) {
17145            if self.peek_token().token == Token::LParen {
17146                let idents = self.parse_parenthesized_column_list(Mandatory, false)?;
17147                match &idents[..] {
17148                    [] => {
17149                        return self.expected(
17150                            "at least one column should be parsed by the expect clause",
17151                            self.peek_token(),
17152                        )?;
17153                    }
17154                    [first, idents @ ..] => Some(ExceptSelectItem {
17155                        first_element: first.clone(),
17156                        additional_elements: idents.to_vec(),
17157                    }),
17158                }
17159            } else {
17160                // Clickhouse allows EXCEPT column_name
17161                let ident = self.parse_identifier()?;
17162                Some(ExceptSelectItem {
17163                    first_element: ident,
17164                    additional_elements: vec![],
17165                })
17166            }
17167        } else {
17168            None
17169        };
17170
17171        Ok(opt_except)
17172    }
17173
17174    /// Parse a [`Rename`](RenameSelectItem) information for wildcard select items.
17175    pub fn parse_optional_select_item_rename(
17176        &mut self,
17177    ) -> Result<Option<RenameSelectItem>, ParserError> {
17178        let opt_rename = if self.parse_keyword(Keyword::RENAME) {
17179            if self.consume_token(&Token::LParen) {
17180                let idents =
17181                    self.parse_comma_separated(|parser| parser.parse_identifier_with_alias())?;
17182                self.expect_token(&Token::RParen)?;
17183                Some(RenameSelectItem::Multiple(idents))
17184            } else {
17185                let ident = self.parse_identifier_with_alias()?;
17186                Some(RenameSelectItem::Single(ident))
17187            }
17188        } else {
17189            None
17190        };
17191
17192        Ok(opt_rename)
17193    }
17194
17195    /// Parse a [`Replace`](ReplaceSelectItem) information for wildcard select items.
17196    pub fn parse_optional_select_item_replace(
17197        &mut self,
17198    ) -> Result<Option<ReplaceSelectItem>, ParserError> {
17199        let opt_replace = if self.parse_keyword(Keyword::REPLACE) {
17200            if self.consume_token(&Token::LParen) {
17201                let items = self.parse_comma_separated(|parser| {
17202                    Ok(Box::new(parser.parse_replace_elements()?))
17203                })?;
17204                self.expect_token(&Token::RParen)?;
17205                Some(ReplaceSelectItem { items })
17206            } else {
17207                let tok = self.next_token();
17208                return self.expected("( after REPLACE but", tok);
17209            }
17210        } else {
17211            None
17212        };
17213
17214        Ok(opt_replace)
17215    }
17216    pub fn parse_replace_elements(&mut self) -> Result<ReplaceSelectElement, ParserError> {
17217        let expr = self.parse_expr()?;
17218        let as_keyword = self.parse_keyword(Keyword::AS);
17219        let ident = self.parse_identifier()?;
17220        Ok(ReplaceSelectElement {
17221            expr,
17222            column_name: ident,
17223            as_keyword,
17224        })
17225    }
17226
17227    /// Parse ASC or DESC, returns an Option with true if ASC, false of DESC or `None` if none of
17228    /// them.
17229    pub fn parse_asc_desc(&mut self) -> Option<bool> {
17230        if self.parse_keyword(Keyword::ASC) {
17231            Some(true)
17232        } else if self.parse_keyword(Keyword::DESC) {
17233            Some(false)
17234        } else {
17235            None
17236        }
17237    }
17238
17239    /// Parse an [OrderByExpr] expression.
17240    pub fn parse_order_by_expr(&mut self) -> Result<OrderByExpr, ParserError> {
17241        self.parse_order_by_expr_inner(false)
17242            .map(|(order_by, _)| order_by)
17243    }
17244
17245    /// Parse an [IndexColumn].
17246    pub fn parse_create_index_expr(&mut self) -> Result<IndexColumn, ParserError> {
17247        self.parse_order_by_expr_inner(true)
17248            .map(|(column, operator_class)| IndexColumn {
17249                column,
17250                operator_class,
17251            })
17252    }
17253
17254    fn parse_order_by_expr_inner(
17255        &mut self,
17256        with_operator_class: bool,
17257    ) -> Result<(OrderByExpr, Option<ObjectName>), ParserError> {
17258        let expr = self.parse_expr()?;
17259
17260        let operator_class: Option<ObjectName> = if with_operator_class {
17261            // We check that if non of the following keywords are present, then we parse an
17262            // identifier as operator class.
17263            if self
17264                .peek_one_of_keywords(&[Keyword::ASC, Keyword::DESC, Keyword::NULLS, Keyword::WITH])
17265                .is_some()
17266            {
17267                None
17268            } else {
17269                self.maybe_parse(|parser| parser.parse_object_name(false))?
17270            }
17271        } else {
17272            None
17273        };
17274
17275        let options = self.parse_order_by_options()?;
17276
17277        let with_fill = if dialect_of!(self is ClickHouseDialect | GenericDialect)
17278            && self.parse_keywords(&[Keyword::WITH, Keyword::FILL])
17279        {
17280            Some(self.parse_with_fill()?)
17281        } else {
17282            None
17283        };
17284
17285        Ok((
17286            OrderByExpr {
17287                expr,
17288                options,
17289                with_fill,
17290            },
17291            operator_class,
17292        ))
17293    }
17294
17295    fn parse_order_by_options(&mut self) -> Result<OrderByOptions, ParserError> {
17296        let asc = self.parse_asc_desc();
17297
17298        let nulls_first = if self.parse_keywords(&[Keyword::NULLS, Keyword::FIRST]) {
17299            Some(true)
17300        } else if self.parse_keywords(&[Keyword::NULLS, Keyword::LAST]) {
17301            Some(false)
17302        } else {
17303            None
17304        };
17305
17306        Ok(OrderByOptions { asc, nulls_first })
17307    }
17308
17309    // Parse a WITH FILL clause (ClickHouse dialect)
17310    // that follow the WITH FILL keywords in a ORDER BY clause
17311    pub fn parse_with_fill(&mut self) -> Result<WithFill, ParserError> {
17312        let from = if self.parse_keyword(Keyword::FROM) {
17313            Some(self.parse_expr()?)
17314        } else {
17315            None
17316        };
17317
17318        let to = if self.parse_keyword(Keyword::TO) {
17319            Some(self.parse_expr()?)
17320        } else {
17321            None
17322        };
17323
17324        let step = if self.parse_keyword(Keyword::STEP) {
17325            Some(self.parse_expr()?)
17326        } else {
17327            None
17328        };
17329
17330        Ok(WithFill { from, to, step })
17331    }
17332
17333    // Parse a set of comma separated INTERPOLATE expressions (ClickHouse dialect)
17334    // that follow the INTERPOLATE keyword in an ORDER BY clause with the WITH FILL modifier
17335    pub fn parse_interpolations(&mut self) -> Result<Option<Interpolate>, ParserError> {
17336        if !self.parse_keyword(Keyword::INTERPOLATE) {
17337            return Ok(None);
17338        }
17339
17340        if self.consume_token(&Token::LParen) {
17341            let interpolations =
17342                self.parse_comma_separated0(|p| p.parse_interpolation(), Token::RParen)?;
17343            self.expect_token(&Token::RParen)?;
17344            // INTERPOLATE () and INTERPOLATE ( ... ) variants
17345            return Ok(Some(Interpolate {
17346                exprs: Some(interpolations),
17347            }));
17348        }
17349
17350        // INTERPOLATE
17351        Ok(Some(Interpolate { exprs: None }))
17352    }
17353
17354    // Parse a INTERPOLATE expression (ClickHouse dialect)
17355    pub fn parse_interpolation(&mut self) -> Result<InterpolateExpr, ParserError> {
17356        let column = self.parse_identifier()?;
17357        let expr = if self.parse_keyword(Keyword::AS) {
17358            Some(self.parse_expr()?)
17359        } else {
17360            None
17361        };
17362        Ok(InterpolateExpr { column, expr })
17363    }
17364
17365    /// Parse a TOP clause, MSSQL equivalent of LIMIT,
17366    /// that follows after `SELECT [DISTINCT]`.
17367    pub fn parse_top(&mut self) -> Result<Top, ParserError> {
17368        let quantity = if self.consume_token(&Token::LParen) {
17369            let quantity = self.parse_expr()?;
17370            self.expect_token(&Token::RParen)?;
17371            Some(TopQuantity::Expr(quantity))
17372        } else {
17373            let next_token = self.next_token();
17374            let quantity = match next_token.token {
17375                Token::Number(s, _) => Self::parse::<u64>(s, next_token.span.start)?,
17376                _ => self.expected("literal int", next_token)?,
17377            };
17378            Some(TopQuantity::Constant(quantity))
17379        };
17380
17381        let percent = self.parse_keyword(Keyword::PERCENT);
17382
17383        let with_ties = self.parse_keywords(&[Keyword::WITH, Keyword::TIES]);
17384
17385        Ok(Top {
17386            with_ties,
17387            percent,
17388            quantity,
17389        })
17390    }
17391
17392    /// Parse a LIMIT clause
17393    pub fn parse_limit(&mut self) -> Result<Option<Expr>, ParserError> {
17394        if self.parse_keyword(Keyword::ALL) {
17395            Ok(None)
17396        } else {
17397            Ok(Some(self.parse_expr()?))
17398        }
17399    }
17400
17401    /// Parse an OFFSET clause
17402    pub fn parse_offset(&mut self) -> Result<Offset, ParserError> {
17403        let value = self.parse_expr()?;
17404        let rows = if self.parse_keyword(Keyword::ROW) {
17405            OffsetRows::Row
17406        } else if self.parse_keyword(Keyword::ROWS) {
17407            OffsetRows::Rows
17408        } else {
17409            OffsetRows::None
17410        };
17411        Ok(Offset { value, rows })
17412    }
17413
17414    /// Parse a FETCH clause
17415    pub fn parse_fetch(&mut self) -> Result<Fetch, ParserError> {
17416        let _ = self.parse_one_of_keywords(&[Keyword::FIRST, Keyword::NEXT]);
17417
17418        let (quantity, percent) = if self
17419            .parse_one_of_keywords(&[Keyword::ROW, Keyword::ROWS])
17420            .is_some()
17421        {
17422            (None, false)
17423        } else {
17424            let quantity = Expr::Value(self.parse_value()?);
17425            let percent = self.parse_keyword(Keyword::PERCENT);
17426            let _ = self.parse_one_of_keywords(&[Keyword::ROW, Keyword::ROWS]);
17427            (Some(quantity), percent)
17428        };
17429
17430        let with_ties = if self.parse_keyword(Keyword::ONLY) {
17431            false
17432        } else {
17433            self.parse_keywords(&[Keyword::WITH, Keyword::TIES])
17434        };
17435
17436        Ok(Fetch {
17437            with_ties,
17438            percent,
17439            quantity,
17440        })
17441    }
17442
17443    /// Parse a FOR UPDATE/FOR SHARE clause
17444    pub fn parse_lock(&mut self) -> Result<LockClause, ParserError> {
17445        let lock_type = match self.expect_one_of_keywords(&[Keyword::UPDATE, Keyword::SHARE])? {
17446            Keyword::UPDATE => LockType::Update,
17447            Keyword::SHARE => LockType::Share,
17448            unexpected_keyword => return Err(ParserError::ParserError(
17449                format!("Internal parser error: expected any of {{UPDATE, SHARE}}, got {unexpected_keyword:?}"),
17450            )),
17451        };
17452        let of = if self.parse_keyword(Keyword::OF) {
17453            Some(self.parse_object_name(false)?)
17454        } else {
17455            None
17456        };
17457        let nonblock = if self.parse_keyword(Keyword::NOWAIT) {
17458            Some(NonBlock::Nowait)
17459        } else if self.parse_keywords(&[Keyword::SKIP, Keyword::LOCKED]) {
17460            Some(NonBlock::SkipLocked)
17461        } else {
17462            None
17463        };
17464        Ok(LockClause {
17465            lock_type,
17466            of,
17467            nonblock,
17468        })
17469    }
17470
17471    pub fn parse_values(
17472        &mut self,
17473        allow_empty: bool,
17474        value_keyword: bool,
17475    ) -> Result<Values, ParserError> {
17476        let mut explicit_row = false;
17477
17478        let rows = self.parse_comma_separated(|parser| {
17479            if parser.parse_keyword(Keyword::ROW) {
17480                explicit_row = true;
17481            }
17482
17483            parser.expect_token(&Token::LParen)?;
17484            if allow_empty && parser.peek_token().token == Token::RParen {
17485                parser.next_token();
17486                Ok(vec![])
17487            } else {
17488                let exprs = parser.parse_comma_separated(Parser::parse_expr)?;
17489                parser.expect_token(&Token::RParen)?;
17490                Ok(exprs)
17491            }
17492        })?;
17493        Ok(Values {
17494            explicit_row,
17495            rows,
17496            value_keyword,
17497        })
17498    }
17499
17500    pub fn parse_start_transaction(&mut self) -> Result<Statement, ParserError> {
17501        self.expect_keyword_is(Keyword::TRANSACTION)?;
17502        Ok(Statement::StartTransaction {
17503            modes: self.parse_transaction_modes()?,
17504            begin: false,
17505            transaction: Some(BeginTransactionKind::Transaction),
17506            modifier: None,
17507            statements: vec![],
17508            exception: None,
17509            has_end_keyword: false,
17510        })
17511    }
17512
17513    pub fn parse_begin(&mut self) -> Result<Statement, ParserError> {
17514        let modifier = if !self.dialect.supports_start_transaction_modifier() {
17515            None
17516        } else if self.parse_keyword(Keyword::DEFERRED) {
17517            Some(TransactionModifier::Deferred)
17518        } else if self.parse_keyword(Keyword::IMMEDIATE) {
17519            Some(TransactionModifier::Immediate)
17520        } else if self.parse_keyword(Keyword::EXCLUSIVE) {
17521            Some(TransactionModifier::Exclusive)
17522        } else if self.parse_keyword(Keyword::TRY) {
17523            Some(TransactionModifier::Try)
17524        } else if self.parse_keyword(Keyword::CATCH) {
17525            Some(TransactionModifier::Catch)
17526        } else {
17527            None
17528        };
17529        let transaction = match self.parse_one_of_keywords(&[Keyword::TRANSACTION, Keyword::WORK]) {
17530            Some(Keyword::TRANSACTION) => Some(BeginTransactionKind::Transaction),
17531            Some(Keyword::WORK) => Some(BeginTransactionKind::Work),
17532            _ => None,
17533        };
17534        Ok(Statement::StartTransaction {
17535            modes: self.parse_transaction_modes()?,
17536            begin: true,
17537            transaction,
17538            modifier,
17539            statements: vec![],
17540            exception: None,
17541            has_end_keyword: false,
17542        })
17543    }
17544
17545    pub fn parse_begin_exception_end(&mut self) -> Result<Statement, ParserError> {
17546        let statements = self.parse_statement_list(&[Keyword::EXCEPTION, Keyword::END])?;
17547
17548        let exception = if self.parse_keyword(Keyword::EXCEPTION) {
17549            let mut when = Vec::new();
17550
17551            // We can have multiple `WHEN` arms so we consume all cases until `END`
17552            while !self.peek_keyword(Keyword::END) {
17553                self.expect_keyword(Keyword::WHEN)?;
17554
17555                // Each `WHEN` case can have one or more conditions, e.g.
17556                // WHEN EXCEPTION_1 [OR EXCEPTION_2] THEN
17557                // So we parse identifiers until the `THEN` keyword.
17558                let mut idents = Vec::new();
17559
17560                while !self.parse_keyword(Keyword::THEN) {
17561                    let ident = self.parse_identifier()?;
17562                    idents.push(ident);
17563
17564                    self.maybe_parse(|p| p.expect_keyword(Keyword::OR))?;
17565                }
17566
17567                let statements = self.parse_statement_list(&[Keyword::WHEN, Keyword::END])?;
17568
17569                when.push(ExceptionWhen { idents, statements });
17570            }
17571
17572            Some(when)
17573        } else {
17574            None
17575        };
17576
17577        self.expect_keyword(Keyword::END)?;
17578
17579        Ok(Statement::StartTransaction {
17580            begin: true,
17581            statements,
17582            exception,
17583            has_end_keyword: true,
17584            transaction: None,
17585            modifier: None,
17586            modes: Default::default(),
17587        })
17588    }
17589
17590    pub fn parse_end(&mut self) -> Result<Statement, ParserError> {
17591        let modifier = if !self.dialect.supports_end_transaction_modifier() {
17592            None
17593        } else if self.parse_keyword(Keyword::TRY) {
17594            Some(TransactionModifier::Try)
17595        } else if self.parse_keyword(Keyword::CATCH) {
17596            Some(TransactionModifier::Catch)
17597        } else {
17598            None
17599        };
17600        Ok(Statement::Commit {
17601            chain: self.parse_commit_rollback_chain()?,
17602            end: true,
17603            modifier,
17604        })
17605    }
17606
17607    pub fn parse_transaction_modes(&mut self) -> Result<Vec<TransactionMode>, ParserError> {
17608        let mut modes = vec![];
17609        let mut required = false;
17610        loop {
17611            let mode = if self.parse_keywords(&[Keyword::ISOLATION, Keyword::LEVEL]) {
17612                let iso_level = if self.parse_keywords(&[Keyword::READ, Keyword::UNCOMMITTED]) {
17613                    TransactionIsolationLevel::ReadUncommitted
17614                } else if self.parse_keywords(&[Keyword::READ, Keyword::COMMITTED]) {
17615                    TransactionIsolationLevel::ReadCommitted
17616                } else if self.parse_keywords(&[Keyword::REPEATABLE, Keyword::READ]) {
17617                    TransactionIsolationLevel::RepeatableRead
17618                } else if self.parse_keyword(Keyword::SERIALIZABLE) {
17619                    TransactionIsolationLevel::Serializable
17620                } else if self.parse_keyword(Keyword::SNAPSHOT) {
17621                    TransactionIsolationLevel::Snapshot
17622                } else {
17623                    self.expected("isolation level", self.peek_token())?
17624                };
17625                TransactionMode::IsolationLevel(iso_level)
17626            } else if self.parse_keywords(&[Keyword::READ, Keyword::ONLY]) {
17627                TransactionMode::AccessMode(TransactionAccessMode::ReadOnly)
17628            } else if self.parse_keywords(&[Keyword::READ, Keyword::WRITE]) {
17629                TransactionMode::AccessMode(TransactionAccessMode::ReadWrite)
17630            } else if required {
17631                self.expected("transaction mode", self.peek_token())?
17632            } else {
17633                break;
17634            };
17635            modes.push(mode);
17636            // ANSI requires a comma after each transaction mode, but
17637            // PostgreSQL, for historical reasons, does not. We follow
17638            // PostgreSQL in making the comma optional, since that is strictly
17639            // more general.
17640            required = self.consume_token(&Token::Comma);
17641        }
17642        Ok(modes)
17643    }
17644
17645    pub fn parse_commit(&mut self) -> Result<Statement, ParserError> {
17646        Ok(Statement::Commit {
17647            chain: self.parse_commit_rollback_chain()?,
17648            end: false,
17649            modifier: None,
17650        })
17651    }
17652
17653    pub fn parse_rollback(&mut self) -> Result<Statement, ParserError> {
17654        let chain = self.parse_commit_rollback_chain()?;
17655        let savepoint = self.parse_rollback_savepoint()?;
17656
17657        Ok(Statement::Rollback { chain, savepoint })
17658    }
17659
17660    pub fn parse_commit_rollback_chain(&mut self) -> Result<bool, ParserError> {
17661        let _ = self.parse_one_of_keywords(&[Keyword::TRANSACTION, Keyword::WORK]);
17662        if self.parse_keyword(Keyword::AND) {
17663            let chain = !self.parse_keyword(Keyword::NO);
17664            self.expect_keyword_is(Keyword::CHAIN)?;
17665            Ok(chain)
17666        } else {
17667            Ok(false)
17668        }
17669    }
17670
17671    pub fn parse_rollback_savepoint(&mut self) -> Result<Option<Ident>, ParserError> {
17672        if self.parse_keyword(Keyword::TO) {
17673            let _ = self.parse_keyword(Keyword::SAVEPOINT);
17674            let savepoint = self.parse_identifier()?;
17675
17676            Ok(Some(savepoint))
17677        } else {
17678            Ok(None)
17679        }
17680    }
17681
17682    /// Parse a 'RAISERROR' statement
17683    pub fn parse_raiserror(&mut self) -> Result<Statement, ParserError> {
17684        self.expect_token(&Token::LParen)?;
17685        let message = Box::new(self.parse_expr()?);
17686        self.expect_token(&Token::Comma)?;
17687        let severity = Box::new(self.parse_expr()?);
17688        self.expect_token(&Token::Comma)?;
17689        let state = Box::new(self.parse_expr()?);
17690        let arguments = if self.consume_token(&Token::Comma) {
17691            self.parse_comma_separated(Parser::parse_expr)?
17692        } else {
17693            vec![]
17694        };
17695        self.expect_token(&Token::RParen)?;
17696        let options = if self.parse_keyword(Keyword::WITH) {
17697            self.parse_comma_separated(Parser::parse_raiserror_option)?
17698        } else {
17699            vec![]
17700        };
17701        Ok(Statement::RaisError {
17702            message,
17703            severity,
17704            state,
17705            arguments,
17706            options,
17707        })
17708    }
17709
17710    pub fn parse_raiserror_option(&mut self) -> Result<RaisErrorOption, ParserError> {
17711        match self.expect_one_of_keywords(&[Keyword::LOG, Keyword::NOWAIT, Keyword::SETERROR])? {
17712            Keyword::LOG => Ok(RaisErrorOption::Log),
17713            Keyword::NOWAIT => Ok(RaisErrorOption::NoWait),
17714            Keyword::SETERROR => Ok(RaisErrorOption::SetError),
17715            _ => self.expected(
17716                "LOG, NOWAIT OR SETERROR raiserror option",
17717                self.peek_token(),
17718            ),
17719        }
17720    }
17721
17722    pub fn parse_deallocate(&mut self) -> Result<Statement, ParserError> {
17723        let prepare = self.parse_keyword(Keyword::PREPARE);
17724        let name = self.parse_identifier()?;
17725        Ok(Statement::Deallocate { name, prepare })
17726    }
17727
17728    pub fn parse_execute(&mut self) -> Result<Statement, ParserError> {
17729        let name = if self.dialect.supports_execute_immediate()
17730            && self.parse_keyword(Keyword::IMMEDIATE)
17731        {
17732            None
17733        } else {
17734            let has_parentheses = self.consume_token(&Token::LParen);
17735            let name = self.parse_object_name(false)?;
17736            if has_parentheses {
17737                self.expect_token(&Token::RParen)?;
17738            }
17739            Some(name)
17740        };
17741
17742        let has_parentheses = self.consume_token(&Token::LParen);
17743
17744        let end_kws = &[Keyword::USING, Keyword::OUTPUT, Keyword::DEFAULT];
17745        let end_token = match (has_parentheses, self.peek_token().token) {
17746            (true, _) => Token::RParen,
17747            (false, Token::EOF) => Token::EOF,
17748            (false, Token::Word(w)) if end_kws.contains(&w.keyword) => Token::Word(w),
17749            (false, _) => Token::SemiColon,
17750        };
17751
17752        let parameters = self.parse_comma_separated0(Parser::parse_expr, end_token)?;
17753
17754        if has_parentheses {
17755            self.expect_token(&Token::RParen)?;
17756        }
17757
17758        let into = if self.parse_keyword(Keyword::INTO) {
17759            self.parse_comma_separated(Self::parse_identifier)?
17760        } else {
17761            vec![]
17762        };
17763
17764        let using = if self.parse_keyword(Keyword::USING) {
17765            self.parse_comma_separated(Self::parse_expr_with_alias)?
17766        } else {
17767            vec![]
17768        };
17769
17770        let output = self.parse_keyword(Keyword::OUTPUT);
17771
17772        let default = self.parse_keyword(Keyword::DEFAULT);
17773
17774        Ok(Statement::Execute {
17775            immediate: name.is_none(),
17776            name,
17777            parameters,
17778            has_parentheses,
17779            into,
17780            using,
17781            output,
17782            default,
17783        })
17784    }
17785
17786    pub fn parse_prepare(&mut self) -> Result<Statement, ParserError> {
17787        let name = self.parse_identifier()?;
17788
17789        let mut data_types = vec![];
17790        if self.consume_token(&Token::LParen) {
17791            data_types = self.parse_comma_separated(Parser::parse_data_type)?;
17792            self.expect_token(&Token::RParen)?;
17793        }
17794
17795        self.expect_keyword_is(Keyword::AS)?;
17796        let statement = Box::new(self.parse_statement()?);
17797        Ok(Statement::Prepare {
17798            name,
17799            data_types,
17800            statement,
17801        })
17802    }
17803
17804    pub fn parse_unload(&mut self) -> Result<Statement, ParserError> {
17805        self.expect_keyword(Keyword::UNLOAD)?;
17806        self.expect_token(&Token::LParen)?;
17807        let (query, query_text) = if matches!(self.peek_token().token, Token::SingleQuotedString(_))
17808        {
17809            (None, Some(self.parse_literal_string()?))
17810        } else {
17811            (Some(self.parse_query()?), None)
17812        };
17813        self.expect_token(&Token::RParen)?;
17814
17815        self.expect_keyword_is(Keyword::TO)?;
17816        let to = self.parse_identifier()?;
17817        let auth = if self.parse_keyword(Keyword::IAM_ROLE) {
17818            Some(self.parse_iam_role_kind()?)
17819        } else {
17820            None
17821        };
17822        let with = self.parse_options(Keyword::WITH)?;
17823        let mut options = vec![];
17824        while let Some(opt) = self.maybe_parse(|parser| parser.parse_copy_legacy_option())? {
17825            options.push(opt);
17826        }
17827        Ok(Statement::Unload {
17828            query,
17829            query_text,
17830            to,
17831            auth,
17832            with,
17833            options,
17834        })
17835    }
17836
17837    fn parse_select_into(&mut self) -> Result<SelectInto, ParserError> {
17838        let temporary = self
17839            .parse_one_of_keywords(&[Keyword::TEMP, Keyword::TEMPORARY])
17840            .is_some();
17841        let unlogged = self.parse_keyword(Keyword::UNLOGGED);
17842        let table = self.parse_keyword(Keyword::TABLE);
17843        let name = self.parse_object_name(false)?;
17844
17845        Ok(SelectInto {
17846            temporary,
17847            unlogged,
17848            table,
17849            name,
17850        })
17851    }
17852
17853    fn parse_pragma_value(&mut self) -> Result<Value, ParserError> {
17854        match self.parse_value()?.value {
17855            v @ Value::SingleQuotedString(_) => Ok(v),
17856            v @ Value::DoubleQuotedString(_) => Ok(v),
17857            v @ Value::Number(_, _) => Ok(v),
17858            v @ Value::Placeholder(_) => Ok(v),
17859            _ => {
17860                self.prev_token();
17861                self.expected("number or string or ? placeholder", self.peek_token())
17862            }
17863        }
17864    }
17865
17866    // PRAGMA [schema-name '.'] pragma-name [('=' pragma-value) | '(' pragma-value ')']
17867    pub fn parse_pragma(&mut self) -> Result<Statement, ParserError> {
17868        let name = self.parse_object_name(false)?;
17869        if self.consume_token(&Token::LParen) {
17870            let value = self.parse_pragma_value()?;
17871            self.expect_token(&Token::RParen)?;
17872            Ok(Statement::Pragma {
17873                name,
17874                value: Some(value),
17875                is_eq: false,
17876            })
17877        } else if self.consume_token(&Token::Eq) {
17878            Ok(Statement::Pragma {
17879                name,
17880                value: Some(self.parse_pragma_value()?),
17881                is_eq: true,
17882            })
17883        } else {
17884            Ok(Statement::Pragma {
17885                name,
17886                value: None,
17887                is_eq: false,
17888            })
17889        }
17890    }
17891
17892    /// `INSTALL [extension_name]`
17893    pub fn parse_install(&mut self) -> Result<Statement, ParserError> {
17894        let extension_name = self.parse_identifier()?;
17895
17896        Ok(Statement::Install { extension_name })
17897    }
17898
17899    /// Parse a SQL LOAD statement
17900    pub fn parse_load(&mut self) -> Result<Statement, ParserError> {
17901        if self.dialect.supports_load_extension() {
17902            let extension_name = self.parse_identifier()?;
17903            Ok(Statement::Load { extension_name })
17904        } else if self.parse_keyword(Keyword::DATA) && self.dialect.supports_load_data() {
17905            let local = self.parse_one_of_keywords(&[Keyword::LOCAL]).is_some();
17906            self.expect_keyword_is(Keyword::INPATH)?;
17907            let inpath = self.parse_literal_string()?;
17908            let overwrite = self.parse_one_of_keywords(&[Keyword::OVERWRITE]).is_some();
17909            self.expect_keyword_is(Keyword::INTO)?;
17910            self.expect_keyword_is(Keyword::TABLE)?;
17911            let table_name = self.parse_object_name(false)?;
17912            let partitioned = self.parse_insert_partition()?;
17913            let table_format = self.parse_load_data_table_format()?;
17914            Ok(Statement::LoadData {
17915                local,
17916                inpath,
17917                overwrite,
17918                table_name,
17919                partitioned,
17920                table_format,
17921            })
17922        } else {
17923            self.expected(
17924                "`DATA` or an extension name after `LOAD`",
17925                self.peek_token(),
17926            )
17927        }
17928    }
17929
17930    /// ```sql
17931    /// OPTIMIZE TABLE [db.]name [ON CLUSTER cluster] [PARTITION partition | PARTITION ID 'partition_id'] [FINAL] [DEDUPLICATE [BY expression]]
17932    /// ```
17933    /// [ClickHouse](https://clickhouse.com/docs/en/sql-reference/statements/optimize)
17934    pub fn parse_optimize_table(&mut self) -> Result<Statement, ParserError> {
17935        self.expect_keyword_is(Keyword::TABLE)?;
17936        let name = self.parse_object_name(false)?;
17937        let on_cluster = self.parse_optional_on_cluster()?;
17938
17939        let partition = if self.parse_keyword(Keyword::PARTITION) {
17940            if self.parse_keyword(Keyword::ID) {
17941                Some(Partition::Identifier(self.parse_identifier()?))
17942            } else {
17943                Some(Partition::Expr(self.parse_expr()?))
17944            }
17945        } else {
17946            None
17947        };
17948
17949        let include_final = self.parse_keyword(Keyword::FINAL);
17950        let deduplicate = if self.parse_keyword(Keyword::DEDUPLICATE) {
17951            if self.parse_keyword(Keyword::BY) {
17952                Some(Deduplicate::ByExpression(self.parse_expr()?))
17953            } else {
17954                Some(Deduplicate::All)
17955            }
17956        } else {
17957            None
17958        };
17959
17960        Ok(Statement::OptimizeTable {
17961            name,
17962            on_cluster,
17963            partition,
17964            include_final,
17965            deduplicate,
17966        })
17967    }
17968
17969    /// ```sql
17970    /// CREATE [ { TEMPORARY | TEMP } ] SEQUENCE [ IF NOT EXISTS ] <sequence_name>
17971    /// ```
17972    ///
17973    /// See [Postgres docs](https://www.postgresql.org/docs/current/sql-createsequence.html) for more details.
17974    pub fn parse_create_sequence(&mut self, temporary: bool) -> Result<Statement, ParserError> {
17975        //[ IF NOT EXISTS ]
17976        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
17977        //name
17978        let name = self.parse_object_name(false)?;
17979        //[ AS data_type ]
17980        let mut data_type: Option<DataType> = None;
17981        if self.parse_keywords(&[Keyword::AS]) {
17982            data_type = Some(self.parse_data_type()?)
17983        }
17984        let sequence_options = self.parse_create_sequence_options()?;
17985        // [ OWNED BY { table_name.column_name | NONE } ]
17986        let owned_by = if self.parse_keywords(&[Keyword::OWNED, Keyword::BY]) {
17987            if self.parse_keywords(&[Keyword::NONE]) {
17988                Some(ObjectName::from(vec![Ident::new("NONE")]))
17989            } else {
17990                Some(self.parse_object_name(false)?)
17991            }
17992        } else {
17993            None
17994        };
17995        Ok(Statement::CreateSequence {
17996            temporary,
17997            if_not_exists,
17998            name,
17999            data_type,
18000            sequence_options,
18001            owned_by,
18002        })
18003    }
18004
18005    fn parse_create_sequence_options(&mut self) -> Result<Vec<SequenceOptions>, ParserError> {
18006        let mut sequence_options = vec![];
18007        //[ INCREMENT [ BY ] increment ]
18008        if self.parse_keywords(&[Keyword::INCREMENT]) {
18009            if self.parse_keywords(&[Keyword::BY]) {
18010                sequence_options.push(SequenceOptions::IncrementBy(self.parse_number()?, true));
18011            } else {
18012                sequence_options.push(SequenceOptions::IncrementBy(self.parse_number()?, false));
18013            }
18014        }
18015        //[ MINVALUE minvalue | NO MINVALUE ]
18016        if self.parse_keyword(Keyword::MINVALUE) {
18017            sequence_options.push(SequenceOptions::MinValue(Some(self.parse_number()?)));
18018        } else if self.parse_keywords(&[Keyword::NO, Keyword::MINVALUE]) {
18019            sequence_options.push(SequenceOptions::MinValue(None));
18020        }
18021        //[ MAXVALUE maxvalue | NO MAXVALUE ]
18022        if self.parse_keywords(&[Keyword::MAXVALUE]) {
18023            sequence_options.push(SequenceOptions::MaxValue(Some(self.parse_number()?)));
18024        } else if self.parse_keywords(&[Keyword::NO, Keyword::MAXVALUE]) {
18025            sequence_options.push(SequenceOptions::MaxValue(None));
18026        }
18027
18028        //[ START [ WITH ] start ]
18029        if self.parse_keywords(&[Keyword::START]) {
18030            if self.parse_keywords(&[Keyword::WITH]) {
18031                sequence_options.push(SequenceOptions::StartWith(self.parse_number()?, true));
18032            } else {
18033                sequence_options.push(SequenceOptions::StartWith(self.parse_number()?, false));
18034            }
18035        }
18036        //[ CACHE cache ]
18037        if self.parse_keywords(&[Keyword::CACHE]) {
18038            sequence_options.push(SequenceOptions::Cache(self.parse_number()?));
18039        }
18040        // [ [ NO ] CYCLE ]
18041        if self.parse_keywords(&[Keyword::NO, Keyword::CYCLE]) {
18042            sequence_options.push(SequenceOptions::Cycle(true));
18043        } else if self.parse_keywords(&[Keyword::CYCLE]) {
18044            sequence_options.push(SequenceOptions::Cycle(false));
18045        }
18046
18047        Ok(sequence_options)
18048    }
18049
18050    ///   Parse a `CREATE SERVER` statement.
18051    ///
18052    ///  See [Statement::CreateServer]
18053    pub fn parse_pg_create_server(&mut self) -> Result<Statement, ParserError> {
18054        let ine = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
18055        let name = self.parse_object_name(false)?;
18056
18057        let server_type = if self.parse_keyword(Keyword::TYPE) {
18058            Some(self.parse_identifier()?)
18059        } else {
18060            None
18061        };
18062
18063        let version = if self.parse_keyword(Keyword::VERSION) {
18064            Some(self.parse_identifier()?)
18065        } else {
18066            None
18067        };
18068
18069        self.expect_keywords(&[Keyword::FOREIGN, Keyword::DATA, Keyword::WRAPPER])?;
18070        let foreign_data_wrapper = self.parse_object_name(false)?;
18071
18072        let mut options = None;
18073        if self.parse_keyword(Keyword::OPTIONS) {
18074            self.expect_token(&Token::LParen)?;
18075            options = Some(self.parse_comma_separated(|p| {
18076                let key = p.parse_identifier()?;
18077                let value = p.parse_identifier()?;
18078                Ok(CreateServerOption { key, value })
18079            })?);
18080            self.expect_token(&Token::RParen)?;
18081        }
18082
18083        Ok(Statement::CreateServer(CreateServerStatement {
18084            name,
18085            if_not_exists: ine,
18086            server_type,
18087            version,
18088            foreign_data_wrapper,
18089            options,
18090        }))
18091    }
18092
18093    /// The index of the first unprocessed token.
18094    pub fn index(&self) -> usize {
18095        self.index
18096    }
18097
18098    pub fn parse_named_window(&mut self) -> Result<NamedWindowDefinition, ParserError> {
18099        let ident = self.parse_identifier()?;
18100        self.expect_keyword_is(Keyword::AS)?;
18101
18102        let window_expr = if self.consume_token(&Token::LParen) {
18103            NamedWindowExpr::WindowSpec(self.parse_window_spec()?)
18104        } else if self.dialect.supports_window_clause_named_window_reference() {
18105            NamedWindowExpr::NamedWindow(self.parse_identifier()?)
18106        } else {
18107            return self.expected("(", self.peek_token());
18108        };
18109
18110        Ok(NamedWindowDefinition(ident, window_expr))
18111    }
18112
18113    pub fn parse_create_procedure(&mut self, or_alter: bool) -> Result<Statement, ParserError> {
18114        let name = self.parse_object_name(false)?;
18115        let params = self.parse_optional_procedure_parameters()?;
18116
18117        let language = if self.parse_keyword(Keyword::LANGUAGE) {
18118            Some(self.parse_identifier()?)
18119        } else {
18120            None
18121        };
18122
18123        self.expect_keyword_is(Keyword::AS)?;
18124
18125        let body = self.parse_conditional_statements(&[Keyword::END])?;
18126
18127        Ok(Statement::CreateProcedure {
18128            name,
18129            or_alter,
18130            params,
18131            language,
18132            body,
18133        })
18134    }
18135
18136    pub fn parse_window_spec(&mut self) -> Result<WindowSpec, ParserError> {
18137        let window_name = match self.peek_token().token {
18138            Token::Word(word) if word.keyword == Keyword::NoKeyword => {
18139                self.parse_optional_ident()?
18140            }
18141            _ => None,
18142        };
18143
18144        let partition_by = if self.parse_keywords(&[Keyword::PARTITION, Keyword::BY]) {
18145            self.parse_comma_separated(Parser::parse_expr)?
18146        } else {
18147            vec![]
18148        };
18149        let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
18150            self.parse_comma_separated(Parser::parse_order_by_expr)?
18151        } else {
18152            vec![]
18153        };
18154
18155        let window_frame = if !self.consume_token(&Token::RParen) {
18156            let window_frame = self.parse_window_frame()?;
18157            self.expect_token(&Token::RParen)?;
18158            Some(window_frame)
18159        } else {
18160            None
18161        };
18162        Ok(WindowSpec {
18163            window_name,
18164            partition_by,
18165            order_by,
18166            window_frame,
18167        })
18168    }
18169
18170    pub fn parse_create_type(&mut self) -> Result<Statement, ParserError> {
18171        let name = self.parse_object_name(false)?;
18172
18173        // Check if we have AS keyword
18174        let has_as = self.parse_keyword(Keyword::AS);
18175
18176        if !has_as {
18177            // Two cases: CREATE TYPE name; or CREATE TYPE name (options);
18178            if self.consume_token(&Token::LParen) {
18179                // CREATE TYPE name (options) - SQL definition without AS
18180                let options = self.parse_create_type_sql_definition_options()?;
18181                self.expect_token(&Token::RParen)?;
18182                return Ok(Statement::CreateType {
18183                    name,
18184                    representation: Some(UserDefinedTypeRepresentation::SqlDefinition { options }),
18185                });
18186            }
18187
18188            // CREATE TYPE name; - no representation
18189            return Ok(Statement::CreateType {
18190                name,
18191                representation: None,
18192            });
18193        }
18194
18195        // We have AS keyword
18196        if self.parse_keyword(Keyword::ENUM) {
18197            // CREATE TYPE name AS ENUM (labels)
18198            self.parse_create_type_enum(name)
18199        } else if self.parse_keyword(Keyword::RANGE) {
18200            // CREATE TYPE name AS RANGE (options)
18201            self.parse_create_type_range(name)
18202        } else if self.consume_token(&Token::LParen) {
18203            // CREATE TYPE name AS (attributes) - Composite
18204            self.parse_create_type_composite(name)
18205        } else {
18206            self.expected("ENUM, RANGE, or '(' after AS", self.peek_token())
18207        }
18208    }
18209
18210    /// Parse remainder of `CREATE TYPE AS (attributes)` statement (composite type)
18211    ///
18212    /// See [PostgreSQL](https://www.postgresql.org/docs/current/sql-createtype.html)
18213    fn parse_create_type_composite(&mut self, name: ObjectName) -> Result<Statement, ParserError> {
18214        if self.consume_token(&Token::RParen) {
18215            // Empty composite type
18216            return Ok(Statement::CreateType {
18217                name,
18218                representation: Some(UserDefinedTypeRepresentation::Composite {
18219                    attributes: vec![],
18220                }),
18221            });
18222        }
18223
18224        let mut attributes = vec![];
18225        loop {
18226            let attr_name = self.parse_identifier()?;
18227            let attr_data_type = self.parse_data_type()?;
18228            let attr_collation = if self.parse_keyword(Keyword::COLLATE) {
18229                Some(self.parse_object_name(false)?)
18230            } else {
18231                None
18232            };
18233            attributes.push(UserDefinedTypeCompositeAttributeDef {
18234                name: attr_name,
18235                data_type: attr_data_type,
18236                collation: attr_collation,
18237            });
18238
18239            if !self.consume_token(&Token::Comma) {
18240                break;
18241            }
18242        }
18243        self.expect_token(&Token::RParen)?;
18244
18245        Ok(Statement::CreateType {
18246            name,
18247            representation: Some(UserDefinedTypeRepresentation::Composite { attributes }),
18248        })
18249    }
18250
18251    /// Parse remainder of `CREATE TYPE AS ENUM` statement (see [Statement::CreateType] and [Self::parse_create_type])
18252    ///
18253    /// See [PostgreSQL](https://www.postgresql.org/docs/current/sql-createtype.html)
18254    pub fn parse_create_type_enum(&mut self, name: ObjectName) -> Result<Statement, ParserError> {
18255        self.expect_token(&Token::LParen)?;
18256        let labels = self.parse_comma_separated0(|p| p.parse_identifier(), Token::RParen)?;
18257        self.expect_token(&Token::RParen)?;
18258
18259        Ok(Statement::CreateType {
18260            name,
18261            representation: Some(UserDefinedTypeRepresentation::Enum { labels }),
18262        })
18263    }
18264
18265    /// Parse remainder of `CREATE TYPE AS RANGE` statement
18266    ///
18267    /// See [PostgreSQL](https://www.postgresql.org/docs/current/sql-createtype.html)
18268    fn parse_create_type_range(&mut self, name: ObjectName) -> Result<Statement, ParserError> {
18269        self.expect_token(&Token::LParen)?;
18270        let options = self.parse_comma_separated0(|p| p.parse_range_option(), Token::RParen)?;
18271        self.expect_token(&Token::RParen)?;
18272
18273        Ok(Statement::CreateType {
18274            name,
18275            representation: Some(UserDefinedTypeRepresentation::Range { options }),
18276        })
18277    }
18278
18279    /// Parse a single range option for a `CREATE TYPE AS RANGE` statement
18280    fn parse_range_option(&mut self) -> Result<UserDefinedTypeRangeOption, ParserError> {
18281        let keyword = self.parse_one_of_keywords(&[
18282            Keyword::SUBTYPE,
18283            Keyword::SUBTYPE_OPCLASS,
18284            Keyword::COLLATION,
18285            Keyword::CANONICAL,
18286            Keyword::SUBTYPE_DIFF,
18287            Keyword::MULTIRANGE_TYPE_NAME,
18288        ]);
18289
18290        match keyword {
18291            Some(Keyword::SUBTYPE) => {
18292                self.expect_token(&Token::Eq)?;
18293                let data_type = self.parse_data_type()?;
18294                Ok(UserDefinedTypeRangeOption::Subtype(data_type))
18295            }
18296            Some(Keyword::SUBTYPE_OPCLASS) => {
18297                self.expect_token(&Token::Eq)?;
18298                let name = self.parse_object_name(false)?;
18299                Ok(UserDefinedTypeRangeOption::SubtypeOpClass(name))
18300            }
18301            Some(Keyword::COLLATION) => {
18302                self.expect_token(&Token::Eq)?;
18303                let name = self.parse_object_name(false)?;
18304                Ok(UserDefinedTypeRangeOption::Collation(name))
18305            }
18306            Some(Keyword::CANONICAL) => {
18307                self.expect_token(&Token::Eq)?;
18308                let name = self.parse_object_name(false)?;
18309                Ok(UserDefinedTypeRangeOption::Canonical(name))
18310            }
18311            Some(Keyword::SUBTYPE_DIFF) => {
18312                self.expect_token(&Token::Eq)?;
18313                let name = self.parse_object_name(false)?;
18314                Ok(UserDefinedTypeRangeOption::SubtypeDiff(name))
18315            }
18316            Some(Keyword::MULTIRANGE_TYPE_NAME) => {
18317                self.expect_token(&Token::Eq)?;
18318                let name = self.parse_object_name(false)?;
18319                Ok(UserDefinedTypeRangeOption::MultirangeTypeName(name))
18320            }
18321            _ => self.expected("range option keyword", self.peek_token()),
18322        }
18323    }
18324
18325    /// Parse SQL definition options for CREATE TYPE (options)
18326    fn parse_create_type_sql_definition_options(
18327        &mut self,
18328    ) -> Result<Vec<UserDefinedTypeSqlDefinitionOption>, ParserError> {
18329        self.parse_comma_separated0(|p| p.parse_sql_definition_option(), Token::RParen)
18330    }
18331
18332    /// Parse a single SQL definition option for CREATE TYPE (options)
18333    fn parse_sql_definition_option(
18334        &mut self,
18335    ) -> Result<UserDefinedTypeSqlDefinitionOption, ParserError> {
18336        let keyword = self.parse_one_of_keywords(&[
18337            Keyword::INPUT,
18338            Keyword::OUTPUT,
18339            Keyword::RECEIVE,
18340            Keyword::SEND,
18341            Keyword::TYPMOD_IN,
18342            Keyword::TYPMOD_OUT,
18343            Keyword::ANALYZE,
18344            Keyword::SUBSCRIPT,
18345            Keyword::INTERNALLENGTH,
18346            Keyword::PASSEDBYVALUE,
18347            Keyword::ALIGNMENT,
18348            Keyword::STORAGE,
18349            Keyword::LIKE,
18350            Keyword::CATEGORY,
18351            Keyword::PREFERRED,
18352            Keyword::DEFAULT,
18353            Keyword::ELEMENT,
18354            Keyword::DELIMITER,
18355            Keyword::COLLATABLE,
18356        ]);
18357
18358        match keyword {
18359            Some(Keyword::INPUT) => {
18360                self.expect_token(&Token::Eq)?;
18361                let name = self.parse_object_name(false)?;
18362                Ok(UserDefinedTypeSqlDefinitionOption::Input(name))
18363            }
18364            Some(Keyword::OUTPUT) => {
18365                self.expect_token(&Token::Eq)?;
18366                let name = self.parse_object_name(false)?;
18367                Ok(UserDefinedTypeSqlDefinitionOption::Output(name))
18368            }
18369            Some(Keyword::RECEIVE) => {
18370                self.expect_token(&Token::Eq)?;
18371                let name = self.parse_object_name(false)?;
18372                Ok(UserDefinedTypeSqlDefinitionOption::Receive(name))
18373            }
18374            Some(Keyword::SEND) => {
18375                self.expect_token(&Token::Eq)?;
18376                let name = self.parse_object_name(false)?;
18377                Ok(UserDefinedTypeSqlDefinitionOption::Send(name))
18378            }
18379            Some(Keyword::TYPMOD_IN) => {
18380                self.expect_token(&Token::Eq)?;
18381                let name = self.parse_object_name(false)?;
18382                Ok(UserDefinedTypeSqlDefinitionOption::TypmodIn(name))
18383            }
18384            Some(Keyword::TYPMOD_OUT) => {
18385                self.expect_token(&Token::Eq)?;
18386                let name = self.parse_object_name(false)?;
18387                Ok(UserDefinedTypeSqlDefinitionOption::TypmodOut(name))
18388            }
18389            Some(Keyword::ANALYZE) => {
18390                self.expect_token(&Token::Eq)?;
18391                let name = self.parse_object_name(false)?;
18392                Ok(UserDefinedTypeSqlDefinitionOption::Analyze(name))
18393            }
18394            Some(Keyword::SUBSCRIPT) => {
18395                self.expect_token(&Token::Eq)?;
18396                let name = self.parse_object_name(false)?;
18397                Ok(UserDefinedTypeSqlDefinitionOption::Subscript(name))
18398            }
18399            Some(Keyword::INTERNALLENGTH) => {
18400                self.expect_token(&Token::Eq)?;
18401                if self.parse_keyword(Keyword::VARIABLE) {
18402                    Ok(UserDefinedTypeSqlDefinitionOption::InternalLength(
18403                        UserDefinedTypeInternalLength::Variable,
18404                    ))
18405                } else {
18406                    let value = self.parse_literal_uint()?;
18407                    Ok(UserDefinedTypeSqlDefinitionOption::InternalLength(
18408                        UserDefinedTypeInternalLength::Fixed(value),
18409                    ))
18410                }
18411            }
18412            Some(Keyword::PASSEDBYVALUE) => Ok(UserDefinedTypeSqlDefinitionOption::PassedByValue),
18413            Some(Keyword::ALIGNMENT) => {
18414                self.expect_token(&Token::Eq)?;
18415                let align_keyword = self.parse_one_of_keywords(&[
18416                    Keyword::CHAR,
18417                    Keyword::INT2,
18418                    Keyword::INT4,
18419                    Keyword::DOUBLE,
18420                ]);
18421                match align_keyword {
18422                    Some(Keyword::CHAR) => Ok(UserDefinedTypeSqlDefinitionOption::Alignment(
18423                        Alignment::Char,
18424                    )),
18425                    Some(Keyword::INT2) => Ok(UserDefinedTypeSqlDefinitionOption::Alignment(
18426                        Alignment::Int2,
18427                    )),
18428                    Some(Keyword::INT4) => Ok(UserDefinedTypeSqlDefinitionOption::Alignment(
18429                        Alignment::Int4,
18430                    )),
18431                    Some(Keyword::DOUBLE) => Ok(UserDefinedTypeSqlDefinitionOption::Alignment(
18432                        Alignment::Double,
18433                    )),
18434                    _ => self.expected(
18435                        "alignment value (char, int2, int4, or double)",
18436                        self.peek_token(),
18437                    ),
18438                }
18439            }
18440            Some(Keyword::STORAGE) => {
18441                self.expect_token(&Token::Eq)?;
18442                let storage_keyword = self.parse_one_of_keywords(&[
18443                    Keyword::PLAIN,
18444                    Keyword::EXTERNAL,
18445                    Keyword::EXTENDED,
18446                    Keyword::MAIN,
18447                ]);
18448                match storage_keyword {
18449                    Some(Keyword::PLAIN) => Ok(UserDefinedTypeSqlDefinitionOption::Storage(
18450                        UserDefinedTypeStorage::Plain,
18451                    )),
18452                    Some(Keyword::EXTERNAL) => Ok(UserDefinedTypeSqlDefinitionOption::Storage(
18453                        UserDefinedTypeStorage::External,
18454                    )),
18455                    Some(Keyword::EXTENDED) => Ok(UserDefinedTypeSqlDefinitionOption::Storage(
18456                        UserDefinedTypeStorage::Extended,
18457                    )),
18458                    Some(Keyword::MAIN) => Ok(UserDefinedTypeSqlDefinitionOption::Storage(
18459                        UserDefinedTypeStorage::Main,
18460                    )),
18461                    _ => self.expected(
18462                        "storage value (plain, external, extended, or main)",
18463                        self.peek_token(),
18464                    ),
18465                }
18466            }
18467            Some(Keyword::LIKE) => {
18468                self.expect_token(&Token::Eq)?;
18469                let name = self.parse_object_name(false)?;
18470                Ok(UserDefinedTypeSqlDefinitionOption::Like(name))
18471            }
18472            Some(Keyword::CATEGORY) => {
18473                self.expect_token(&Token::Eq)?;
18474                let category_str = self.parse_literal_string()?;
18475                let category_char = category_str.chars().next().ok_or_else(|| {
18476                    ParserError::ParserError(
18477                        "CATEGORY value must be a single character".to_string(),
18478                    )
18479                })?;
18480                Ok(UserDefinedTypeSqlDefinitionOption::Category(category_char))
18481            }
18482            Some(Keyword::PREFERRED) => {
18483                self.expect_token(&Token::Eq)?;
18484                let value =
18485                    self.parse_keyword(Keyword::TRUE) || !self.parse_keyword(Keyword::FALSE);
18486                Ok(UserDefinedTypeSqlDefinitionOption::Preferred(value))
18487            }
18488            Some(Keyword::DEFAULT) => {
18489                self.expect_token(&Token::Eq)?;
18490                let expr = self.parse_expr()?;
18491                Ok(UserDefinedTypeSqlDefinitionOption::Default(expr))
18492            }
18493            Some(Keyword::ELEMENT) => {
18494                self.expect_token(&Token::Eq)?;
18495                let data_type = self.parse_data_type()?;
18496                Ok(UserDefinedTypeSqlDefinitionOption::Element(data_type))
18497            }
18498            Some(Keyword::DELIMITER) => {
18499                self.expect_token(&Token::Eq)?;
18500                let delimiter = self.parse_literal_string()?;
18501                Ok(UserDefinedTypeSqlDefinitionOption::Delimiter(delimiter))
18502            }
18503            Some(Keyword::COLLATABLE) => {
18504                self.expect_token(&Token::Eq)?;
18505                let value =
18506                    self.parse_keyword(Keyword::TRUE) || !self.parse_keyword(Keyword::FALSE);
18507                Ok(UserDefinedTypeSqlDefinitionOption::Collatable(value))
18508            }
18509            _ => self.expected("SQL definition option keyword", self.peek_token()),
18510        }
18511    }
18512
18513    fn parse_parenthesized_identifiers(&mut self) -> Result<Vec<Ident>, ParserError> {
18514        self.expect_token(&Token::LParen)?;
18515        let idents = self.parse_comma_separated0(|p| p.parse_identifier(), Token::RParen)?;
18516        self.expect_token(&Token::RParen)?;
18517        Ok(idents)
18518    }
18519
18520    fn parse_column_position(&mut self) -> Result<Option<MySQLColumnPosition>, ParserError> {
18521        if dialect_of!(self is MySqlDialect | GenericDialect) {
18522            if self.parse_keyword(Keyword::FIRST) {
18523                Ok(Some(MySQLColumnPosition::First))
18524            } else if self.parse_keyword(Keyword::AFTER) {
18525                let ident = self.parse_identifier()?;
18526                Ok(Some(MySQLColumnPosition::After(ident)))
18527            } else {
18528                Ok(None)
18529            }
18530        } else {
18531            Ok(None)
18532        }
18533    }
18534
18535    /// Parse [Statement::Print]
18536    fn parse_print(&mut self) -> Result<Statement, ParserError> {
18537        Ok(Statement::Print(PrintStatement {
18538            message: Box::new(self.parse_expr()?),
18539        }))
18540    }
18541
18542    /// Parse [Statement::Return]
18543    fn parse_return(&mut self) -> Result<Statement, ParserError> {
18544        match self.maybe_parse(|p| p.parse_expr())? {
18545            Some(expr) => Ok(Statement::Return(ReturnStatement {
18546                value: Some(ReturnStatementValue::Expr(expr)),
18547            })),
18548            None => Ok(Statement::Return(ReturnStatement { value: None })),
18549        }
18550    }
18551
18552    /// /// Parse a `EXPORT DATA` statement.
18553    ///
18554    /// See [Statement::ExportData]
18555    fn parse_export_data(&mut self) -> Result<Statement, ParserError> {
18556        self.expect_keywords(&[Keyword::EXPORT, Keyword::DATA])?;
18557
18558        let connection = if self.parse_keywords(&[Keyword::WITH, Keyword::CONNECTION]) {
18559            Some(self.parse_object_name(false)?)
18560        } else {
18561            None
18562        };
18563        self.expect_keyword(Keyword::OPTIONS)?;
18564        self.expect_token(&Token::LParen)?;
18565        let options = self.parse_comma_separated(|p| p.parse_sql_option())?;
18566        self.expect_token(&Token::RParen)?;
18567        self.expect_keyword(Keyword::AS)?;
18568        let query = self.parse_query()?;
18569        Ok(Statement::ExportData(ExportData {
18570            options,
18571            query,
18572            connection,
18573        }))
18574    }
18575
18576    fn parse_vacuum(&mut self) -> Result<Statement, ParserError> {
18577        self.expect_keyword(Keyword::VACUUM)?;
18578        let full = self.parse_keyword(Keyword::FULL);
18579        let sort_only = self.parse_keywords(&[Keyword::SORT, Keyword::ONLY]);
18580        let delete_only = self.parse_keywords(&[Keyword::DELETE, Keyword::ONLY]);
18581        let reindex = self.parse_keyword(Keyword::REINDEX);
18582        let recluster = self.parse_keyword(Keyword::RECLUSTER);
18583        let (table_name, threshold, boost) =
18584            match self.maybe_parse(|p| p.parse_object_name(false))? {
18585                Some(table_name) => {
18586                    let threshold = if self.parse_keyword(Keyword::TO) {
18587                        let value = self.parse_value()?;
18588                        self.expect_keyword(Keyword::PERCENT)?;
18589                        Some(value.value)
18590                    } else {
18591                        None
18592                    };
18593                    let boost = self.parse_keyword(Keyword::BOOST);
18594                    (Some(table_name), threshold, boost)
18595                }
18596                _ => (None, None, false),
18597            };
18598        Ok(Statement::Vacuum(VacuumStatement {
18599            full,
18600            sort_only,
18601            delete_only,
18602            reindex,
18603            recluster,
18604            table_name,
18605            threshold,
18606            boost,
18607        }))
18608    }
18609
18610    /// Consume the parser and return its underlying token buffer
18611    pub fn into_tokens(self) -> Vec<TokenWithSpan> {
18612        self.tokens
18613    }
18614
18615    /// Returns true if the next keyword indicates a sub query, i.e. SELECT or WITH
18616    fn peek_sub_query(&mut self) -> bool {
18617        if self
18618            .parse_one_of_keywords(&[Keyword::SELECT, Keyword::WITH])
18619            .is_some()
18620        {
18621            self.prev_token();
18622            return true;
18623        }
18624        false
18625    }
18626
18627    pub(crate) fn parse_show_stmt_options(&mut self) -> Result<ShowStatementOptions, ParserError> {
18628        let show_in;
18629        let mut filter_position = None;
18630        if self.dialect.supports_show_like_before_in() {
18631            if let Some(filter) = self.parse_show_statement_filter()? {
18632                filter_position = Some(ShowStatementFilterPosition::Infix(filter));
18633            }
18634            show_in = self.maybe_parse_show_stmt_in()?;
18635        } else {
18636            show_in = self.maybe_parse_show_stmt_in()?;
18637            if let Some(filter) = self.parse_show_statement_filter()? {
18638                filter_position = Some(ShowStatementFilterPosition::Suffix(filter));
18639            }
18640        }
18641        let starts_with = self.maybe_parse_show_stmt_starts_with()?;
18642        let limit = self.maybe_parse_show_stmt_limit()?;
18643        let from = self.maybe_parse_show_stmt_from()?;
18644        Ok(ShowStatementOptions {
18645            filter_position,
18646            show_in,
18647            starts_with,
18648            limit,
18649            limit_from: from,
18650        })
18651    }
18652
18653    fn maybe_parse_show_stmt_in(&mut self) -> Result<Option<ShowStatementIn>, ParserError> {
18654        let clause = match self.parse_one_of_keywords(&[Keyword::FROM, Keyword::IN]) {
18655            Some(Keyword::FROM) => ShowStatementInClause::FROM,
18656            Some(Keyword::IN) => ShowStatementInClause::IN,
18657            None => return Ok(None),
18658            _ => return self.expected("FROM or IN", self.peek_token()),
18659        };
18660
18661        let (parent_type, parent_name) = match self.parse_one_of_keywords(&[
18662            Keyword::ACCOUNT,
18663            Keyword::DATABASE,
18664            Keyword::SCHEMA,
18665            Keyword::TABLE,
18666            Keyword::VIEW,
18667        ]) {
18668            // If we see these next keywords it means we don't have a parent name
18669            Some(Keyword::DATABASE)
18670                if self.peek_keywords(&[Keyword::STARTS, Keyword::WITH])
18671                    | self.peek_keyword(Keyword::LIMIT) =>
18672            {
18673                (Some(ShowStatementInParentType::Database), None)
18674            }
18675            Some(Keyword::SCHEMA)
18676                if self.peek_keywords(&[Keyword::STARTS, Keyword::WITH])
18677                    | self.peek_keyword(Keyword::LIMIT) =>
18678            {
18679                (Some(ShowStatementInParentType::Schema), None)
18680            }
18681            Some(parent_kw) => {
18682                // The parent name here is still optional, for example:
18683                // SHOW TABLES IN ACCOUNT, so parsing the object name
18684                // may fail because the statement ends.
18685                let parent_name = self.maybe_parse(|p| p.parse_object_name(false))?;
18686                match parent_kw {
18687                    Keyword::ACCOUNT => (Some(ShowStatementInParentType::Account), parent_name),
18688                    Keyword::DATABASE => (Some(ShowStatementInParentType::Database), parent_name),
18689                    Keyword::SCHEMA => (Some(ShowStatementInParentType::Schema), parent_name),
18690                    Keyword::TABLE => (Some(ShowStatementInParentType::Table), parent_name),
18691                    Keyword::VIEW => (Some(ShowStatementInParentType::View), parent_name),
18692                    _ => {
18693                        return self.expected(
18694                            "one of ACCOUNT, DATABASE, SCHEMA, TABLE or VIEW",
18695                            self.peek_token(),
18696                        )
18697                    }
18698                }
18699            }
18700            None => {
18701                // Parsing MySQL style FROM tbl_name FROM db_name
18702                // which is equivalent to FROM tbl_name.db_name
18703                let mut parent_name = self.parse_object_name(false)?;
18704                if self
18705                    .parse_one_of_keywords(&[Keyword::FROM, Keyword::IN])
18706                    .is_some()
18707                {
18708                    parent_name
18709                        .0
18710                        .insert(0, ObjectNamePart::Identifier(self.parse_identifier()?));
18711                }
18712                (None, Some(parent_name))
18713            }
18714        };
18715
18716        Ok(Some(ShowStatementIn {
18717            clause,
18718            parent_type,
18719            parent_name,
18720        }))
18721    }
18722
18723    fn maybe_parse_show_stmt_starts_with(&mut self) -> Result<Option<Value>, ParserError> {
18724        if self.parse_keywords(&[Keyword::STARTS, Keyword::WITH]) {
18725            Ok(Some(self.parse_value()?.value))
18726        } else {
18727            Ok(None)
18728        }
18729    }
18730
18731    fn maybe_parse_show_stmt_limit(&mut self) -> Result<Option<Expr>, ParserError> {
18732        if self.parse_keyword(Keyword::LIMIT) {
18733            Ok(self.parse_limit()?)
18734        } else {
18735            Ok(None)
18736        }
18737    }
18738
18739    fn maybe_parse_show_stmt_from(&mut self) -> Result<Option<Value>, ParserError> {
18740        if self.parse_keyword(Keyword::FROM) {
18741            Ok(Some(self.parse_value()?.value))
18742        } else {
18743            Ok(None)
18744        }
18745    }
18746
18747    pub(crate) fn in_column_definition_state(&self) -> bool {
18748        matches!(self.state, ColumnDefinition)
18749    }
18750
18751    /// Parses options provided in key-value format.
18752    ///
18753    /// * `parenthesized` - true if the options are enclosed in parenthesis
18754    /// * `end_words` - a list of keywords that any of them indicates the end of the options section
18755    pub(crate) fn parse_key_value_options(
18756        &mut self,
18757        parenthesized: bool,
18758        end_words: &[Keyword],
18759    ) -> Result<KeyValueOptions, ParserError> {
18760        let mut options: Vec<KeyValueOption> = Vec::new();
18761        let mut delimiter = KeyValueOptionsDelimiter::Space;
18762        if parenthesized {
18763            self.expect_token(&Token::LParen)?;
18764        }
18765        loop {
18766            match self.next_token().token {
18767                Token::RParen => {
18768                    if parenthesized {
18769                        break;
18770                    } else {
18771                        return self.expected(" another option or EOF", self.peek_token());
18772                    }
18773                }
18774                Token::EOF => break,
18775                Token::Comma => {
18776                    delimiter = KeyValueOptionsDelimiter::Comma;
18777                    continue;
18778                }
18779                Token::Word(w) if !end_words.contains(&w.keyword) => {
18780                    options.push(self.parse_key_value_option(&w)?)
18781                }
18782                Token::Word(w) if end_words.contains(&w.keyword) => {
18783                    self.prev_token();
18784                    break;
18785                }
18786                _ => return self.expected("another option, EOF, Comma or ')'", self.peek_token()),
18787            };
18788        }
18789
18790        Ok(KeyValueOptions { delimiter, options })
18791    }
18792
18793    /// Parses a `KEY = VALUE` construct based on the specified key
18794    pub(crate) fn parse_key_value_option(
18795        &mut self,
18796        key: &Word,
18797    ) -> Result<KeyValueOption, ParserError> {
18798        self.expect_token(&Token::Eq)?;
18799        match self.peek_token().token {
18800            Token::SingleQuotedString(_) => Ok(KeyValueOption {
18801                option_name: key.value.clone(),
18802                option_value: KeyValueOptionKind::Single(self.parse_value()?.into()),
18803            }),
18804            Token::Word(word)
18805                if word.keyword == Keyword::TRUE || word.keyword == Keyword::FALSE =>
18806            {
18807                Ok(KeyValueOption {
18808                    option_name: key.value.clone(),
18809                    option_value: KeyValueOptionKind::Single(self.parse_value()?.into()),
18810                })
18811            }
18812            Token::Number(..) => Ok(KeyValueOption {
18813                option_name: key.value.clone(),
18814                option_value: KeyValueOptionKind::Single(self.parse_value()?.into()),
18815            }),
18816            Token::Word(word) => {
18817                self.next_token();
18818                Ok(KeyValueOption {
18819                    option_name: key.value.clone(),
18820                    option_value: KeyValueOptionKind::Single(Value::Placeholder(
18821                        word.value.clone(),
18822                    )),
18823                })
18824            }
18825            Token::LParen => {
18826                // Can be a list of values or a list of key value properties.
18827                // Try to parse a list of values and if that fails, try to parse
18828                // a list of key-value properties.
18829                match self.maybe_parse(|parser| {
18830                    parser.expect_token(&Token::LParen)?;
18831                    let values = parser.parse_comma_separated0(|p| p.parse_value(), Token::RParen);
18832                    parser.expect_token(&Token::RParen)?;
18833                    values
18834                })? {
18835                    Some(values) => {
18836                        let values = values.into_iter().map(|v| v.value).collect();
18837                        Ok(KeyValueOption {
18838                            option_name: key.value.clone(),
18839                            option_value: KeyValueOptionKind::Multi(values),
18840                        })
18841                    }
18842                    None => Ok(KeyValueOption {
18843                        option_name: key.value.clone(),
18844                        option_value: KeyValueOptionKind::KeyValueOptions(Box::new(
18845                            self.parse_key_value_options(true, &[])?,
18846                        )),
18847                    }),
18848                }
18849            }
18850            _ => self.expected("expected option value", self.peek_token()),
18851        }
18852    }
18853
18854    /// Parses a RESET statement
18855    fn parse_reset(&mut self) -> Result<Statement, ParserError> {
18856        if self.parse_keyword(Keyword::ALL) {
18857            return Ok(Statement::Reset(ResetStatement { reset: Reset::ALL }));
18858        }
18859
18860        let obj = self.parse_object_name(false)?;
18861        Ok(Statement::Reset(ResetStatement {
18862            reset: Reset::ConfigurationParameter(obj),
18863        }))
18864    }
18865}
18866
18867fn maybe_prefixed_expr(expr: Expr, prefix: Option<Ident>) -> Expr {
18868    if let Some(prefix) = prefix {
18869        Expr::Prefixed {
18870            prefix,
18871            value: Box::new(expr),
18872        }
18873    } else {
18874        expr
18875    }
18876}
18877
18878impl Word {
18879    #[deprecated(since = "0.54.0", note = "please use `into_ident` instead")]
18880    pub fn to_ident(&self, span: Span) -> Ident {
18881        Ident {
18882            value: self.value.clone(),
18883            quote_style: self.quote_style,
18884            span,
18885        }
18886    }
18887
18888    /// Convert this word into an [`Ident`] identifier
18889    pub fn into_ident(self, span: Span) -> Ident {
18890        Ident {
18891            value: self.value,
18892            quote_style: self.quote_style,
18893            span,
18894        }
18895    }
18896}
18897
18898#[cfg(test)]
18899mod tests {
18900    use crate::test_utils::{all_dialects, TestedDialects};
18901
18902    use super::*;
18903
18904    #[test]
18905    fn test_prev_index() {
18906        let sql = "SELECT version";
18907        all_dialects().run_parser_method(sql, |parser| {
18908            assert_eq!(parser.peek_token(), Token::make_keyword("SELECT"));
18909            assert_eq!(parser.next_token(), Token::make_keyword("SELECT"));
18910            parser.prev_token();
18911            assert_eq!(parser.next_token(), Token::make_keyword("SELECT"));
18912            assert_eq!(parser.next_token(), Token::make_word("version", None));
18913            parser.prev_token();
18914            assert_eq!(parser.peek_token(), Token::make_word("version", None));
18915            assert_eq!(parser.next_token(), Token::make_word("version", None));
18916            assert_eq!(parser.peek_token(), Token::EOF);
18917            parser.prev_token();
18918            assert_eq!(parser.next_token(), Token::make_word("version", None));
18919            assert_eq!(parser.next_token(), Token::EOF);
18920            assert_eq!(parser.next_token(), Token::EOF);
18921            parser.prev_token();
18922        });
18923    }
18924
18925    #[test]
18926    fn test_peek_tokens() {
18927        all_dialects().run_parser_method("SELECT foo AS bar FROM baz", |parser| {
18928            assert!(matches!(
18929                parser.peek_tokens(),
18930                [Token::Word(Word {
18931                    keyword: Keyword::SELECT,
18932                    ..
18933                })]
18934            ));
18935
18936            assert!(matches!(
18937                parser.peek_tokens(),
18938                [
18939                    Token::Word(Word {
18940                        keyword: Keyword::SELECT,
18941                        ..
18942                    }),
18943                    Token::Word(_),
18944                    Token::Word(Word {
18945                        keyword: Keyword::AS,
18946                        ..
18947                    }),
18948                ]
18949            ));
18950
18951            for _ in 0..4 {
18952                parser.next_token();
18953            }
18954
18955            assert!(matches!(
18956                parser.peek_tokens(),
18957                [
18958                    Token::Word(Word {
18959                        keyword: Keyword::FROM,
18960                        ..
18961                    }),
18962                    Token::Word(_),
18963                    Token::EOF,
18964                    Token::EOF,
18965                ]
18966            ))
18967        })
18968    }
18969
18970    #[cfg(test)]
18971    mod test_parse_data_type {
18972        use crate::ast::{
18973            CharLengthUnits, CharacterLength, DataType, ExactNumberInfo, ObjectName, TimezoneInfo,
18974        };
18975        use crate::dialect::{AnsiDialect, GenericDialect, PostgreSqlDialect};
18976        use crate::test_utils::TestedDialects;
18977
18978        macro_rules! test_parse_data_type {
18979            ($dialect:expr, $input:expr, $expected_type:expr $(,)?) => {{
18980                $dialect.run_parser_method(&*$input, |parser| {
18981                    let data_type = parser.parse_data_type().unwrap();
18982                    assert_eq!($expected_type, data_type);
18983                    assert_eq!($input.to_string(), data_type.to_string());
18984                });
18985            }};
18986        }
18987
18988        #[test]
18989        fn test_ansii_character_string_types() {
18990            // Character string types: <https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#character-string-type>
18991            let dialect =
18992                TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
18993
18994            test_parse_data_type!(dialect, "CHARACTER", DataType::Character(None));
18995
18996            test_parse_data_type!(
18997                dialect,
18998                "CHARACTER(20)",
18999                DataType::Character(Some(CharacterLength::IntegerLength {
19000                    length: 20,
19001                    unit: None
19002                }))
19003            );
19004
19005            test_parse_data_type!(
19006                dialect,
19007                "CHARACTER(20 CHARACTERS)",
19008                DataType::Character(Some(CharacterLength::IntegerLength {
19009                    length: 20,
19010                    unit: Some(CharLengthUnits::Characters)
19011                }))
19012            );
19013
19014            test_parse_data_type!(
19015                dialect,
19016                "CHARACTER(20 OCTETS)",
19017                DataType::Character(Some(CharacterLength::IntegerLength {
19018                    length: 20,
19019                    unit: Some(CharLengthUnits::Octets)
19020                }))
19021            );
19022
19023            test_parse_data_type!(dialect, "CHAR", DataType::Char(None));
19024
19025            test_parse_data_type!(
19026                dialect,
19027                "CHAR(20)",
19028                DataType::Char(Some(CharacterLength::IntegerLength {
19029                    length: 20,
19030                    unit: None
19031                }))
19032            );
19033
19034            test_parse_data_type!(
19035                dialect,
19036                "CHAR(20 CHARACTERS)",
19037                DataType::Char(Some(CharacterLength::IntegerLength {
19038                    length: 20,
19039                    unit: Some(CharLengthUnits::Characters)
19040                }))
19041            );
19042
19043            test_parse_data_type!(
19044                dialect,
19045                "CHAR(20 OCTETS)",
19046                DataType::Char(Some(CharacterLength::IntegerLength {
19047                    length: 20,
19048                    unit: Some(CharLengthUnits::Octets)
19049                }))
19050            );
19051
19052            test_parse_data_type!(
19053                dialect,
19054                "CHARACTER VARYING(20)",
19055                DataType::CharacterVarying(Some(CharacterLength::IntegerLength {
19056                    length: 20,
19057                    unit: None
19058                }))
19059            );
19060
19061            test_parse_data_type!(
19062                dialect,
19063                "CHARACTER VARYING(20 CHARACTERS)",
19064                DataType::CharacterVarying(Some(CharacterLength::IntegerLength {
19065                    length: 20,
19066                    unit: Some(CharLengthUnits::Characters)
19067                }))
19068            );
19069
19070            test_parse_data_type!(
19071                dialect,
19072                "CHARACTER VARYING(20 OCTETS)",
19073                DataType::CharacterVarying(Some(CharacterLength::IntegerLength {
19074                    length: 20,
19075                    unit: Some(CharLengthUnits::Octets)
19076                }))
19077            );
19078
19079            test_parse_data_type!(
19080                dialect,
19081                "CHAR VARYING(20)",
19082                DataType::CharVarying(Some(CharacterLength::IntegerLength {
19083                    length: 20,
19084                    unit: None
19085                }))
19086            );
19087
19088            test_parse_data_type!(
19089                dialect,
19090                "CHAR VARYING(20 CHARACTERS)",
19091                DataType::CharVarying(Some(CharacterLength::IntegerLength {
19092                    length: 20,
19093                    unit: Some(CharLengthUnits::Characters)
19094                }))
19095            );
19096
19097            test_parse_data_type!(
19098                dialect,
19099                "CHAR VARYING(20 OCTETS)",
19100                DataType::CharVarying(Some(CharacterLength::IntegerLength {
19101                    length: 20,
19102                    unit: Some(CharLengthUnits::Octets)
19103                }))
19104            );
19105
19106            test_parse_data_type!(
19107                dialect,
19108                "VARCHAR(20)",
19109                DataType::Varchar(Some(CharacterLength::IntegerLength {
19110                    length: 20,
19111                    unit: None
19112                }))
19113            );
19114        }
19115
19116        #[test]
19117        fn test_ansii_character_large_object_types() {
19118            // Character large object types: <https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#character-large-object-length>
19119            let dialect =
19120                TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
19121
19122            test_parse_data_type!(
19123                dialect,
19124                "CHARACTER LARGE OBJECT",
19125                DataType::CharacterLargeObject(None)
19126            );
19127            test_parse_data_type!(
19128                dialect,
19129                "CHARACTER LARGE OBJECT(20)",
19130                DataType::CharacterLargeObject(Some(20))
19131            );
19132
19133            test_parse_data_type!(
19134                dialect,
19135                "CHAR LARGE OBJECT",
19136                DataType::CharLargeObject(None)
19137            );
19138            test_parse_data_type!(
19139                dialect,
19140                "CHAR LARGE OBJECT(20)",
19141                DataType::CharLargeObject(Some(20))
19142            );
19143
19144            test_parse_data_type!(dialect, "CLOB", DataType::Clob(None));
19145            test_parse_data_type!(dialect, "CLOB(20)", DataType::Clob(Some(20)));
19146        }
19147
19148        #[test]
19149        fn test_parse_custom_types() {
19150            let dialect =
19151                TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
19152
19153            test_parse_data_type!(
19154                dialect,
19155                "GEOMETRY",
19156                DataType::Custom(ObjectName::from(vec!["GEOMETRY".into()]), vec![])
19157            );
19158
19159            test_parse_data_type!(
19160                dialect,
19161                "GEOMETRY(POINT)",
19162                DataType::Custom(
19163                    ObjectName::from(vec!["GEOMETRY".into()]),
19164                    vec!["POINT".to_string()]
19165                )
19166            );
19167
19168            test_parse_data_type!(
19169                dialect,
19170                "GEOMETRY(POINT, 4326)",
19171                DataType::Custom(
19172                    ObjectName::from(vec!["GEOMETRY".into()]),
19173                    vec!["POINT".to_string(), "4326".to_string()]
19174                )
19175            );
19176        }
19177
19178        #[test]
19179        fn test_ansii_exact_numeric_types() {
19180            // Exact numeric types: <https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#exact-numeric-type>
19181            let dialect = TestedDialects::new(vec![
19182                Box::new(GenericDialect {}),
19183                Box::new(AnsiDialect {}),
19184                Box::new(PostgreSqlDialect {}),
19185            ]);
19186
19187            test_parse_data_type!(dialect, "NUMERIC", DataType::Numeric(ExactNumberInfo::None));
19188
19189            test_parse_data_type!(
19190                dialect,
19191                "NUMERIC(2)",
19192                DataType::Numeric(ExactNumberInfo::Precision(2))
19193            );
19194
19195            test_parse_data_type!(
19196                dialect,
19197                "NUMERIC(2,10)",
19198                DataType::Numeric(ExactNumberInfo::PrecisionAndScale(2, 10))
19199            );
19200
19201            test_parse_data_type!(dialect, "DECIMAL", DataType::Decimal(ExactNumberInfo::None));
19202
19203            test_parse_data_type!(
19204                dialect,
19205                "DECIMAL(2)",
19206                DataType::Decimal(ExactNumberInfo::Precision(2))
19207            );
19208
19209            test_parse_data_type!(
19210                dialect,
19211                "DECIMAL(2,10)",
19212                DataType::Decimal(ExactNumberInfo::PrecisionAndScale(2, 10))
19213            );
19214
19215            test_parse_data_type!(dialect, "DEC", DataType::Dec(ExactNumberInfo::None));
19216
19217            test_parse_data_type!(
19218                dialect,
19219                "DEC(2)",
19220                DataType::Dec(ExactNumberInfo::Precision(2))
19221            );
19222
19223            test_parse_data_type!(
19224                dialect,
19225                "DEC(2,10)",
19226                DataType::Dec(ExactNumberInfo::PrecisionAndScale(2, 10))
19227            );
19228
19229            // Test negative scale values.
19230            test_parse_data_type!(
19231                dialect,
19232                "NUMERIC(10,-2)",
19233                DataType::Numeric(ExactNumberInfo::PrecisionAndScale(10, -2))
19234            );
19235
19236            test_parse_data_type!(
19237                dialect,
19238                "DECIMAL(1000,-10)",
19239                DataType::Decimal(ExactNumberInfo::PrecisionAndScale(1000, -10))
19240            );
19241
19242            test_parse_data_type!(
19243                dialect,
19244                "DEC(5,-1000)",
19245                DataType::Dec(ExactNumberInfo::PrecisionAndScale(5, -1000))
19246            );
19247
19248            test_parse_data_type!(
19249                dialect,
19250                "NUMERIC(10,-5)",
19251                DataType::Numeric(ExactNumberInfo::PrecisionAndScale(10, -5))
19252            );
19253
19254            test_parse_data_type!(
19255                dialect,
19256                "DECIMAL(20,-10)",
19257                DataType::Decimal(ExactNumberInfo::PrecisionAndScale(20, -10))
19258            );
19259
19260            test_parse_data_type!(
19261                dialect,
19262                "DEC(5,-2)",
19263                DataType::Dec(ExactNumberInfo::PrecisionAndScale(5, -2))
19264            );
19265
19266            dialect.run_parser_method("NUMERIC(10,+5)", |parser| {
19267                let data_type = parser.parse_data_type().unwrap();
19268                assert_eq!(
19269                    DataType::Numeric(ExactNumberInfo::PrecisionAndScale(10, 5)),
19270                    data_type
19271                );
19272                // Note: Explicit '+' sign is not preserved in output, which is correct
19273                assert_eq!("NUMERIC(10,5)", data_type.to_string());
19274            });
19275        }
19276
19277        #[test]
19278        fn test_ansii_date_type() {
19279            // Datetime types: <https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#datetime-type>
19280            let dialect =
19281                TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
19282
19283            test_parse_data_type!(dialect, "DATE", DataType::Date);
19284
19285            test_parse_data_type!(dialect, "TIME", DataType::Time(None, TimezoneInfo::None));
19286
19287            test_parse_data_type!(
19288                dialect,
19289                "TIME(6)",
19290                DataType::Time(Some(6), TimezoneInfo::None)
19291            );
19292
19293            test_parse_data_type!(
19294                dialect,
19295                "TIME WITH TIME ZONE",
19296                DataType::Time(None, TimezoneInfo::WithTimeZone)
19297            );
19298
19299            test_parse_data_type!(
19300                dialect,
19301                "TIME(6) WITH TIME ZONE",
19302                DataType::Time(Some(6), TimezoneInfo::WithTimeZone)
19303            );
19304
19305            test_parse_data_type!(
19306                dialect,
19307                "TIME WITHOUT TIME ZONE",
19308                DataType::Time(None, TimezoneInfo::WithoutTimeZone)
19309            );
19310
19311            test_parse_data_type!(
19312                dialect,
19313                "TIME(6) WITHOUT TIME ZONE",
19314                DataType::Time(Some(6), TimezoneInfo::WithoutTimeZone)
19315            );
19316
19317            test_parse_data_type!(
19318                dialect,
19319                "TIMESTAMP",
19320                DataType::Timestamp(None, TimezoneInfo::None)
19321            );
19322
19323            test_parse_data_type!(
19324                dialect,
19325                "TIMESTAMP(22)",
19326                DataType::Timestamp(Some(22), TimezoneInfo::None)
19327            );
19328
19329            test_parse_data_type!(
19330                dialect,
19331                "TIMESTAMP(22) WITH TIME ZONE",
19332                DataType::Timestamp(Some(22), TimezoneInfo::WithTimeZone)
19333            );
19334
19335            test_parse_data_type!(
19336                dialect,
19337                "TIMESTAMP(33) WITHOUT TIME ZONE",
19338                DataType::Timestamp(Some(33), TimezoneInfo::WithoutTimeZone)
19339            );
19340        }
19341    }
19342
19343    #[test]
19344    fn test_parse_schema_name() {
19345        // The expected name should be identical as the input name, that's why I don't receive both
19346        macro_rules! test_parse_schema_name {
19347            ($input:expr, $expected_name:expr $(,)?) => {{
19348                all_dialects().run_parser_method(&*$input, |parser| {
19349                    let schema_name = parser.parse_schema_name().unwrap();
19350                    // Validate that the structure is the same as expected
19351                    assert_eq!(schema_name, $expected_name);
19352                    // Validate that the input and the expected structure serialization are the same
19353                    assert_eq!(schema_name.to_string(), $input.to_string());
19354                });
19355            }};
19356        }
19357
19358        let dummy_name = ObjectName::from(vec![Ident::new("dummy_name")]);
19359        let dummy_authorization = Ident::new("dummy_authorization");
19360
19361        test_parse_schema_name!(
19362            format!("{dummy_name}"),
19363            SchemaName::Simple(dummy_name.clone())
19364        );
19365
19366        test_parse_schema_name!(
19367            format!("AUTHORIZATION {dummy_authorization}"),
19368            SchemaName::UnnamedAuthorization(dummy_authorization.clone()),
19369        );
19370        test_parse_schema_name!(
19371            format!("{dummy_name} AUTHORIZATION {dummy_authorization}"),
19372            SchemaName::NamedAuthorization(dummy_name.clone(), dummy_authorization.clone()),
19373        );
19374    }
19375
19376    #[test]
19377    fn mysql_parse_index_table_constraint() {
19378        macro_rules! test_parse_table_constraint {
19379            ($dialect:expr, $input:expr, $expected:expr $(,)?) => {{
19380                $dialect.run_parser_method(&*$input, |parser| {
19381                    let constraint = parser.parse_optional_table_constraint().unwrap().unwrap();
19382                    // Validate that the structure is the same as expected
19383                    assert_eq!(constraint, $expected);
19384                    // Validate that the input and the expected structure serialization are the same
19385                    assert_eq!(constraint.to_string(), $input.to_string());
19386                });
19387            }};
19388        }
19389
19390        fn mk_expected_col(name: &str) -> IndexColumn {
19391            IndexColumn {
19392                column: OrderByExpr {
19393                    expr: Expr::Identifier(name.into()),
19394                    options: OrderByOptions {
19395                        asc: None,
19396                        nulls_first: None,
19397                    },
19398                    with_fill: None,
19399                },
19400                operator_class: None,
19401            }
19402        }
19403
19404        let dialect =
19405            TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(MySqlDialect {})]);
19406
19407        test_parse_table_constraint!(
19408            dialect,
19409            "INDEX (c1)",
19410            IndexConstraint {
19411                display_as_key: false,
19412                name: None,
19413                index_type: None,
19414                columns: vec![mk_expected_col("c1")],
19415                index_options: vec![],
19416            }
19417            .into()
19418        );
19419
19420        test_parse_table_constraint!(
19421            dialect,
19422            "KEY (c1)",
19423            IndexConstraint {
19424                display_as_key: true,
19425                name: None,
19426                index_type: None,
19427                columns: vec![mk_expected_col("c1")],
19428                index_options: vec![],
19429            }
19430            .into()
19431        );
19432
19433        test_parse_table_constraint!(
19434            dialect,
19435            "INDEX 'index' (c1, c2)",
19436            TableConstraint::Index(IndexConstraint {
19437                display_as_key: false,
19438                name: Some(Ident::with_quote('\'', "index")),
19439                index_type: None,
19440                columns: vec![mk_expected_col("c1"), mk_expected_col("c2")],
19441                index_options: vec![],
19442            })
19443        );
19444
19445        test_parse_table_constraint!(
19446            dialect,
19447            "INDEX USING BTREE (c1)",
19448            IndexConstraint {
19449                display_as_key: false,
19450                name: None,
19451                index_type: Some(IndexType::BTree),
19452                columns: vec![mk_expected_col("c1")],
19453                index_options: vec![],
19454            }
19455            .into()
19456        );
19457
19458        test_parse_table_constraint!(
19459            dialect,
19460            "INDEX USING HASH (c1)",
19461            IndexConstraint {
19462                display_as_key: false,
19463                name: None,
19464                index_type: Some(IndexType::Hash),
19465                columns: vec![mk_expected_col("c1")],
19466                index_options: vec![],
19467            }
19468            .into()
19469        );
19470
19471        test_parse_table_constraint!(
19472            dialect,
19473            "INDEX idx_name USING BTREE (c1)",
19474            IndexConstraint {
19475                display_as_key: false,
19476                name: Some(Ident::new("idx_name")),
19477                index_type: Some(IndexType::BTree),
19478                columns: vec![mk_expected_col("c1")],
19479                index_options: vec![],
19480            }
19481            .into()
19482        );
19483
19484        test_parse_table_constraint!(
19485            dialect,
19486            "INDEX idx_name USING HASH (c1)",
19487            IndexConstraint {
19488                display_as_key: false,
19489                name: Some(Ident::new("idx_name")),
19490                index_type: Some(IndexType::Hash),
19491                columns: vec![mk_expected_col("c1")],
19492                index_options: vec![],
19493            }
19494            .into()
19495        );
19496    }
19497
19498    #[test]
19499    fn test_tokenizer_error_loc() {
19500        let sql = "foo '";
19501        let ast = Parser::parse_sql(&GenericDialect, sql);
19502        assert_eq!(
19503            ast,
19504            Err(ParserError::TokenizerError(
19505                "Unterminated string literal at Line: 1, Column: 5".to_string()
19506            ))
19507        );
19508    }
19509
19510    #[test]
19511    fn test_parser_error_loc() {
19512        let sql = "SELECT this is a syntax error";
19513        let ast = Parser::parse_sql(&GenericDialect, sql);
19514        assert_eq!(
19515            ast,
19516            Err(ParserError::ParserError(
19517                "Expected: [NOT] NULL | TRUE | FALSE | DISTINCT | [form] NORMALIZED FROM after IS, found: a at Line: 1, Column: 16"
19518                    .to_string()
19519            ))
19520        );
19521    }
19522
19523    #[test]
19524    fn test_nested_explain_error() {
19525        let sql = "EXPLAIN EXPLAIN SELECT 1";
19526        let ast = Parser::parse_sql(&GenericDialect, sql);
19527        assert_eq!(
19528            ast,
19529            Err(ParserError::ParserError(
19530                "Explain must be root of the plan".to_string()
19531            ))
19532        );
19533    }
19534
19535    #[test]
19536    fn test_parse_multipart_identifier_positive() {
19537        let dialect = TestedDialects::new(vec![Box::new(GenericDialect {})]);
19538
19539        // parse multipart with quotes
19540        let expected = vec![
19541            Ident {
19542                value: "CATALOG".to_string(),
19543                quote_style: None,
19544                span: Span::empty(),
19545            },
19546            Ident {
19547                value: "F(o)o. \"bar".to_string(),
19548                quote_style: Some('"'),
19549                span: Span::empty(),
19550            },
19551            Ident {
19552                value: "table".to_string(),
19553                quote_style: None,
19554                span: Span::empty(),
19555            },
19556        ];
19557        dialect.run_parser_method(r#"CATALOG."F(o)o. ""bar".table"#, |parser| {
19558            let actual = parser.parse_multipart_identifier().unwrap();
19559            assert_eq!(expected, actual);
19560        });
19561
19562        // allow whitespace between ident parts
19563        let expected = vec![
19564            Ident {
19565                value: "CATALOG".to_string(),
19566                quote_style: None,
19567                span: Span::empty(),
19568            },
19569            Ident {
19570                value: "table".to_string(),
19571                quote_style: None,
19572                span: Span::empty(),
19573            },
19574        ];
19575        dialect.run_parser_method("CATALOG . table", |parser| {
19576            let actual = parser.parse_multipart_identifier().unwrap();
19577            assert_eq!(expected, actual);
19578        });
19579    }
19580
19581    #[test]
19582    fn test_parse_multipart_identifier_negative() {
19583        macro_rules! test_parse_multipart_identifier_error {
19584            ($input:expr, $expected_err:expr $(,)?) => {{
19585                all_dialects().run_parser_method(&*$input, |parser| {
19586                    let actual_err = parser.parse_multipart_identifier().unwrap_err();
19587                    assert_eq!(actual_err.to_string(), $expected_err);
19588                });
19589            }};
19590        }
19591
19592        test_parse_multipart_identifier_error!(
19593            "",
19594            "sql parser error: Empty input when parsing identifier",
19595        );
19596
19597        test_parse_multipart_identifier_error!(
19598            "*schema.table",
19599            "sql parser error: Unexpected token in identifier: *",
19600        );
19601
19602        test_parse_multipart_identifier_error!(
19603            "schema.table*",
19604            "sql parser error: Unexpected token in identifier: *",
19605        );
19606
19607        test_parse_multipart_identifier_error!(
19608            "schema.table.",
19609            "sql parser error: Trailing period in identifier",
19610        );
19611
19612        test_parse_multipart_identifier_error!(
19613            "schema.*",
19614            "sql parser error: Unexpected token following period in identifier: *",
19615        );
19616    }
19617
19618    #[test]
19619    fn test_mysql_partition_selection() {
19620        let sql = "SELECT * FROM employees PARTITION (p0, p2)";
19621        let expected = vec!["p0", "p2"];
19622
19623        let ast: Vec<Statement> = Parser::parse_sql(&MySqlDialect {}, sql).unwrap();
19624        assert_eq!(ast.len(), 1);
19625        if let Statement::Query(v) = &ast[0] {
19626            if let SetExpr::Select(select) = &*v.body {
19627                assert_eq!(select.from.len(), 1);
19628                let from: &TableWithJoins = &select.from[0];
19629                let table_factor = &from.relation;
19630                if let TableFactor::Table { partitions, .. } = table_factor {
19631                    let actual: Vec<&str> = partitions
19632                        .iter()
19633                        .map(|ident| ident.value.as_str())
19634                        .collect();
19635                    assert_eq!(expected, actual);
19636                }
19637            }
19638        } else {
19639            panic!("fail to parse mysql partition selection");
19640        }
19641    }
19642
19643    #[test]
19644    fn test_replace_into_placeholders() {
19645        let sql = "REPLACE INTO t (a) VALUES (&a)";
19646
19647        assert!(Parser::parse_sql(&GenericDialect {}, sql).is_err());
19648    }
19649
19650    #[test]
19651    fn test_replace_into_set_placeholder() {
19652        let sql = "REPLACE INTO t SET ?";
19653
19654        assert!(Parser::parse_sql(&GenericDialect {}, sql).is_err());
19655    }
19656
19657    #[test]
19658    fn test_replace_incomplete() {
19659        let sql = r#"REPLACE"#;
19660
19661        assert!(Parser::parse_sql(&MySqlDialect {}, sql).is_err());
19662    }
19663
19664    #[test]
19665    fn test_placeholder_invalid_whitespace() {
19666        for w in ["  ", "/*invalid*/"] {
19667            let sql = format!("\nSELECT\n  :{w}fooBar");
19668            assert!(Parser::parse_sql(&GenericDialect, &sql).is_err());
19669        }
19670    }
19671}