Skip to main content

sqlparser/parser/
mod.rs

1// Licensed under the Apache License, Version 2.0 (the "License");
2// you may not use this file except in compliance with the License.
3// You may obtain a copy of the License at
4//
5// http://www.apache.org/licenses/LICENSE-2.0
6//
7// Unless required by applicable law or agreed to in writing, software
8// distributed under the License is distributed on an "AS IS" BASIS,
9// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
10// See the License for the specific language governing permissions and
11// limitations under the License.
12
13//! SQL Parser
14
15#[cfg(not(feature = "std"))]
16use alloc::{
17    boxed::Box,
18    format,
19    string::{String, ToString},
20    vec,
21    vec::Vec,
22};
23use core::{
24    fmt::{self, Display},
25    str::FromStr,
26};
27use helpers::attached_token::AttachedToken;
28
29use log::debug;
30
31use recursion::RecursionCounter;
32use IsLateral::*;
33use IsOptional::*;
34
35use crate::ast::Statement::CreatePolicy;
36use crate::ast::*;
37use crate::ast::{
38    comments,
39    helpers::{
40        key_value_options::{
41            KeyValueOption, KeyValueOptionKind, KeyValueOptions, KeyValueOptionsDelimiter,
42        },
43        stmt_create_table::{CreateTableBuilder, CreateTableConfiguration},
44    },
45};
46use crate::dialect::*;
47use crate::keywords::{Keyword, ALL_KEYWORDS};
48use crate::tokenizer::*;
49use sqlparser::parser::ParserState::ColumnDefinition;
50
51#[derive(Debug, Clone, PartialEq, Eq)]
52pub enum ParserError {
53    TokenizerError(String),
54    ParserError(String),
55    RecursionLimitExceeded,
56}
57
58// Use `Parser::expected` instead, if possible
59macro_rules! parser_err {
60    ($MSG:expr, $loc:expr) => {
61        Err(ParserError::ParserError(format!("{}{}", $MSG, $loc)))
62    };
63}
64
65mod alter;
66mod merge;
67
68#[cfg(feature = "std")]
69/// Implementation [`RecursionCounter`] if std is available
70mod recursion {
71    use std::cell::Cell;
72    use std::rc::Rc;
73
74    use super::ParserError;
75
76    /// Tracks remaining recursion depth. This value is decremented on
77    /// each call to [`RecursionCounter::try_decrease()`], when it reaches 0 an error will
78    /// be returned.
79    ///
80    /// Note: Uses an [`std::rc::Rc`] and [`std::cell::Cell`] in order to satisfy the Rust
81    /// borrow checker so the automatic [`DepthGuard`] decrement a
82    /// reference to the counter.
83    ///
84    /// Note: when "recursive-protection" feature is enabled, this crate uses additional stack overflow protection
85    /// for some of its recursive methods. See [`recursive::recursive`] for more information.
86    pub(crate) struct RecursionCounter {
87        remaining_depth: Rc<Cell<usize>>,
88    }
89
90    impl RecursionCounter {
91        /// Creates a [`RecursionCounter`] with the specified maximum
92        /// depth
93        pub fn new(remaining_depth: usize) -> Self {
94            Self {
95                remaining_depth: Rc::new(remaining_depth.into()),
96            }
97        }
98
99        /// Decreases the remaining depth by 1.
100        ///
101        /// Returns [`Err`] if the remaining depth falls to 0.
102        ///
103        /// Returns a [`DepthGuard`] which will adds 1 to the
104        /// remaining depth upon drop;
105        pub fn try_decrease(&self) -> Result<DepthGuard, ParserError> {
106            let old_value = self.remaining_depth.get();
107            // ran out of space
108            if old_value == 0 {
109                Err(ParserError::RecursionLimitExceeded)
110            } else {
111                self.remaining_depth.set(old_value - 1);
112                Ok(DepthGuard::new(Rc::clone(&self.remaining_depth)))
113            }
114        }
115    }
116
117    /// Guard that increases the remaining depth by 1 on drop
118    pub struct DepthGuard {
119        remaining_depth: Rc<Cell<usize>>,
120    }
121
122    impl DepthGuard {
123        fn new(remaining_depth: Rc<Cell<usize>>) -> Self {
124            Self { remaining_depth }
125        }
126    }
127    impl Drop for DepthGuard {
128        fn drop(&mut self) {
129            let old_value = self.remaining_depth.get();
130            self.remaining_depth.set(old_value + 1);
131        }
132    }
133}
134
135#[cfg(not(feature = "std"))]
136mod recursion {
137    /// Implementation [`RecursionCounter`] if std is NOT available (and does not
138    /// guard against stack overflow).
139    ///
140    /// Has the same API as the std [`RecursionCounter`] implementation
141    /// but does not actually limit stack depth.
142    pub(crate) struct RecursionCounter {}
143
144    impl RecursionCounter {
145        pub fn new(_remaining_depth: usize) -> Self {
146            Self {}
147        }
148        pub fn try_decrease(&self) -> Result<DepthGuard, super::ParserError> {
149            Ok(DepthGuard {})
150        }
151    }
152
153    pub struct DepthGuard {}
154}
155
156#[derive(PartialEq, Eq)]
157pub enum IsOptional {
158    Optional,
159    Mandatory,
160}
161
162pub enum IsLateral {
163    Lateral,
164    NotLateral,
165}
166
167pub enum WildcardExpr {
168    Expr(Expr),
169    QualifiedWildcard(ObjectName),
170    Wildcard,
171}
172
173impl From<TokenizerError> for ParserError {
174    fn from(e: TokenizerError) -> Self {
175        ParserError::TokenizerError(e.to_string())
176    }
177}
178
179impl fmt::Display for ParserError {
180    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
181        write!(
182            f,
183            "sql parser error: {}",
184            match self {
185                ParserError::TokenizerError(s) => s,
186                ParserError::ParserError(s) => s,
187                ParserError::RecursionLimitExceeded => "recursion limit exceeded",
188            }
189        )
190    }
191}
192
193#[cfg(feature = "std")]
194impl std::error::Error for ParserError {}
195
196// By default, allow expressions up to this deep before erroring
197const DEFAULT_REMAINING_DEPTH: usize = 50;
198
199// A constant EOF token that can be referenced.
200const EOF_TOKEN: TokenWithSpan = TokenWithSpan {
201    token: Token::EOF,
202    span: Span {
203        start: Location { line: 0, column: 0 },
204        end: Location { line: 0, column: 0 },
205    },
206};
207
208/// Composite types declarations using angle brackets syntax can be arbitrary
209/// nested such that the following declaration is possible:
210///      `ARRAY<ARRAY<INT>>`
211/// But the tokenizer recognizes the `>>` as a ShiftRight token.
212/// We work around that limitation when parsing a data type by accepting
213/// either a `>` or `>>` token in such cases, remembering which variant we
214/// matched.
215/// In the latter case having matched a `>>`, the parent type will not look to
216/// match its closing `>` as a result since that will have taken place at the
217/// child type.
218///
219/// See [Parser::parse_data_type] for details
220struct MatchedTrailingBracket(bool);
221
222impl From<bool> for MatchedTrailingBracket {
223    fn from(value: bool) -> Self {
224        Self(value)
225    }
226}
227
228/// Options that control how the [`Parser`] parses SQL text
229#[derive(Debug, Clone, PartialEq, Eq)]
230pub struct ParserOptions {
231    pub trailing_commas: bool,
232    /// Controls how literal values are unescaped. See
233    /// [`Tokenizer::with_unescape`] for more details.
234    pub unescape: bool,
235    /// Controls if the parser expects a semi-colon token
236    /// between statements. Default is `true`.
237    pub require_semicolon_stmt_delimiter: bool,
238}
239
240impl Default for ParserOptions {
241    fn default() -> Self {
242        Self {
243            trailing_commas: false,
244            unescape: true,
245            require_semicolon_stmt_delimiter: true,
246        }
247    }
248}
249
250impl ParserOptions {
251    /// Create a new [`ParserOptions`]
252    pub fn new() -> Self {
253        Default::default()
254    }
255
256    /// Set if trailing commas are allowed.
257    ///
258    /// If this option is `false` (the default), the following SQL will
259    /// not parse. If the option is `true`, the SQL will parse.
260    ///
261    /// ```sql
262    ///  SELECT
263    ///   foo,
264    ///   bar,
265    ///  FROM baz
266    /// ```
267    pub fn with_trailing_commas(mut self, trailing_commas: bool) -> Self {
268        self.trailing_commas = trailing_commas;
269        self
270    }
271
272    /// Set if literal values are unescaped. Defaults to true. See
273    /// [`Tokenizer::with_unescape`] for more details.
274    pub fn with_unescape(mut self, unescape: bool) -> Self {
275        self.unescape = unescape;
276        self
277    }
278}
279
280#[derive(Copy, Clone)]
281enum ParserState {
282    /// The default state of the parser.
283    Normal,
284    /// The state when parsing a CONNECT BY expression. This allows parsing
285    /// PRIOR expressions while still allowing prior as an identifier name
286    /// in other contexts.
287    ConnectBy,
288    /// The state when parsing column definitions.  This state prohibits
289    /// NOT NULL as an alias for IS NOT NULL.  For example:
290    /// ```sql
291    /// CREATE TABLE foo (abc BIGINT NOT NULL);
292    /// ```
293    ColumnDefinition,
294}
295
296/// A SQL Parser
297///
298/// This struct is the main entry point for parsing SQL queries.
299///
300/// # Functionality:
301/// * Parsing SQL: see examples on [`Parser::new`] and [`Parser::parse_sql`]
302/// * Controlling recursion: See [`Parser::with_recursion_limit`]
303/// * Controlling parser options: See [`Parser::with_options`]
304/// * Providing your own tokens: See [`Parser::with_tokens`]
305///
306/// # Internals
307///
308/// The parser uses a [`Tokenizer`] to tokenize the input SQL string into a
309/// `Vec` of [`TokenWithSpan`]s and maintains an `index` to the current token
310/// being processed. The token vec may contain multiple SQL statements.
311///
312/// * The "current" token is the token at `index - 1`
313/// * The "next" token is the token at `index`
314/// * The "previous" token is the token at `index - 2`
315///
316/// If `index` is equal to the length of the token stream, the 'next' token is
317/// [`Token::EOF`].
318///
319/// For example, the SQL string "SELECT * FROM foo" will be tokenized into
320/// following tokens:
321/// ```text
322///  [
323///    "SELECT", // token index 0
324///    " ",      // whitespace
325///    "*",
326///    " ",
327///    "FROM",
328///    " ",
329///    "foo"
330///   ]
331/// ```
332///
333///
334pub struct Parser<'a> {
335    /// The tokens
336    tokens: Vec<TokenWithSpan>,
337    /// The index of the first unprocessed token in [`Parser::tokens`].
338    index: usize,
339    /// The current state of the parser.
340    state: ParserState,
341    /// The SQL dialect to use.
342    dialect: &'a dyn Dialect,
343    /// Additional options that allow you to mix & match behavior
344    /// otherwise constrained to certain dialects (e.g. trailing
345    /// commas) and/or format of parse (e.g. unescaping).
346    options: ParserOptions,
347    /// Ensures the stack does not overflow by limiting recursion depth.
348    recursion_counter: RecursionCounter,
349}
350
351impl<'a> Parser<'a> {
352    /// Create a parser for a [`Dialect`]
353    ///
354    /// See also [`Parser::parse_sql`]
355    ///
356    /// Example:
357    /// ```
358    /// # use sqlparser::{parser::{Parser, ParserError}, dialect::GenericDialect};
359    /// # fn main() -> Result<(), ParserError> {
360    /// let dialect = GenericDialect{};
361    /// let statements = Parser::new(&dialect)
362    ///   .try_with_sql("SELECT * FROM foo")?
363    ///   .parse_statements()?;
364    /// # Ok(())
365    /// # }
366    /// ```
367    pub fn new(dialect: &'a dyn Dialect) -> Self {
368        Self {
369            tokens: vec![],
370            index: 0,
371            state: ParserState::Normal,
372            dialect,
373            recursion_counter: RecursionCounter::new(DEFAULT_REMAINING_DEPTH),
374            options: ParserOptions::new().with_trailing_commas(dialect.supports_trailing_commas()),
375        }
376    }
377
378    /// Specify the maximum recursion limit while parsing.
379    ///
380    /// [`Parser`] prevents stack overflows by returning
381    /// [`ParserError::RecursionLimitExceeded`] if the parser exceeds
382    /// this depth while processing the query.
383    ///
384    /// Example:
385    /// ```
386    /// # use sqlparser::{parser::{Parser, ParserError}, dialect::GenericDialect};
387    /// # fn main() -> Result<(), ParserError> {
388    /// let dialect = GenericDialect{};
389    /// let result = Parser::new(&dialect)
390    ///   .with_recursion_limit(1)
391    ///   .try_with_sql("SELECT * FROM foo WHERE (a OR (b OR (c OR d)))")?
392    ///   .parse_statements();
393    ///   assert_eq!(result, Err(ParserError::RecursionLimitExceeded));
394    /// # Ok(())
395    /// # }
396    /// ```
397    ///
398    /// Note: when "recursive-protection" feature is enabled, this crate uses additional stack overflow protection
399    //  for some of its recursive methods. See [`recursive::recursive`] for more information.
400    pub fn with_recursion_limit(mut self, recursion_limit: usize) -> Self {
401        self.recursion_counter = RecursionCounter::new(recursion_limit);
402        self
403    }
404
405    /// Specify additional parser options
406    ///
407    /// [`Parser`] supports additional options ([`ParserOptions`])
408    /// that allow you to mix & match behavior otherwise constrained
409    /// to certain dialects (e.g. trailing commas).
410    ///
411    /// Example:
412    /// ```
413    /// # use sqlparser::{parser::{Parser, ParserError, ParserOptions}, dialect::GenericDialect};
414    /// # fn main() -> Result<(), ParserError> {
415    /// let dialect = GenericDialect{};
416    /// let options = ParserOptions::new()
417    ///    .with_trailing_commas(true)
418    ///    .with_unescape(false);
419    /// let result = Parser::new(&dialect)
420    ///   .with_options(options)
421    ///   .try_with_sql("SELECT a, b, COUNT(*), FROM foo GROUP BY a, b,")?
422    ///   .parse_statements();
423    ///   assert!(matches!(result, Ok(_)));
424    /// # Ok(())
425    /// # }
426    /// ```
427    pub fn with_options(mut self, options: ParserOptions) -> Self {
428        self.options = options;
429        self
430    }
431
432    /// Reset this parser to parse the specified token stream
433    pub fn with_tokens_with_locations(mut self, tokens: Vec<TokenWithSpan>) -> Self {
434        self.tokens = tokens;
435        self.index = 0;
436        self
437    }
438
439    /// Reset this parser state to parse the specified tokens
440    pub fn with_tokens(self, tokens: Vec<Token>) -> Self {
441        // Put in dummy locations
442        let tokens_with_locations: Vec<TokenWithSpan> = tokens
443            .into_iter()
444            .map(|token| TokenWithSpan {
445                token,
446                span: Span::empty(),
447            })
448            .collect();
449        self.with_tokens_with_locations(tokens_with_locations)
450    }
451
452    /// Tokenize the sql string and sets this [`Parser`]'s state to
453    /// parse the resulting tokens
454    ///
455    /// Returns an error if there was an error tokenizing the SQL string.
456    ///
457    /// See example on [`Parser::new()`] for an example
458    pub fn try_with_sql(self, sql: &str) -> Result<Self, ParserError> {
459        debug!("Parsing sql '{sql}'...");
460        let tokens = Tokenizer::new(self.dialect, sql)
461            .with_unescape(self.options.unescape)
462            .tokenize_with_location()?;
463        Ok(self.with_tokens_with_locations(tokens))
464    }
465
466    /// Parse potentially multiple statements
467    ///
468    /// Example
469    /// ```
470    /// # use sqlparser::{parser::{Parser, ParserError}, dialect::GenericDialect};
471    /// # fn main() -> Result<(), ParserError> {
472    /// let dialect = GenericDialect{};
473    /// let statements = Parser::new(&dialect)
474    ///   // Parse a SQL string with 2 separate statements
475    ///   .try_with_sql("SELECT * FROM foo; SELECT * FROM bar;")?
476    ///   .parse_statements()?;
477    /// assert_eq!(statements.len(), 2);
478    /// # Ok(())
479    /// # }
480    /// ```
481    pub fn parse_statements(&mut self) -> Result<Vec<Statement>, ParserError> {
482        let mut stmts = Vec::new();
483        let mut expecting_statement_delimiter = false;
484        loop {
485            // ignore empty statements (between successive statement delimiters)
486            while self.consume_token(&Token::SemiColon) {
487                expecting_statement_delimiter = false;
488            }
489
490            if !self.options.require_semicolon_stmt_delimiter {
491                expecting_statement_delimiter = false;
492            }
493
494            match self.peek_token().token {
495                Token::EOF => break,
496
497                // end of statement
498                Token::Word(word) => {
499                    if expecting_statement_delimiter && word.keyword == Keyword::END {
500                        break;
501                    }
502                }
503                _ => {}
504            }
505
506            if expecting_statement_delimiter {
507                return self.expected("end of statement", self.peek_token());
508            }
509
510            let statement = self.parse_statement()?;
511            stmts.push(statement);
512            expecting_statement_delimiter = true;
513        }
514        Ok(stmts)
515    }
516
517    /// Convenience method to parse a string with one or more SQL
518    /// statements into produce an Abstract Syntax Tree (AST).
519    ///
520    /// Example
521    /// ```
522    /// # use sqlparser::{parser::{Parser, ParserError}, dialect::GenericDialect};
523    /// # fn main() -> Result<(), ParserError> {
524    /// let dialect = GenericDialect{};
525    /// let statements = Parser::parse_sql(
526    ///   &dialect, "SELECT * FROM foo"
527    /// )?;
528    /// assert_eq!(statements.len(), 1);
529    /// # Ok(())
530    /// # }
531    /// ```
532    pub fn parse_sql(dialect: &dyn Dialect, sql: &str) -> Result<Vec<Statement>, ParserError> {
533        Parser::new(dialect).try_with_sql(sql)?.parse_statements()
534    }
535
536    /// Parses the given `sql` into an Abstract Syntax Tree (AST), returning
537    /// also encountered source code comments.
538    ///
539    /// See [Parser::parse_sql].
540    pub fn parse_sql_with_comments(
541        dialect: &'a dyn Dialect,
542        sql: &str,
543    ) -> Result<(Vec<Statement>, comments::Comments), ParserError> {
544        let mut p = Parser::new(dialect).try_with_sql(sql)?;
545        p.parse_statements().map(|stmts| (stmts, p.into_comments()))
546    }
547
548    /// Consumes this parser returning comments from the parsed token stream.
549    fn into_comments(self) -> comments::Comments {
550        let mut comments = comments::Comments::default();
551        for t in self.tokens.into_iter() {
552            match t.token {
553                Token::Whitespace(Whitespace::SingleLineComment { comment, prefix }) => {
554                    comments.offer(comments::CommentWithSpan {
555                        comment: comments::Comment::SingleLine {
556                            content: comment,
557                            prefix,
558                        },
559                        span: t.span,
560                    });
561                }
562                Token::Whitespace(Whitespace::MultiLineComment(comment)) => {
563                    comments.offer(comments::CommentWithSpan {
564                        comment: comments::Comment::MultiLine(comment),
565                        span: t.span,
566                    });
567                }
568                _ => {}
569            }
570        }
571        comments
572    }
573
574    /// Parse a single top-level statement (such as SELECT, INSERT, CREATE, etc.),
575    /// stopping before the statement separator, if any.
576    pub fn parse_statement(&mut self) -> Result<Statement, ParserError> {
577        let _guard = self.recursion_counter.try_decrease()?;
578
579        // allow the dialect to override statement parsing
580        if let Some(statement) = self.dialect.parse_statement(self) {
581            return statement;
582        }
583
584        let next_token = self.next_token();
585        match &next_token.token {
586            Token::Word(w) => match w.keyword {
587                Keyword::KILL => self.parse_kill(),
588                Keyword::FLUSH => self.parse_flush(),
589                Keyword::DESC => self.parse_explain(DescribeAlias::Desc),
590                Keyword::DESCRIBE => self.parse_explain(DescribeAlias::Describe),
591                Keyword::EXPLAIN => self.parse_explain(DescribeAlias::Explain),
592                Keyword::ANALYZE => self.parse_analyze(),
593                Keyword::CASE => {
594                    self.prev_token();
595                    self.parse_case_stmt()
596                }
597                Keyword::IF => {
598                    self.prev_token();
599                    self.parse_if_stmt()
600                }
601                Keyword::WHILE => {
602                    self.prev_token();
603                    self.parse_while()
604                }
605                Keyword::RAISE => {
606                    self.prev_token();
607                    self.parse_raise_stmt()
608                }
609                Keyword::SELECT | Keyword::WITH | Keyword::VALUES | Keyword::FROM => {
610                    self.prev_token();
611                    self.parse_query().map(Statement::Query)
612                }
613                Keyword::TRUNCATE => self.parse_truncate(),
614                Keyword::ATTACH => {
615                    if dialect_of!(self is DuckDbDialect) {
616                        self.parse_attach_duckdb_database()
617                    } else {
618                        self.parse_attach_database()
619                    }
620                }
621                Keyword::DETACH if dialect_of!(self is DuckDbDialect | GenericDialect) => {
622                    self.parse_detach_duckdb_database()
623                }
624                Keyword::MSCK => self.parse_msck(),
625                Keyword::CREATE => self.parse_create(),
626                Keyword::CACHE => self.parse_cache_table(),
627                Keyword::DROP => self.parse_drop(),
628                Keyword::DISCARD => self.parse_discard(),
629                Keyword::DECLARE => self.parse_declare(),
630                Keyword::FETCH => self.parse_fetch_statement(),
631                Keyword::DELETE => self.parse_delete(next_token),
632                Keyword::INSERT => self.parse_insert(next_token),
633                Keyword::REPLACE => self.parse_replace(next_token),
634                Keyword::UNCACHE => self.parse_uncache_table(),
635                Keyword::UPDATE => self.parse_update(next_token),
636                Keyword::ALTER => self.parse_alter(),
637                Keyword::CALL => self.parse_call(),
638                Keyword::COPY => self.parse_copy(),
639                Keyword::OPEN => {
640                    self.prev_token();
641                    self.parse_open()
642                }
643                Keyword::CLOSE => self.parse_close(),
644                Keyword::SET => self.parse_set(),
645                Keyword::SHOW => self.parse_show(),
646                Keyword::USE => self.parse_use(),
647                Keyword::GRANT => self.parse_grant(),
648                Keyword::DENY => {
649                    self.prev_token();
650                    self.parse_deny()
651                }
652                Keyword::REVOKE => self.parse_revoke(),
653                Keyword::START => self.parse_start_transaction(),
654                Keyword::BEGIN => self.parse_begin(),
655                Keyword::END => self.parse_end(),
656                Keyword::SAVEPOINT => self.parse_savepoint(),
657                Keyword::RELEASE => self.parse_release(),
658                Keyword::COMMIT => self.parse_commit(),
659                Keyword::RAISERROR => Ok(self.parse_raiserror()?),
660                Keyword::ROLLBACK => self.parse_rollback(),
661                Keyword::ASSERT => self.parse_assert(),
662                // `PREPARE`, `EXECUTE` and `DEALLOCATE` are Postgres-specific
663                // syntaxes. They are used for Postgres prepared statement.
664                Keyword::DEALLOCATE => self.parse_deallocate(),
665                Keyword::EXECUTE | Keyword::EXEC => self.parse_execute(),
666                Keyword::PREPARE => self.parse_prepare(),
667                Keyword::MERGE => self.parse_merge(next_token),
668                // `LISTEN`, `UNLISTEN` and `NOTIFY` are Postgres-specific
669                // syntaxes. They are used for Postgres statement.
670                Keyword::LISTEN if self.dialect.supports_listen_notify() => self.parse_listen(),
671                Keyword::UNLISTEN if self.dialect.supports_listen_notify() => self.parse_unlisten(),
672                Keyword::NOTIFY if self.dialect.supports_listen_notify() => self.parse_notify(),
673                // `PRAGMA` is sqlite specific https://www.sqlite.org/pragma.html
674                Keyword::PRAGMA => self.parse_pragma(),
675                Keyword::UNLOAD => {
676                    self.prev_token();
677                    self.parse_unload()
678                }
679                Keyword::RENAME => self.parse_rename(),
680                // `INSTALL` is duckdb specific https://duckdb.org/docs/extensions/overview
681                Keyword::INSTALL if dialect_of!(self is DuckDbDialect | GenericDialect) => {
682                    self.parse_install()
683                }
684                Keyword::LOAD => self.parse_load(),
685                // `OPTIMIZE` is clickhouse specific https://clickhouse.tech/docs/en/sql-reference/statements/optimize/
686                Keyword::OPTIMIZE if dialect_of!(self is ClickHouseDialect | GenericDialect) => {
687                    self.parse_optimize_table()
688                }
689                // `COMMENT` is snowflake specific https://docs.snowflake.com/en/sql-reference/sql/comment
690                Keyword::COMMENT if self.dialect.supports_comment_on() => self.parse_comment(),
691                Keyword::PRINT => self.parse_print(),
692                Keyword::RETURN => self.parse_return(),
693                Keyword::EXPORT => {
694                    self.prev_token();
695                    self.parse_export_data()
696                }
697                Keyword::VACUUM => {
698                    self.prev_token();
699                    self.parse_vacuum()
700                }
701                Keyword::RESET => self.parse_reset(),
702                _ => self.expected("an SQL statement", next_token),
703            },
704            Token::LParen => {
705                self.prev_token();
706                self.parse_query().map(Statement::Query)
707            }
708            _ => self.expected("an SQL statement", next_token),
709        }
710    }
711
712    /// Parse a `CASE` statement.
713    ///
714    /// See [Statement::Case]
715    pub fn parse_case_stmt(&mut self) -> Result<Statement, ParserError> {
716        let case_token = self.expect_keyword(Keyword::CASE)?;
717
718        let match_expr = if self.peek_keyword(Keyword::WHEN) {
719            None
720        } else {
721            Some(self.parse_expr()?)
722        };
723
724        self.expect_keyword_is(Keyword::WHEN)?;
725        let when_blocks = self.parse_keyword_separated(Keyword::WHEN, |parser| {
726            parser.parse_conditional_statement_block(&[Keyword::WHEN, Keyword::ELSE, Keyword::END])
727        })?;
728
729        let else_block = if self.parse_keyword(Keyword::ELSE) {
730            Some(self.parse_conditional_statement_block(&[Keyword::END])?)
731        } else {
732            None
733        };
734
735        let mut end_case_token = self.expect_keyword(Keyword::END)?;
736        if self.peek_keyword(Keyword::CASE) {
737            end_case_token = self.expect_keyword(Keyword::CASE)?;
738        }
739
740        Ok(Statement::Case(CaseStatement {
741            case_token: AttachedToken(case_token),
742            match_expr,
743            when_blocks,
744            else_block,
745            end_case_token: AttachedToken(end_case_token),
746        }))
747    }
748
749    /// Parse an `IF` statement.
750    ///
751    /// See [Statement::If]
752    pub fn parse_if_stmt(&mut self) -> Result<Statement, ParserError> {
753        self.expect_keyword_is(Keyword::IF)?;
754        let if_block = self.parse_conditional_statement_block(&[
755            Keyword::ELSE,
756            Keyword::ELSEIF,
757            Keyword::END,
758        ])?;
759
760        let elseif_blocks = if self.parse_keyword(Keyword::ELSEIF) {
761            self.parse_keyword_separated(Keyword::ELSEIF, |parser| {
762                parser.parse_conditional_statement_block(&[
763                    Keyword::ELSEIF,
764                    Keyword::ELSE,
765                    Keyword::END,
766                ])
767            })?
768        } else {
769            vec![]
770        };
771
772        let else_block = if self.parse_keyword(Keyword::ELSE) {
773            Some(self.parse_conditional_statement_block(&[Keyword::END])?)
774        } else {
775            None
776        };
777
778        self.expect_keyword_is(Keyword::END)?;
779        let end_token = self.expect_keyword(Keyword::IF)?;
780
781        Ok(Statement::If(IfStatement {
782            if_block,
783            elseif_blocks,
784            else_block,
785            end_token: Some(AttachedToken(end_token)),
786        }))
787    }
788
789    /// Parse a `WHILE` statement.
790    ///
791    /// See [Statement::While]
792    fn parse_while(&mut self) -> Result<Statement, ParserError> {
793        self.expect_keyword_is(Keyword::WHILE)?;
794        let while_block = self.parse_conditional_statement_block(&[Keyword::END])?;
795
796        Ok(Statement::While(WhileStatement { while_block }))
797    }
798
799    /// Parses an expression and associated list of statements
800    /// belonging to a conditional statement like `IF` or `WHEN` or `WHILE`.
801    ///
802    /// Example:
803    /// ```sql
804    /// IF condition THEN statement1; statement2;
805    /// ```
806    fn parse_conditional_statement_block(
807        &mut self,
808        terminal_keywords: &[Keyword],
809    ) -> Result<ConditionalStatementBlock, ParserError> {
810        let start_token = self.get_current_token().clone(); // self.expect_keyword(keyword)?;
811        let mut then_token = None;
812
813        let condition = match &start_token.token {
814            Token::Word(w) if w.keyword == Keyword::ELSE => None,
815            Token::Word(w) if w.keyword == Keyword::WHILE => {
816                let expr = self.parse_expr()?;
817                Some(expr)
818            }
819            _ => {
820                let expr = self.parse_expr()?;
821                then_token = Some(AttachedToken(self.expect_keyword(Keyword::THEN)?));
822                Some(expr)
823            }
824        };
825
826        let conditional_statements = self.parse_conditional_statements(terminal_keywords)?;
827
828        Ok(ConditionalStatementBlock {
829            start_token: AttachedToken(start_token),
830            condition,
831            then_token,
832            conditional_statements,
833        })
834    }
835
836    /// Parse a BEGIN/END block or a sequence of statements
837    /// This could be inside of a conditional (IF, CASE, WHILE etc.) or an object body defined optionally BEGIN/END and one or more statements.
838    pub(crate) fn parse_conditional_statements(
839        &mut self,
840        terminal_keywords: &[Keyword],
841    ) -> Result<ConditionalStatements, ParserError> {
842        let conditional_statements = if self.peek_keyword(Keyword::BEGIN) {
843            let begin_token = self.expect_keyword(Keyword::BEGIN)?;
844            let statements = self.parse_statement_list(terminal_keywords)?;
845            let end_token = self.expect_keyword(Keyword::END)?;
846
847            ConditionalStatements::BeginEnd(BeginEndStatements {
848                begin_token: AttachedToken(begin_token),
849                statements,
850                end_token: AttachedToken(end_token),
851            })
852        } else {
853            ConditionalStatements::Sequence {
854                statements: self.parse_statement_list(terminal_keywords)?,
855            }
856        };
857        Ok(conditional_statements)
858    }
859
860    /// Parse a `RAISE` statement.
861    ///
862    /// See [Statement::Raise]
863    pub fn parse_raise_stmt(&mut self) -> Result<Statement, ParserError> {
864        self.expect_keyword_is(Keyword::RAISE)?;
865
866        let value = if self.parse_keywords(&[Keyword::USING, Keyword::MESSAGE]) {
867            self.expect_token(&Token::Eq)?;
868            Some(RaiseStatementValue::UsingMessage(self.parse_expr()?))
869        } else {
870            self.maybe_parse(|parser| parser.parse_expr().map(RaiseStatementValue::Expr))?
871        };
872
873        Ok(Statement::Raise(RaiseStatement { value }))
874    }
875
876    pub fn parse_comment(&mut self) -> Result<Statement, ParserError> {
877        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
878
879        self.expect_keyword_is(Keyword::ON)?;
880        let token = self.next_token();
881
882        let (object_type, object_name) = match token.token {
883            Token::Word(w) if w.keyword == Keyword::COLUMN => {
884                (CommentObject::Column, self.parse_object_name(false)?)
885            }
886            Token::Word(w) if w.keyword == Keyword::TABLE => {
887                (CommentObject::Table, self.parse_object_name(false)?)
888            }
889            Token::Word(w) if w.keyword == Keyword::EXTENSION => {
890                (CommentObject::Extension, self.parse_object_name(false)?)
891            }
892            Token::Word(w) if w.keyword == Keyword::SCHEMA => {
893                (CommentObject::Schema, self.parse_object_name(false)?)
894            }
895            Token::Word(w) if w.keyword == Keyword::DATABASE => {
896                (CommentObject::Database, self.parse_object_name(false)?)
897            }
898            Token::Word(w) if w.keyword == Keyword::USER => {
899                (CommentObject::User, self.parse_object_name(false)?)
900            }
901            Token::Word(w) if w.keyword == Keyword::ROLE => {
902                (CommentObject::Role, self.parse_object_name(false)?)
903            }
904            _ => self.expected("comment object_type", token)?,
905        };
906
907        self.expect_keyword_is(Keyword::IS)?;
908        let comment = if self.parse_keyword(Keyword::NULL) {
909            None
910        } else {
911            Some(self.parse_literal_string()?)
912        };
913        Ok(Statement::Comment {
914            object_type,
915            object_name,
916            comment,
917            if_exists,
918        })
919    }
920
921    pub fn parse_flush(&mut self) -> Result<Statement, ParserError> {
922        let mut channel = None;
923        let mut tables: Vec<ObjectName> = vec![];
924        let mut read_lock = false;
925        let mut export = false;
926
927        if !dialect_of!(self is MySqlDialect | GenericDialect) {
928            return parser_err!("Unsupported statement FLUSH", self.peek_token().span.start);
929        }
930
931        let location = if self.parse_keyword(Keyword::NO_WRITE_TO_BINLOG) {
932            Some(FlushLocation::NoWriteToBinlog)
933        } else if self.parse_keyword(Keyword::LOCAL) {
934            Some(FlushLocation::Local)
935        } else {
936            None
937        };
938
939        let object_type = if self.parse_keywords(&[Keyword::BINARY, Keyword::LOGS]) {
940            FlushType::BinaryLogs
941        } else if self.parse_keywords(&[Keyword::ENGINE, Keyword::LOGS]) {
942            FlushType::EngineLogs
943        } else if self.parse_keywords(&[Keyword::ERROR, Keyword::LOGS]) {
944            FlushType::ErrorLogs
945        } else if self.parse_keywords(&[Keyword::GENERAL, Keyword::LOGS]) {
946            FlushType::GeneralLogs
947        } else if self.parse_keywords(&[Keyword::HOSTS]) {
948            FlushType::Hosts
949        } else if self.parse_keyword(Keyword::PRIVILEGES) {
950            FlushType::Privileges
951        } else if self.parse_keyword(Keyword::OPTIMIZER_COSTS) {
952            FlushType::OptimizerCosts
953        } else if self.parse_keywords(&[Keyword::RELAY, Keyword::LOGS]) {
954            if self.parse_keywords(&[Keyword::FOR, Keyword::CHANNEL]) {
955                channel = Some(self.parse_object_name(false).unwrap().to_string());
956            }
957            FlushType::RelayLogs
958        } else if self.parse_keywords(&[Keyword::SLOW, Keyword::LOGS]) {
959            FlushType::SlowLogs
960        } else if self.parse_keyword(Keyword::STATUS) {
961            FlushType::Status
962        } else if self.parse_keyword(Keyword::USER_RESOURCES) {
963            FlushType::UserResources
964        } else if self.parse_keywords(&[Keyword::LOGS]) {
965            FlushType::Logs
966        } else if self.parse_keywords(&[Keyword::TABLES]) {
967            loop {
968                let next_token = self.next_token();
969                match &next_token.token {
970                    Token::Word(w) => match w.keyword {
971                        Keyword::WITH => {
972                            read_lock = self.parse_keywords(&[Keyword::READ, Keyword::LOCK]);
973                        }
974                        Keyword::FOR => {
975                            export = self.parse_keyword(Keyword::EXPORT);
976                        }
977                        Keyword::NoKeyword => {
978                            self.prev_token();
979                            tables = self.parse_comma_separated(|p| p.parse_object_name(false))?;
980                        }
981                        _ => {}
982                    },
983                    _ => {
984                        break;
985                    }
986                }
987            }
988
989            FlushType::Tables
990        } else {
991            return self.expected(
992                "BINARY LOGS, ENGINE LOGS, ERROR LOGS, GENERAL LOGS, HOSTS, LOGS, PRIVILEGES, OPTIMIZER_COSTS,\
993                 RELAY LOGS [FOR CHANNEL channel], SLOW LOGS, STATUS, USER_RESOURCES",
994                self.peek_token(),
995            );
996        };
997
998        Ok(Statement::Flush {
999            object_type,
1000            location,
1001            channel,
1002            read_lock,
1003            export,
1004            tables,
1005        })
1006    }
1007
1008    pub fn parse_msck(&mut self) -> Result<Statement, ParserError> {
1009        let repair = self.parse_keyword(Keyword::REPAIR);
1010        self.expect_keyword_is(Keyword::TABLE)?;
1011        let table_name = self.parse_object_name(false)?;
1012        let partition_action = self
1013            .maybe_parse(|parser| {
1014                let pa = match parser.parse_one_of_keywords(&[
1015                    Keyword::ADD,
1016                    Keyword::DROP,
1017                    Keyword::SYNC,
1018                ]) {
1019                    Some(Keyword::ADD) => Some(AddDropSync::ADD),
1020                    Some(Keyword::DROP) => Some(AddDropSync::DROP),
1021                    Some(Keyword::SYNC) => Some(AddDropSync::SYNC),
1022                    _ => None,
1023                };
1024                parser.expect_keyword_is(Keyword::PARTITIONS)?;
1025                Ok(pa)
1026            })?
1027            .unwrap_or_default();
1028        Ok(Msck {
1029            repair,
1030            table_name,
1031            partition_action,
1032        }
1033        .into())
1034    }
1035
1036    pub fn parse_truncate(&mut self) -> Result<Statement, ParserError> {
1037        let table = self.parse_keyword(Keyword::TABLE);
1038
1039        let table_names = self
1040            .parse_comma_separated(|p| {
1041                Ok((p.parse_keyword(Keyword::ONLY), p.parse_object_name(false)?))
1042            })?
1043            .into_iter()
1044            .map(|(only, name)| TruncateTableTarget { name, only })
1045            .collect();
1046
1047        let mut partitions = None;
1048        if self.parse_keyword(Keyword::PARTITION) {
1049            self.expect_token(&Token::LParen)?;
1050            partitions = Some(self.parse_comma_separated(Parser::parse_expr)?);
1051            self.expect_token(&Token::RParen)?;
1052        }
1053
1054        let mut identity = None;
1055        let mut cascade = None;
1056
1057        if dialect_of!(self is PostgreSqlDialect | GenericDialect) {
1058            identity = if self.parse_keywords(&[Keyword::RESTART, Keyword::IDENTITY]) {
1059                Some(TruncateIdentityOption::Restart)
1060            } else if self.parse_keywords(&[Keyword::CONTINUE, Keyword::IDENTITY]) {
1061                Some(TruncateIdentityOption::Continue)
1062            } else {
1063                None
1064            };
1065
1066            cascade = self.parse_cascade_option();
1067        };
1068
1069        let on_cluster = self.parse_optional_on_cluster()?;
1070
1071        Ok(Truncate {
1072            table_names,
1073            partitions,
1074            table,
1075            identity,
1076            cascade,
1077            on_cluster,
1078        }
1079        .into())
1080    }
1081
1082    fn parse_cascade_option(&mut self) -> Option<CascadeOption> {
1083        if self.parse_keyword(Keyword::CASCADE) {
1084            Some(CascadeOption::Cascade)
1085        } else if self.parse_keyword(Keyword::RESTRICT) {
1086            Some(CascadeOption::Restrict)
1087        } else {
1088            None
1089        }
1090    }
1091
1092    pub fn parse_attach_duckdb_database_options(
1093        &mut self,
1094    ) -> Result<Vec<AttachDuckDBDatabaseOption>, ParserError> {
1095        if !self.consume_token(&Token::LParen) {
1096            return Ok(vec![]);
1097        }
1098
1099        let mut options = vec![];
1100        loop {
1101            if self.parse_keyword(Keyword::READ_ONLY) {
1102                let boolean = if self.parse_keyword(Keyword::TRUE) {
1103                    Some(true)
1104                } else if self.parse_keyword(Keyword::FALSE) {
1105                    Some(false)
1106                } else {
1107                    None
1108                };
1109                options.push(AttachDuckDBDatabaseOption::ReadOnly(boolean));
1110            } else if self.parse_keyword(Keyword::TYPE) {
1111                let ident = self.parse_identifier()?;
1112                options.push(AttachDuckDBDatabaseOption::Type(ident));
1113            } else {
1114                return self.expected("expected one of: ), READ_ONLY, TYPE", self.peek_token());
1115            };
1116
1117            if self.consume_token(&Token::RParen) {
1118                return Ok(options);
1119            } else if self.consume_token(&Token::Comma) {
1120                continue;
1121            } else {
1122                return self.expected("expected one of: ')', ','", self.peek_token());
1123            }
1124        }
1125    }
1126
1127    pub fn parse_attach_duckdb_database(&mut self) -> Result<Statement, ParserError> {
1128        let database = self.parse_keyword(Keyword::DATABASE);
1129        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
1130        let database_path = self.parse_identifier()?;
1131        let database_alias = if self.parse_keyword(Keyword::AS) {
1132            Some(self.parse_identifier()?)
1133        } else {
1134            None
1135        };
1136
1137        let attach_options = self.parse_attach_duckdb_database_options()?;
1138        Ok(Statement::AttachDuckDBDatabase {
1139            if_not_exists,
1140            database,
1141            database_path,
1142            database_alias,
1143            attach_options,
1144        })
1145    }
1146
1147    pub fn parse_detach_duckdb_database(&mut self) -> Result<Statement, ParserError> {
1148        let database = self.parse_keyword(Keyword::DATABASE);
1149        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
1150        let database_alias = self.parse_identifier()?;
1151        Ok(Statement::DetachDuckDBDatabase {
1152            if_exists,
1153            database,
1154            database_alias,
1155        })
1156    }
1157
1158    pub fn parse_attach_database(&mut self) -> Result<Statement, ParserError> {
1159        let database = self.parse_keyword(Keyword::DATABASE);
1160        let database_file_name = self.parse_expr()?;
1161        self.expect_keyword_is(Keyword::AS)?;
1162        let schema_name = self.parse_identifier()?;
1163        Ok(Statement::AttachDatabase {
1164            database,
1165            schema_name,
1166            database_file_name,
1167        })
1168    }
1169
1170    pub fn parse_analyze(&mut self) -> Result<Statement, ParserError> {
1171        let has_table_keyword = self.parse_keyword(Keyword::TABLE);
1172        let table_name = self.parse_object_name(false)?;
1173        let mut for_columns = false;
1174        let mut cache_metadata = false;
1175        let mut noscan = false;
1176        let mut partitions = None;
1177        let mut compute_statistics = false;
1178        let mut columns = vec![];
1179        loop {
1180            match self.parse_one_of_keywords(&[
1181                Keyword::PARTITION,
1182                Keyword::FOR,
1183                Keyword::CACHE,
1184                Keyword::NOSCAN,
1185                Keyword::COMPUTE,
1186            ]) {
1187                Some(Keyword::PARTITION) => {
1188                    self.expect_token(&Token::LParen)?;
1189                    partitions = Some(self.parse_comma_separated(Parser::parse_expr)?);
1190                    self.expect_token(&Token::RParen)?;
1191                }
1192                Some(Keyword::NOSCAN) => noscan = true,
1193                Some(Keyword::FOR) => {
1194                    self.expect_keyword_is(Keyword::COLUMNS)?;
1195
1196                    columns = self
1197                        .maybe_parse(|parser| {
1198                            parser.parse_comma_separated(|p| p.parse_identifier())
1199                        })?
1200                        .unwrap_or_default();
1201                    for_columns = true
1202                }
1203                Some(Keyword::CACHE) => {
1204                    self.expect_keyword_is(Keyword::METADATA)?;
1205                    cache_metadata = true
1206                }
1207                Some(Keyword::COMPUTE) => {
1208                    self.expect_keyword_is(Keyword::STATISTICS)?;
1209                    compute_statistics = true
1210                }
1211                _ => break,
1212            }
1213        }
1214
1215        Ok(Analyze {
1216            has_table_keyword,
1217            table_name,
1218            for_columns,
1219            columns,
1220            partitions,
1221            cache_metadata,
1222            noscan,
1223            compute_statistics,
1224        }
1225        .into())
1226    }
1227
1228    /// Parse a new expression including wildcard & qualified wildcard.
1229    pub fn parse_wildcard_expr(&mut self) -> Result<Expr, ParserError> {
1230        let index = self.index;
1231
1232        let next_token = self.next_token();
1233        match next_token.token {
1234            t @ (Token::Word(_) | Token::SingleQuotedString(_)) => {
1235                if self.peek_token().token == Token::Period {
1236                    let mut id_parts: Vec<Ident> = vec![match t {
1237                        Token::Word(w) => w.into_ident(next_token.span),
1238                        Token::SingleQuotedString(s) => Ident::with_quote('\'', s),
1239                        _ => {
1240                            return Err(ParserError::ParserError(
1241                                "Internal parser error: unexpected token type".to_string(),
1242                            ))
1243                        }
1244                    }];
1245
1246                    while self.consume_token(&Token::Period) {
1247                        let next_token = self.next_token();
1248                        match next_token.token {
1249                            Token::Word(w) => id_parts.push(w.into_ident(next_token.span)),
1250                            Token::SingleQuotedString(s) => {
1251                                // SQLite has single-quoted identifiers
1252                                id_parts.push(Ident::with_quote('\'', s))
1253                            }
1254                            Token::Mul => {
1255                                return Ok(Expr::QualifiedWildcard(
1256                                    ObjectName::from(id_parts),
1257                                    AttachedToken(next_token),
1258                                ));
1259                            }
1260                            _ => {
1261                                return self
1262                                    .expected("an identifier or a '*' after '.'", next_token);
1263                            }
1264                        }
1265                    }
1266                }
1267            }
1268            Token::Mul => {
1269                return Ok(Expr::Wildcard(AttachedToken(next_token)));
1270            }
1271            // Handle parenthesized wildcard: (*)
1272            Token::LParen => {
1273                let [maybe_mul, maybe_rparen] = self.peek_tokens_ref();
1274                if maybe_mul.token == Token::Mul && maybe_rparen.token == Token::RParen {
1275                    let mul_token = self.next_token(); // consume Mul
1276                    self.next_token(); // consume RParen
1277                    return Ok(Expr::Wildcard(AttachedToken(mul_token)));
1278                }
1279            }
1280            _ => (),
1281        };
1282
1283        self.index = index;
1284        self.parse_expr()
1285    }
1286
1287    /// Parse a new expression.
1288    pub fn parse_expr(&mut self) -> Result<Expr, ParserError> {
1289        self.parse_subexpr(self.dialect.prec_unknown())
1290    }
1291
1292    pub fn parse_expr_with_alias_and_order_by(
1293        &mut self,
1294    ) -> Result<ExprWithAliasAndOrderBy, ParserError> {
1295        let expr = self.parse_expr()?;
1296
1297        fn validator(explicit: bool, kw: &Keyword, _parser: &mut Parser) -> bool {
1298            explicit || !&[Keyword::ASC, Keyword::DESC, Keyword::GROUP].contains(kw)
1299        }
1300        let alias = self.parse_optional_alias_inner(None, validator)?;
1301        let order_by = OrderByOptions {
1302            asc: self.parse_asc_desc(),
1303            nulls_first: None,
1304        };
1305        Ok(ExprWithAliasAndOrderBy {
1306            expr: ExprWithAlias { expr, alias },
1307            order_by,
1308        })
1309    }
1310
1311    /// Parse tokens until the precedence changes.
1312    pub fn parse_subexpr(&mut self, precedence: u8) -> Result<Expr, ParserError> {
1313        let _guard = self.recursion_counter.try_decrease()?;
1314        debug!("parsing expr");
1315        let mut expr = self.parse_prefix()?;
1316
1317        expr = self.parse_compound_expr(expr, vec![])?;
1318
1319        debug!("prefix: {expr:?}");
1320        loop {
1321            let next_precedence = self.get_next_precedence()?;
1322            debug!("next precedence: {next_precedence:?}");
1323
1324            if precedence >= next_precedence {
1325                break;
1326            }
1327
1328            // The period operator is handled exclusively by the
1329            // compound field access parsing.
1330            if Token::Period == self.peek_token_ref().token {
1331                break;
1332            }
1333
1334            expr = self.parse_infix(expr, next_precedence)?;
1335        }
1336        Ok(expr)
1337    }
1338
1339    pub fn parse_assert(&mut self) -> Result<Statement, ParserError> {
1340        let condition = self.parse_expr()?;
1341        let message = if self.parse_keyword(Keyword::AS) {
1342            Some(self.parse_expr()?)
1343        } else {
1344            None
1345        };
1346
1347        Ok(Statement::Assert { condition, message })
1348    }
1349
1350    pub fn parse_savepoint(&mut self) -> Result<Statement, ParserError> {
1351        let name = self.parse_identifier()?;
1352        Ok(Statement::Savepoint { name })
1353    }
1354
1355    pub fn parse_release(&mut self) -> Result<Statement, ParserError> {
1356        let _ = self.parse_keyword(Keyword::SAVEPOINT);
1357        let name = self.parse_identifier()?;
1358
1359        Ok(Statement::ReleaseSavepoint { name })
1360    }
1361
1362    pub fn parse_listen(&mut self) -> Result<Statement, ParserError> {
1363        let channel = self.parse_identifier()?;
1364        Ok(Statement::LISTEN { channel })
1365    }
1366
1367    pub fn parse_unlisten(&mut self) -> Result<Statement, ParserError> {
1368        let channel = if self.consume_token(&Token::Mul) {
1369            Ident::new(Expr::Wildcard(AttachedToken::empty()).to_string())
1370        } else {
1371            match self.parse_identifier() {
1372                Ok(expr) => expr,
1373                _ => {
1374                    self.prev_token();
1375                    return self.expected("wildcard or identifier", self.peek_token());
1376                }
1377            }
1378        };
1379        Ok(Statement::UNLISTEN { channel })
1380    }
1381
1382    pub fn parse_notify(&mut self) -> Result<Statement, ParserError> {
1383        let channel = self.parse_identifier()?;
1384        let payload = if self.consume_token(&Token::Comma) {
1385            Some(self.parse_literal_string()?)
1386        } else {
1387            None
1388        };
1389        Ok(Statement::NOTIFY { channel, payload })
1390    }
1391
1392    /// Parses a `RENAME TABLE` statement. See [Statement::RenameTable]
1393    pub fn parse_rename(&mut self) -> Result<Statement, ParserError> {
1394        if self.peek_keyword(Keyword::TABLE) {
1395            self.expect_keyword(Keyword::TABLE)?;
1396            let rename_tables = self.parse_comma_separated(|parser| {
1397                let old_name = parser.parse_object_name(false)?;
1398                parser.expect_keyword(Keyword::TO)?;
1399                let new_name = parser.parse_object_name(false)?;
1400
1401                Ok(RenameTable { old_name, new_name })
1402            })?;
1403            Ok(Statement::RenameTable(rename_tables))
1404        } else {
1405            self.expected("KEYWORD `TABLE` after RENAME", self.peek_token())
1406        }
1407    }
1408
1409    /// Tries to parse an expression by matching the specified word to known keywords that have a special meaning in the dialect.
1410    /// Returns `None if no match is found.
1411    fn parse_expr_prefix_by_reserved_word(
1412        &mut self,
1413        w: &Word,
1414        w_span: Span,
1415    ) -> Result<Option<Expr>, ParserError> {
1416        match w.keyword {
1417            Keyword::TRUE | Keyword::FALSE if self.dialect.supports_boolean_literals() => {
1418                self.prev_token();
1419                Ok(Some(Expr::Value(self.parse_value()?)))
1420            }
1421            Keyword::NULL => {
1422                self.prev_token();
1423                Ok(Some(Expr::Value(self.parse_value()?)))
1424            }
1425            Keyword::CURRENT_CATALOG
1426            | Keyword::CURRENT_USER
1427            | Keyword::SESSION_USER
1428            | Keyword::USER
1429            if dialect_of!(self is PostgreSqlDialect | GenericDialect) =>
1430                {
1431                    Ok(Some(Expr::Function(Function {
1432                        name: ObjectName::from(vec![w.clone().into_ident(w_span)]),
1433                        uses_odbc_syntax: false,
1434                        parameters: FunctionArguments::None,
1435                        args: FunctionArguments::None,
1436                        null_treatment: None,
1437                        filter: None,
1438                        over: None,
1439                        within_group: vec![],
1440                    })))
1441                }
1442            Keyword::CURRENT_TIMESTAMP
1443            | Keyword::CURRENT_TIME
1444            | Keyword::CURRENT_DATE
1445            | Keyword::LOCALTIME
1446            | Keyword::LOCALTIMESTAMP => {
1447                Ok(Some(self.parse_time_functions(ObjectName::from(vec![w.clone().into_ident(w_span)]))?))
1448            }
1449            Keyword::CASE => Ok(Some(self.parse_case_expr()?)),
1450            Keyword::CONVERT => Ok(Some(self.parse_convert_expr(false)?)),
1451            Keyword::TRY_CONVERT if self.dialect.supports_try_convert() => Ok(Some(self.parse_convert_expr(true)?)),
1452            Keyword::CAST => Ok(Some(self.parse_cast_expr(CastKind::Cast)?)),
1453            Keyword::TRY_CAST => Ok(Some(self.parse_cast_expr(CastKind::TryCast)?)),
1454            Keyword::SAFE_CAST => Ok(Some(self.parse_cast_expr(CastKind::SafeCast)?)),
1455            Keyword::EXISTS
1456            // Support parsing Databricks has a function named `exists`.
1457            if !dialect_of!(self is DatabricksDialect)
1458                || matches!(
1459                        self.peek_nth_token_ref(1).token,
1460                        Token::Word(Word {
1461                            keyword: Keyword::SELECT | Keyword::WITH,
1462                            ..
1463                        })
1464                    ) =>
1465                {
1466                    Ok(Some(self.parse_exists_expr(false)?))
1467                }
1468            Keyword::EXTRACT => Ok(Some(self.parse_extract_expr()?)),
1469            Keyword::CEIL => Ok(Some(self.parse_ceil_floor_expr(true)?)),
1470            Keyword::FLOOR => Ok(Some(self.parse_ceil_floor_expr(false)?)),
1471            Keyword::POSITION if self.peek_token_ref().token == Token::LParen => {
1472                Ok(Some(self.parse_position_expr(w.clone().into_ident(w_span))?))
1473            }
1474            Keyword::SUBSTR | Keyword::SUBSTRING => {
1475                self.prev_token();
1476                Ok(Some(self.parse_substring()?))
1477            }
1478            Keyword::OVERLAY => Ok(Some(self.parse_overlay_expr()?)),
1479            Keyword::TRIM => Ok(Some(self.parse_trim_expr()?)),
1480            Keyword::INTERVAL => Ok(Some(self.parse_interval()?)),
1481            // Treat ARRAY[1,2,3] as an array [1,2,3], otherwise try as subquery or a function call
1482            Keyword::ARRAY if *self.peek_token_ref() == Token::LBracket => {
1483                self.expect_token(&Token::LBracket)?;
1484                Ok(Some(self.parse_array_expr(true)?))
1485            }
1486            Keyword::ARRAY
1487            if self.peek_token() == Token::LParen
1488                && !dialect_of!(self is ClickHouseDialect | DatabricksDialect) =>
1489                {
1490                    self.expect_token(&Token::LParen)?;
1491                    let query = self.parse_query()?;
1492                    self.expect_token(&Token::RParen)?;
1493                    Ok(Some(Expr::Function(Function {
1494                        name: ObjectName::from(vec![w.clone().into_ident(w_span)]),
1495                        uses_odbc_syntax: false,
1496                        parameters: FunctionArguments::None,
1497                        args: FunctionArguments::Subquery(query),
1498                        filter: None,
1499                        null_treatment: None,
1500                        over: None,
1501                        within_group: vec![],
1502                    })))
1503                }
1504            Keyword::NOT => Ok(Some(self.parse_not()?)),
1505            Keyword::MATCH if self.dialect.supports_match_against() => {
1506                Ok(Some(self.parse_match_against()?))
1507            }
1508            Keyword::STRUCT if self.dialect.supports_struct_literal() => {
1509                let struct_expr = self.parse_struct_literal()?;
1510                Ok(Some(struct_expr))
1511            }
1512            Keyword::PRIOR if matches!(self.state, ParserState::ConnectBy) => {
1513                let expr = self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?;
1514                Ok(Some(Expr::Prior(Box::new(expr))))
1515            }
1516            Keyword::MAP if *self.peek_token_ref() == Token::LBrace && self.dialect.support_map_literal_syntax() => {
1517                Ok(Some(self.parse_duckdb_map_literal()?))
1518            }
1519            _ if self.dialect.supports_geometric_types() => match w.keyword {
1520                Keyword::CIRCLE => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Circle)?)),
1521                Keyword::BOX => Ok(Some(self.parse_geometric_type(GeometricTypeKind::GeometricBox)?)),
1522                Keyword::PATH => Ok(Some(self.parse_geometric_type(GeometricTypeKind::GeometricPath)?)),
1523                Keyword::LINE => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Line)?)),
1524                Keyword::LSEG => Ok(Some(self.parse_geometric_type(GeometricTypeKind::LineSegment)?)),
1525                Keyword::POINT => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Point)?)),
1526                Keyword::POLYGON => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Polygon)?)),
1527                _ => Ok(None),
1528            },
1529            _ => Ok(None),
1530        }
1531    }
1532
1533    /// Tries to parse an expression by a word that is not known to have a special meaning in the dialect.
1534    fn parse_expr_prefix_by_unreserved_word(
1535        &mut self,
1536        w: &Word,
1537        w_span: Span,
1538    ) -> Result<Expr, ParserError> {
1539        match self.peek_token().token {
1540            Token::LParen if !self.peek_outer_join_operator() => {
1541                let id_parts = vec![w.clone().into_ident(w_span)];
1542                self.parse_function(ObjectName::from(id_parts))
1543            }
1544            // string introducer https://dev.mysql.com/doc/refman/8.0/en/charset-introducer.html
1545            Token::SingleQuotedString(_)
1546            | Token::DoubleQuotedString(_)
1547            | Token::HexStringLiteral(_)
1548                if w.value.starts_with('_') =>
1549            {
1550                Ok(Expr::Prefixed {
1551                    prefix: w.clone().into_ident(w_span),
1552                    value: self.parse_introduced_string_expr()?.into(),
1553                })
1554            }
1555            // string introducer https://dev.mysql.com/doc/refman/8.0/en/charset-introducer.html
1556            Token::SingleQuotedString(_)
1557            | Token::DoubleQuotedString(_)
1558            | Token::HexStringLiteral(_)
1559                if w.value.starts_with('_') =>
1560            {
1561                Ok(Expr::Prefixed {
1562                    prefix: w.clone().into_ident(w_span),
1563                    value: self.parse_introduced_string_expr()?.into(),
1564                })
1565            }
1566            Token::Arrow if self.dialect.supports_lambda_functions() => {
1567                self.expect_token(&Token::Arrow)?;
1568                Ok(Expr::Lambda(LambdaFunction {
1569                    params: OneOrManyWithParens::One(w.clone().into_ident(w_span)),
1570                    body: Box::new(self.parse_expr()?),
1571                }))
1572            }
1573            _ => Ok(Expr::Identifier(w.clone().into_ident(w_span))),
1574        }
1575    }
1576
1577    /// Parse an expression prefix.
1578    pub fn parse_prefix(&mut self) -> Result<Expr, ParserError> {
1579        // allow the dialect to override prefix parsing
1580        if let Some(prefix) = self.dialect.parse_prefix(self) {
1581            return prefix;
1582        }
1583
1584        // PostgreSQL allows any string literal to be preceded by a type name, indicating that the
1585        // string literal represents a literal of that type. Some examples:
1586        //
1587        //      DATE '2020-05-20'
1588        //      TIMESTAMP WITH TIME ZONE '2020-05-20 7:43:54'
1589        //      BOOL 'true'
1590        //
1591        // The first two are standard SQL, while the latter is a PostgreSQL extension. Complicating
1592        // matters is the fact that INTERVAL string literals may optionally be followed by special
1593        // keywords, e.g.:
1594        //
1595        //      INTERVAL '7' DAY
1596        //
1597        // Note also that naively `SELECT date` looks like a syntax error because the `date` type
1598        // name is not followed by a string literal, but in fact in PostgreSQL it is a valid
1599        // expression that should parse as the column name "date".
1600        let loc = self.peek_token_ref().span.start;
1601        let opt_expr = self.maybe_parse(|parser| {
1602            match parser.parse_data_type()? {
1603                DataType::Interval { .. } => parser.parse_interval(),
1604                // PostgreSQL allows almost any identifier to be used as custom data type name,
1605                // and we support that in `parse_data_type()`. But unlike Postgres we don't
1606                // have a list of globally reserved keywords (since they vary across dialects),
1607                // so given `NOT 'a' LIKE 'b'`, we'd accept `NOT` as a possible custom data type
1608                // name, resulting in `NOT 'a'` being recognized as a `TypedString` instead of
1609                // an unary negation `NOT ('a' LIKE 'b')`. To solve this, we don't accept the
1610                // `type 'string'` syntax for the custom data types at all.
1611                DataType::Custom(..) => parser_err!("dummy", loc),
1612                data_type => Ok(Expr::TypedString(TypedString {
1613                    data_type,
1614                    value: parser.parse_value()?,
1615                    uses_odbc_syntax: false,
1616                })),
1617            }
1618        })?;
1619
1620        if let Some(expr) = opt_expr {
1621            return Ok(expr);
1622        }
1623
1624        // Cache some dialect properties to avoid lifetime issues with the
1625        // next_token reference.
1626
1627        let dialect = self.dialect;
1628
1629        self.advance_token();
1630        let next_token_index = self.get_current_index();
1631        let next_token = self.get_current_token();
1632        let span = next_token.span;
1633        let expr = match &next_token.token {
1634            Token::Word(w) => {
1635                // The word we consumed may fall into one of two cases: it has a special meaning, or not.
1636                // For example, in Snowflake, the word `interval` may have two meanings depending on the context:
1637                // `SELECT CURRENT_DATE() + INTERVAL '1 DAY', MAX(interval) FROM tbl;`
1638                //                          ^^^^^^^^^^^^^^^^      ^^^^^^^^
1639                //                         interval expression   identifier
1640                //
1641                // We first try to parse the word and following tokens as a special expression, and if that fails,
1642                // we rollback and try to parse it as an identifier.
1643                let w = w.clone();
1644                match self.try_parse(|parser| parser.parse_expr_prefix_by_reserved_word(&w, span)) {
1645                    // This word indicated an expression prefix and parsing was successful
1646                    Ok(Some(expr)) => Ok(expr),
1647
1648                    // No expression prefix associated with this word
1649                    Ok(None) => Ok(self.parse_expr_prefix_by_unreserved_word(&w, span)?),
1650
1651                    // If parsing of the word as a special expression failed, we are facing two options:
1652                    // 1. The statement is malformed, e.g. `SELECT INTERVAL '1 DAI` (`DAI` instead of `DAY`)
1653                    // 2. The word is used as an identifier, e.g. `SELECT MAX(interval) FROM tbl`
1654                    // We first try to parse the word as an identifier and if that fails
1655                    // we rollback and return the parsing error we got from trying to parse a
1656                    // special expression (to maintain backwards compatibility of parsing errors).
1657                    Err(e) => {
1658                        if !self.dialect.is_reserved_for_identifier(w.keyword) {
1659                            if let Ok(Some(expr)) = self.maybe_parse(|parser| {
1660                                parser.parse_expr_prefix_by_unreserved_word(&w, span)
1661                            }) {
1662                                return Ok(expr);
1663                            }
1664                        }
1665                        return Err(e);
1666                    }
1667                }
1668            } // End of Token::Word
1669            // array `[1, 2, 3]`
1670            Token::LBracket => self.parse_array_expr(false),
1671            tok @ Token::Minus | tok @ Token::Plus => {
1672                let op = if *tok == Token::Plus {
1673                    UnaryOperator::Plus
1674                } else {
1675                    UnaryOperator::Minus
1676                };
1677                Ok(Expr::UnaryOp {
1678                    op,
1679                    expr: Box::new(
1680                        self.parse_subexpr(self.dialect.prec_value(Precedence::MulDivModOp))?,
1681                    ),
1682                })
1683            }
1684            Token::ExclamationMark if dialect.supports_bang_not_operator() => Ok(Expr::UnaryOp {
1685                op: UnaryOperator::BangNot,
1686                expr: Box::new(self.parse_subexpr(self.dialect.prec_value(Precedence::UnaryNot))?),
1687            }),
1688            tok @ Token::DoubleExclamationMark
1689            | tok @ Token::PGSquareRoot
1690            | tok @ Token::PGCubeRoot
1691            | tok @ Token::AtSign
1692                if dialect_is!(dialect is PostgreSqlDialect) =>
1693            {
1694                let op = match tok {
1695                    Token::DoubleExclamationMark => UnaryOperator::PGPrefixFactorial,
1696                    Token::PGSquareRoot => UnaryOperator::PGSquareRoot,
1697                    Token::PGCubeRoot => UnaryOperator::PGCubeRoot,
1698                    Token::AtSign => UnaryOperator::PGAbs,
1699                    _ => {
1700                        return Err(ParserError::ParserError(
1701                            "Internal parser error: unexpected unary operator token".to_string(),
1702                        ))
1703                    }
1704                };
1705                Ok(Expr::UnaryOp {
1706                    op,
1707                    expr: Box::new(
1708                        self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?,
1709                    ),
1710                })
1711            }
1712            Token::Tilde => Ok(Expr::UnaryOp {
1713                op: UnaryOperator::BitwiseNot,
1714                expr: Box::new(self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?),
1715            }),
1716            tok @ Token::Sharp
1717            | tok @ Token::AtDashAt
1718            | tok @ Token::AtAt
1719            | tok @ Token::QuestionMarkDash
1720            | tok @ Token::QuestionPipe
1721                if self.dialect.supports_geometric_types() =>
1722            {
1723                let op = match tok {
1724                    Token::Sharp => UnaryOperator::Hash,
1725                    Token::AtDashAt => UnaryOperator::AtDashAt,
1726                    Token::AtAt => UnaryOperator::DoubleAt,
1727                    Token::QuestionMarkDash => UnaryOperator::QuestionDash,
1728                    Token::QuestionPipe => UnaryOperator::QuestionPipe,
1729                    _ => {
1730                        return Err(ParserError::ParserError(format!(
1731                            "Unexpected token in unary operator parsing: {tok:?}"
1732                        )))
1733                    }
1734                };
1735                Ok(Expr::UnaryOp {
1736                    op,
1737                    expr: Box::new(
1738                        self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?,
1739                    ),
1740                })
1741            }
1742            Token::EscapedStringLiteral(_) if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) =>
1743            {
1744                self.prev_token();
1745                Ok(Expr::Value(self.parse_value()?))
1746            }
1747            Token::UnicodeStringLiteral(_) => {
1748                self.prev_token();
1749                Ok(Expr::Value(self.parse_value()?))
1750            }
1751            Token::Number(_, _)
1752            | Token::SingleQuotedString(_)
1753            | Token::DoubleQuotedString(_)
1754            | Token::TripleSingleQuotedString(_)
1755            | Token::TripleDoubleQuotedString(_)
1756            | Token::DollarQuotedString(_)
1757            | Token::SingleQuotedByteStringLiteral(_)
1758            | Token::DoubleQuotedByteStringLiteral(_)
1759            | Token::TripleSingleQuotedByteStringLiteral(_)
1760            | Token::TripleDoubleQuotedByteStringLiteral(_)
1761            | Token::SingleQuotedRawStringLiteral(_)
1762            | Token::DoubleQuotedRawStringLiteral(_)
1763            | Token::TripleSingleQuotedRawStringLiteral(_)
1764            | Token::TripleDoubleQuotedRawStringLiteral(_)
1765            | Token::NationalStringLiteral(_)
1766            | Token::QuoteDelimitedStringLiteral(_)
1767            | Token::NationalQuoteDelimitedStringLiteral(_)
1768            | Token::HexStringLiteral(_) => {
1769                self.prev_token();
1770                Ok(Expr::Value(self.parse_value()?))
1771            }
1772            Token::LParen => {
1773                let expr =
1774                    if let Some(expr) = self.try_parse_expr_sub_query()? {
1775                        expr
1776                    } else if let Some(lambda) = self.try_parse_lambda()? {
1777                        return Ok(lambda);
1778                    } else {
1779                        let exprs = self.parse_comma_separated(Parser::parse_expr)?;
1780                        match exprs.len() {
1781                            0 => return Err(ParserError::ParserError(
1782                                "Internal parser error: parse_comma_separated returned empty list"
1783                                    .to_string(),
1784                            )),
1785                            1 => Expr::Nested(Box::new(exprs.into_iter().next().unwrap())),
1786                            _ => Expr::Tuple(exprs),
1787                        }
1788                    };
1789                self.expect_token(&Token::RParen)?;
1790                Ok(expr)
1791            }
1792            Token::Placeholder(_) | Token::Colon | Token::AtSign => {
1793                self.prev_token();
1794                Ok(Expr::Value(self.parse_value()?))
1795            }
1796            Token::LBrace => {
1797                self.prev_token();
1798                self.parse_lbrace_expr()
1799            }
1800            _ => self.expected_at("an expression", next_token_index),
1801        }?;
1802
1803        if !self.in_column_definition_state() && self.parse_keyword(Keyword::COLLATE) {
1804            Ok(Expr::Collate {
1805                expr: Box::new(expr),
1806                collation: self.parse_object_name(false)?,
1807            })
1808        } else {
1809            Ok(expr)
1810        }
1811    }
1812
1813    fn parse_geometric_type(&mut self, kind: GeometricTypeKind) -> Result<Expr, ParserError> {
1814        Ok(Expr::TypedString(TypedString {
1815            data_type: DataType::GeometricType(kind),
1816            value: self.parse_value()?,
1817            uses_odbc_syntax: false,
1818        }))
1819    }
1820
1821    /// Try to parse an [Expr::CompoundFieldAccess] like `a.b.c` or `a.b[1].c`.
1822    /// If all the fields are `Expr::Identifier`s, return an [Expr::CompoundIdentifier] instead.
1823    /// If only the root exists, return the root.
1824    /// Parses compound expressions which may be delimited by period
1825    /// or bracket notation.
1826    /// For example: `a.b.c`, `a.b[1]`.
1827    pub fn parse_compound_expr(
1828        &mut self,
1829        root: Expr,
1830        mut chain: Vec<AccessExpr>,
1831    ) -> Result<Expr, ParserError> {
1832        let mut ending_wildcard: Option<TokenWithSpan> = None;
1833        loop {
1834            if self.consume_token(&Token::Period) {
1835                let next_token = self.peek_token_ref();
1836                match &next_token.token {
1837                    Token::Mul => {
1838                        // Postgres explicitly allows funcnm(tablenm.*) and the
1839                        // function array_agg traverses this control flow
1840                        if dialect_of!(self is PostgreSqlDialect) {
1841                            ending_wildcard = Some(self.next_token());
1842                        } else {
1843                            // Put back the consumed `.` tokens before exiting.
1844                            // If this expression is being parsed in the
1845                            // context of a projection, then the `.*` could imply
1846                            // a wildcard expansion. For example:
1847                            // `SELECT STRUCT('foo').* FROM T`
1848                            self.prev_token(); // .
1849                        }
1850
1851                        break;
1852                    }
1853                    Token::SingleQuotedString(s) => {
1854                        let expr =
1855                            Expr::Identifier(Ident::with_quote_and_span('\'', next_token.span, s));
1856                        chain.push(AccessExpr::Dot(expr));
1857                        self.advance_token(); // The consumed string
1858                    }
1859                    // Fallback to parsing an arbitrary expression.
1860                    _ => match self.parse_subexpr(self.dialect.prec_value(Precedence::Period))? {
1861                        // If we get back a compound field access or identifier,
1862                        // we flatten the nested expression.
1863                        // For example if the current root is `foo`
1864                        // and we get back a compound identifier expression `bar.baz`
1865                        // The full expression should be `foo.bar.baz` (i.e.
1866                        // a root with an access chain with 2 entries) and not
1867                        // `foo.(bar.baz)` (i.e. a root with an access chain with
1868                        // 1 entry`).
1869                        Expr::CompoundFieldAccess { root, access_chain } => {
1870                            chain.push(AccessExpr::Dot(*root));
1871                            chain.extend(access_chain);
1872                        }
1873                        Expr::CompoundIdentifier(parts) => chain
1874                            .extend(parts.into_iter().map(Expr::Identifier).map(AccessExpr::Dot)),
1875                        expr => {
1876                            chain.push(AccessExpr::Dot(expr));
1877                        }
1878                    },
1879                }
1880            } else if !self.dialect.supports_partiql()
1881                && self.peek_token_ref().token == Token::LBracket
1882            {
1883                self.parse_multi_dim_subscript(&mut chain)?;
1884            } else {
1885                break;
1886            }
1887        }
1888
1889        let tok_index = self.get_current_index();
1890        if let Some(wildcard_token) = ending_wildcard {
1891            if !Self::is_all_ident(&root, &chain) {
1892                return self.expected("an identifier or a '*' after '.'", self.peek_token());
1893            };
1894            Ok(Expr::QualifiedWildcard(
1895                ObjectName::from(Self::exprs_to_idents(root, chain)?),
1896                AttachedToken(wildcard_token),
1897            ))
1898        } else if self.maybe_parse_outer_join_operator() {
1899            if !Self::is_all_ident(&root, &chain) {
1900                return self.expected_at("column identifier before (+)", tok_index);
1901            };
1902            let expr = if chain.is_empty() {
1903                root
1904            } else {
1905                Expr::CompoundIdentifier(Self::exprs_to_idents(root, chain)?)
1906            };
1907            Ok(Expr::OuterJoin(expr.into()))
1908        } else {
1909            Self::build_compound_expr(root, chain)
1910        }
1911    }
1912
1913    /// Combines a root expression and access chain to form
1914    /// a compound expression. Which may be a [Expr::CompoundFieldAccess]
1915    /// or other special cased expressions like [Expr::CompoundIdentifier],
1916    /// [Expr::OuterJoin].
1917    fn build_compound_expr(
1918        root: Expr,
1919        mut access_chain: Vec<AccessExpr>,
1920    ) -> Result<Expr, ParserError> {
1921        if access_chain.is_empty() {
1922            return Ok(root);
1923        }
1924
1925        if Self::is_all_ident(&root, &access_chain) {
1926            return Ok(Expr::CompoundIdentifier(Self::exprs_to_idents(
1927                root,
1928                access_chain,
1929            )?));
1930        }
1931
1932        // Flatten qualified function calls.
1933        // For example, the expression `a.b.c.foo(1,2,3)` should
1934        // represent a function called `a.b.c.foo`, rather than
1935        // a composite expression.
1936        if matches!(root, Expr::Identifier(_))
1937            && matches!(
1938                access_chain.last(),
1939                Some(AccessExpr::Dot(Expr::Function(_)))
1940            )
1941            && access_chain
1942                .iter()
1943                .rev()
1944                .skip(1) // All except the Function
1945                .all(|access| matches!(access, AccessExpr::Dot(Expr::Identifier(_))))
1946        {
1947            let Some(AccessExpr::Dot(Expr::Function(mut func))) = access_chain.pop() else {
1948                return parser_err!("expected function expression", root.span().start);
1949            };
1950
1951            let compound_func_name = [root]
1952                .into_iter()
1953                .chain(access_chain.into_iter().flat_map(|access| match access {
1954                    AccessExpr::Dot(expr) => Some(expr),
1955                    _ => None,
1956                }))
1957                .flat_map(|expr| match expr {
1958                    Expr::Identifier(ident) => Some(ident),
1959                    _ => None,
1960                })
1961                .map(ObjectNamePart::Identifier)
1962                .chain(func.name.0)
1963                .collect::<Vec<_>>();
1964            func.name = ObjectName(compound_func_name);
1965
1966            return Ok(Expr::Function(func));
1967        }
1968
1969        // Flatten qualified outer join expressions.
1970        // For example, the expression `T.foo(+)` should
1971        // represent an outer join on the column name `T.foo`
1972        // rather than a composite expression.
1973        if access_chain.len() == 1
1974            && matches!(
1975                access_chain.last(),
1976                Some(AccessExpr::Dot(Expr::OuterJoin(_)))
1977            )
1978        {
1979            let Some(AccessExpr::Dot(Expr::OuterJoin(inner_expr))) = access_chain.pop() else {
1980                return parser_err!("expected (+) expression", root.span().start);
1981            };
1982
1983            if !Self::is_all_ident(&root, &[]) {
1984                return parser_err!("column identifier before (+)", root.span().start);
1985            };
1986
1987            let token_start = root.span().start;
1988            let mut idents = Self::exprs_to_idents(root, vec![])?;
1989            match *inner_expr {
1990                Expr::CompoundIdentifier(suffix) => idents.extend(suffix),
1991                Expr::Identifier(suffix) => idents.push(suffix),
1992                _ => {
1993                    return parser_err!("column identifier before (+)", token_start);
1994                }
1995            }
1996
1997            return Ok(Expr::OuterJoin(Expr::CompoundIdentifier(idents).into()));
1998        }
1999
2000        Ok(Expr::CompoundFieldAccess {
2001            root: Box::new(root),
2002            access_chain,
2003        })
2004    }
2005
2006    fn keyword_to_modifier(k: Keyword) -> Option<ContextModifier> {
2007        match k {
2008            Keyword::LOCAL => Some(ContextModifier::Local),
2009            Keyword::GLOBAL => Some(ContextModifier::Global),
2010            Keyword::SESSION => Some(ContextModifier::Session),
2011            _ => None,
2012        }
2013    }
2014
2015    /// Check if the root is an identifier and all fields are identifiers.
2016    fn is_all_ident(root: &Expr, fields: &[AccessExpr]) -> bool {
2017        if !matches!(root, Expr::Identifier(_)) {
2018            return false;
2019        }
2020        fields
2021            .iter()
2022            .all(|x| matches!(x, AccessExpr::Dot(Expr::Identifier(_))))
2023    }
2024
2025    /// Convert a root and a list of fields to a list of identifiers.
2026    fn exprs_to_idents(root: Expr, fields: Vec<AccessExpr>) -> Result<Vec<Ident>, ParserError> {
2027        let mut idents = vec![];
2028        if let Expr::Identifier(root) = root {
2029            idents.push(root);
2030            for x in fields {
2031                if let AccessExpr::Dot(Expr::Identifier(ident)) = x {
2032                    idents.push(ident);
2033                } else {
2034                    return parser_err!(
2035                        format!("Expected identifier, found: {}", x),
2036                        x.span().start
2037                    );
2038                }
2039            }
2040            Ok(idents)
2041        } else {
2042            parser_err!(
2043                format!("Expected identifier, found: {}", root),
2044                root.span().start
2045            )
2046        }
2047    }
2048
2049    /// Returns true if the next tokens indicate the outer join operator `(+)`.
2050    fn peek_outer_join_operator(&mut self) -> bool {
2051        if !self.dialect.supports_outer_join_operator() {
2052            return false;
2053        }
2054
2055        let [maybe_lparen, maybe_plus, maybe_rparen] = self.peek_tokens_ref();
2056        Token::LParen == maybe_lparen.token
2057            && Token::Plus == maybe_plus.token
2058            && Token::RParen == maybe_rparen.token
2059    }
2060
2061    /// If the next tokens indicates the outer join operator `(+)`, consume
2062    /// the tokens and return true.
2063    fn maybe_parse_outer_join_operator(&mut self) -> bool {
2064        self.dialect.supports_outer_join_operator()
2065            && self.consume_tokens(&[Token::LParen, Token::Plus, Token::RParen])
2066    }
2067
2068    pub fn parse_utility_options(&mut self) -> Result<Vec<UtilityOption>, ParserError> {
2069        self.expect_token(&Token::LParen)?;
2070        let options = self.parse_comma_separated(Self::parse_utility_option)?;
2071        self.expect_token(&Token::RParen)?;
2072
2073        Ok(options)
2074    }
2075
2076    fn parse_utility_option(&mut self) -> Result<UtilityOption, ParserError> {
2077        let name = self.parse_identifier()?;
2078
2079        let next_token = self.peek_token();
2080        if next_token == Token::Comma || next_token == Token::RParen {
2081            return Ok(UtilityOption { name, arg: None });
2082        }
2083        let arg = self.parse_expr()?;
2084
2085        Ok(UtilityOption {
2086            name,
2087            arg: Some(arg),
2088        })
2089    }
2090
2091    fn try_parse_expr_sub_query(&mut self) -> Result<Option<Expr>, ParserError> {
2092        if !self.peek_sub_query() {
2093            return Ok(None);
2094        }
2095
2096        Ok(Some(Expr::Subquery(self.parse_query()?)))
2097    }
2098
2099    fn try_parse_lambda(&mut self) -> Result<Option<Expr>, ParserError> {
2100        if !self.dialect.supports_lambda_functions() {
2101            return Ok(None);
2102        }
2103        self.maybe_parse(|p| {
2104            let params = p.parse_comma_separated(|p| p.parse_identifier())?;
2105            p.expect_token(&Token::RParen)?;
2106            p.expect_token(&Token::Arrow)?;
2107            let expr = p.parse_expr()?;
2108            Ok(Expr::Lambda(LambdaFunction {
2109                params: OneOrManyWithParens::Many(params),
2110                body: Box::new(expr),
2111            }))
2112        })
2113    }
2114
2115    /// Tries to parse the body of an [ODBC escaping sequence]
2116    /// i.e. without the enclosing braces
2117    /// Currently implemented:
2118    /// Scalar Function Calls
2119    /// Date, Time, and Timestamp Literals
2120    /// See <https://learn.microsoft.com/en-us/sql/odbc/reference/develop-app/escape-sequences-in-odbc?view=sql-server-2017>
2121    fn maybe_parse_odbc_body(&mut self) -> Result<Option<Expr>, ParserError> {
2122        // Attempt 1: Try to parse it as a function.
2123        if let Some(expr) = self.maybe_parse_odbc_fn_body()? {
2124            return Ok(Some(expr));
2125        }
2126        // Attempt 2: Try to parse it as a Date, Time or Timestamp Literal
2127        self.maybe_parse_odbc_body_datetime()
2128    }
2129
2130    /// Tries to parse the body of an [ODBC Date, Time, and Timestamp Literals] call.
2131    ///
2132    /// ```sql
2133    /// {d '2025-07-17'}
2134    /// {t '14:12:01'}
2135    /// {ts '2025-07-17 14:12:01'}
2136    /// ```
2137    ///
2138    /// [ODBC Date, Time, and Timestamp Literals]:
2139    /// https://learn.microsoft.com/en-us/sql/odbc/reference/develop-app/date-time-and-timestamp-literals?view=sql-server-2017
2140    fn maybe_parse_odbc_body_datetime(&mut self) -> Result<Option<Expr>, ParserError> {
2141        self.maybe_parse(|p| {
2142            let token = p.next_token().clone();
2143            let word_string = token.token.to_string();
2144            let data_type = match word_string.as_str() {
2145                "t" => DataType::Time(None, TimezoneInfo::None),
2146                "d" => DataType::Date,
2147                "ts" => DataType::Timestamp(None, TimezoneInfo::None),
2148                _ => return p.expected("ODBC datetime keyword (t, d, or ts)", token),
2149            };
2150            let value = p.parse_value()?;
2151            Ok(Expr::TypedString(TypedString {
2152                data_type,
2153                value,
2154                uses_odbc_syntax: true,
2155            }))
2156        })
2157    }
2158
2159    /// Tries to parse the body of an [ODBC function] call.
2160    /// i.e. without the enclosing braces
2161    ///
2162    /// ```sql
2163    /// fn myfunc(1,2,3)
2164    /// ```
2165    ///
2166    /// [ODBC function]: https://learn.microsoft.com/en-us/sql/odbc/reference/develop-app/scalar-function-calls?view=sql-server-2017
2167    fn maybe_parse_odbc_fn_body(&mut self) -> Result<Option<Expr>, ParserError> {
2168        self.maybe_parse(|p| {
2169            p.expect_keyword(Keyword::FN)?;
2170            let fn_name = p.parse_object_name(false)?;
2171            let mut fn_call = p.parse_function_call(fn_name)?;
2172            fn_call.uses_odbc_syntax = true;
2173            Ok(Expr::Function(fn_call))
2174        })
2175    }
2176
2177    pub fn parse_function(&mut self, name: ObjectName) -> Result<Expr, ParserError> {
2178        self.parse_function_call(name).map(Expr::Function)
2179    }
2180
2181    fn parse_function_call(&mut self, name: ObjectName) -> Result<Function, ParserError> {
2182        self.expect_token(&Token::LParen)?;
2183
2184        // Snowflake permits a subquery to be passed as an argument without
2185        // an enclosing set of parens if it's the only argument.
2186        if dialect_of!(self is SnowflakeDialect) && self.peek_sub_query() {
2187            let subquery = self.parse_query()?;
2188            self.expect_token(&Token::RParen)?;
2189            return Ok(Function {
2190                name,
2191                uses_odbc_syntax: false,
2192                parameters: FunctionArguments::None,
2193                args: FunctionArguments::Subquery(subquery),
2194                filter: None,
2195                null_treatment: None,
2196                over: None,
2197                within_group: vec![],
2198            });
2199        }
2200
2201        let mut args = self.parse_function_argument_list()?;
2202        let mut parameters = FunctionArguments::None;
2203        // ClickHouse aggregations support parametric functions like `HISTOGRAM(0.5, 0.6)(x, y)`
2204        // which (0.5, 0.6) is a parameter to the function.
2205        if dialect_of!(self is ClickHouseDialect | GenericDialect)
2206            && self.consume_token(&Token::LParen)
2207        {
2208            parameters = FunctionArguments::List(args);
2209            args = self.parse_function_argument_list()?;
2210        }
2211
2212        let within_group = if self.parse_keywords(&[Keyword::WITHIN, Keyword::GROUP]) {
2213            self.expect_token(&Token::LParen)?;
2214            self.expect_keywords(&[Keyword::ORDER, Keyword::BY])?;
2215            let order_by = self.parse_comma_separated(Parser::parse_order_by_expr)?;
2216            self.expect_token(&Token::RParen)?;
2217            order_by
2218        } else {
2219            vec![]
2220        };
2221
2222        let filter = if self.dialect.supports_filter_during_aggregation()
2223            && self.parse_keyword(Keyword::FILTER)
2224            && self.consume_token(&Token::LParen)
2225            && self.parse_keyword(Keyword::WHERE)
2226        {
2227            let filter = Some(Box::new(self.parse_expr()?));
2228            self.expect_token(&Token::RParen)?;
2229            filter
2230        } else {
2231            None
2232        };
2233
2234        // Syntax for null treatment shows up either in the args list
2235        // or after the function call, but not both.
2236        let null_treatment = if args
2237            .clauses
2238            .iter()
2239            .all(|clause| !matches!(clause, FunctionArgumentClause::IgnoreOrRespectNulls(_)))
2240        {
2241            self.parse_null_treatment()?
2242        } else {
2243            None
2244        };
2245
2246        let over = if self.parse_keyword(Keyword::OVER) {
2247            if self.consume_token(&Token::LParen) {
2248                let window_spec = self.parse_window_spec()?;
2249                Some(WindowType::WindowSpec(window_spec))
2250            } else {
2251                Some(WindowType::NamedWindow(self.parse_identifier()?))
2252            }
2253        } else {
2254            None
2255        };
2256
2257        Ok(Function {
2258            name,
2259            uses_odbc_syntax: false,
2260            parameters,
2261            args: FunctionArguments::List(args),
2262            null_treatment,
2263            filter,
2264            over,
2265            within_group,
2266        })
2267    }
2268
2269    /// Optionally parses a null treatment clause.
2270    fn parse_null_treatment(&mut self) -> Result<Option<NullTreatment>, ParserError> {
2271        match self.parse_one_of_keywords(&[Keyword::RESPECT, Keyword::IGNORE]) {
2272            Some(keyword) => {
2273                self.expect_keyword_is(Keyword::NULLS)?;
2274
2275                Ok(match keyword {
2276                    Keyword::RESPECT => Some(NullTreatment::RespectNulls),
2277                    Keyword::IGNORE => Some(NullTreatment::IgnoreNulls),
2278                    _ => None,
2279                })
2280            }
2281            None => Ok(None),
2282        }
2283    }
2284
2285    pub fn parse_time_functions(&mut self, name: ObjectName) -> Result<Expr, ParserError> {
2286        let args = if self.consume_token(&Token::LParen) {
2287            FunctionArguments::List(self.parse_function_argument_list()?)
2288        } else {
2289            FunctionArguments::None
2290        };
2291        Ok(Expr::Function(Function {
2292            name,
2293            uses_odbc_syntax: false,
2294            parameters: FunctionArguments::None,
2295            args,
2296            filter: None,
2297            over: None,
2298            null_treatment: None,
2299            within_group: vec![],
2300        }))
2301    }
2302
2303    pub fn parse_window_frame_units(&mut self) -> Result<WindowFrameUnits, ParserError> {
2304        let next_token = self.next_token();
2305        match &next_token.token {
2306            Token::Word(w) => match w.keyword {
2307                Keyword::ROWS => Ok(WindowFrameUnits::Rows),
2308                Keyword::RANGE => Ok(WindowFrameUnits::Range),
2309                Keyword::GROUPS => Ok(WindowFrameUnits::Groups),
2310                _ => self.expected("ROWS, RANGE, GROUPS", next_token)?,
2311            },
2312            _ => self.expected("ROWS, RANGE, GROUPS", next_token),
2313        }
2314    }
2315
2316    pub fn parse_window_frame(&mut self) -> Result<WindowFrame, ParserError> {
2317        let units = self.parse_window_frame_units()?;
2318        let (start_bound, end_bound) = if self.parse_keyword(Keyword::BETWEEN) {
2319            let start_bound = self.parse_window_frame_bound()?;
2320            self.expect_keyword_is(Keyword::AND)?;
2321            let end_bound = Some(self.parse_window_frame_bound()?);
2322            (start_bound, end_bound)
2323        } else {
2324            (self.parse_window_frame_bound()?, None)
2325        };
2326        Ok(WindowFrame {
2327            units,
2328            start_bound,
2329            end_bound,
2330        })
2331    }
2332
2333    /// Parse `CURRENT ROW` or `{ <positive number> | UNBOUNDED } { PRECEDING | FOLLOWING }`
2334    pub fn parse_window_frame_bound(&mut self) -> Result<WindowFrameBound, ParserError> {
2335        if self.parse_keywords(&[Keyword::CURRENT, Keyword::ROW]) {
2336            Ok(WindowFrameBound::CurrentRow)
2337        } else {
2338            let rows = if self.parse_keyword(Keyword::UNBOUNDED) {
2339                None
2340            } else {
2341                Some(Box::new(match self.peek_token().token {
2342                    Token::SingleQuotedString(_) => self.parse_interval()?,
2343                    _ => self.parse_expr()?,
2344                }))
2345            };
2346            if self.parse_keyword(Keyword::PRECEDING) {
2347                Ok(WindowFrameBound::Preceding(rows))
2348            } else if self.parse_keyword(Keyword::FOLLOWING) {
2349                Ok(WindowFrameBound::Following(rows))
2350            } else {
2351                self.expected("PRECEDING or FOLLOWING", self.peek_token())
2352            }
2353        }
2354    }
2355
2356    /// Parse a group by expr. Group by expr can be one of group sets, roll up, cube, or simple expr.
2357    fn parse_group_by_expr(&mut self) -> Result<Expr, ParserError> {
2358        if self.dialect.supports_group_by_expr() {
2359            if self.parse_keywords(&[Keyword::GROUPING, Keyword::SETS]) {
2360                self.expect_token(&Token::LParen)?;
2361                let result = self.parse_comma_separated(|p| p.parse_tuple(false, true))?;
2362                self.expect_token(&Token::RParen)?;
2363                Ok(Expr::GroupingSets(result))
2364            } else if self.parse_keyword(Keyword::CUBE) {
2365                self.expect_token(&Token::LParen)?;
2366                let result = self.parse_comma_separated(|p| p.parse_tuple(true, true))?;
2367                self.expect_token(&Token::RParen)?;
2368                Ok(Expr::Cube(result))
2369            } else if self.parse_keyword(Keyword::ROLLUP) {
2370                self.expect_token(&Token::LParen)?;
2371                let result = self.parse_comma_separated(|p| p.parse_tuple(true, true))?;
2372                self.expect_token(&Token::RParen)?;
2373                Ok(Expr::Rollup(result))
2374            } else if self.consume_tokens(&[Token::LParen, Token::RParen]) {
2375                // PostgreSQL allow to use empty tuple as a group by expression,
2376                // e.g. `GROUP BY (), name`. Please refer to GROUP BY Clause section in
2377                // [PostgreSQL](https://www.postgresql.org/docs/16/sql-select.html)
2378                Ok(Expr::Tuple(vec![]))
2379            } else {
2380                self.parse_expr()
2381            }
2382        } else {
2383            // TODO parse rollup for other dialects
2384            self.parse_expr()
2385        }
2386    }
2387
2388    /// Parse a tuple with `(` and `)`.
2389    /// If `lift_singleton` is true, then a singleton tuple is lifted to a tuple of length 1, otherwise it will fail.
2390    /// If `allow_empty` is true, then an empty tuple is allowed.
2391    fn parse_tuple(
2392        &mut self,
2393        lift_singleton: bool,
2394        allow_empty: bool,
2395    ) -> Result<Vec<Expr>, ParserError> {
2396        if lift_singleton {
2397            if self.consume_token(&Token::LParen) {
2398                let result = if allow_empty && self.consume_token(&Token::RParen) {
2399                    vec![]
2400                } else {
2401                    let result = self.parse_comma_separated(Parser::parse_expr)?;
2402                    self.expect_token(&Token::RParen)?;
2403                    result
2404                };
2405                Ok(result)
2406            } else {
2407                Ok(vec![self.parse_expr()?])
2408            }
2409        } else {
2410            self.expect_token(&Token::LParen)?;
2411            let result = if allow_empty && self.consume_token(&Token::RParen) {
2412                vec![]
2413            } else {
2414                let result = self.parse_comma_separated(Parser::parse_expr)?;
2415                self.expect_token(&Token::RParen)?;
2416                result
2417            };
2418            Ok(result)
2419        }
2420    }
2421
2422    pub fn parse_case_expr(&mut self) -> Result<Expr, ParserError> {
2423        let case_token = AttachedToken(self.get_current_token().clone());
2424        let mut operand = None;
2425        if !self.parse_keyword(Keyword::WHEN) {
2426            operand = Some(Box::new(self.parse_expr()?));
2427            self.expect_keyword_is(Keyword::WHEN)?;
2428        }
2429        let mut conditions = vec![];
2430        loop {
2431            let condition = self.parse_expr()?;
2432            self.expect_keyword_is(Keyword::THEN)?;
2433            let result = self.parse_expr()?;
2434            conditions.push(CaseWhen { condition, result });
2435            if !self.parse_keyword(Keyword::WHEN) {
2436                break;
2437            }
2438        }
2439        let else_result = if self.parse_keyword(Keyword::ELSE) {
2440            Some(Box::new(self.parse_expr()?))
2441        } else {
2442            None
2443        };
2444        let end_token = AttachedToken(self.expect_keyword(Keyword::END)?);
2445        Ok(Expr::Case {
2446            case_token,
2447            end_token,
2448            operand,
2449            conditions,
2450            else_result,
2451        })
2452    }
2453
2454    pub fn parse_optional_cast_format(&mut self) -> Result<Option<CastFormat>, ParserError> {
2455        if self.parse_keyword(Keyword::FORMAT) {
2456            let value = self.parse_value()?.value;
2457            match self.parse_optional_time_zone()? {
2458                Some(tz) => Ok(Some(CastFormat::ValueAtTimeZone(value, tz))),
2459                None => Ok(Some(CastFormat::Value(value))),
2460            }
2461        } else {
2462            Ok(None)
2463        }
2464    }
2465
2466    pub fn parse_optional_time_zone(&mut self) -> Result<Option<Value>, ParserError> {
2467        if self.parse_keywords(&[Keyword::AT, Keyword::TIME, Keyword::ZONE]) {
2468            self.parse_value().map(|v| Some(v.value))
2469        } else {
2470            Ok(None)
2471        }
2472    }
2473
2474    /// mssql-like convert function
2475    fn parse_mssql_convert(&mut self, is_try: bool) -> Result<Expr, ParserError> {
2476        self.expect_token(&Token::LParen)?;
2477        let data_type = self.parse_data_type()?;
2478        self.expect_token(&Token::Comma)?;
2479        let expr = self.parse_expr()?;
2480        let styles = if self.consume_token(&Token::Comma) {
2481            self.parse_comma_separated(Parser::parse_expr)?
2482        } else {
2483            Default::default()
2484        };
2485        self.expect_token(&Token::RParen)?;
2486        Ok(Expr::Convert {
2487            is_try,
2488            expr: Box::new(expr),
2489            data_type: Some(data_type),
2490            charset: None,
2491            target_before_value: true,
2492            styles,
2493        })
2494    }
2495
2496    /// Parse a SQL CONVERT function:
2497    ///  - `CONVERT('héhé' USING utf8mb4)` (MySQL)
2498    ///  - `CONVERT('héhé', CHAR CHARACTER SET utf8mb4)` (MySQL)
2499    ///  - `CONVERT(DECIMAL(10, 5), 42)` (MSSQL) - the type comes first
2500    pub fn parse_convert_expr(&mut self, is_try: bool) -> Result<Expr, ParserError> {
2501        if self.dialect.convert_type_before_value() {
2502            return self.parse_mssql_convert(is_try);
2503        }
2504        self.expect_token(&Token::LParen)?;
2505        let expr = self.parse_expr()?;
2506        if self.parse_keyword(Keyword::USING) {
2507            let charset = self.parse_object_name(false)?;
2508            self.expect_token(&Token::RParen)?;
2509            return Ok(Expr::Convert {
2510                is_try,
2511                expr: Box::new(expr),
2512                data_type: None,
2513                charset: Some(charset),
2514                target_before_value: false,
2515                styles: vec![],
2516            });
2517        }
2518        self.expect_token(&Token::Comma)?;
2519        let data_type = self.parse_data_type()?;
2520        let charset = if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
2521            Some(self.parse_object_name(false)?)
2522        } else {
2523            None
2524        };
2525        self.expect_token(&Token::RParen)?;
2526        Ok(Expr::Convert {
2527            is_try,
2528            expr: Box::new(expr),
2529            data_type: Some(data_type),
2530            charset,
2531            target_before_value: false,
2532            styles: vec![],
2533        })
2534    }
2535
2536    /// Parse a SQL CAST function e.g. `CAST(expr AS FLOAT)`
2537    pub fn parse_cast_expr(&mut self, kind: CastKind) -> Result<Expr, ParserError> {
2538        self.expect_token(&Token::LParen)?;
2539        let expr = self.parse_expr()?;
2540        self.expect_keyword_is(Keyword::AS)?;
2541        let data_type = self.parse_data_type()?;
2542        let format = self.parse_optional_cast_format()?;
2543        self.expect_token(&Token::RParen)?;
2544        Ok(Expr::Cast {
2545            kind,
2546            expr: Box::new(expr),
2547            data_type,
2548            format,
2549        })
2550    }
2551
2552    /// Parse a SQL EXISTS expression e.g. `WHERE EXISTS(SELECT ...)`.
2553    pub fn parse_exists_expr(&mut self, negated: bool) -> Result<Expr, ParserError> {
2554        self.expect_token(&Token::LParen)?;
2555        let exists_node = Expr::Exists {
2556            negated,
2557            subquery: self.parse_query()?,
2558        };
2559        self.expect_token(&Token::RParen)?;
2560        Ok(exists_node)
2561    }
2562
2563    pub fn parse_extract_expr(&mut self) -> Result<Expr, ParserError> {
2564        self.expect_token(&Token::LParen)?;
2565        let field = self.parse_date_time_field()?;
2566
2567        let syntax = if self.parse_keyword(Keyword::FROM) {
2568            ExtractSyntax::From
2569        } else if self.consume_token(&Token::Comma)
2570            && dialect_of!(self is SnowflakeDialect | GenericDialect)
2571        {
2572            ExtractSyntax::Comma
2573        } else {
2574            return Err(ParserError::ParserError(
2575                "Expected 'FROM' or ','".to_string(),
2576            ));
2577        };
2578
2579        let expr = self.parse_expr()?;
2580        self.expect_token(&Token::RParen)?;
2581        Ok(Expr::Extract {
2582            field,
2583            expr: Box::new(expr),
2584            syntax,
2585        })
2586    }
2587
2588    pub fn parse_ceil_floor_expr(&mut self, is_ceil: bool) -> Result<Expr, ParserError> {
2589        self.expect_token(&Token::LParen)?;
2590        let expr = self.parse_expr()?;
2591        // Parse `CEIL/FLOOR(expr)`
2592        let field = if self.parse_keyword(Keyword::TO) {
2593            // Parse `CEIL/FLOOR(expr TO DateTimeField)`
2594            CeilFloorKind::DateTimeField(self.parse_date_time_field()?)
2595        } else if self.consume_token(&Token::Comma) {
2596            // Parse `CEIL/FLOOR(expr, scale)`
2597            match self.parse_value()?.value {
2598                Value::Number(n, s) => CeilFloorKind::Scale(Value::Number(n, s)),
2599                _ => {
2600                    return Err(ParserError::ParserError(
2601                        "Scale field can only be of number type".to_string(),
2602                    ))
2603                }
2604            }
2605        } else {
2606            CeilFloorKind::DateTimeField(DateTimeField::NoDateTime)
2607        };
2608        self.expect_token(&Token::RParen)?;
2609        if is_ceil {
2610            Ok(Expr::Ceil {
2611                expr: Box::new(expr),
2612                field,
2613            })
2614        } else {
2615            Ok(Expr::Floor {
2616                expr: Box::new(expr),
2617                field,
2618            })
2619        }
2620    }
2621
2622    pub fn parse_position_expr(&mut self, ident: Ident) -> Result<Expr, ParserError> {
2623        let between_prec = self.dialect.prec_value(Precedence::Between);
2624        let position_expr = self.maybe_parse(|p| {
2625            // PARSE SELECT POSITION('@' in field)
2626            p.expect_token(&Token::LParen)?;
2627
2628            // Parse the subexpr till the IN keyword
2629            let expr = p.parse_subexpr(between_prec)?;
2630            p.expect_keyword_is(Keyword::IN)?;
2631            let from = p.parse_expr()?;
2632            p.expect_token(&Token::RParen)?;
2633            Ok(Expr::Position {
2634                expr: Box::new(expr),
2635                r#in: Box::new(from),
2636            })
2637        })?;
2638        match position_expr {
2639            Some(expr) => Ok(expr),
2640            // Snowflake supports `position` as an ordinary function call
2641            // without the special `IN` syntax.
2642            None => self.parse_function(ObjectName::from(vec![ident])),
2643        }
2644    }
2645
2646    // { SUBSTRING | SUBSTR } (<EXPR> [FROM 1] [FOR 3])
2647    pub fn parse_substring(&mut self) -> Result<Expr, ParserError> {
2648        let shorthand = match self.expect_one_of_keywords(&[Keyword::SUBSTR, Keyword::SUBSTRING])? {
2649            Keyword::SUBSTR => true,
2650            Keyword::SUBSTRING => false,
2651            _ => {
2652                self.prev_token();
2653                return self.expected("SUBSTR or SUBSTRING", self.peek_token());
2654            }
2655        };
2656        self.expect_token(&Token::LParen)?;
2657        let expr = self.parse_expr()?;
2658        let mut from_expr = None;
2659        let special = self.consume_token(&Token::Comma);
2660        if special || self.parse_keyword(Keyword::FROM) {
2661            from_expr = Some(self.parse_expr()?);
2662        }
2663
2664        let mut to_expr = None;
2665        if self.parse_keyword(Keyword::FOR) || self.consume_token(&Token::Comma) {
2666            to_expr = Some(self.parse_expr()?);
2667        }
2668        self.expect_token(&Token::RParen)?;
2669
2670        Ok(Expr::Substring {
2671            expr: Box::new(expr),
2672            substring_from: from_expr.map(Box::new),
2673            substring_for: to_expr.map(Box::new),
2674            special,
2675            shorthand,
2676        })
2677    }
2678
2679    pub fn parse_overlay_expr(&mut self) -> Result<Expr, ParserError> {
2680        // PARSE OVERLAY (EXPR PLACING EXPR FROM 1 [FOR 3])
2681        self.expect_token(&Token::LParen)?;
2682        let expr = self.parse_expr()?;
2683        self.expect_keyword_is(Keyword::PLACING)?;
2684        let what_expr = self.parse_expr()?;
2685        self.expect_keyword_is(Keyword::FROM)?;
2686        let from_expr = self.parse_expr()?;
2687        let mut for_expr = None;
2688        if self.parse_keyword(Keyword::FOR) {
2689            for_expr = Some(self.parse_expr()?);
2690        }
2691        self.expect_token(&Token::RParen)?;
2692
2693        Ok(Expr::Overlay {
2694            expr: Box::new(expr),
2695            overlay_what: Box::new(what_expr),
2696            overlay_from: Box::new(from_expr),
2697            overlay_for: for_expr.map(Box::new),
2698        })
2699    }
2700
2701    /// ```sql
2702    /// TRIM ([WHERE] ['text' FROM] 'text')
2703    /// TRIM ('text')
2704    /// TRIM(<expr>, [, characters]) -- only Snowflake or BigQuery
2705    /// ```
2706    pub fn parse_trim_expr(&mut self) -> Result<Expr, ParserError> {
2707        self.expect_token(&Token::LParen)?;
2708        let mut trim_where = None;
2709        if let Token::Word(word) = self.peek_token().token {
2710            if [Keyword::BOTH, Keyword::LEADING, Keyword::TRAILING].contains(&word.keyword) {
2711                trim_where = Some(self.parse_trim_where()?);
2712            }
2713        }
2714        let expr = self.parse_expr()?;
2715        if self.parse_keyword(Keyword::FROM) {
2716            let trim_what = Box::new(expr);
2717            let expr = self.parse_expr()?;
2718            self.expect_token(&Token::RParen)?;
2719            Ok(Expr::Trim {
2720                expr: Box::new(expr),
2721                trim_where,
2722                trim_what: Some(trim_what),
2723                trim_characters: None,
2724            })
2725        } else if self.consume_token(&Token::Comma)
2726            && dialect_of!(self is DuckDbDialect | SnowflakeDialect | BigQueryDialect | GenericDialect)
2727        {
2728            let characters = self.parse_comma_separated(Parser::parse_expr)?;
2729            self.expect_token(&Token::RParen)?;
2730            Ok(Expr::Trim {
2731                expr: Box::new(expr),
2732                trim_where: None,
2733                trim_what: None,
2734                trim_characters: Some(characters),
2735            })
2736        } else {
2737            self.expect_token(&Token::RParen)?;
2738            Ok(Expr::Trim {
2739                expr: Box::new(expr),
2740                trim_where,
2741                trim_what: None,
2742                trim_characters: None,
2743            })
2744        }
2745    }
2746
2747    pub fn parse_trim_where(&mut self) -> Result<TrimWhereField, ParserError> {
2748        let next_token = self.next_token();
2749        match &next_token.token {
2750            Token::Word(w) => match w.keyword {
2751                Keyword::BOTH => Ok(TrimWhereField::Both),
2752                Keyword::LEADING => Ok(TrimWhereField::Leading),
2753                Keyword::TRAILING => Ok(TrimWhereField::Trailing),
2754                _ => self.expected("trim_where field", next_token)?,
2755            },
2756            _ => self.expected("trim_where field", next_token),
2757        }
2758    }
2759
2760    /// Parses an array expression `[ex1, ex2, ..]`
2761    /// if `named` is `true`, came from an expression like  `ARRAY[ex1, ex2]`
2762    pub fn parse_array_expr(&mut self, named: bool) -> Result<Expr, ParserError> {
2763        let exprs = self.parse_comma_separated0(Parser::parse_expr, Token::RBracket)?;
2764        self.expect_token(&Token::RBracket)?;
2765        Ok(Expr::Array(Array { elem: exprs, named }))
2766    }
2767
2768    pub fn parse_listagg_on_overflow(&mut self) -> Result<Option<ListAggOnOverflow>, ParserError> {
2769        if self.parse_keywords(&[Keyword::ON, Keyword::OVERFLOW]) {
2770            if self.parse_keyword(Keyword::ERROR) {
2771                Ok(Some(ListAggOnOverflow::Error))
2772            } else {
2773                self.expect_keyword_is(Keyword::TRUNCATE)?;
2774                let filler = match self.peek_token().token {
2775                    Token::Word(w)
2776                        if w.keyword == Keyword::WITH || w.keyword == Keyword::WITHOUT =>
2777                    {
2778                        None
2779                    }
2780                    Token::SingleQuotedString(_)
2781                    | Token::EscapedStringLiteral(_)
2782                    | Token::UnicodeStringLiteral(_)
2783                    | Token::NationalStringLiteral(_)
2784                    | Token::QuoteDelimitedStringLiteral(_)
2785                    | Token::NationalQuoteDelimitedStringLiteral(_)
2786                    | Token::HexStringLiteral(_) => Some(Box::new(self.parse_expr()?)),
2787                    _ => self.expected(
2788                        "either filler, WITH, or WITHOUT in LISTAGG",
2789                        self.peek_token(),
2790                    )?,
2791                };
2792                let with_count = self.parse_keyword(Keyword::WITH);
2793                if !with_count && !self.parse_keyword(Keyword::WITHOUT) {
2794                    self.expected("either WITH or WITHOUT in LISTAGG", self.peek_token())?;
2795                }
2796                self.expect_keyword_is(Keyword::COUNT)?;
2797                Ok(Some(ListAggOnOverflow::Truncate { filler, with_count }))
2798            }
2799        } else {
2800            Ok(None)
2801        }
2802    }
2803
2804    // This function parses date/time fields for the EXTRACT function-like
2805    // operator, interval qualifiers, and the ceil/floor operations.
2806    // EXTRACT supports a wider set of date/time fields than interval qualifiers,
2807    // so this function may need to be split in two.
2808    pub fn parse_date_time_field(&mut self) -> Result<DateTimeField, ParserError> {
2809        let next_token = self.next_token();
2810        match &next_token.token {
2811            Token::Word(w) => match w.keyword {
2812                Keyword::YEAR => Ok(DateTimeField::Year),
2813                Keyword::YEARS => Ok(DateTimeField::Years),
2814                Keyword::MONTH => Ok(DateTimeField::Month),
2815                Keyword::MONTHS => Ok(DateTimeField::Months),
2816                Keyword::WEEK => {
2817                    let week_day = if dialect_of!(self is BigQueryDialect | GenericDialect)
2818                        && self.consume_token(&Token::LParen)
2819                    {
2820                        let week_day = self.parse_identifier()?;
2821                        self.expect_token(&Token::RParen)?;
2822                        Some(week_day)
2823                    } else {
2824                        None
2825                    };
2826                    Ok(DateTimeField::Week(week_day))
2827                }
2828                Keyword::WEEKS => Ok(DateTimeField::Weeks),
2829                Keyword::DAY => Ok(DateTimeField::Day),
2830                Keyword::DAYOFWEEK => Ok(DateTimeField::DayOfWeek),
2831                Keyword::DAYOFYEAR => Ok(DateTimeField::DayOfYear),
2832                Keyword::DAYS => Ok(DateTimeField::Days),
2833                Keyword::DATE => Ok(DateTimeField::Date),
2834                Keyword::DATETIME => Ok(DateTimeField::Datetime),
2835                Keyword::HOUR => Ok(DateTimeField::Hour),
2836                Keyword::HOURS => Ok(DateTimeField::Hours),
2837                Keyword::MINUTE => Ok(DateTimeField::Minute),
2838                Keyword::MINUTES => Ok(DateTimeField::Minutes),
2839                Keyword::SECOND => Ok(DateTimeField::Second),
2840                Keyword::SECONDS => Ok(DateTimeField::Seconds),
2841                Keyword::CENTURY => Ok(DateTimeField::Century),
2842                Keyword::DECADE => Ok(DateTimeField::Decade),
2843                Keyword::DOY => Ok(DateTimeField::Doy),
2844                Keyword::DOW => Ok(DateTimeField::Dow),
2845                Keyword::EPOCH => Ok(DateTimeField::Epoch),
2846                Keyword::ISODOW => Ok(DateTimeField::Isodow),
2847                Keyword::ISOYEAR => Ok(DateTimeField::Isoyear),
2848                Keyword::ISOWEEK => Ok(DateTimeField::IsoWeek),
2849                Keyword::JULIAN => Ok(DateTimeField::Julian),
2850                Keyword::MICROSECOND => Ok(DateTimeField::Microsecond),
2851                Keyword::MICROSECONDS => Ok(DateTimeField::Microseconds),
2852                Keyword::MILLENIUM => Ok(DateTimeField::Millenium),
2853                Keyword::MILLENNIUM => Ok(DateTimeField::Millennium),
2854                Keyword::MILLISECOND => Ok(DateTimeField::Millisecond),
2855                Keyword::MILLISECONDS => Ok(DateTimeField::Milliseconds),
2856                Keyword::NANOSECOND => Ok(DateTimeField::Nanosecond),
2857                Keyword::NANOSECONDS => Ok(DateTimeField::Nanoseconds),
2858                Keyword::QUARTER => Ok(DateTimeField::Quarter),
2859                Keyword::TIME => Ok(DateTimeField::Time),
2860                Keyword::TIMEZONE => Ok(DateTimeField::Timezone),
2861                Keyword::TIMEZONE_ABBR => Ok(DateTimeField::TimezoneAbbr),
2862                Keyword::TIMEZONE_HOUR => Ok(DateTimeField::TimezoneHour),
2863                Keyword::TIMEZONE_MINUTE => Ok(DateTimeField::TimezoneMinute),
2864                Keyword::TIMEZONE_REGION => Ok(DateTimeField::TimezoneRegion),
2865                _ if self.dialect.allow_extract_custom() => {
2866                    self.prev_token();
2867                    let custom = self.parse_identifier()?;
2868                    Ok(DateTimeField::Custom(custom))
2869                }
2870                _ => self.expected("date/time field", next_token),
2871            },
2872            Token::SingleQuotedString(_) if self.dialect.allow_extract_single_quotes() => {
2873                self.prev_token();
2874                let custom = self.parse_identifier()?;
2875                Ok(DateTimeField::Custom(custom))
2876            }
2877            _ => self.expected("date/time field", next_token),
2878        }
2879    }
2880
2881    pub fn parse_not(&mut self) -> Result<Expr, ParserError> {
2882        match self.peek_token().token {
2883            Token::Word(w) => match w.keyword {
2884                Keyword::EXISTS => {
2885                    let negated = true;
2886                    let _ = self.parse_keyword(Keyword::EXISTS);
2887                    self.parse_exists_expr(negated)
2888                }
2889                _ => Ok(Expr::UnaryOp {
2890                    op: UnaryOperator::Not,
2891                    expr: Box::new(
2892                        self.parse_subexpr(self.dialect.prec_value(Precedence::UnaryNot))?,
2893                    ),
2894                }),
2895            },
2896            _ => Ok(Expr::UnaryOp {
2897                op: UnaryOperator::Not,
2898                expr: Box::new(self.parse_subexpr(self.dialect.prec_value(Precedence::UnaryNot))?),
2899            }),
2900        }
2901    }
2902
2903    /// Parse expression types that start with a left brace '{'.
2904    /// Examples:
2905    /// ```sql
2906    /// -- Dictionary expr.
2907    /// {'key1': 'value1', 'key2': 'value2'}
2908    ///
2909    /// -- Function call using the ODBC syntax.
2910    /// { fn CONCAT('foo', 'bar') }
2911    /// ```
2912    fn parse_lbrace_expr(&mut self) -> Result<Expr, ParserError> {
2913        let token = self.expect_token(&Token::LBrace)?;
2914
2915        if let Some(fn_expr) = self.maybe_parse_odbc_body()? {
2916            self.expect_token(&Token::RBrace)?;
2917            return Ok(fn_expr);
2918        }
2919
2920        if self.dialect.supports_dictionary_syntax() {
2921            self.prev_token(); // Put back the '{'
2922            return self.parse_dictionary();
2923        }
2924
2925        self.expected("an expression", token)
2926    }
2927
2928    /// Parses fulltext expressions [`sqlparser::ast::Expr::MatchAgainst`]
2929    ///
2930    /// # Errors
2931    /// This method will raise an error if the column list is empty or with invalid identifiers,
2932    /// the match expression is not a literal string, or if the search modifier is not valid.
2933    pub fn parse_match_against(&mut self) -> Result<Expr, ParserError> {
2934        let columns = self.parse_parenthesized_qualified_column_list(Mandatory, false)?;
2935
2936        self.expect_keyword_is(Keyword::AGAINST)?;
2937
2938        self.expect_token(&Token::LParen)?;
2939
2940        // MySQL is too permissive about the value, IMO we can't validate it perfectly on syntax level.
2941        let match_value = self.parse_value()?.value;
2942
2943        let in_natural_language_mode_keywords = &[
2944            Keyword::IN,
2945            Keyword::NATURAL,
2946            Keyword::LANGUAGE,
2947            Keyword::MODE,
2948        ];
2949
2950        let with_query_expansion_keywords = &[Keyword::WITH, Keyword::QUERY, Keyword::EXPANSION];
2951
2952        let in_boolean_mode_keywords = &[Keyword::IN, Keyword::BOOLEAN, Keyword::MODE];
2953
2954        let opt_search_modifier = if self.parse_keywords(in_natural_language_mode_keywords) {
2955            if self.parse_keywords(with_query_expansion_keywords) {
2956                Some(SearchModifier::InNaturalLanguageModeWithQueryExpansion)
2957            } else {
2958                Some(SearchModifier::InNaturalLanguageMode)
2959            }
2960        } else if self.parse_keywords(in_boolean_mode_keywords) {
2961            Some(SearchModifier::InBooleanMode)
2962        } else if self.parse_keywords(with_query_expansion_keywords) {
2963            Some(SearchModifier::WithQueryExpansion)
2964        } else {
2965            None
2966        };
2967
2968        self.expect_token(&Token::RParen)?;
2969
2970        Ok(Expr::MatchAgainst {
2971            columns,
2972            match_value,
2973            opt_search_modifier,
2974        })
2975    }
2976
2977    /// Parse an `INTERVAL` expression.
2978    ///
2979    /// Some syntactically valid intervals:
2980    ///
2981    /// ```sql
2982    ///   1. INTERVAL '1' DAY
2983    ///   2. INTERVAL '1-1' YEAR TO MONTH
2984    ///   3. INTERVAL '1' SECOND
2985    ///   4. INTERVAL '1:1:1.1' HOUR (5) TO SECOND (5)
2986    ///   5. INTERVAL '1.1' SECOND (2, 2)
2987    ///   6. INTERVAL '1:1' HOUR (5) TO MINUTE (5)
2988    ///   7. (MySql & BigQuery only): INTERVAL 1 DAY
2989    /// ```
2990    ///
2991    /// Note that we do not currently attempt to parse the quoted value.
2992    pub fn parse_interval(&mut self) -> Result<Expr, ParserError> {
2993        // The SQL standard allows an optional sign before the value string, but
2994        // it is not clear if any implementations support that syntax, so we
2995        // don't currently try to parse it. (The sign can instead be included
2996        // inside the value string.)
2997
2998        // to match the different flavours of INTERVAL syntax, we only allow expressions
2999        // if the dialect requires an interval qualifier,
3000        // see https://github.com/sqlparser-rs/sqlparser-rs/pull/1398 for more details
3001        let value = if self.dialect.require_interval_qualifier() {
3002            // parse a whole expression so `INTERVAL 1 + 1 DAY` is valid
3003            self.parse_expr()?
3004        } else {
3005            // parse a prefix expression so `INTERVAL 1 DAY` is valid, but `INTERVAL 1 + 1 DAY` is not
3006            // this also means that `INTERVAL '5 days' > INTERVAL '1 day'` treated properly
3007            self.parse_prefix()?
3008        };
3009
3010        // Following the string literal is a qualifier which indicates the units
3011        // of the duration specified in the string literal.
3012        //
3013        // Note that PostgreSQL allows omitting the qualifier, so we provide
3014        // this more general implementation.
3015        let leading_field = if self.next_token_is_temporal_unit() {
3016            Some(self.parse_date_time_field()?)
3017        } else if self.dialect.require_interval_qualifier() {
3018            return parser_err!(
3019                "INTERVAL requires a unit after the literal value",
3020                self.peek_token().span.start
3021            );
3022        } else {
3023            None
3024        };
3025
3026        let (leading_precision, last_field, fsec_precision) =
3027            if leading_field == Some(DateTimeField::Second) {
3028                // SQL mandates special syntax for `SECOND TO SECOND` literals.
3029                // Instead of
3030                //     `SECOND [(<leading precision>)] TO SECOND[(<fractional seconds precision>)]`
3031                // one must use the special format:
3032                //     `SECOND [( <leading precision> [ , <fractional seconds precision>] )]`
3033                let last_field = None;
3034                let (leading_precision, fsec_precision) = self.parse_optional_precision_scale()?;
3035                (leading_precision, last_field, fsec_precision)
3036            } else {
3037                let leading_precision = self.parse_optional_precision()?;
3038                if self.parse_keyword(Keyword::TO) {
3039                    let last_field = Some(self.parse_date_time_field()?);
3040                    let fsec_precision = if last_field == Some(DateTimeField::Second) {
3041                        self.parse_optional_precision()?
3042                    } else {
3043                        None
3044                    };
3045                    (leading_precision, last_field, fsec_precision)
3046                } else {
3047                    (leading_precision, None, None)
3048                }
3049            };
3050
3051        Ok(Expr::Interval(Interval {
3052            value: Box::new(value),
3053            leading_field,
3054            leading_precision,
3055            last_field,
3056            fractional_seconds_precision: fsec_precision,
3057        }))
3058    }
3059
3060    /// Peek at the next token and determine if it is a temporal unit
3061    /// like `second`.
3062    pub fn next_token_is_temporal_unit(&mut self) -> bool {
3063        if let Token::Word(word) = self.peek_token().token {
3064            matches!(
3065                word.keyword,
3066                Keyword::YEAR
3067                    | Keyword::YEARS
3068                    | Keyword::MONTH
3069                    | Keyword::MONTHS
3070                    | Keyword::WEEK
3071                    | Keyword::WEEKS
3072                    | Keyword::DAY
3073                    | Keyword::DAYS
3074                    | Keyword::HOUR
3075                    | Keyword::HOURS
3076                    | Keyword::MINUTE
3077                    | Keyword::MINUTES
3078                    | Keyword::SECOND
3079                    | Keyword::SECONDS
3080                    | Keyword::CENTURY
3081                    | Keyword::DECADE
3082                    | Keyword::DOW
3083                    | Keyword::DOY
3084                    | Keyword::EPOCH
3085                    | Keyword::ISODOW
3086                    | Keyword::ISOYEAR
3087                    | Keyword::JULIAN
3088                    | Keyword::MICROSECOND
3089                    | Keyword::MICROSECONDS
3090                    | Keyword::MILLENIUM
3091                    | Keyword::MILLENNIUM
3092                    | Keyword::MILLISECOND
3093                    | Keyword::MILLISECONDS
3094                    | Keyword::NANOSECOND
3095                    | Keyword::NANOSECONDS
3096                    | Keyword::QUARTER
3097                    | Keyword::TIMEZONE
3098                    | Keyword::TIMEZONE_HOUR
3099                    | Keyword::TIMEZONE_MINUTE
3100            )
3101        } else {
3102            false
3103        }
3104    }
3105
3106    /// Syntax
3107    /// ```sql
3108    /// -- typed
3109    /// STRUCT<[field_name] field_type, ...>( expr1 [, ... ])
3110    /// -- typeless
3111    /// STRUCT( expr1 [AS field_name] [, ... ])
3112    /// ```
3113    fn parse_struct_literal(&mut self) -> Result<Expr, ParserError> {
3114        // Parse the fields definition if exist `<[field_name] field_type, ...>`
3115        self.prev_token();
3116        let (fields, trailing_bracket) =
3117            self.parse_struct_type_def(Self::parse_struct_field_def)?;
3118        if trailing_bracket.0 {
3119            return parser_err!(
3120                "unmatched > in STRUCT literal",
3121                self.peek_token().span.start
3122            );
3123        }
3124
3125        // Parse the struct values `(expr1 [, ... ])`
3126        self.expect_token(&Token::LParen)?;
3127        let values = self
3128            .parse_comma_separated(|parser| parser.parse_struct_field_expr(!fields.is_empty()))?;
3129        self.expect_token(&Token::RParen)?;
3130
3131        Ok(Expr::Struct { values, fields })
3132    }
3133
3134    /// Parse an expression value for a struct literal
3135    /// Syntax
3136    /// ```sql
3137    /// expr [AS name]
3138    /// ```
3139    ///
3140    /// For biquery [1], Parameter typed_syntax is set to true if the expression
3141    /// is to be parsed as a field expression declared using typed
3142    /// struct syntax [2], and false if using typeless struct syntax [3].
3143    ///
3144    /// [1]: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#constructing_a_struct
3145    /// [2]: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#typed_struct_syntax
3146    /// [3]: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#typeless_struct_syntax
3147    fn parse_struct_field_expr(&mut self, typed_syntax: bool) -> Result<Expr, ParserError> {
3148        let expr = self.parse_expr()?;
3149        if self.parse_keyword(Keyword::AS) {
3150            if typed_syntax {
3151                return parser_err!("Typed syntax does not allow AS", {
3152                    self.prev_token();
3153                    self.peek_token().span.start
3154                });
3155            }
3156            let field_name = self.parse_identifier()?;
3157            Ok(Expr::Named {
3158                expr: expr.into(),
3159                name: field_name,
3160            })
3161        } else {
3162            Ok(expr)
3163        }
3164    }
3165
3166    /// Parse a Struct type definition as a sequence of field-value pairs.
3167    /// The syntax of the Struct elem differs by dialect so it is customised
3168    /// by the `elem_parser` argument.
3169    ///
3170    /// Syntax
3171    /// ```sql
3172    /// Hive:
3173    /// STRUCT<field_name: field_type>
3174    ///
3175    /// BigQuery:
3176    /// STRUCT<[field_name] field_type>
3177    /// ```
3178    fn parse_struct_type_def<F>(
3179        &mut self,
3180        mut elem_parser: F,
3181    ) -> Result<(Vec<StructField>, MatchedTrailingBracket), ParserError>
3182    where
3183        F: FnMut(&mut Parser<'a>) -> Result<(StructField, MatchedTrailingBracket), ParserError>,
3184    {
3185        self.expect_keyword_is(Keyword::STRUCT)?;
3186
3187        // Nothing to do if we have no type information.
3188        if Token::Lt != self.peek_token() {
3189            return Ok((Default::default(), false.into()));
3190        }
3191        self.next_token();
3192
3193        let mut field_defs = vec![];
3194        let trailing_bracket = loop {
3195            let (def, trailing_bracket) = elem_parser(self)?;
3196            field_defs.push(def);
3197            // The struct field definition is finished if it occurs `>>` or comma.
3198            if trailing_bracket.0 || !self.consume_token(&Token::Comma) {
3199                break trailing_bracket;
3200            }
3201        };
3202
3203        Ok((
3204            field_defs,
3205            self.expect_closing_angle_bracket(trailing_bracket)?,
3206        ))
3207    }
3208
3209    /// Duckdb Struct Data Type <https://duckdb.org/docs/sql/data_types/struct.html#retrieving-from-structs>
3210    fn parse_duckdb_struct_type_def(&mut self) -> Result<Vec<StructField>, ParserError> {
3211        self.expect_keyword_is(Keyword::STRUCT)?;
3212        self.expect_token(&Token::LParen)?;
3213        let struct_body = self.parse_comma_separated(|parser| {
3214            let field_name = parser.parse_identifier()?;
3215            let field_type = parser.parse_data_type()?;
3216
3217            Ok(StructField {
3218                field_name: Some(field_name),
3219                field_type,
3220                options: None,
3221            })
3222        });
3223        self.expect_token(&Token::RParen)?;
3224        struct_body
3225    }
3226
3227    /// Parse a field definition in a [struct] or [tuple].
3228    /// Syntax:
3229    ///
3230    /// ```sql
3231    /// [field_name] field_type
3232    /// ```
3233    ///
3234    /// [struct]: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#declaring_a_struct_type
3235    /// [tuple]: https://clickhouse.com/docs/en/sql-reference/data-types/tuple
3236    fn parse_struct_field_def(
3237        &mut self,
3238    ) -> Result<(StructField, MatchedTrailingBracket), ParserError> {
3239        // Look beyond the next item to infer whether both field name
3240        // and type are specified.
3241        let is_anonymous_field = !matches!(
3242            (self.peek_nth_token(0).token, self.peek_nth_token(1).token),
3243            (Token::Word(_), Token::Word(_))
3244        );
3245
3246        let field_name = if is_anonymous_field {
3247            None
3248        } else {
3249            Some(self.parse_identifier()?)
3250        };
3251
3252        let (field_type, trailing_bracket) = self.parse_data_type_helper()?;
3253
3254        let options = self.maybe_parse_options(Keyword::OPTIONS)?;
3255        Ok((
3256            StructField {
3257                field_name,
3258                field_type,
3259                options,
3260            },
3261            trailing_bracket,
3262        ))
3263    }
3264
3265    /// DuckDB specific: Parse a Union type definition as a sequence of field-value pairs.
3266    ///
3267    /// Syntax:
3268    ///
3269    /// ```sql
3270    /// UNION(field_name field_type[,...])
3271    /// ```
3272    ///
3273    /// [1]: https://duckdb.org/docs/sql/data_types/union.html
3274    fn parse_union_type_def(&mut self) -> Result<Vec<UnionField>, ParserError> {
3275        self.expect_keyword_is(Keyword::UNION)?;
3276
3277        self.expect_token(&Token::LParen)?;
3278
3279        let fields = self.parse_comma_separated(|p| {
3280            Ok(UnionField {
3281                field_name: p.parse_identifier()?,
3282                field_type: p.parse_data_type()?,
3283            })
3284        })?;
3285
3286        self.expect_token(&Token::RParen)?;
3287
3288        Ok(fields)
3289    }
3290
3291    /// DuckDB and ClickHouse specific: Parse a duckdb [dictionary] or a clickhouse [map] setting
3292    ///
3293    /// Syntax:
3294    ///
3295    /// ```sql
3296    /// {'field_name': expr1[, ... ]}
3297    /// ```
3298    ///
3299    /// [dictionary]: https://duckdb.org/docs/sql/data_types/struct#creating-structs
3300    /// [map]: https://clickhouse.com/docs/operations/settings/settings#additional_table_filters
3301    fn parse_dictionary(&mut self) -> Result<Expr, ParserError> {
3302        self.expect_token(&Token::LBrace)?;
3303
3304        let fields = self.parse_comma_separated0(Self::parse_dictionary_field, Token::RBrace)?;
3305
3306        self.expect_token(&Token::RBrace)?;
3307
3308        Ok(Expr::Dictionary(fields))
3309    }
3310
3311    /// Parse a field for a duckdb [dictionary] or a clickhouse [map] setting
3312    ///
3313    /// Syntax
3314    ///
3315    /// ```sql
3316    /// 'name': expr
3317    /// ```
3318    ///
3319    /// [dictionary]: https://duckdb.org/docs/sql/data_types/struct#creating-structs
3320    /// [map]: https://clickhouse.com/docs/operations/settings/settings#additional_table_filters
3321    fn parse_dictionary_field(&mut self) -> Result<DictionaryField, ParserError> {
3322        let key = self.parse_identifier()?;
3323
3324        self.expect_token(&Token::Colon)?;
3325
3326        let expr = self.parse_expr()?;
3327
3328        Ok(DictionaryField {
3329            key,
3330            value: Box::new(expr),
3331        })
3332    }
3333
3334    /// DuckDB specific: Parse a duckdb [map]
3335    ///
3336    /// Syntax:
3337    ///
3338    /// ```sql
3339    /// Map {key1: value1[, ... ]}
3340    /// ```
3341    ///
3342    /// [map]: https://duckdb.org/docs/sql/data_types/map.html#creating-maps
3343    fn parse_duckdb_map_literal(&mut self) -> Result<Expr, ParserError> {
3344        self.expect_token(&Token::LBrace)?;
3345        let fields = self.parse_comma_separated0(Self::parse_duckdb_map_field, Token::RBrace)?;
3346        self.expect_token(&Token::RBrace)?;
3347        Ok(Expr::Map(Map { entries: fields }))
3348    }
3349
3350    /// Parse a field for a duckdb [map]
3351    ///
3352    /// Syntax
3353    ///
3354    /// ```sql
3355    /// key: value
3356    /// ```
3357    ///
3358    /// [map]: https://duckdb.org/docs/sql/data_types/map.html#creating-maps
3359    fn parse_duckdb_map_field(&mut self) -> Result<MapEntry, ParserError> {
3360        let key = self.parse_expr()?;
3361
3362        self.expect_token(&Token::Colon)?;
3363
3364        let value = self.parse_expr()?;
3365
3366        Ok(MapEntry {
3367            key: Box::new(key),
3368            value: Box::new(value),
3369        })
3370    }
3371
3372    /// Parse clickhouse [map]
3373    ///
3374    /// Syntax
3375    ///
3376    /// ```sql
3377    /// Map(key_data_type, value_data_type)
3378    /// ```
3379    ///
3380    /// [map]: https://clickhouse.com/docs/en/sql-reference/data-types/map
3381    fn parse_click_house_map_def(&mut self) -> Result<(DataType, DataType), ParserError> {
3382        self.expect_keyword_is(Keyword::MAP)?;
3383        self.expect_token(&Token::LParen)?;
3384        let key_data_type = self.parse_data_type()?;
3385        self.expect_token(&Token::Comma)?;
3386        let value_data_type = self.parse_data_type()?;
3387        self.expect_token(&Token::RParen)?;
3388
3389        Ok((key_data_type, value_data_type))
3390    }
3391
3392    /// Parse clickhouse [tuple]
3393    ///
3394    /// Syntax
3395    ///
3396    /// ```sql
3397    /// Tuple([field_name] field_type, ...)
3398    /// ```
3399    ///
3400    /// [tuple]: https://clickhouse.com/docs/en/sql-reference/data-types/tuple
3401    fn parse_click_house_tuple_def(&mut self) -> Result<Vec<StructField>, ParserError> {
3402        self.expect_keyword_is(Keyword::TUPLE)?;
3403        self.expect_token(&Token::LParen)?;
3404        let mut field_defs = vec![];
3405        loop {
3406            let (def, _) = self.parse_struct_field_def()?;
3407            field_defs.push(def);
3408            if !self.consume_token(&Token::Comma) {
3409                break;
3410            }
3411        }
3412        self.expect_token(&Token::RParen)?;
3413
3414        Ok(field_defs)
3415    }
3416
3417    /// For nested types that use the angle bracket syntax, this matches either
3418    /// `>`, `>>` or nothing depending on which variant is expected (specified by the previously
3419    /// matched `trailing_bracket` argument). It returns whether there is a trailing
3420    /// left to be matched - (i.e. if '>>' was matched).
3421    fn expect_closing_angle_bracket(
3422        &mut self,
3423        trailing_bracket: MatchedTrailingBracket,
3424    ) -> Result<MatchedTrailingBracket, ParserError> {
3425        let trailing_bracket = if !trailing_bracket.0 {
3426            match self.peek_token().token {
3427                Token::Gt => {
3428                    self.next_token();
3429                    false.into()
3430                }
3431                Token::ShiftRight => {
3432                    self.next_token();
3433                    true.into()
3434                }
3435                _ => return self.expected(">", self.peek_token()),
3436            }
3437        } else {
3438            false.into()
3439        };
3440
3441        Ok(trailing_bracket)
3442    }
3443
3444    /// Parse an operator following an expression
3445    pub fn parse_infix(&mut self, expr: Expr, precedence: u8) -> Result<Expr, ParserError> {
3446        // allow the dialect to override infix parsing
3447        if let Some(infix) = self.dialect.parse_infix(self, &expr, precedence) {
3448            return infix;
3449        }
3450
3451        let dialect = self.dialect;
3452
3453        self.advance_token();
3454        let tok = self.get_current_token();
3455        debug!("infix: {tok:?}");
3456        let tok_index = self.get_current_index();
3457        let span = tok.span;
3458        let regular_binary_operator = match &tok.token {
3459            Token::Spaceship => Some(BinaryOperator::Spaceship),
3460            Token::DoubleEq => Some(BinaryOperator::Eq),
3461            Token::Assignment => Some(BinaryOperator::Assignment),
3462            Token::Eq => Some(BinaryOperator::Eq),
3463            Token::Neq => Some(BinaryOperator::NotEq),
3464            Token::Gt => Some(BinaryOperator::Gt),
3465            Token::GtEq => Some(BinaryOperator::GtEq),
3466            Token::Lt => Some(BinaryOperator::Lt),
3467            Token::LtEq => Some(BinaryOperator::LtEq),
3468            Token::Plus => Some(BinaryOperator::Plus),
3469            Token::Minus => Some(BinaryOperator::Minus),
3470            Token::Mul => Some(BinaryOperator::Multiply),
3471            Token::Mod => Some(BinaryOperator::Modulo),
3472            Token::StringConcat => Some(BinaryOperator::StringConcat),
3473            Token::Pipe => Some(BinaryOperator::BitwiseOr),
3474            Token::Caret => {
3475                // In PostgreSQL, ^ stands for the exponentiation operation,
3476                // and # stands for XOR. See https://www.postgresql.org/docs/current/functions-math.html
3477                if dialect_is!(dialect is PostgreSqlDialect) {
3478                    Some(BinaryOperator::PGExp)
3479                } else {
3480                    Some(BinaryOperator::BitwiseXor)
3481                }
3482            }
3483            Token::Ampersand => Some(BinaryOperator::BitwiseAnd),
3484            Token::Div => Some(BinaryOperator::Divide),
3485            Token::DuckIntDiv if dialect_is!(dialect is DuckDbDialect | GenericDialect) => {
3486                Some(BinaryOperator::DuckIntegerDivide)
3487            }
3488            Token::ShiftLeft if dialect_is!(dialect is PostgreSqlDialect | DuckDbDialect | GenericDialect | RedshiftSqlDialect) => {
3489                Some(BinaryOperator::PGBitwiseShiftLeft)
3490            }
3491            Token::ShiftRight if dialect_is!(dialect is PostgreSqlDialect | DuckDbDialect | GenericDialect | RedshiftSqlDialect) => {
3492                Some(BinaryOperator::PGBitwiseShiftRight)
3493            }
3494            Token::Sharp if dialect_is!(dialect is PostgreSqlDialect | RedshiftSqlDialect) => {
3495                Some(BinaryOperator::PGBitwiseXor)
3496            }
3497            Token::Overlap if dialect_is!(dialect is PostgreSqlDialect | RedshiftSqlDialect) => {
3498                Some(BinaryOperator::PGOverlap)
3499            }
3500            Token::Overlap if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
3501                Some(BinaryOperator::PGOverlap)
3502            }
3503            Token::CaretAt if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
3504                Some(BinaryOperator::PGStartsWith)
3505            }
3506            Token::Tilde => Some(BinaryOperator::PGRegexMatch),
3507            Token::TildeAsterisk => Some(BinaryOperator::PGRegexIMatch),
3508            Token::ExclamationMarkTilde => Some(BinaryOperator::PGRegexNotMatch),
3509            Token::ExclamationMarkTildeAsterisk => Some(BinaryOperator::PGRegexNotIMatch),
3510            Token::DoubleTilde => Some(BinaryOperator::PGLikeMatch),
3511            Token::DoubleTildeAsterisk => Some(BinaryOperator::PGILikeMatch),
3512            Token::ExclamationMarkDoubleTilde => Some(BinaryOperator::PGNotLikeMatch),
3513            Token::ExclamationMarkDoubleTildeAsterisk => Some(BinaryOperator::PGNotILikeMatch),
3514            Token::Arrow => Some(BinaryOperator::Arrow),
3515            Token::LongArrow => Some(BinaryOperator::LongArrow),
3516            Token::HashArrow => Some(BinaryOperator::HashArrow),
3517            Token::HashLongArrow => Some(BinaryOperator::HashLongArrow),
3518            Token::AtArrow => Some(BinaryOperator::AtArrow),
3519            Token::ArrowAt => Some(BinaryOperator::ArrowAt),
3520            Token::HashMinus => Some(BinaryOperator::HashMinus),
3521            Token::AtQuestion => Some(BinaryOperator::AtQuestion),
3522            Token::AtAt => Some(BinaryOperator::AtAt),
3523            Token::Question => Some(BinaryOperator::Question),
3524            Token::QuestionAnd => Some(BinaryOperator::QuestionAnd),
3525            Token::QuestionPipe => Some(BinaryOperator::QuestionPipe),
3526            Token::CustomBinaryOperator(s) => Some(BinaryOperator::Custom(s.clone())),
3527            Token::DoubleSharp if self.dialect.supports_geometric_types() => {
3528                Some(BinaryOperator::DoubleHash)
3529            }
3530
3531            Token::AmpersandLeftAngleBracket if self.dialect.supports_geometric_types() => {
3532                Some(BinaryOperator::AndLt)
3533            }
3534            Token::AmpersandRightAngleBracket if self.dialect.supports_geometric_types() => {
3535                Some(BinaryOperator::AndGt)
3536            }
3537            Token::QuestionMarkDash if self.dialect.supports_geometric_types() => {
3538                Some(BinaryOperator::QuestionDash)
3539            }
3540            Token::AmpersandLeftAngleBracketVerticalBar
3541                if self.dialect.supports_geometric_types() =>
3542            {
3543                Some(BinaryOperator::AndLtPipe)
3544            }
3545            Token::VerticalBarAmpersandRightAngleBracket
3546                if self.dialect.supports_geometric_types() =>
3547            {
3548                Some(BinaryOperator::PipeAndGt)
3549            }
3550            Token::TwoWayArrow if self.dialect.supports_geometric_types() => {
3551                Some(BinaryOperator::LtDashGt)
3552            }
3553            Token::LeftAngleBracketCaret if self.dialect.supports_geometric_types() => {
3554                Some(BinaryOperator::LtCaret)
3555            }
3556            Token::RightAngleBracketCaret if self.dialect.supports_geometric_types() => {
3557                Some(BinaryOperator::GtCaret)
3558            }
3559            Token::QuestionMarkSharp if self.dialect.supports_geometric_types() => {
3560                Some(BinaryOperator::QuestionHash)
3561            }
3562            Token::QuestionMarkDoubleVerticalBar if self.dialect.supports_geometric_types() => {
3563                Some(BinaryOperator::QuestionDoublePipe)
3564            }
3565            Token::QuestionMarkDashVerticalBar if self.dialect.supports_geometric_types() => {
3566                Some(BinaryOperator::QuestionDashPipe)
3567            }
3568            Token::TildeEqual if self.dialect.supports_geometric_types() => {
3569                Some(BinaryOperator::TildeEq)
3570            }
3571            Token::ShiftLeftVerticalBar if self.dialect.supports_geometric_types() => {
3572                Some(BinaryOperator::LtLtPipe)
3573            }
3574            Token::VerticalBarShiftRight if self.dialect.supports_geometric_types() => {
3575                Some(BinaryOperator::PipeGtGt)
3576            }
3577            Token::AtSign if self.dialect.supports_geometric_types() => Some(BinaryOperator::At),
3578
3579            Token::Word(w) => match w.keyword {
3580                Keyword::AND => Some(BinaryOperator::And),
3581                Keyword::OR => Some(BinaryOperator::Or),
3582                Keyword::XOR => Some(BinaryOperator::Xor),
3583                Keyword::OVERLAPS => Some(BinaryOperator::Overlaps),
3584                Keyword::OPERATOR if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
3585                    self.expect_token(&Token::LParen)?;
3586                    // there are special rules for operator names in
3587                    // postgres so we can not use 'parse_object'
3588                    // or similar.
3589                    // See https://www.postgresql.org/docs/current/sql-createoperator.html
3590                    let mut idents = vec![];
3591                    loop {
3592                        self.advance_token();
3593                        idents.push(self.get_current_token().to_string());
3594                        if !self.consume_token(&Token::Period) {
3595                            break;
3596                        }
3597                    }
3598                    self.expect_token(&Token::RParen)?;
3599                    Some(BinaryOperator::PGCustomBinaryOperator(idents))
3600                }
3601                _ => None,
3602            },
3603            _ => None,
3604        };
3605
3606        let tok = self.token_at(tok_index);
3607        if let Some(op) = regular_binary_operator {
3608            if let Some(keyword) =
3609                self.parse_one_of_keywords(&[Keyword::ANY, Keyword::ALL, Keyword::SOME])
3610            {
3611                self.expect_token(&Token::LParen)?;
3612                let right = if self.peek_sub_query() {
3613                    // We have a subquery ahead (SELECT\WITH ...) need to rewind and
3614                    // use the parenthesis for parsing the subquery as an expression.
3615                    self.prev_token(); // LParen
3616                    self.parse_subexpr(precedence)?
3617                } else {
3618                    // Non-subquery expression
3619                    let right = self.parse_subexpr(precedence)?;
3620                    self.expect_token(&Token::RParen)?;
3621                    right
3622                };
3623
3624                if !matches!(
3625                    op,
3626                    BinaryOperator::Gt
3627                        | BinaryOperator::Lt
3628                        | BinaryOperator::GtEq
3629                        | BinaryOperator::LtEq
3630                        | BinaryOperator::Eq
3631                        | BinaryOperator::NotEq
3632                        | BinaryOperator::PGRegexMatch
3633                        | BinaryOperator::PGRegexIMatch
3634                        | BinaryOperator::PGRegexNotMatch
3635                        | BinaryOperator::PGRegexNotIMatch
3636                        | BinaryOperator::PGLikeMatch
3637                        | BinaryOperator::PGILikeMatch
3638                        | BinaryOperator::PGNotLikeMatch
3639                        | BinaryOperator::PGNotILikeMatch
3640                ) {
3641                    return parser_err!(
3642                        format!(
3643                        "Expected one of [=, >, <, =>, =<, !=, ~, ~*, !~, !~*, ~~, ~~*, !~~, !~~*] as comparison operator, found: {op}"
3644                    ),
3645                        span.start
3646                    );
3647                };
3648
3649                Ok(match keyword {
3650                    Keyword::ALL => Expr::AllOp {
3651                        left: Box::new(expr),
3652                        compare_op: op,
3653                        right: Box::new(right),
3654                    },
3655                    Keyword::ANY | Keyword::SOME => Expr::AnyOp {
3656                        left: Box::new(expr),
3657                        compare_op: op,
3658                        right: Box::new(right),
3659                        is_some: keyword == Keyword::SOME,
3660                    },
3661                    unexpected_keyword => return Err(ParserError::ParserError(
3662                        format!("Internal parser error: expected any of {{ALL, ANY, SOME}}, got {unexpected_keyword:?}"),
3663                    )),
3664                })
3665            } else {
3666                Ok(Expr::BinaryOp {
3667                    left: Box::new(expr),
3668                    op,
3669                    right: Box::new(self.parse_subexpr(precedence)?),
3670                })
3671            }
3672        } else if let Token::Word(w) = &tok.token {
3673            match w.keyword {
3674                Keyword::IS => {
3675                    if self.parse_keyword(Keyword::NULL) {
3676                        Ok(Expr::IsNull(Box::new(expr)))
3677                    } else if self.parse_keywords(&[Keyword::NOT, Keyword::NULL]) {
3678                        Ok(Expr::IsNotNull(Box::new(expr)))
3679                    } else if self.parse_keywords(&[Keyword::TRUE]) {
3680                        Ok(Expr::IsTrue(Box::new(expr)))
3681                    } else if self.parse_keywords(&[Keyword::NOT, Keyword::TRUE]) {
3682                        Ok(Expr::IsNotTrue(Box::new(expr)))
3683                    } else if self.parse_keywords(&[Keyword::FALSE]) {
3684                        Ok(Expr::IsFalse(Box::new(expr)))
3685                    } else if self.parse_keywords(&[Keyword::NOT, Keyword::FALSE]) {
3686                        Ok(Expr::IsNotFalse(Box::new(expr)))
3687                    } else if self.parse_keywords(&[Keyword::UNKNOWN]) {
3688                        Ok(Expr::IsUnknown(Box::new(expr)))
3689                    } else if self.parse_keywords(&[Keyword::NOT, Keyword::UNKNOWN]) {
3690                        Ok(Expr::IsNotUnknown(Box::new(expr)))
3691                    } else if self.parse_keywords(&[Keyword::DISTINCT, Keyword::FROM]) {
3692                        let expr2 = self.parse_expr()?;
3693                        Ok(Expr::IsDistinctFrom(Box::new(expr), Box::new(expr2)))
3694                    } else if self.parse_keywords(&[Keyword::NOT, Keyword::DISTINCT, Keyword::FROM])
3695                    {
3696                        let expr2 = self.parse_expr()?;
3697                        Ok(Expr::IsNotDistinctFrom(Box::new(expr), Box::new(expr2)))
3698                    } else if let Ok(is_normalized) = self.parse_unicode_is_normalized(expr) {
3699                        Ok(is_normalized)
3700                    } else {
3701                        self.expected(
3702                            "[NOT] NULL | TRUE | FALSE | DISTINCT | [form] NORMALIZED FROM after IS",
3703                            self.peek_token(),
3704                        )
3705                    }
3706                }
3707                Keyword::AT => {
3708                    self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
3709                    Ok(Expr::AtTimeZone {
3710                        timestamp: Box::new(expr),
3711                        time_zone: Box::new(self.parse_subexpr(precedence)?),
3712                    })
3713                }
3714                Keyword::NOT
3715                | Keyword::IN
3716                | Keyword::BETWEEN
3717                | Keyword::LIKE
3718                | Keyword::ILIKE
3719                | Keyword::SIMILAR
3720                | Keyword::REGEXP
3721                | Keyword::RLIKE => {
3722                    self.prev_token();
3723                    let negated = self.parse_keyword(Keyword::NOT);
3724                    let regexp = self.parse_keyword(Keyword::REGEXP);
3725                    let rlike = self.parse_keyword(Keyword::RLIKE);
3726                    let null = if !self.in_column_definition_state() {
3727                        self.parse_keyword(Keyword::NULL)
3728                    } else {
3729                        false
3730                    };
3731                    if regexp || rlike {
3732                        Ok(Expr::RLike {
3733                            negated,
3734                            expr: Box::new(expr),
3735                            pattern: Box::new(
3736                                self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
3737                            ),
3738                            regexp,
3739                        })
3740                    } else if negated && null {
3741                        Ok(Expr::IsNotNull(Box::new(expr)))
3742                    } else if self.parse_keyword(Keyword::IN) {
3743                        self.parse_in(expr, negated)
3744                    } else if self.parse_keyword(Keyword::BETWEEN) {
3745                        self.parse_between(expr, negated)
3746                    } else if self.parse_keyword(Keyword::LIKE) {
3747                        Ok(Expr::Like {
3748                            negated,
3749                            any: self.parse_keyword(Keyword::ANY),
3750                            expr: Box::new(expr),
3751                            pattern: Box::new(
3752                                self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
3753                            ),
3754                            escape_char: self.parse_escape_char()?,
3755                        })
3756                    } else if self.parse_keyword(Keyword::ILIKE) {
3757                        Ok(Expr::ILike {
3758                            negated,
3759                            any: self.parse_keyword(Keyword::ANY),
3760                            expr: Box::new(expr),
3761                            pattern: Box::new(
3762                                self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
3763                            ),
3764                            escape_char: self.parse_escape_char()?,
3765                        })
3766                    } else if self.parse_keywords(&[Keyword::SIMILAR, Keyword::TO]) {
3767                        Ok(Expr::SimilarTo {
3768                            negated,
3769                            expr: Box::new(expr),
3770                            pattern: Box::new(
3771                                self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
3772                            ),
3773                            escape_char: self.parse_escape_char()?,
3774                        })
3775                    } else {
3776                        self.expected("IN or BETWEEN after NOT", self.peek_token())
3777                    }
3778                }
3779                Keyword::NOTNULL if dialect.supports_notnull_operator() => {
3780                    Ok(Expr::IsNotNull(Box::new(expr)))
3781                }
3782                Keyword::MEMBER => {
3783                    if self.parse_keyword(Keyword::OF) {
3784                        self.expect_token(&Token::LParen)?;
3785                        let array = self.parse_expr()?;
3786                        self.expect_token(&Token::RParen)?;
3787                        Ok(Expr::MemberOf(MemberOf {
3788                            value: Box::new(expr),
3789                            array: Box::new(array),
3790                        }))
3791                    } else {
3792                        self.expected("OF after MEMBER", self.peek_token())
3793                    }
3794                }
3795                // Can only happen if `get_next_precedence` got out of sync with this function
3796                _ => parser_err!(
3797                    format!("No infix parser for token {:?}", tok.token),
3798                    tok.span.start
3799                ),
3800            }
3801        } else if Token::DoubleColon == *tok {
3802            Ok(Expr::Cast {
3803                kind: CastKind::DoubleColon,
3804                expr: Box::new(expr),
3805                data_type: self.parse_data_type()?,
3806                format: None,
3807            })
3808        } else if Token::ExclamationMark == *tok && self.dialect.supports_factorial_operator() {
3809            Ok(Expr::UnaryOp {
3810                op: UnaryOperator::PGPostfixFactorial,
3811                expr: Box::new(expr),
3812            })
3813        } else if Token::LBracket == *tok && self.dialect.supports_partiql()
3814            || (dialect_of!(self is SnowflakeDialect | GenericDialect) && Token::Colon == *tok)
3815        {
3816            self.prev_token();
3817            self.parse_json_access(expr)
3818        } else {
3819            // Can only happen if `get_next_precedence` got out of sync with this function
3820            parser_err!(
3821                format!("No infix parser for token {:?}", tok.token),
3822                tok.span.start
3823            )
3824        }
3825    }
3826
3827    /// Parse the `ESCAPE CHAR` portion of `LIKE`, `ILIKE`, and `SIMILAR TO`
3828    pub fn parse_escape_char(&mut self) -> Result<Option<Value>, ParserError> {
3829        if self.parse_keyword(Keyword::ESCAPE) {
3830            Ok(Some(self.parse_value()?.into()))
3831        } else {
3832            Ok(None)
3833        }
3834    }
3835
3836    /// Parses an array subscript like
3837    /// * `[:]`
3838    /// * `[l]`
3839    /// * `[l:]`
3840    /// * `[:u]`
3841    /// * `[l:u]`
3842    /// * `[l:u:s]`
3843    ///
3844    /// Parser is right after `[`
3845    fn parse_subscript_inner(&mut self) -> Result<Subscript, ParserError> {
3846        // at either `<lower>:(rest)` or `:(rest)]`
3847        let lower_bound = if self.consume_token(&Token::Colon) {
3848            None
3849        } else {
3850            Some(self.parse_expr()?)
3851        };
3852
3853        // check for end
3854        if self.consume_token(&Token::RBracket) {
3855            if let Some(lower_bound) = lower_bound {
3856                return Ok(Subscript::Index { index: lower_bound });
3857            };
3858            return Ok(Subscript::Slice {
3859                lower_bound,
3860                upper_bound: None,
3861                stride: None,
3862            });
3863        }
3864
3865        // consume the `:`
3866        if lower_bound.is_some() {
3867            self.expect_token(&Token::Colon)?;
3868        }
3869
3870        // we are now at either `]`, `<upper>(rest)]`
3871        let upper_bound = if self.consume_token(&Token::RBracket) {
3872            return Ok(Subscript::Slice {
3873                lower_bound,
3874                upper_bound: None,
3875                stride: None,
3876            });
3877        } else {
3878            Some(self.parse_expr()?)
3879        };
3880
3881        // check for end
3882        if self.consume_token(&Token::RBracket) {
3883            return Ok(Subscript::Slice {
3884                lower_bound,
3885                upper_bound,
3886                stride: None,
3887            });
3888        }
3889
3890        // we are now at `:]` or `:stride]`
3891        self.expect_token(&Token::Colon)?;
3892        let stride = if self.consume_token(&Token::RBracket) {
3893            None
3894        } else {
3895            Some(self.parse_expr()?)
3896        };
3897
3898        if stride.is_some() {
3899            self.expect_token(&Token::RBracket)?;
3900        }
3901
3902        Ok(Subscript::Slice {
3903            lower_bound,
3904            upper_bound,
3905            stride,
3906        })
3907    }
3908
3909    /// Parse a multi-dimension array accessing like `[1:3][1][1]`
3910    pub fn parse_multi_dim_subscript(
3911        &mut self,
3912        chain: &mut Vec<AccessExpr>,
3913    ) -> Result<(), ParserError> {
3914        while self.consume_token(&Token::LBracket) {
3915            self.parse_subscript(chain)?;
3916        }
3917        Ok(())
3918    }
3919
3920    /// Parses an array subscript like `[1:3]`
3921    ///
3922    /// Parser is right after `[`
3923    fn parse_subscript(&mut self, chain: &mut Vec<AccessExpr>) -> Result<(), ParserError> {
3924        let subscript = self.parse_subscript_inner()?;
3925        chain.push(AccessExpr::Subscript(subscript));
3926        Ok(())
3927    }
3928
3929    fn parse_json_path_object_key(&mut self) -> Result<JsonPathElem, ParserError> {
3930        let token = self.next_token();
3931        match token.token {
3932            Token::Word(Word {
3933                value,
3934                // path segments in SF dot notation can be unquoted or double-quoted
3935                quote_style: quote_style @ (Some('"') | None),
3936                // some experimentation suggests that snowflake permits
3937                // any keyword here unquoted.
3938                keyword: _,
3939            }) => Ok(JsonPathElem::Dot {
3940                key: value,
3941                quoted: quote_style.is_some(),
3942            }),
3943
3944            // This token should never be generated on snowflake or generic
3945            // dialects, but we handle it just in case this is used on future
3946            // dialects.
3947            Token::DoubleQuotedString(key) => Ok(JsonPathElem::Dot { key, quoted: true }),
3948
3949            _ => self.expected("variant object key name", token),
3950        }
3951    }
3952
3953    fn parse_json_access(&mut self, expr: Expr) -> Result<Expr, ParserError> {
3954        let path = self.parse_json_path()?;
3955        Ok(Expr::JsonAccess {
3956            value: Box::new(expr),
3957            path,
3958        })
3959    }
3960
3961    fn parse_json_path(&mut self) -> Result<JsonPath, ParserError> {
3962        let mut path = Vec::new();
3963        loop {
3964            match self.next_token().token {
3965                Token::Colon if path.is_empty() => {
3966                    path.push(self.parse_json_path_object_key()?);
3967                }
3968                Token::Period if !path.is_empty() => {
3969                    path.push(self.parse_json_path_object_key()?);
3970                }
3971                Token::LBracket => {
3972                    let key = self.parse_expr()?;
3973                    self.expect_token(&Token::RBracket)?;
3974
3975                    path.push(JsonPathElem::Bracket { key });
3976                }
3977                _ => {
3978                    self.prev_token();
3979                    break;
3980                }
3981            };
3982        }
3983
3984        debug_assert!(!path.is_empty());
3985        Ok(JsonPath { path })
3986    }
3987
3988    /// Parses the parens following the `[ NOT ] IN` operator.
3989    pub fn parse_in(&mut self, expr: Expr, negated: bool) -> Result<Expr, ParserError> {
3990        // BigQuery allows `IN UNNEST(array_expression)`
3991        // https://cloud.google.com/bigquery/docs/reference/standard-sql/operators#in_operators
3992        if self.parse_keyword(Keyword::UNNEST) {
3993            self.expect_token(&Token::LParen)?;
3994            let array_expr = self.parse_expr()?;
3995            self.expect_token(&Token::RParen)?;
3996            return Ok(Expr::InUnnest {
3997                expr: Box::new(expr),
3998                array_expr: Box::new(array_expr),
3999                negated,
4000            });
4001        }
4002        self.expect_token(&Token::LParen)?;
4003        let in_op = match self.maybe_parse(|p| p.parse_query())? {
4004            Some(subquery) => Expr::InSubquery {
4005                expr: Box::new(expr),
4006                subquery,
4007                negated,
4008            },
4009            None => Expr::InList {
4010                expr: Box::new(expr),
4011                list: if self.dialect.supports_in_empty_list() {
4012                    self.parse_comma_separated0(Parser::parse_expr, Token::RParen)?
4013                } else {
4014                    self.parse_comma_separated(Parser::parse_expr)?
4015                },
4016                negated,
4017            },
4018        };
4019        self.expect_token(&Token::RParen)?;
4020        Ok(in_op)
4021    }
4022
4023    /// Parses `BETWEEN <low> AND <high>`, assuming the `BETWEEN` keyword was already consumed.
4024    pub fn parse_between(&mut self, expr: Expr, negated: bool) -> Result<Expr, ParserError> {
4025        // Stop parsing subexpressions for <low> and <high> on tokens with
4026        // precedence lower than that of `BETWEEN`, such as `AND`, `IS`, etc.
4027        let low = self.parse_subexpr(self.dialect.prec_value(Precedence::Between))?;
4028        self.expect_keyword_is(Keyword::AND)?;
4029        let high = self.parse_subexpr(self.dialect.prec_value(Precedence::Between))?;
4030        Ok(Expr::Between {
4031            expr: Box::new(expr),
4032            negated,
4033            low: Box::new(low),
4034            high: Box::new(high),
4035        })
4036    }
4037
4038    /// Parse a PostgreSQL casting style which is in the form of `expr::datatype`.
4039    pub fn parse_pg_cast(&mut self, expr: Expr) -> Result<Expr, ParserError> {
4040        Ok(Expr::Cast {
4041            kind: CastKind::DoubleColon,
4042            expr: Box::new(expr),
4043            data_type: self.parse_data_type()?,
4044            format: None,
4045        })
4046    }
4047
4048    /// Get the precedence of the next token
4049    pub fn get_next_precedence(&self) -> Result<u8, ParserError> {
4050        self.dialect.get_next_precedence_default(self)
4051    }
4052
4053    /// Return the token at the given location, or EOF if the index is beyond
4054    /// the length of the current set of tokens.
4055    pub fn token_at(&self, index: usize) -> &TokenWithSpan {
4056        self.tokens.get(index).unwrap_or(&EOF_TOKEN)
4057    }
4058
4059    /// Return the first non-whitespace token that has not yet been processed
4060    /// or Token::EOF
4061    ///
4062    /// See [`Self::peek_token_ref`] to avoid the copy.
4063    pub fn peek_token(&self) -> TokenWithSpan {
4064        self.peek_nth_token(0)
4065    }
4066
4067    /// Return a reference to the first non-whitespace token that has not yet
4068    /// been processed or Token::EOF
4069    pub fn peek_token_ref(&self) -> &TokenWithSpan {
4070        self.peek_nth_token_ref(0)
4071    }
4072
4073    /// Returns the `N` next non-whitespace tokens that have not yet been
4074    /// processed.
4075    ///
4076    /// Example:
4077    /// ```rust
4078    /// # use sqlparser::dialect::GenericDialect;
4079    /// # use sqlparser::parser::Parser;
4080    /// # use sqlparser::keywords::Keyword;
4081    /// # use sqlparser::tokenizer::{Token, Word};
4082    /// let dialect = GenericDialect {};
4083    /// let mut parser = Parser::new(&dialect).try_with_sql("ORDER BY foo, bar").unwrap();
4084    ///
4085    /// // Note that Rust infers the number of tokens to peek based on the
4086    /// // length of the slice pattern!
4087    /// assert!(matches!(
4088    ///     parser.peek_tokens(),
4089    ///     [
4090    ///         Token::Word(Word { keyword: Keyword::ORDER, .. }),
4091    ///         Token::Word(Word { keyword: Keyword::BY, .. }),
4092    ///     ]
4093    /// ));
4094    /// ```
4095    pub fn peek_tokens<const N: usize>(&self) -> [Token; N] {
4096        self.peek_tokens_with_location()
4097            .map(|with_loc| with_loc.token)
4098    }
4099
4100    /// Returns the `N` next non-whitespace tokens with locations that have not
4101    /// yet been processed.
4102    ///
4103    /// See [`Self::peek_token`] for an example.
4104    pub fn peek_tokens_with_location<const N: usize>(&self) -> [TokenWithSpan; N] {
4105        let mut index = self.index;
4106        core::array::from_fn(|_| loop {
4107            let token = self.tokens.get(index);
4108            index += 1;
4109            if let Some(TokenWithSpan {
4110                token: Token::Whitespace(_),
4111                span: _,
4112            }) = token
4113            {
4114                continue;
4115            }
4116            break token.cloned().unwrap_or(TokenWithSpan {
4117                token: Token::EOF,
4118                span: Span::empty(),
4119            });
4120        })
4121    }
4122
4123    /// Returns references to the `N` next non-whitespace tokens
4124    /// that have not yet been processed.
4125    ///
4126    /// See [`Self::peek_tokens`] for an example.
4127    pub fn peek_tokens_ref<const N: usize>(&self) -> [&TokenWithSpan; N] {
4128        let mut index = self.index;
4129        core::array::from_fn(|_| loop {
4130            let token = self.tokens.get(index);
4131            index += 1;
4132            if let Some(TokenWithSpan {
4133                token: Token::Whitespace(_),
4134                span: _,
4135            }) = token
4136            {
4137                continue;
4138            }
4139            break token.unwrap_or(&EOF_TOKEN);
4140        })
4141    }
4142
4143    /// Return nth non-whitespace token that has not yet been processed
4144    pub fn peek_nth_token(&self, n: usize) -> TokenWithSpan {
4145        self.peek_nth_token_ref(n).clone()
4146    }
4147
4148    /// Return nth non-whitespace token that has not yet been processed
4149    pub fn peek_nth_token_ref(&self, mut n: usize) -> &TokenWithSpan {
4150        let mut index = self.index;
4151        loop {
4152            index += 1;
4153            match self.tokens.get(index - 1) {
4154                Some(TokenWithSpan {
4155                    token: Token::Whitespace(_),
4156                    span: _,
4157                }) => continue,
4158                non_whitespace => {
4159                    if n == 0 {
4160                        return non_whitespace.unwrap_or(&EOF_TOKEN);
4161                    }
4162                    n -= 1;
4163                }
4164            }
4165        }
4166    }
4167
4168    /// Return the first token, possibly whitespace, that has not yet been processed
4169    /// (or None if reached end-of-file).
4170    pub fn peek_token_no_skip(&self) -> TokenWithSpan {
4171        self.peek_nth_token_no_skip(0)
4172    }
4173
4174    /// Return nth token, possibly whitespace, that has not yet been processed.
4175    pub fn peek_nth_token_no_skip(&self, n: usize) -> TokenWithSpan {
4176        self.tokens
4177            .get(self.index + n)
4178            .cloned()
4179            .unwrap_or(TokenWithSpan {
4180                token: Token::EOF,
4181                span: Span::empty(),
4182            })
4183    }
4184
4185    /// Return true if the next tokens exactly `expected`
4186    ///
4187    /// Does not advance the current token.
4188    fn peek_keywords(&mut self, expected: &[Keyword]) -> bool {
4189        let index = self.index;
4190        let matched = self.parse_keywords(expected);
4191        self.index = index;
4192        matched
4193    }
4194
4195    /// Advances to the next non-whitespace token and returns a copy.
4196    ///
4197    /// Please use [`Self::advance_token`] and [`Self::get_current_token`] to
4198    /// avoid the copy.
4199    pub fn next_token(&mut self) -> TokenWithSpan {
4200        self.advance_token();
4201        self.get_current_token().clone()
4202    }
4203
4204    /// Returns the index of the current token
4205    ///
4206    /// This can be used with APIs that expect an index, such as
4207    /// [`Self::token_at`]
4208    pub fn get_current_index(&self) -> usize {
4209        self.index.saturating_sub(1)
4210    }
4211
4212    /// Return the next unprocessed token, possibly whitespace.
4213    pub fn next_token_no_skip(&mut self) -> Option<&TokenWithSpan> {
4214        self.index += 1;
4215        self.tokens.get(self.index - 1)
4216    }
4217
4218    /// Advances the current token to the next non-whitespace token
4219    ///
4220    /// See [`Self::get_current_token`] to get the current token after advancing
4221    pub fn advance_token(&mut self) {
4222        loop {
4223            self.index += 1;
4224            match self.tokens.get(self.index - 1) {
4225                Some(TokenWithSpan {
4226                    token: Token::Whitespace(_),
4227                    span: _,
4228                }) => continue,
4229                _ => break,
4230            }
4231        }
4232    }
4233
4234    /// Returns a reference to the current token
4235    ///
4236    /// Does not advance the current token.
4237    pub fn get_current_token(&self) -> &TokenWithSpan {
4238        self.token_at(self.index.saturating_sub(1))
4239    }
4240
4241    /// Returns a reference to the previous token
4242    ///
4243    /// Does not advance the current token.
4244    pub fn get_previous_token(&self) -> &TokenWithSpan {
4245        self.token_at(self.index.saturating_sub(2))
4246    }
4247
4248    /// Returns a reference to the next token
4249    ///
4250    /// Does not advance the current token.
4251    pub fn get_next_token(&self) -> &TokenWithSpan {
4252        self.token_at(self.index)
4253    }
4254
4255    /// Seek back the last one non-whitespace token.
4256    ///
4257    /// Must be called after `next_token()`, otherwise might panic. OK to call
4258    /// after `next_token()` indicates an EOF.
4259    ///
4260    // TODO rename to backup_token and deprecate prev_token?
4261    pub fn prev_token(&mut self) {
4262        loop {
4263            assert!(self.index > 0);
4264            self.index -= 1;
4265            if let Some(TokenWithSpan {
4266                token: Token::Whitespace(_),
4267                span: _,
4268            }) = self.tokens.get(self.index)
4269            {
4270                continue;
4271            }
4272            return;
4273        }
4274    }
4275
4276    /// Report `found` was encountered instead of `expected`
4277    pub fn expected<T>(&self, expected: &str, found: TokenWithSpan) -> Result<T, ParserError> {
4278        parser_err!(
4279            format!("Expected: {expected}, found: {found}"),
4280            found.span.start
4281        )
4282    }
4283
4284    /// report `found` was encountered instead of `expected`
4285    pub fn expected_ref<T>(&self, expected: &str, found: &TokenWithSpan) -> Result<T, ParserError> {
4286        parser_err!(
4287            format!("Expected: {expected}, found: {found}"),
4288            found.span.start
4289        )
4290    }
4291
4292    /// Report that the token at `index` was found instead of `expected`.
4293    pub fn expected_at<T>(&self, expected: &str, index: usize) -> Result<T, ParserError> {
4294        let found = self.tokens.get(index).unwrap_or(&EOF_TOKEN);
4295        parser_err!(
4296            format!("Expected: {expected}, found: {found}"),
4297            found.span.start
4298        )
4299    }
4300
4301    /// If the current token is the `expected` keyword, consume it and returns
4302    /// true. Otherwise, no tokens are consumed and returns false.
4303    #[must_use]
4304    pub fn parse_keyword(&mut self, expected: Keyword) -> bool {
4305        if self.peek_keyword(expected) {
4306            self.advance_token();
4307            true
4308        } else {
4309            false
4310        }
4311    }
4312
4313    #[must_use]
4314    pub fn peek_keyword(&self, expected: Keyword) -> bool {
4315        matches!(&self.peek_token_ref().token, Token::Word(w) if expected == w.keyword)
4316    }
4317
4318    /// If the current token is the `expected` keyword followed by
4319    /// specified tokens, consume them and returns true.
4320    /// Otherwise, no tokens are consumed and returns false.
4321    ///
4322    /// Note that if the length of `tokens` is too long, this function will
4323    /// not be efficient as it does a loop on the tokens with `peek_nth_token`
4324    /// each time.
4325    pub fn parse_keyword_with_tokens(&mut self, expected: Keyword, tokens: &[Token]) -> bool {
4326        self.keyword_with_tokens(expected, tokens, true)
4327    }
4328
4329    /// Peeks to see if the current token is the `expected` keyword followed by specified tokens
4330    /// without consuming them.
4331    ///
4332    /// See [Self::parse_keyword_with_tokens] for details.
4333    pub(crate) fn peek_keyword_with_tokens(&mut self, expected: Keyword, tokens: &[Token]) -> bool {
4334        self.keyword_with_tokens(expected, tokens, false)
4335    }
4336
4337    fn keyword_with_tokens(&mut self, expected: Keyword, tokens: &[Token], consume: bool) -> bool {
4338        match &self.peek_token_ref().token {
4339            Token::Word(w) if expected == w.keyword => {
4340                for (idx, token) in tokens.iter().enumerate() {
4341                    if self.peek_nth_token_ref(idx + 1).token != *token {
4342                        return false;
4343                    }
4344                }
4345
4346                if consume {
4347                    for _ in 0..(tokens.len() + 1) {
4348                        self.advance_token();
4349                    }
4350                }
4351
4352                true
4353            }
4354            _ => false,
4355        }
4356    }
4357
4358    /// If the current and subsequent tokens exactly match the `keywords`
4359    /// sequence, consume them and returns true. Otherwise, no tokens are
4360    /// consumed and returns false
4361    #[must_use]
4362    pub fn parse_keywords(&mut self, keywords: &[Keyword]) -> bool {
4363        let index = self.index;
4364        for &keyword in keywords {
4365            if !self.parse_keyword(keyword) {
4366                // println!("parse_keywords aborting .. did not find {:?}", keyword);
4367                // reset index and return immediately
4368                self.index = index;
4369                return false;
4370            }
4371        }
4372        true
4373    }
4374
4375    /// If the current token is one of the given `keywords`, returns the keyword
4376    /// that matches, without consuming the token. Otherwise, returns [`None`].
4377    #[must_use]
4378    pub fn peek_one_of_keywords(&self, keywords: &[Keyword]) -> Option<Keyword> {
4379        for keyword in keywords {
4380            if self.peek_keyword(*keyword) {
4381                return Some(*keyword);
4382            }
4383        }
4384        None
4385    }
4386
4387    /// If the current token is one of the given `keywords`, consume the token
4388    /// and return the keyword that matches. Otherwise, no tokens are consumed
4389    /// and returns [`None`].
4390    #[must_use]
4391    pub fn parse_one_of_keywords(&mut self, keywords: &[Keyword]) -> Option<Keyword> {
4392        match &self.peek_token_ref().token {
4393            Token::Word(w) => {
4394                keywords
4395                    .iter()
4396                    .find(|keyword| **keyword == w.keyword)
4397                    .map(|keyword| {
4398                        self.advance_token();
4399                        *keyword
4400                    })
4401            }
4402            _ => None,
4403        }
4404    }
4405
4406    /// If the current token is one of the expected keywords, consume the token
4407    /// and return the keyword that matches. Otherwise, return an error.
4408    pub fn expect_one_of_keywords(&mut self, keywords: &[Keyword]) -> Result<Keyword, ParserError> {
4409        if let Some(keyword) = self.parse_one_of_keywords(keywords) {
4410            Ok(keyword)
4411        } else {
4412            let keywords: Vec<String> = keywords.iter().map(|x| format!("{x:?}")).collect();
4413            self.expected_ref(
4414                &format!("one of {}", keywords.join(" or ")),
4415                self.peek_token_ref(),
4416            )
4417        }
4418    }
4419
4420    /// If the current token is the `expected` keyword, consume the token.
4421    /// Otherwise, return an error.
4422    ///
4423    // todo deprecate in favor of expected_keyword_is
4424    pub fn expect_keyword(&mut self, expected: Keyword) -> Result<TokenWithSpan, ParserError> {
4425        if self.parse_keyword(expected) {
4426            Ok(self.get_current_token().clone())
4427        } else {
4428            self.expected_ref(format!("{:?}", &expected).as_str(), self.peek_token_ref())
4429        }
4430    }
4431
4432    /// If the current token is the `expected` keyword, consume the token.
4433    /// Otherwise, return an error.
4434    ///
4435    /// This differs from expect_keyword only in that the matched keyword
4436    /// token is not returned.
4437    pub fn expect_keyword_is(&mut self, expected: Keyword) -> Result<(), ParserError> {
4438        if self.parse_keyword(expected) {
4439            Ok(())
4440        } else {
4441            self.expected_ref(format!("{:?}", &expected).as_str(), self.peek_token_ref())
4442        }
4443    }
4444
4445    /// If the current and subsequent tokens exactly match the `keywords`
4446    /// sequence, consume them and returns Ok. Otherwise, return an Error.
4447    pub fn expect_keywords(&mut self, expected: &[Keyword]) -> Result<(), ParserError> {
4448        for &kw in expected {
4449            self.expect_keyword_is(kw)?;
4450        }
4451        Ok(())
4452    }
4453
4454    /// Consume the next token if it matches the expected token, otherwise return false
4455    ///
4456    /// See [Self::advance_token] to consume the token unconditionally
4457    #[must_use]
4458    pub fn consume_token(&mut self, expected: &Token) -> bool {
4459        if self.peek_token_ref() == expected {
4460            self.advance_token();
4461            true
4462        } else {
4463            false
4464        }
4465    }
4466
4467    /// If the current and subsequent tokens exactly match the `tokens`
4468    /// sequence, consume them and returns true. Otherwise, no tokens are
4469    /// consumed and returns false
4470    #[must_use]
4471    pub fn consume_tokens(&mut self, tokens: &[Token]) -> bool {
4472        let index = self.index;
4473        for token in tokens {
4474            if !self.consume_token(token) {
4475                self.index = index;
4476                return false;
4477            }
4478        }
4479        true
4480    }
4481
4482    /// Bail out if the current token is not an expected keyword, or consume it if it is
4483    pub fn expect_token(&mut self, expected: &Token) -> Result<TokenWithSpan, ParserError> {
4484        if self.peek_token_ref() == expected {
4485            Ok(self.next_token())
4486        } else {
4487            self.expected_ref(&expected.to_string(), self.peek_token_ref())
4488        }
4489    }
4490
4491    fn parse<T: FromStr>(s: String, loc: Location) -> Result<T, ParserError>
4492    where
4493        <T as FromStr>::Err: Display,
4494    {
4495        s.parse::<T>().map_err(|e| {
4496            ParserError::ParserError(format!(
4497                "Could not parse '{s}' as {}: {e}{loc}",
4498                core::any::type_name::<T>()
4499            ))
4500        })
4501    }
4502
4503    /// Parse a comma-separated list of 1+ SelectItem
4504    pub fn parse_projection(&mut self) -> Result<Vec<SelectItem>, ParserError> {
4505        // BigQuery and Snowflake allow trailing commas, but only in project lists
4506        // e.g. `SELECT 1, 2, FROM t`
4507        // https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#trailing_commas
4508        // https://docs.snowflake.com/en/release-notes/2024/8_11#select-supports-trailing-commas
4509
4510        let trailing_commas =
4511            self.options.trailing_commas | self.dialect.supports_projection_trailing_commas();
4512
4513        self.parse_comma_separated_with_trailing_commas(
4514            |p| p.parse_select_item(),
4515            trailing_commas,
4516            Self::is_reserved_for_column_alias,
4517        )
4518    }
4519
4520    pub fn parse_actions_list(&mut self) -> Result<Vec<Action>, ParserError> {
4521        let mut values = vec![];
4522        loop {
4523            values.push(self.parse_grant_permission()?);
4524            if !self.consume_token(&Token::Comma) {
4525                break;
4526            } else if self.options.trailing_commas {
4527                match self.peek_token().token {
4528                    Token::Word(kw) if kw.keyword == Keyword::ON => {
4529                        break;
4530                    }
4531                    Token::RParen
4532                    | Token::SemiColon
4533                    | Token::EOF
4534                    | Token::RBracket
4535                    | Token::RBrace => break,
4536                    _ => continue,
4537                }
4538            }
4539        }
4540        Ok(values)
4541    }
4542
4543    /// Parse a list of [TableWithJoins]
4544    fn parse_table_with_joins(&mut self) -> Result<Vec<TableWithJoins>, ParserError> {
4545        let trailing_commas = self.dialect.supports_from_trailing_commas();
4546
4547        self.parse_comma_separated_with_trailing_commas(
4548            Parser::parse_table_and_joins,
4549            trailing_commas,
4550            |kw, parser| !self.dialect.is_table_factor(kw, parser),
4551        )
4552    }
4553
4554    /// Parse the comma of a comma-separated syntax element.
4555    /// `R` is a predicate that should return true if the next
4556    /// keyword is a reserved keyword.
4557    /// Allows for control over trailing commas
4558    ///
4559    /// Returns true if there is a next element
4560    fn is_parse_comma_separated_end_with_trailing_commas<R>(
4561        &mut self,
4562        trailing_commas: bool,
4563        is_reserved_keyword: &R,
4564    ) -> bool
4565    where
4566        R: Fn(&Keyword, &mut Parser) -> bool,
4567    {
4568        if !self.consume_token(&Token::Comma) {
4569            true
4570        } else if trailing_commas {
4571            let token = self.next_token().token;
4572            let is_end = match token {
4573                Token::Word(ref kw) if is_reserved_keyword(&kw.keyword, self) => true,
4574                Token::RParen | Token::SemiColon | Token::EOF | Token::RBracket | Token::RBrace => {
4575                    true
4576                }
4577                _ => false,
4578            };
4579            self.prev_token();
4580
4581            is_end
4582        } else {
4583            false
4584        }
4585    }
4586
4587    /// Parse the comma of a comma-separated syntax element.
4588    /// Returns true if there is a next element
4589    fn is_parse_comma_separated_end(&mut self) -> bool {
4590        self.is_parse_comma_separated_end_with_trailing_commas(
4591            self.options.trailing_commas,
4592            &Self::is_reserved_for_column_alias,
4593        )
4594    }
4595
4596    /// Parse a comma-separated list of 1+ items accepted by `F`
4597    pub fn parse_comma_separated<T, F>(&mut self, f: F) -> Result<Vec<T>, ParserError>
4598    where
4599        F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4600    {
4601        self.parse_comma_separated_with_trailing_commas(
4602            f,
4603            self.options.trailing_commas,
4604            Self::is_reserved_for_column_alias,
4605        )
4606    }
4607
4608    /// Parse a comma-separated list of 1+ items accepted by `F`.
4609    /// `R` is a predicate that should return true if the next
4610    /// keyword is a reserved keyword.
4611    /// Allows for control over trailing commas.
4612    fn parse_comma_separated_with_trailing_commas<T, F, R>(
4613        &mut self,
4614        mut f: F,
4615        trailing_commas: bool,
4616        is_reserved_keyword: R,
4617    ) -> Result<Vec<T>, ParserError>
4618    where
4619        F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4620        R: Fn(&Keyword, &mut Parser) -> bool,
4621    {
4622        let mut values = vec![];
4623        loop {
4624            values.push(f(self)?);
4625            if self.is_parse_comma_separated_end_with_trailing_commas(
4626                trailing_commas,
4627                &is_reserved_keyword,
4628            ) {
4629                break;
4630            }
4631        }
4632        Ok(values)
4633    }
4634
4635    /// Parse a period-separated list of 1+ items accepted by `F`
4636    fn parse_period_separated<T, F>(&mut self, mut f: F) -> Result<Vec<T>, ParserError>
4637    where
4638        F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4639    {
4640        let mut values = vec![];
4641        loop {
4642            values.push(f(self)?);
4643            if !self.consume_token(&Token::Period) {
4644                break;
4645            }
4646        }
4647        Ok(values)
4648    }
4649
4650    /// Parse a keyword-separated list of 1+ items accepted by `F`
4651    pub fn parse_keyword_separated<T, F>(
4652        &mut self,
4653        keyword: Keyword,
4654        mut f: F,
4655    ) -> Result<Vec<T>, ParserError>
4656    where
4657        F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4658    {
4659        let mut values = vec![];
4660        loop {
4661            values.push(f(self)?);
4662            if !self.parse_keyword(keyword) {
4663                break;
4664            }
4665        }
4666        Ok(values)
4667    }
4668
4669    pub fn parse_parenthesized<T, F>(&mut self, mut f: F) -> Result<T, ParserError>
4670    where
4671        F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4672    {
4673        self.expect_token(&Token::LParen)?;
4674        let res = f(self)?;
4675        self.expect_token(&Token::RParen)?;
4676        Ok(res)
4677    }
4678
4679    /// Parse a comma-separated list of 0+ items accepted by `F`
4680    /// * `end_token` - expected end token for the closure (e.g. [Token::RParen], [Token::RBrace] ...)
4681    pub fn parse_comma_separated0<T, F>(
4682        &mut self,
4683        f: F,
4684        end_token: Token,
4685    ) -> Result<Vec<T>, ParserError>
4686    where
4687        F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4688    {
4689        if self.peek_token().token == end_token {
4690            return Ok(vec![]);
4691        }
4692
4693        if self.options.trailing_commas && self.peek_tokens() == [Token::Comma, end_token] {
4694            let _ = self.consume_token(&Token::Comma);
4695            return Ok(vec![]);
4696        }
4697
4698        self.parse_comma_separated(f)
4699    }
4700
4701    /// Parses 0 or more statements, each followed by a semicolon.
4702    /// If the next token is any of `terminal_keywords` then no more
4703    /// statements will be parsed.
4704    pub(crate) fn parse_statement_list(
4705        &mut self,
4706        terminal_keywords: &[Keyword],
4707    ) -> Result<Vec<Statement>, ParserError> {
4708        let mut values = vec![];
4709        loop {
4710            match &self.peek_nth_token_ref(0).token {
4711                Token::EOF => break,
4712                Token::Word(w) => {
4713                    if w.quote_style.is_none() && terminal_keywords.contains(&w.keyword) {
4714                        break;
4715                    }
4716                }
4717                _ => {}
4718            }
4719
4720            values.push(self.parse_statement()?);
4721            self.expect_token(&Token::SemiColon)?;
4722        }
4723        Ok(values)
4724    }
4725
4726    /// Default implementation of a predicate that returns true if
4727    /// the specified keyword is reserved for column alias.
4728    /// See [Dialect::is_column_alias]
4729    fn is_reserved_for_column_alias(kw: &Keyword, parser: &mut Parser) -> bool {
4730        !parser.dialect.is_column_alias(kw, parser)
4731    }
4732
4733    /// Run a parser method `f`, reverting back to the current position if unsuccessful.
4734    /// Returns `ParserError::RecursionLimitExceeded` if `f` returns a `RecursionLimitExceeded`.
4735    /// Returns `Ok(None)` if `f` returns any other error.
4736    pub fn maybe_parse<T, F>(&mut self, f: F) -> Result<Option<T>, ParserError>
4737    where
4738        F: FnMut(&mut Parser) -> Result<T, ParserError>,
4739    {
4740        match self.try_parse(f) {
4741            Ok(t) => Ok(Some(t)),
4742            Err(ParserError::RecursionLimitExceeded) => Err(ParserError::RecursionLimitExceeded),
4743            _ => Ok(None),
4744        }
4745    }
4746
4747    /// Run a parser method `f`, reverting back to the current position if unsuccessful.
4748    pub fn try_parse<T, F>(&mut self, mut f: F) -> Result<T, ParserError>
4749    where
4750        F: FnMut(&mut Parser) -> Result<T, ParserError>,
4751    {
4752        let index = self.index;
4753        match f(self) {
4754            Ok(t) => Ok(t),
4755            Err(e) => {
4756                // Unwind stack if limit exceeded
4757                self.index = index;
4758                Err(e)
4759            }
4760        }
4761    }
4762
4763    /// Parse either `ALL`, `DISTINCT` or `DISTINCT ON (...)`. Returns [`None`] if `ALL` is parsed
4764    /// and results in a [`ParserError`] if both `ALL` and `DISTINCT` are found.
4765    pub fn parse_all_or_distinct(&mut self) -> Result<Option<Distinct>, ParserError> {
4766        let loc = self.peek_token().span.start;
4767        let all = self.parse_keyword(Keyword::ALL);
4768        let distinct = self.parse_keyword(Keyword::DISTINCT);
4769        if !distinct {
4770            return Ok(None);
4771        }
4772        if all {
4773            return parser_err!("Cannot specify both ALL and DISTINCT".to_string(), loc);
4774        }
4775        let on = self.parse_keyword(Keyword::ON);
4776        if !on {
4777            return Ok(Some(Distinct::Distinct));
4778        }
4779
4780        self.expect_token(&Token::LParen)?;
4781        let col_names = if self.consume_token(&Token::RParen) {
4782            self.prev_token();
4783            Vec::new()
4784        } else {
4785            self.parse_comma_separated(Parser::parse_expr)?
4786        };
4787        self.expect_token(&Token::RParen)?;
4788        Ok(Some(Distinct::On(col_names)))
4789    }
4790
4791    /// Parse a SQL CREATE statement
4792    pub fn parse_create(&mut self) -> Result<Statement, ParserError> {
4793        let or_replace = self.parse_keywords(&[Keyword::OR, Keyword::REPLACE]);
4794        let or_alter = self.parse_keywords(&[Keyword::OR, Keyword::ALTER]);
4795        let local = self.parse_one_of_keywords(&[Keyword::LOCAL]).is_some();
4796        let global = self.parse_one_of_keywords(&[Keyword::GLOBAL]).is_some();
4797        let transient = self.parse_one_of_keywords(&[Keyword::TRANSIENT]).is_some();
4798        let global: Option<bool> = if global {
4799            Some(true)
4800        } else if local {
4801            Some(false)
4802        } else {
4803            None
4804        };
4805        let temporary = self
4806            .parse_one_of_keywords(&[Keyword::TEMP, Keyword::TEMPORARY])
4807            .is_some();
4808        let persistent = dialect_of!(self is DuckDbDialect)
4809            && self.parse_one_of_keywords(&[Keyword::PERSISTENT]).is_some();
4810        let create_view_params = self.parse_create_view_params()?;
4811        if self.parse_keyword(Keyword::TABLE) {
4812            self.parse_create_table(or_replace, temporary, global, transient)
4813        } else if self.peek_keyword(Keyword::MATERIALIZED)
4814            || self.peek_keyword(Keyword::VIEW)
4815            || self.peek_keywords(&[Keyword::SECURE, Keyword::MATERIALIZED, Keyword::VIEW])
4816            || self.peek_keywords(&[Keyword::SECURE, Keyword::VIEW])
4817        {
4818            self.parse_create_view(or_alter, or_replace, temporary, create_view_params)
4819        } else if self.parse_keyword(Keyword::POLICY) {
4820            self.parse_create_policy()
4821        } else if self.parse_keyword(Keyword::EXTERNAL) {
4822            self.parse_create_external_table(or_replace)
4823        } else if self.parse_keyword(Keyword::FUNCTION) {
4824            self.parse_create_function(or_alter, or_replace, temporary)
4825        } else if self.parse_keyword(Keyword::DOMAIN) {
4826            self.parse_create_domain()
4827        } else if self.parse_keyword(Keyword::TRIGGER) {
4828            self.parse_create_trigger(temporary, or_alter, or_replace, false)
4829        } else if self.parse_keywords(&[Keyword::CONSTRAINT, Keyword::TRIGGER]) {
4830            self.parse_create_trigger(temporary, or_alter, or_replace, true)
4831        } else if self.parse_keyword(Keyword::MACRO) {
4832            self.parse_create_macro(or_replace, temporary)
4833        } else if self.parse_keyword(Keyword::SECRET) {
4834            self.parse_create_secret(or_replace, temporary, persistent)
4835        } else if self.parse_keyword(Keyword::USER) {
4836            self.parse_create_user(or_replace)
4837        } else if or_replace {
4838            self.expected(
4839                "[EXTERNAL] TABLE or [MATERIALIZED] VIEW or FUNCTION after CREATE OR REPLACE",
4840                self.peek_token(),
4841            )
4842        } else if self.parse_keyword(Keyword::EXTENSION) {
4843            self.parse_create_extension()
4844        } else if self.parse_keyword(Keyword::INDEX) {
4845            self.parse_create_index(false)
4846        } else if self.parse_keywords(&[Keyword::UNIQUE, Keyword::INDEX]) {
4847            self.parse_create_index(true)
4848        } else if self.parse_keyword(Keyword::VIRTUAL) {
4849            self.parse_create_virtual_table()
4850        } else if self.parse_keyword(Keyword::SCHEMA) {
4851            self.parse_create_schema()
4852        } else if self.parse_keyword(Keyword::DATABASE) {
4853            self.parse_create_database()
4854        } else if self.parse_keyword(Keyword::ROLE) {
4855            self.parse_create_role()
4856        } else if self.parse_keyword(Keyword::SEQUENCE) {
4857            self.parse_create_sequence(temporary)
4858        } else if self.parse_keyword(Keyword::TYPE) {
4859            self.parse_create_type()
4860        } else if self.parse_keyword(Keyword::PROCEDURE) {
4861            self.parse_create_procedure(or_alter)
4862        } else if self.parse_keyword(Keyword::CONNECTOR) {
4863            self.parse_create_connector()
4864        } else if self.parse_keyword(Keyword::OPERATOR) {
4865            // Check if this is CREATE OPERATOR FAMILY or CREATE OPERATOR CLASS
4866            if self.parse_keyword(Keyword::FAMILY) {
4867                self.parse_create_operator_family()
4868            } else if self.parse_keyword(Keyword::CLASS) {
4869                self.parse_create_operator_class()
4870            } else {
4871                self.parse_create_operator()
4872            }
4873        } else if self.parse_keyword(Keyword::SERVER) {
4874            self.parse_pg_create_server()
4875        } else {
4876            self.expected("an object type after CREATE", self.peek_token())
4877        }
4878    }
4879
4880    fn parse_create_user(&mut self, or_replace: bool) -> Result<Statement, ParserError> {
4881        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
4882        let name = self.parse_identifier()?;
4883        let options = self
4884            .parse_key_value_options(false, &[Keyword::WITH, Keyword::TAG])?
4885            .options;
4886        let with_tags = self.parse_keyword(Keyword::WITH);
4887        let tags = if self.parse_keyword(Keyword::TAG) {
4888            self.parse_key_value_options(true, &[])?.options
4889        } else {
4890            vec![]
4891        };
4892        Ok(Statement::CreateUser(CreateUser {
4893            or_replace,
4894            if_not_exists,
4895            name,
4896            options: KeyValueOptions {
4897                options,
4898                delimiter: KeyValueOptionsDelimiter::Space,
4899            },
4900            with_tags,
4901            tags: KeyValueOptions {
4902                options: tags,
4903                delimiter: KeyValueOptionsDelimiter::Comma,
4904            },
4905        }))
4906    }
4907
4908    /// See [DuckDB Docs](https://duckdb.org/docs/sql/statements/create_secret.html) for more details.
4909    pub fn parse_create_secret(
4910        &mut self,
4911        or_replace: bool,
4912        temporary: bool,
4913        persistent: bool,
4914    ) -> Result<Statement, ParserError> {
4915        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
4916
4917        let mut storage_specifier = None;
4918        let mut name = None;
4919        if self.peek_token() != Token::LParen {
4920            if self.parse_keyword(Keyword::IN) {
4921                storage_specifier = self.parse_identifier().ok()
4922            } else {
4923                name = self.parse_identifier().ok();
4924            }
4925
4926            // Storage specifier may follow the name
4927            if storage_specifier.is_none()
4928                && self.peek_token() != Token::LParen
4929                && self.parse_keyword(Keyword::IN)
4930            {
4931                storage_specifier = self.parse_identifier().ok();
4932            }
4933        }
4934
4935        self.expect_token(&Token::LParen)?;
4936        self.expect_keyword_is(Keyword::TYPE)?;
4937        let secret_type = self.parse_identifier()?;
4938
4939        let mut options = Vec::new();
4940        if self.consume_token(&Token::Comma) {
4941            options.append(&mut self.parse_comma_separated(|p| {
4942                let key = p.parse_identifier()?;
4943                let value = p.parse_identifier()?;
4944                Ok(SecretOption { key, value })
4945            })?);
4946        }
4947        self.expect_token(&Token::RParen)?;
4948
4949        let temp = match (temporary, persistent) {
4950            (true, false) => Some(true),
4951            (false, true) => Some(false),
4952            (false, false) => None,
4953            _ => self.expected("TEMPORARY or PERSISTENT", self.peek_token())?,
4954        };
4955
4956        Ok(Statement::CreateSecret {
4957            or_replace,
4958            temporary: temp,
4959            if_not_exists,
4960            name,
4961            storage_specifier,
4962            secret_type,
4963            options,
4964        })
4965    }
4966
4967    /// Parse a CACHE TABLE statement
4968    pub fn parse_cache_table(&mut self) -> Result<Statement, ParserError> {
4969        let (mut table_flag, mut options, mut has_as, mut query) = (None, vec![], false, None);
4970        if self.parse_keyword(Keyword::TABLE) {
4971            let table_name = self.parse_object_name(false)?;
4972            if self.peek_token().token != Token::EOF {
4973                if let Token::Word(word) = self.peek_token().token {
4974                    if word.keyword == Keyword::OPTIONS {
4975                        options = self.parse_options(Keyword::OPTIONS)?
4976                    }
4977                };
4978
4979                if self.peek_token().token != Token::EOF {
4980                    let (a, q) = self.parse_as_query()?;
4981                    has_as = a;
4982                    query = Some(q);
4983                }
4984
4985                Ok(Statement::Cache {
4986                    table_flag,
4987                    table_name,
4988                    has_as,
4989                    options,
4990                    query,
4991                })
4992            } else {
4993                Ok(Statement::Cache {
4994                    table_flag,
4995                    table_name,
4996                    has_as,
4997                    options,
4998                    query,
4999                })
5000            }
5001        } else {
5002            table_flag = Some(self.parse_object_name(false)?);
5003            if self.parse_keyword(Keyword::TABLE) {
5004                let table_name = self.parse_object_name(false)?;
5005                if self.peek_token() != Token::EOF {
5006                    if let Token::Word(word) = self.peek_token().token {
5007                        if word.keyword == Keyword::OPTIONS {
5008                            options = self.parse_options(Keyword::OPTIONS)?
5009                        }
5010                    };
5011
5012                    if self.peek_token() != Token::EOF {
5013                        let (a, q) = self.parse_as_query()?;
5014                        has_as = a;
5015                        query = Some(q);
5016                    }
5017
5018                    Ok(Statement::Cache {
5019                        table_flag,
5020                        table_name,
5021                        has_as,
5022                        options,
5023                        query,
5024                    })
5025                } else {
5026                    Ok(Statement::Cache {
5027                        table_flag,
5028                        table_name,
5029                        has_as,
5030                        options,
5031                        query,
5032                    })
5033                }
5034            } else {
5035                if self.peek_token() == Token::EOF {
5036                    self.prev_token();
5037                }
5038                self.expected("a `TABLE` keyword", self.peek_token())
5039            }
5040        }
5041    }
5042
5043    /// Parse 'AS' before as query,such as `WITH XXX AS SELECT XXX` oer `CACHE TABLE AS SELECT XXX`
5044    pub fn parse_as_query(&mut self) -> Result<(bool, Box<Query>), ParserError> {
5045        match self.peek_token().token {
5046            Token::Word(word) => match word.keyword {
5047                Keyword::AS => {
5048                    self.next_token();
5049                    Ok((true, self.parse_query()?))
5050                }
5051                _ => Ok((false, self.parse_query()?)),
5052            },
5053            _ => self.expected("a QUERY statement", self.peek_token()),
5054        }
5055    }
5056
5057    /// Parse a UNCACHE TABLE statement
5058    pub fn parse_uncache_table(&mut self) -> Result<Statement, ParserError> {
5059        self.expect_keyword_is(Keyword::TABLE)?;
5060        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
5061        let table_name = self.parse_object_name(false)?;
5062        Ok(Statement::UNCache {
5063            table_name,
5064            if_exists,
5065        })
5066    }
5067
5068    /// SQLite-specific `CREATE VIRTUAL TABLE`
5069    pub fn parse_create_virtual_table(&mut self) -> Result<Statement, ParserError> {
5070        self.expect_keyword_is(Keyword::TABLE)?;
5071        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5072        let table_name = self.parse_object_name(false)?;
5073        self.expect_keyword_is(Keyword::USING)?;
5074        let module_name = self.parse_identifier()?;
5075        // SQLite docs note that module "arguments syntax is sufficiently
5076        // general that the arguments can be made to appear as column
5077        // definitions in a traditional CREATE TABLE statement", but
5078        // we don't implement that.
5079        let module_args = self.parse_parenthesized_column_list(Optional, false)?;
5080        Ok(Statement::CreateVirtualTable {
5081            name: table_name,
5082            if_not_exists,
5083            module_name,
5084            module_args,
5085        })
5086    }
5087
5088    pub fn parse_create_schema(&mut self) -> Result<Statement, ParserError> {
5089        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5090
5091        let schema_name = self.parse_schema_name()?;
5092
5093        let default_collate_spec = if self.parse_keywords(&[Keyword::DEFAULT, Keyword::COLLATE]) {
5094            Some(self.parse_expr()?)
5095        } else {
5096            None
5097        };
5098
5099        let with = if self.peek_keyword(Keyword::WITH) {
5100            Some(self.parse_options(Keyword::WITH)?)
5101        } else {
5102            None
5103        };
5104
5105        let options = if self.peek_keyword(Keyword::OPTIONS) {
5106            Some(self.parse_options(Keyword::OPTIONS)?)
5107        } else {
5108            None
5109        };
5110
5111        let clone = if self.parse_keyword(Keyword::CLONE) {
5112            Some(self.parse_object_name(false)?)
5113        } else {
5114            None
5115        };
5116
5117        Ok(Statement::CreateSchema {
5118            schema_name,
5119            if_not_exists,
5120            with,
5121            options,
5122            default_collate_spec,
5123            clone,
5124        })
5125    }
5126
5127    fn parse_schema_name(&mut self) -> Result<SchemaName, ParserError> {
5128        if self.parse_keyword(Keyword::AUTHORIZATION) {
5129            Ok(SchemaName::UnnamedAuthorization(self.parse_identifier()?))
5130        } else {
5131            let name = self.parse_object_name(false)?;
5132
5133            if self.parse_keyword(Keyword::AUTHORIZATION) {
5134                Ok(SchemaName::NamedAuthorization(
5135                    name,
5136                    self.parse_identifier()?,
5137                ))
5138            } else {
5139                Ok(SchemaName::Simple(name))
5140            }
5141        }
5142    }
5143
5144    pub fn parse_create_database(&mut self) -> Result<Statement, ParserError> {
5145        let ine = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5146        let db_name = self.parse_object_name(false)?;
5147        let mut location = None;
5148        let mut managed_location = None;
5149        loop {
5150            match self.parse_one_of_keywords(&[Keyword::LOCATION, Keyword::MANAGEDLOCATION]) {
5151                Some(Keyword::LOCATION) => location = Some(self.parse_literal_string()?),
5152                Some(Keyword::MANAGEDLOCATION) => {
5153                    managed_location = Some(self.parse_literal_string()?)
5154                }
5155                _ => break,
5156            }
5157        }
5158        let clone = if self.parse_keyword(Keyword::CLONE) {
5159            Some(self.parse_object_name(false)?)
5160        } else {
5161            None
5162        };
5163
5164        Ok(Statement::CreateDatabase {
5165            db_name,
5166            if_not_exists: ine,
5167            location,
5168            managed_location,
5169            or_replace: false,
5170            transient: false,
5171            clone,
5172            data_retention_time_in_days: None,
5173            max_data_extension_time_in_days: None,
5174            external_volume: None,
5175            catalog: None,
5176            replace_invalid_characters: None,
5177            default_ddl_collation: None,
5178            storage_serialization_policy: None,
5179            comment: None,
5180            catalog_sync: None,
5181            catalog_sync_namespace_mode: None,
5182            catalog_sync_namespace_flatten_delimiter: None,
5183            with_tags: None,
5184            with_contacts: None,
5185        })
5186    }
5187
5188    pub fn parse_optional_create_function_using(
5189        &mut self,
5190    ) -> Result<Option<CreateFunctionUsing>, ParserError> {
5191        if !self.parse_keyword(Keyword::USING) {
5192            return Ok(None);
5193        };
5194        let keyword =
5195            self.expect_one_of_keywords(&[Keyword::JAR, Keyword::FILE, Keyword::ARCHIVE])?;
5196
5197        let uri = self.parse_literal_string()?;
5198
5199        match keyword {
5200            Keyword::JAR => Ok(Some(CreateFunctionUsing::Jar(uri))),
5201            Keyword::FILE => Ok(Some(CreateFunctionUsing::File(uri))),
5202            Keyword::ARCHIVE => Ok(Some(CreateFunctionUsing::Archive(uri))),
5203            _ => self.expected(
5204                "JAR, FILE or ARCHIVE, got {:?}",
5205                TokenWithSpan::wrap(Token::make_keyword(format!("{keyword:?}").as_str())),
5206            ),
5207        }
5208    }
5209
5210    pub fn parse_create_function(
5211        &mut self,
5212        or_alter: bool,
5213        or_replace: bool,
5214        temporary: bool,
5215    ) -> Result<Statement, ParserError> {
5216        if dialect_of!(self is HiveDialect) {
5217            self.parse_hive_create_function(or_replace, temporary)
5218        } else if dialect_of!(self is PostgreSqlDialect | GenericDialect) {
5219            self.parse_postgres_create_function(or_replace, temporary)
5220        } else if dialect_of!(self is DuckDbDialect) {
5221            self.parse_create_macro(or_replace, temporary)
5222        } else if dialect_of!(self is BigQueryDialect) {
5223            self.parse_bigquery_create_function(or_replace, temporary)
5224        } else if dialect_of!(self is MsSqlDialect) {
5225            self.parse_mssql_create_function(or_alter, or_replace, temporary)
5226        } else {
5227            self.prev_token();
5228            self.expected("an object type after CREATE", self.peek_token())
5229        }
5230    }
5231
5232    /// Parse `CREATE FUNCTION` for [PostgreSQL]
5233    ///
5234    /// [PostgreSQL]: https://www.postgresql.org/docs/15/sql-createfunction.html
5235    fn parse_postgres_create_function(
5236        &mut self,
5237        or_replace: bool,
5238        temporary: bool,
5239    ) -> Result<Statement, ParserError> {
5240        let name = self.parse_object_name(false)?;
5241
5242        self.expect_token(&Token::LParen)?;
5243        let args = if Token::RParen != self.peek_token_ref().token {
5244            self.parse_comma_separated(Parser::parse_function_arg)?
5245        } else {
5246            vec![]
5247        };
5248        self.expect_token(&Token::RParen)?;
5249
5250        let return_type = if self.parse_keyword(Keyword::RETURNS) {
5251            Some(self.parse_data_type()?)
5252        } else {
5253            None
5254        };
5255
5256        #[derive(Default)]
5257        struct Body {
5258            language: Option<Ident>,
5259            behavior: Option<FunctionBehavior>,
5260            function_body: Option<CreateFunctionBody>,
5261            called_on_null: Option<FunctionCalledOnNull>,
5262            parallel: Option<FunctionParallel>,
5263            security: Option<FunctionSecurity>,
5264        }
5265        let mut body = Body::default();
5266        let mut set_params: Vec<FunctionDefinitionSetParam> = Vec::new();
5267        loop {
5268            fn ensure_not_set<T>(field: &Option<T>, name: &str) -> Result<(), ParserError> {
5269                if field.is_some() {
5270                    return Err(ParserError::ParserError(format!(
5271                        "{name} specified more than once",
5272                    )));
5273                }
5274                Ok(())
5275            }
5276            if self.parse_keyword(Keyword::AS) {
5277                ensure_not_set(&body.function_body, "AS")?;
5278                body.function_body = Some(self.parse_create_function_body_string()?);
5279            } else if self.parse_keyword(Keyword::LANGUAGE) {
5280                ensure_not_set(&body.language, "LANGUAGE")?;
5281                body.language = Some(self.parse_identifier()?);
5282            } else if self.parse_keyword(Keyword::IMMUTABLE) {
5283                ensure_not_set(&body.behavior, "IMMUTABLE | STABLE | VOLATILE")?;
5284                body.behavior = Some(FunctionBehavior::Immutable);
5285            } else if self.parse_keyword(Keyword::STABLE) {
5286                ensure_not_set(&body.behavior, "IMMUTABLE | STABLE | VOLATILE")?;
5287                body.behavior = Some(FunctionBehavior::Stable);
5288            } else if self.parse_keyword(Keyword::VOLATILE) {
5289                ensure_not_set(&body.behavior, "IMMUTABLE | STABLE | VOLATILE")?;
5290                body.behavior = Some(FunctionBehavior::Volatile);
5291            } else if self.parse_keywords(&[
5292                Keyword::CALLED,
5293                Keyword::ON,
5294                Keyword::NULL,
5295                Keyword::INPUT,
5296            ]) {
5297                ensure_not_set(
5298                    &body.called_on_null,
5299                    "CALLED ON NULL INPUT | RETURNS NULL ON NULL INPUT | STRICT",
5300                )?;
5301                body.called_on_null = Some(FunctionCalledOnNull::CalledOnNullInput);
5302            } else if self.parse_keywords(&[
5303                Keyword::RETURNS,
5304                Keyword::NULL,
5305                Keyword::ON,
5306                Keyword::NULL,
5307                Keyword::INPUT,
5308            ]) {
5309                ensure_not_set(
5310                    &body.called_on_null,
5311                    "CALLED ON NULL INPUT | RETURNS NULL ON NULL INPUT | STRICT",
5312                )?;
5313                body.called_on_null = Some(FunctionCalledOnNull::ReturnsNullOnNullInput);
5314            } else if self.parse_keyword(Keyword::STRICT) {
5315                ensure_not_set(
5316                    &body.called_on_null,
5317                    "CALLED ON NULL INPUT | RETURNS NULL ON NULL INPUT | STRICT",
5318                )?;
5319                body.called_on_null = Some(FunctionCalledOnNull::Strict);
5320            } else if self.parse_keyword(Keyword::PARALLEL) {
5321                ensure_not_set(&body.parallel, "PARALLEL { UNSAFE | RESTRICTED | SAFE }")?;
5322                if self.parse_keyword(Keyword::UNSAFE) {
5323                    body.parallel = Some(FunctionParallel::Unsafe);
5324                } else if self.parse_keyword(Keyword::RESTRICTED) {
5325                    body.parallel = Some(FunctionParallel::Restricted);
5326                } else if self.parse_keyword(Keyword::SAFE) {
5327                    body.parallel = Some(FunctionParallel::Safe);
5328                } else {
5329                    return self.expected("one of UNSAFE | RESTRICTED | SAFE", self.peek_token());
5330                }
5331            } else if self.parse_keyword(Keyword::SECURITY) {
5332                ensure_not_set(&body.security, "SECURITY { DEFINER | INVOKER }")?;
5333                if self.parse_keyword(Keyword::DEFINER) {
5334                    body.security = Some(FunctionSecurity::Definer);
5335                } else if self.parse_keyword(Keyword::INVOKER) {
5336                    body.security = Some(FunctionSecurity::Invoker);
5337                } else {
5338                    return self.expected("DEFINER or INVOKER", self.peek_token());
5339                }
5340            } else if self.parse_keyword(Keyword::SET) {
5341                let name = self.parse_identifier()?;
5342                let value = if self.parse_keywords(&[Keyword::FROM, Keyword::CURRENT]) {
5343                    FunctionSetValue::FromCurrent
5344                } else {
5345                    if !self.consume_token(&Token::Eq) && !self.parse_keyword(Keyword::TO) {
5346                        return self.expected("= or TO", self.peek_token());
5347                    }
5348                    let values = self.parse_comma_separated(Parser::parse_expr)?;
5349                    FunctionSetValue::Values(values)
5350                };
5351                set_params.push(FunctionDefinitionSetParam { name, value });
5352            } else if self.parse_keyword(Keyword::RETURN) {
5353                ensure_not_set(&body.function_body, "RETURN")?;
5354                body.function_body = Some(CreateFunctionBody::Return(self.parse_expr()?));
5355            } else {
5356                break;
5357            }
5358        }
5359
5360        Ok(Statement::CreateFunction(CreateFunction {
5361            or_alter: false,
5362            or_replace,
5363            temporary,
5364            name,
5365            args: Some(args),
5366            return_type,
5367            behavior: body.behavior,
5368            called_on_null: body.called_on_null,
5369            parallel: body.parallel,
5370            security: body.security,
5371            set_params,
5372            language: body.language,
5373            function_body: body.function_body,
5374            if_not_exists: false,
5375            using: None,
5376            determinism_specifier: None,
5377            options: None,
5378            remote_connection: None,
5379        }))
5380    }
5381
5382    /// Parse `CREATE FUNCTION` for [Hive]
5383    ///
5384    /// [Hive]: https://cwiki.apache.org/confluence/display/hive/languagemanual+ddl#LanguageManualDDL-Create/Drop/ReloadFunction
5385    fn parse_hive_create_function(
5386        &mut self,
5387        or_replace: bool,
5388        temporary: bool,
5389    ) -> Result<Statement, ParserError> {
5390        let name = self.parse_object_name(false)?;
5391        self.expect_keyword_is(Keyword::AS)?;
5392
5393        let body = self.parse_create_function_body_string()?;
5394        let using = self.parse_optional_create_function_using()?;
5395
5396        Ok(Statement::CreateFunction(CreateFunction {
5397            or_alter: false,
5398            or_replace,
5399            temporary,
5400            name,
5401            function_body: Some(body),
5402            using,
5403            if_not_exists: false,
5404            args: None,
5405            return_type: None,
5406            behavior: None,
5407            called_on_null: None,
5408            parallel: None,
5409            security: None,
5410            set_params: vec![],
5411            language: None,
5412            determinism_specifier: None,
5413            options: None,
5414            remote_connection: None,
5415        }))
5416    }
5417
5418    /// Parse `CREATE FUNCTION` for [BigQuery]
5419    ///
5420    /// [BigQuery]: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#create_function_statement
5421    fn parse_bigquery_create_function(
5422        &mut self,
5423        or_replace: bool,
5424        temporary: bool,
5425    ) -> Result<Statement, ParserError> {
5426        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5427        let (name, args) = self.parse_create_function_name_and_params()?;
5428
5429        let return_type = if self.parse_keyword(Keyword::RETURNS) {
5430            Some(self.parse_data_type()?)
5431        } else {
5432            None
5433        };
5434
5435        let determinism_specifier = if self.parse_keyword(Keyword::DETERMINISTIC) {
5436            Some(FunctionDeterminismSpecifier::Deterministic)
5437        } else if self.parse_keywords(&[Keyword::NOT, Keyword::DETERMINISTIC]) {
5438            Some(FunctionDeterminismSpecifier::NotDeterministic)
5439        } else {
5440            None
5441        };
5442
5443        let language = if self.parse_keyword(Keyword::LANGUAGE) {
5444            Some(self.parse_identifier()?)
5445        } else {
5446            None
5447        };
5448
5449        let remote_connection =
5450            if self.parse_keywords(&[Keyword::REMOTE, Keyword::WITH, Keyword::CONNECTION]) {
5451                Some(self.parse_object_name(false)?)
5452            } else {
5453                None
5454            };
5455
5456        // `OPTIONS` may come before of after the function body but
5457        // may be specified at most once.
5458        let mut options = self.maybe_parse_options(Keyword::OPTIONS)?;
5459
5460        let function_body = if remote_connection.is_none() {
5461            self.expect_keyword_is(Keyword::AS)?;
5462            let expr = self.parse_expr()?;
5463            if options.is_none() {
5464                options = self.maybe_parse_options(Keyword::OPTIONS)?;
5465                Some(CreateFunctionBody::AsBeforeOptions {
5466                    body: expr,
5467                    link_symbol: None,
5468                })
5469            } else {
5470                Some(CreateFunctionBody::AsAfterOptions(expr))
5471            }
5472        } else {
5473            None
5474        };
5475
5476        Ok(Statement::CreateFunction(CreateFunction {
5477            or_alter: false,
5478            or_replace,
5479            temporary,
5480            if_not_exists,
5481            name,
5482            args: Some(args),
5483            return_type,
5484            function_body,
5485            language,
5486            determinism_specifier,
5487            options,
5488            remote_connection,
5489            using: None,
5490            behavior: None,
5491            called_on_null: None,
5492            parallel: None,
5493            security: None,
5494            set_params: vec![],
5495        }))
5496    }
5497
5498    /// Parse `CREATE FUNCTION` for [MsSql]
5499    ///
5500    /// [MsSql]: https://learn.microsoft.com/en-us/sql/t-sql/statements/create-function-transact-sql
5501    fn parse_mssql_create_function(
5502        &mut self,
5503        or_alter: bool,
5504        or_replace: bool,
5505        temporary: bool,
5506    ) -> Result<Statement, ParserError> {
5507        let (name, args) = self.parse_create_function_name_and_params()?;
5508
5509        self.expect_keyword(Keyword::RETURNS)?;
5510
5511        let return_table = self.maybe_parse(|p| {
5512            let return_table_name = p.parse_identifier()?;
5513
5514            p.expect_keyword_is(Keyword::TABLE)?;
5515            p.prev_token();
5516
5517            let table_column_defs = match p.parse_data_type()? {
5518                DataType::Table(Some(table_column_defs)) if !table_column_defs.is_empty() => {
5519                    table_column_defs
5520                }
5521                _ => parser_err!(
5522                    "Expected table column definitions after TABLE keyword",
5523                    p.peek_token().span.start
5524                )?,
5525            };
5526
5527            Ok(DataType::NamedTable {
5528                name: ObjectName(vec![ObjectNamePart::Identifier(return_table_name)]),
5529                columns: table_column_defs,
5530            })
5531        })?;
5532
5533        let return_type = if return_table.is_some() {
5534            return_table
5535        } else {
5536            Some(self.parse_data_type()?)
5537        };
5538
5539        let _ = self.parse_keyword(Keyword::AS);
5540
5541        let function_body = if self.peek_keyword(Keyword::BEGIN) {
5542            let begin_token = self.expect_keyword(Keyword::BEGIN)?;
5543            let statements = self.parse_statement_list(&[Keyword::END])?;
5544            let end_token = self.expect_keyword(Keyword::END)?;
5545
5546            Some(CreateFunctionBody::AsBeginEnd(BeginEndStatements {
5547                begin_token: AttachedToken(begin_token),
5548                statements,
5549                end_token: AttachedToken(end_token),
5550            }))
5551        } else if self.parse_keyword(Keyword::RETURN) {
5552            if self.peek_token() == Token::LParen {
5553                Some(CreateFunctionBody::AsReturnExpr(self.parse_expr()?))
5554            } else if self.peek_keyword(Keyword::SELECT) {
5555                let select = self.parse_select()?;
5556                Some(CreateFunctionBody::AsReturnSelect(select))
5557            } else {
5558                parser_err!(
5559                    "Expected a subquery (or bare SELECT statement) after RETURN",
5560                    self.peek_token().span.start
5561                )?
5562            }
5563        } else {
5564            parser_err!("Unparsable function body", self.peek_token().span.start)?
5565        };
5566
5567        Ok(Statement::CreateFunction(CreateFunction {
5568            or_alter,
5569            or_replace,
5570            temporary,
5571            if_not_exists: false,
5572            name,
5573            args: Some(args),
5574            return_type,
5575            function_body,
5576            language: None,
5577            determinism_specifier: None,
5578            options: None,
5579            remote_connection: None,
5580            using: None,
5581            behavior: None,
5582            called_on_null: None,
5583            parallel: None,
5584            security: None,
5585            set_params: vec![],
5586        }))
5587    }
5588
5589    fn parse_create_function_name_and_params(
5590        &mut self,
5591    ) -> Result<(ObjectName, Vec<OperateFunctionArg>), ParserError> {
5592        let name = self.parse_object_name(false)?;
5593        let parse_function_param =
5594            |parser: &mut Parser| -> Result<OperateFunctionArg, ParserError> {
5595                let name = parser.parse_identifier()?;
5596                let data_type = parser.parse_data_type()?;
5597                let default_expr = if parser.consume_token(&Token::Eq) {
5598                    Some(parser.parse_expr()?)
5599                } else {
5600                    None
5601                };
5602
5603                Ok(OperateFunctionArg {
5604                    mode: None,
5605                    name: Some(name),
5606                    data_type,
5607                    default_expr,
5608                })
5609            };
5610        self.expect_token(&Token::LParen)?;
5611        let args = self.parse_comma_separated0(parse_function_param, Token::RParen)?;
5612        self.expect_token(&Token::RParen)?;
5613        Ok((name, args))
5614    }
5615
5616    fn parse_function_arg(&mut self) -> Result<OperateFunctionArg, ParserError> {
5617        let mode = if self.parse_keyword(Keyword::IN) {
5618            Some(ArgMode::In)
5619        } else if self.parse_keyword(Keyword::OUT) {
5620            Some(ArgMode::Out)
5621        } else if self.parse_keyword(Keyword::INOUT) {
5622            Some(ArgMode::InOut)
5623        } else {
5624            None
5625        };
5626
5627        // parse: [ argname ] argtype
5628        let mut name = None;
5629        let mut data_type = self.parse_data_type()?;
5630
5631        // To check whether the first token is a name or a type, we need to
5632        // peek the next token, which if it is another type keyword, then the
5633        // first token is a name and not a type in itself.
5634        let data_type_idx = self.get_current_index();
5635
5636        // DEFAULT will be parsed as `DataType::Custom`, which is undesirable in this context
5637        fn parse_data_type_no_default(parser: &mut Parser) -> Result<DataType, ParserError> {
5638            if parser.peek_keyword(Keyword::DEFAULT) {
5639                // This dummy error is ignored in `maybe_parse`
5640                parser_err!(
5641                    "The DEFAULT keyword is not a type",
5642                    parser.peek_token().span.start
5643                )
5644            } else {
5645                parser.parse_data_type()
5646            }
5647        }
5648
5649        if let Some(next_data_type) = self.maybe_parse(parse_data_type_no_default)? {
5650            let token = self.token_at(data_type_idx);
5651
5652            // We ensure that the token is a `Word` token, and not other special tokens.
5653            if !matches!(token.token, Token::Word(_)) {
5654                return self.expected("a name or type", token.clone());
5655            }
5656
5657            name = Some(Ident::new(token.to_string()));
5658            data_type = next_data_type;
5659        }
5660
5661        let default_expr = if self.parse_keyword(Keyword::DEFAULT) || self.consume_token(&Token::Eq)
5662        {
5663            Some(self.parse_expr()?)
5664        } else {
5665            None
5666        };
5667        Ok(OperateFunctionArg {
5668            mode,
5669            name,
5670            data_type,
5671            default_expr,
5672        })
5673    }
5674
5675    /// Parse statements of the DropTrigger type such as:
5676    ///
5677    /// ```sql
5678    /// DROP TRIGGER [ IF EXISTS ] name ON table_name [ CASCADE | RESTRICT ]
5679    /// ```
5680    pub fn parse_drop_trigger(&mut self) -> Result<Statement, ParserError> {
5681        if !dialect_of!(self is PostgreSqlDialect | SQLiteDialect | GenericDialect | MySqlDialect | MsSqlDialect)
5682        {
5683            self.prev_token();
5684            return self.expected("an object type after DROP", self.peek_token());
5685        }
5686        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
5687        let trigger_name = self.parse_object_name(false)?;
5688        let table_name = if self.parse_keyword(Keyword::ON) {
5689            Some(self.parse_object_name(false)?)
5690        } else {
5691            None
5692        };
5693        let option = match self.parse_one_of_keywords(&[Keyword::CASCADE, Keyword::RESTRICT]) {
5694            Some(Keyword::CASCADE) => Some(ReferentialAction::Cascade),
5695            Some(Keyword::RESTRICT) => Some(ReferentialAction::Restrict),
5696            Some(unexpected_keyword) => return Err(ParserError::ParserError(
5697                format!("Internal parser error: expected any of {{CASCADE, RESTRICT}}, got {unexpected_keyword:?}"),
5698            )),
5699            None => None,
5700        };
5701        Ok(Statement::DropTrigger(DropTrigger {
5702            if_exists,
5703            trigger_name,
5704            table_name,
5705            option,
5706        }))
5707    }
5708
5709    pub fn parse_create_trigger(
5710        &mut self,
5711        temporary: bool,
5712        or_alter: bool,
5713        or_replace: bool,
5714        is_constraint: bool,
5715    ) -> Result<Statement, ParserError> {
5716        if !dialect_of!(self is PostgreSqlDialect | SQLiteDialect | GenericDialect | MySqlDialect | MsSqlDialect)
5717        {
5718            self.prev_token();
5719            return self.expected("an object type after CREATE", self.peek_token());
5720        }
5721
5722        let name = self.parse_object_name(false)?;
5723        let period = self.maybe_parse(|parser| parser.parse_trigger_period())?;
5724
5725        let events = self.parse_keyword_separated(Keyword::OR, Parser::parse_trigger_event)?;
5726        self.expect_keyword_is(Keyword::ON)?;
5727        let table_name = self.parse_object_name(false)?;
5728
5729        let referenced_table_name = if self.parse_keyword(Keyword::FROM) {
5730            self.parse_object_name(true).ok()
5731        } else {
5732            None
5733        };
5734
5735        let characteristics = self.parse_constraint_characteristics()?;
5736
5737        let mut referencing = vec![];
5738        if self.parse_keyword(Keyword::REFERENCING) {
5739            while let Some(refer) = self.parse_trigger_referencing()? {
5740                referencing.push(refer);
5741            }
5742        }
5743
5744        let trigger_object = if self.parse_keyword(Keyword::FOR) {
5745            let include_each = self.parse_keyword(Keyword::EACH);
5746            let trigger_object =
5747                match self.expect_one_of_keywords(&[Keyword::ROW, Keyword::STATEMENT])? {
5748                    Keyword::ROW => TriggerObject::Row,
5749                    Keyword::STATEMENT => TriggerObject::Statement,
5750                    unexpected_keyword => return Err(ParserError::ParserError(
5751                        format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in ROW/STATEMENT"),
5752                    )),
5753                };
5754
5755            Some(if include_each {
5756                TriggerObjectKind::ForEach(trigger_object)
5757            } else {
5758                TriggerObjectKind::For(trigger_object)
5759            })
5760        } else {
5761            let _ = self.parse_keyword(Keyword::FOR);
5762
5763            None
5764        };
5765
5766        let condition = self
5767            .parse_keyword(Keyword::WHEN)
5768            .then(|| self.parse_expr())
5769            .transpose()?;
5770
5771        let mut exec_body = None;
5772        let mut statements = None;
5773        if self.parse_keyword(Keyword::EXECUTE) {
5774            exec_body = Some(self.parse_trigger_exec_body()?);
5775        } else {
5776            statements = Some(self.parse_conditional_statements(&[Keyword::END])?);
5777        }
5778
5779        Ok(CreateTrigger {
5780            or_alter,
5781            temporary,
5782            or_replace,
5783            is_constraint,
5784            name,
5785            period,
5786            period_before_table: true,
5787            events,
5788            table_name,
5789            referenced_table_name,
5790            referencing,
5791            trigger_object,
5792            condition,
5793            exec_body,
5794            statements_as: false,
5795            statements,
5796            characteristics,
5797        }
5798        .into())
5799    }
5800
5801    pub fn parse_trigger_period(&mut self) -> Result<TriggerPeriod, ParserError> {
5802        Ok(
5803            match self.expect_one_of_keywords(&[
5804                Keyword::FOR,
5805                Keyword::BEFORE,
5806                Keyword::AFTER,
5807                Keyword::INSTEAD,
5808            ])? {
5809                Keyword::FOR => TriggerPeriod::For,
5810                Keyword::BEFORE => TriggerPeriod::Before,
5811                Keyword::AFTER => TriggerPeriod::After,
5812                Keyword::INSTEAD => self
5813                    .expect_keyword_is(Keyword::OF)
5814                    .map(|_| TriggerPeriod::InsteadOf)?,
5815                unexpected_keyword => return Err(ParserError::ParserError(
5816                    format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in trigger period"),
5817                )),
5818            },
5819        )
5820    }
5821
5822    pub fn parse_trigger_event(&mut self) -> Result<TriggerEvent, ParserError> {
5823        Ok(
5824            match self.expect_one_of_keywords(&[
5825                Keyword::INSERT,
5826                Keyword::UPDATE,
5827                Keyword::DELETE,
5828                Keyword::TRUNCATE,
5829            ])? {
5830                Keyword::INSERT => TriggerEvent::Insert,
5831                Keyword::UPDATE => {
5832                    if self.parse_keyword(Keyword::OF) {
5833                        let cols = self.parse_comma_separated(Parser::parse_identifier)?;
5834                        TriggerEvent::Update(cols)
5835                    } else {
5836                        TriggerEvent::Update(vec![])
5837                    }
5838                }
5839                Keyword::DELETE => TriggerEvent::Delete,
5840                Keyword::TRUNCATE => TriggerEvent::Truncate,
5841                unexpected_keyword => return Err(ParserError::ParserError(
5842                    format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in trigger event"),
5843                )),
5844            },
5845        )
5846    }
5847
5848    pub fn parse_trigger_referencing(&mut self) -> Result<Option<TriggerReferencing>, ParserError> {
5849        let refer_type = match self.parse_one_of_keywords(&[Keyword::OLD, Keyword::NEW]) {
5850            Some(Keyword::OLD) if self.parse_keyword(Keyword::TABLE) => {
5851                TriggerReferencingType::OldTable
5852            }
5853            Some(Keyword::NEW) if self.parse_keyword(Keyword::TABLE) => {
5854                TriggerReferencingType::NewTable
5855            }
5856            _ => {
5857                return Ok(None);
5858            }
5859        };
5860
5861        let is_as = self.parse_keyword(Keyword::AS);
5862        let transition_relation_name = self.parse_object_name(false)?;
5863        Ok(Some(TriggerReferencing {
5864            refer_type,
5865            is_as,
5866            transition_relation_name,
5867        }))
5868    }
5869
5870    pub fn parse_trigger_exec_body(&mut self) -> Result<TriggerExecBody, ParserError> {
5871        Ok(TriggerExecBody {
5872            exec_type: match self
5873                .expect_one_of_keywords(&[Keyword::FUNCTION, Keyword::PROCEDURE])?
5874            {
5875                Keyword::FUNCTION => TriggerExecBodyType::Function,
5876                Keyword::PROCEDURE => TriggerExecBodyType::Procedure,
5877                unexpected_keyword => return Err(ParserError::ParserError(
5878                    format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in trigger exec body"),
5879                )),
5880            },
5881            func_desc: self.parse_function_desc()?,
5882        })
5883    }
5884
5885    pub fn parse_create_macro(
5886        &mut self,
5887        or_replace: bool,
5888        temporary: bool,
5889    ) -> Result<Statement, ParserError> {
5890        if dialect_of!(self is DuckDbDialect |  GenericDialect) {
5891            let name = self.parse_object_name(false)?;
5892            self.expect_token(&Token::LParen)?;
5893            let args = if self.consume_token(&Token::RParen) {
5894                self.prev_token();
5895                None
5896            } else {
5897                Some(self.parse_comma_separated(Parser::parse_macro_arg)?)
5898            };
5899
5900            self.expect_token(&Token::RParen)?;
5901            self.expect_keyword_is(Keyword::AS)?;
5902
5903            Ok(Statement::CreateMacro {
5904                or_replace,
5905                temporary,
5906                name,
5907                args,
5908                definition: if self.parse_keyword(Keyword::TABLE) {
5909                    MacroDefinition::Table(self.parse_query()?)
5910                } else {
5911                    MacroDefinition::Expr(self.parse_expr()?)
5912                },
5913            })
5914        } else {
5915            self.prev_token();
5916            self.expected("an object type after CREATE", self.peek_token())
5917        }
5918    }
5919
5920    fn parse_macro_arg(&mut self) -> Result<MacroArg, ParserError> {
5921        let name = self.parse_identifier()?;
5922
5923        let default_expr =
5924            if self.consume_token(&Token::Assignment) || self.consume_token(&Token::RArrow) {
5925                Some(self.parse_expr()?)
5926            } else {
5927                None
5928            };
5929        Ok(MacroArg { name, default_expr })
5930    }
5931
5932    pub fn parse_create_external_table(
5933        &mut self,
5934        or_replace: bool,
5935    ) -> Result<Statement, ParserError> {
5936        self.expect_keyword_is(Keyword::TABLE)?;
5937        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5938        let table_name = self.parse_object_name(false)?;
5939        let (columns, constraints) = self.parse_columns()?;
5940
5941        let hive_distribution = self.parse_hive_distribution()?;
5942        let hive_formats = self.parse_hive_formats()?;
5943
5944        let file_format = if let Some(ref hf) = hive_formats {
5945            if let Some(ref ff) = hf.storage {
5946                match ff {
5947                    HiveIOFormat::FileFormat { format } => Some(*format),
5948                    _ => None,
5949                }
5950            } else {
5951                None
5952            }
5953        } else {
5954            None
5955        };
5956        let location = hive_formats.as_ref().and_then(|hf| hf.location.clone());
5957        let table_properties = self.parse_options(Keyword::TBLPROPERTIES)?;
5958        let table_options = if !table_properties.is_empty() {
5959            CreateTableOptions::TableProperties(table_properties)
5960        } else {
5961            CreateTableOptions::None
5962        };
5963        Ok(CreateTableBuilder::new(table_name)
5964            .columns(columns)
5965            .constraints(constraints)
5966            .hive_distribution(hive_distribution)
5967            .hive_formats(hive_formats)
5968            .table_options(table_options)
5969            .or_replace(or_replace)
5970            .if_not_exists(if_not_exists)
5971            .external(true)
5972            .file_format(file_format)
5973            .location(location)
5974            .build())
5975    }
5976
5977    pub fn parse_file_format(&mut self) -> Result<FileFormat, ParserError> {
5978        let next_token = self.next_token();
5979        match &next_token.token {
5980            Token::Word(w) => match w.keyword {
5981                Keyword::AVRO => Ok(FileFormat::AVRO),
5982                Keyword::JSONFILE => Ok(FileFormat::JSONFILE),
5983                Keyword::ORC => Ok(FileFormat::ORC),
5984                Keyword::PARQUET => Ok(FileFormat::PARQUET),
5985                Keyword::RCFILE => Ok(FileFormat::RCFILE),
5986                Keyword::SEQUENCEFILE => Ok(FileFormat::SEQUENCEFILE),
5987                Keyword::TEXTFILE => Ok(FileFormat::TEXTFILE),
5988                _ => self.expected("fileformat", next_token),
5989            },
5990            _ => self.expected("fileformat", next_token),
5991        }
5992    }
5993
5994    fn parse_analyze_format_kind(&mut self) -> Result<AnalyzeFormatKind, ParserError> {
5995        if self.consume_token(&Token::Eq) {
5996            Ok(AnalyzeFormatKind::Assignment(self.parse_analyze_format()?))
5997        } else {
5998            Ok(AnalyzeFormatKind::Keyword(self.parse_analyze_format()?))
5999        }
6000    }
6001
6002    pub fn parse_analyze_format(&mut self) -> Result<AnalyzeFormat, ParserError> {
6003        let next_token = self.next_token();
6004        match &next_token.token {
6005            Token::Word(w) => match w.keyword {
6006                Keyword::TEXT => Ok(AnalyzeFormat::TEXT),
6007                Keyword::GRAPHVIZ => Ok(AnalyzeFormat::GRAPHVIZ),
6008                Keyword::JSON => Ok(AnalyzeFormat::JSON),
6009                _ => self.expected("fileformat", next_token),
6010            },
6011            _ => self.expected("fileformat", next_token),
6012        }
6013    }
6014
6015    pub fn parse_create_view(
6016        &mut self,
6017        or_alter: bool,
6018        or_replace: bool,
6019        temporary: bool,
6020        create_view_params: Option<CreateViewParams>,
6021    ) -> Result<Statement, ParserError> {
6022        let secure = self.parse_keyword(Keyword::SECURE);
6023        let materialized = self.parse_keyword(Keyword::MATERIALIZED);
6024        self.expect_keyword_is(Keyword::VIEW)?;
6025        let allow_unquoted_hyphen = dialect_of!(self is BigQueryDialect);
6026        // Tries to parse IF NOT EXISTS either before name or after name
6027        // Name before IF NOT EXISTS is supported by snowflake but undocumented
6028        let if_not_exists_first =
6029            self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
6030        let name = self.parse_object_name(allow_unquoted_hyphen)?;
6031        let name_before_not_exists = !if_not_exists_first
6032            && self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
6033        let if_not_exists = if_not_exists_first || name_before_not_exists;
6034        // Many dialects support `OR ALTER` right after `CREATE`, but we don't (yet).
6035        // ANSI SQL and Postgres support RECURSIVE here, but we don't support it either.
6036        let columns = self.parse_view_columns()?;
6037        let mut options = CreateTableOptions::None;
6038        let with_options = self.parse_options(Keyword::WITH)?;
6039        if !with_options.is_empty() {
6040            options = CreateTableOptions::With(with_options);
6041        }
6042
6043        let cluster_by = if self.parse_keyword(Keyword::CLUSTER) {
6044            self.expect_keyword_is(Keyword::BY)?;
6045            self.parse_parenthesized_column_list(Optional, false)?
6046        } else {
6047            vec![]
6048        };
6049
6050        if dialect_of!(self is BigQueryDialect | GenericDialect) {
6051            if let Some(opts) = self.maybe_parse_options(Keyword::OPTIONS)? {
6052                if !opts.is_empty() {
6053                    options = CreateTableOptions::Options(opts);
6054                }
6055            };
6056        }
6057
6058        let to = if dialect_of!(self is ClickHouseDialect | GenericDialect)
6059            && self.parse_keyword(Keyword::TO)
6060        {
6061            Some(self.parse_object_name(false)?)
6062        } else {
6063            None
6064        };
6065
6066        let comment = if dialect_of!(self is SnowflakeDialect | GenericDialect)
6067            && self.parse_keyword(Keyword::COMMENT)
6068        {
6069            self.expect_token(&Token::Eq)?;
6070            Some(self.parse_comment_value()?)
6071        } else {
6072            None
6073        };
6074
6075        self.expect_keyword_is(Keyword::AS)?;
6076        let query = self.parse_query()?;
6077        // Optional `WITH [ CASCADED | LOCAL ] CHECK OPTION` is widely supported here.
6078
6079        let with_no_schema_binding = dialect_of!(self is RedshiftSqlDialect | GenericDialect)
6080            && self.parse_keywords(&[
6081                Keyword::WITH,
6082                Keyword::NO,
6083                Keyword::SCHEMA,
6084                Keyword::BINDING,
6085            ]);
6086
6087        Ok(CreateView {
6088            or_alter,
6089            name,
6090            columns,
6091            query,
6092            materialized,
6093            secure,
6094            or_replace,
6095            options,
6096            cluster_by,
6097            comment,
6098            with_no_schema_binding,
6099            if_not_exists,
6100            temporary,
6101            to,
6102            params: create_view_params,
6103            name_before_not_exists,
6104        }
6105        .into())
6106    }
6107
6108    /// Parse optional parameters for the `CREATE VIEW` statement supported by [MySQL].
6109    ///
6110    /// [MySQL]: https://dev.mysql.com/doc/refman/9.1/en/create-view.html
6111    fn parse_create_view_params(&mut self) -> Result<Option<CreateViewParams>, ParserError> {
6112        let algorithm = if self.parse_keyword(Keyword::ALGORITHM) {
6113            self.expect_token(&Token::Eq)?;
6114            Some(
6115                match self.expect_one_of_keywords(&[
6116                    Keyword::UNDEFINED,
6117                    Keyword::MERGE,
6118                    Keyword::TEMPTABLE,
6119                ])? {
6120                    Keyword::UNDEFINED => CreateViewAlgorithm::Undefined,
6121                    Keyword::MERGE => CreateViewAlgorithm::Merge,
6122                    Keyword::TEMPTABLE => CreateViewAlgorithm::TempTable,
6123                    _ => {
6124                        self.prev_token();
6125                        let found = self.next_token();
6126                        return self
6127                            .expected("UNDEFINED or MERGE or TEMPTABLE after ALGORITHM =", found);
6128                    }
6129                },
6130            )
6131        } else {
6132            None
6133        };
6134        let definer = if self.parse_keyword(Keyword::DEFINER) {
6135            self.expect_token(&Token::Eq)?;
6136            Some(self.parse_grantee_name()?)
6137        } else {
6138            None
6139        };
6140        let security = if self.parse_keywords(&[Keyword::SQL, Keyword::SECURITY]) {
6141            Some(
6142                match self.expect_one_of_keywords(&[Keyword::DEFINER, Keyword::INVOKER])? {
6143                    Keyword::DEFINER => CreateViewSecurity::Definer,
6144                    Keyword::INVOKER => CreateViewSecurity::Invoker,
6145                    _ => {
6146                        self.prev_token();
6147                        let found = self.next_token();
6148                        return self.expected("DEFINER or INVOKER after SQL SECURITY", found);
6149                    }
6150                },
6151            )
6152        } else {
6153            None
6154        };
6155        if algorithm.is_some() || definer.is_some() || security.is_some() {
6156            Ok(Some(CreateViewParams {
6157                algorithm,
6158                definer,
6159                security,
6160            }))
6161        } else {
6162            Ok(None)
6163        }
6164    }
6165
6166    pub fn parse_create_role(&mut self) -> Result<Statement, ParserError> {
6167        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
6168        let names = self.parse_comma_separated(|p| p.parse_object_name(false))?;
6169
6170        let _ = self.parse_keyword(Keyword::WITH); // [ WITH ]
6171
6172        let optional_keywords = if dialect_of!(self is MsSqlDialect) {
6173            vec![Keyword::AUTHORIZATION]
6174        } else if dialect_of!(self is PostgreSqlDialect) {
6175            vec![
6176                Keyword::LOGIN,
6177                Keyword::NOLOGIN,
6178                Keyword::INHERIT,
6179                Keyword::NOINHERIT,
6180                Keyword::BYPASSRLS,
6181                Keyword::NOBYPASSRLS,
6182                Keyword::PASSWORD,
6183                Keyword::CREATEDB,
6184                Keyword::NOCREATEDB,
6185                Keyword::CREATEROLE,
6186                Keyword::NOCREATEROLE,
6187                Keyword::SUPERUSER,
6188                Keyword::NOSUPERUSER,
6189                Keyword::REPLICATION,
6190                Keyword::NOREPLICATION,
6191                Keyword::CONNECTION,
6192                Keyword::VALID,
6193                Keyword::IN,
6194                Keyword::ROLE,
6195                Keyword::ADMIN,
6196                Keyword::USER,
6197            ]
6198        } else {
6199            vec![]
6200        };
6201
6202        // MSSQL
6203        let mut authorization_owner = None;
6204        // Postgres
6205        let mut login = None;
6206        let mut inherit = None;
6207        let mut bypassrls = None;
6208        let mut password = None;
6209        let mut create_db = None;
6210        let mut create_role = None;
6211        let mut superuser = None;
6212        let mut replication = None;
6213        let mut connection_limit = None;
6214        let mut valid_until = None;
6215        let mut in_role = vec![];
6216        let mut in_group = vec![];
6217        let mut role = vec![];
6218        let mut user = vec![];
6219        let mut admin = vec![];
6220
6221        while let Some(keyword) = self.parse_one_of_keywords(&optional_keywords) {
6222            let loc = self
6223                .tokens
6224                .get(self.index - 1)
6225                .map_or(Location { line: 0, column: 0 }, |t| t.span.start);
6226            match keyword {
6227                Keyword::AUTHORIZATION => {
6228                    if authorization_owner.is_some() {
6229                        parser_err!("Found multiple AUTHORIZATION", loc)
6230                    } else {
6231                        authorization_owner = Some(self.parse_object_name(false)?);
6232                        Ok(())
6233                    }
6234                }
6235                Keyword::LOGIN | Keyword::NOLOGIN => {
6236                    if login.is_some() {
6237                        parser_err!("Found multiple LOGIN or NOLOGIN", loc)
6238                    } else {
6239                        login = Some(keyword == Keyword::LOGIN);
6240                        Ok(())
6241                    }
6242                }
6243                Keyword::INHERIT | Keyword::NOINHERIT => {
6244                    if inherit.is_some() {
6245                        parser_err!("Found multiple INHERIT or NOINHERIT", loc)
6246                    } else {
6247                        inherit = Some(keyword == Keyword::INHERIT);
6248                        Ok(())
6249                    }
6250                }
6251                Keyword::BYPASSRLS | Keyword::NOBYPASSRLS => {
6252                    if bypassrls.is_some() {
6253                        parser_err!("Found multiple BYPASSRLS or NOBYPASSRLS", loc)
6254                    } else {
6255                        bypassrls = Some(keyword == Keyword::BYPASSRLS);
6256                        Ok(())
6257                    }
6258                }
6259                Keyword::CREATEDB | Keyword::NOCREATEDB => {
6260                    if create_db.is_some() {
6261                        parser_err!("Found multiple CREATEDB or NOCREATEDB", loc)
6262                    } else {
6263                        create_db = Some(keyword == Keyword::CREATEDB);
6264                        Ok(())
6265                    }
6266                }
6267                Keyword::CREATEROLE | Keyword::NOCREATEROLE => {
6268                    if create_role.is_some() {
6269                        parser_err!("Found multiple CREATEROLE or NOCREATEROLE", loc)
6270                    } else {
6271                        create_role = Some(keyword == Keyword::CREATEROLE);
6272                        Ok(())
6273                    }
6274                }
6275                Keyword::SUPERUSER | Keyword::NOSUPERUSER => {
6276                    if superuser.is_some() {
6277                        parser_err!("Found multiple SUPERUSER or NOSUPERUSER", loc)
6278                    } else {
6279                        superuser = Some(keyword == Keyword::SUPERUSER);
6280                        Ok(())
6281                    }
6282                }
6283                Keyword::REPLICATION | Keyword::NOREPLICATION => {
6284                    if replication.is_some() {
6285                        parser_err!("Found multiple REPLICATION or NOREPLICATION", loc)
6286                    } else {
6287                        replication = Some(keyword == Keyword::REPLICATION);
6288                        Ok(())
6289                    }
6290                }
6291                Keyword::PASSWORD => {
6292                    if password.is_some() {
6293                        parser_err!("Found multiple PASSWORD", loc)
6294                    } else {
6295                        password = if self.parse_keyword(Keyword::NULL) {
6296                            Some(Password::NullPassword)
6297                        } else {
6298                            Some(Password::Password(Expr::Value(self.parse_value()?)))
6299                        };
6300                        Ok(())
6301                    }
6302                }
6303                Keyword::CONNECTION => {
6304                    self.expect_keyword_is(Keyword::LIMIT)?;
6305                    if connection_limit.is_some() {
6306                        parser_err!("Found multiple CONNECTION LIMIT", loc)
6307                    } else {
6308                        connection_limit = Some(Expr::Value(self.parse_number_value()?));
6309                        Ok(())
6310                    }
6311                }
6312                Keyword::VALID => {
6313                    self.expect_keyword_is(Keyword::UNTIL)?;
6314                    if valid_until.is_some() {
6315                        parser_err!("Found multiple VALID UNTIL", loc)
6316                    } else {
6317                        valid_until = Some(Expr::Value(self.parse_value()?));
6318                        Ok(())
6319                    }
6320                }
6321                Keyword::IN => {
6322                    if self.parse_keyword(Keyword::ROLE) {
6323                        if !in_role.is_empty() {
6324                            parser_err!("Found multiple IN ROLE", loc)
6325                        } else {
6326                            in_role = self.parse_comma_separated(|p| p.parse_identifier())?;
6327                            Ok(())
6328                        }
6329                    } else if self.parse_keyword(Keyword::GROUP) {
6330                        if !in_group.is_empty() {
6331                            parser_err!("Found multiple IN GROUP", loc)
6332                        } else {
6333                            in_group = self.parse_comma_separated(|p| p.parse_identifier())?;
6334                            Ok(())
6335                        }
6336                    } else {
6337                        self.expected("ROLE or GROUP after IN", self.peek_token())
6338                    }
6339                }
6340                Keyword::ROLE => {
6341                    if !role.is_empty() {
6342                        parser_err!("Found multiple ROLE", loc)
6343                    } else {
6344                        role = self.parse_comma_separated(|p| p.parse_identifier())?;
6345                        Ok(())
6346                    }
6347                }
6348                Keyword::USER => {
6349                    if !user.is_empty() {
6350                        parser_err!("Found multiple USER", loc)
6351                    } else {
6352                        user = self.parse_comma_separated(|p| p.parse_identifier())?;
6353                        Ok(())
6354                    }
6355                }
6356                Keyword::ADMIN => {
6357                    if !admin.is_empty() {
6358                        parser_err!("Found multiple ADMIN", loc)
6359                    } else {
6360                        admin = self.parse_comma_separated(|p| p.parse_identifier())?;
6361                        Ok(())
6362                    }
6363                }
6364                _ => break,
6365            }?
6366        }
6367
6368        Ok(CreateRole {
6369            names,
6370            if_not_exists,
6371            login,
6372            inherit,
6373            bypassrls,
6374            password,
6375            create_db,
6376            create_role,
6377            replication,
6378            superuser,
6379            connection_limit,
6380            valid_until,
6381            in_role,
6382            in_group,
6383            role,
6384            user,
6385            admin,
6386            authorization_owner,
6387        }
6388        .into())
6389    }
6390
6391    pub fn parse_owner(&mut self) -> Result<Owner, ParserError> {
6392        let owner = match self.parse_one_of_keywords(&[Keyword::CURRENT_USER, Keyword::CURRENT_ROLE, Keyword::SESSION_USER]) {
6393            Some(Keyword::CURRENT_USER) => Owner::CurrentUser,
6394            Some(Keyword::CURRENT_ROLE) => Owner::CurrentRole,
6395            Some(Keyword::SESSION_USER) => Owner::SessionUser,
6396            Some(unexpected_keyword) => return Err(ParserError::ParserError(
6397                format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in owner"),
6398            )),
6399            None => {
6400                match self.parse_identifier() {
6401                    Ok(ident) => Owner::Ident(ident),
6402                    Err(e) => {
6403                        return Err(ParserError::ParserError(format!("Expected: CURRENT_USER, CURRENT_ROLE, SESSION_USER or identifier after OWNER TO. {e}")))
6404                    }
6405                }
6406            }
6407        };
6408        Ok(owner)
6409    }
6410
6411    /// Parses a [Statement::CreateDomain] statement.
6412    fn parse_create_domain(&mut self) -> Result<Statement, ParserError> {
6413        let name = self.parse_object_name(false)?;
6414        self.expect_keyword_is(Keyword::AS)?;
6415        let data_type = self.parse_data_type()?;
6416        let collation = if self.parse_keyword(Keyword::COLLATE) {
6417            Some(self.parse_identifier()?)
6418        } else {
6419            None
6420        };
6421        let default = if self.parse_keyword(Keyword::DEFAULT) {
6422            Some(self.parse_expr()?)
6423        } else {
6424            None
6425        };
6426        let mut constraints = Vec::new();
6427        while let Some(constraint) = self.parse_optional_table_constraint()? {
6428            constraints.push(constraint);
6429        }
6430
6431        Ok(Statement::CreateDomain(CreateDomain {
6432            name,
6433            data_type,
6434            collation,
6435            default,
6436            constraints,
6437        }))
6438    }
6439
6440    /// ```sql
6441    ///     CREATE POLICY name ON table_name [ AS { PERMISSIVE | RESTRICTIVE } ]
6442    ///     [ FOR { ALL | SELECT | INSERT | UPDATE | DELETE } ]
6443    ///     [ TO { role_name | PUBLIC | CURRENT_USER | CURRENT_ROLE | SESSION_USER } [, ...] ]
6444    ///     [ USING ( using_expression ) ]
6445    ///     [ WITH CHECK ( with_check_expression ) ]
6446    /// ```
6447    ///
6448    /// [PostgreSQL Documentation](https://www.postgresql.org/docs/current/sql-createpolicy.html)
6449    pub fn parse_create_policy(&mut self) -> Result<Statement, ParserError> {
6450        let name = self.parse_identifier()?;
6451        self.expect_keyword_is(Keyword::ON)?;
6452        let table_name = self.parse_object_name(false)?;
6453
6454        let policy_type = if self.parse_keyword(Keyword::AS) {
6455            let keyword =
6456                self.expect_one_of_keywords(&[Keyword::PERMISSIVE, Keyword::RESTRICTIVE])?;
6457            Some(match keyword {
6458                Keyword::PERMISSIVE => CreatePolicyType::Permissive,
6459                Keyword::RESTRICTIVE => CreatePolicyType::Restrictive,
6460                unexpected_keyword => return Err(ParserError::ParserError(
6461                    format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in policy type"),
6462                )),
6463            })
6464        } else {
6465            None
6466        };
6467
6468        let command = if self.parse_keyword(Keyword::FOR) {
6469            let keyword = self.expect_one_of_keywords(&[
6470                Keyword::ALL,
6471                Keyword::SELECT,
6472                Keyword::INSERT,
6473                Keyword::UPDATE,
6474                Keyword::DELETE,
6475            ])?;
6476            Some(match keyword {
6477                Keyword::ALL => CreatePolicyCommand::All,
6478                Keyword::SELECT => CreatePolicyCommand::Select,
6479                Keyword::INSERT => CreatePolicyCommand::Insert,
6480                Keyword::UPDATE => CreatePolicyCommand::Update,
6481                Keyword::DELETE => CreatePolicyCommand::Delete,
6482                unexpected_keyword => return Err(ParserError::ParserError(
6483                    format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in policy command"),
6484                )),
6485            })
6486        } else {
6487            None
6488        };
6489
6490        let to = if self.parse_keyword(Keyword::TO) {
6491            Some(self.parse_comma_separated(|p| p.parse_owner())?)
6492        } else {
6493            None
6494        };
6495
6496        let using = if self.parse_keyword(Keyword::USING) {
6497            self.expect_token(&Token::LParen)?;
6498            let expr = self.parse_expr()?;
6499            self.expect_token(&Token::RParen)?;
6500            Some(expr)
6501        } else {
6502            None
6503        };
6504
6505        let with_check = if self.parse_keywords(&[Keyword::WITH, Keyword::CHECK]) {
6506            self.expect_token(&Token::LParen)?;
6507            let expr = self.parse_expr()?;
6508            self.expect_token(&Token::RParen)?;
6509            Some(expr)
6510        } else {
6511            None
6512        };
6513
6514        Ok(CreatePolicy {
6515            name,
6516            table_name,
6517            policy_type,
6518            command,
6519            to,
6520            using,
6521            with_check,
6522        })
6523    }
6524
6525    /// ```sql
6526    /// CREATE CONNECTOR [IF NOT EXISTS] connector_name
6527    /// [TYPE datasource_type]
6528    /// [URL datasource_url]
6529    /// [COMMENT connector_comment]
6530    /// [WITH DCPROPERTIES(property_name=property_value, ...)]
6531    /// ```
6532    ///
6533    /// [Hive Documentation](https://cwiki.apache.org/confluence/pages/viewpage.action?pageId=27362034#LanguageManualDDL-CreateDataConnectorCreateConnector)
6534    pub fn parse_create_connector(&mut self) -> Result<Statement, ParserError> {
6535        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
6536        let name = self.parse_identifier()?;
6537
6538        let connector_type = if self.parse_keyword(Keyword::TYPE) {
6539            Some(self.parse_literal_string()?)
6540        } else {
6541            None
6542        };
6543
6544        let url = if self.parse_keyword(Keyword::URL) {
6545            Some(self.parse_literal_string()?)
6546        } else {
6547            None
6548        };
6549
6550        let comment = self.parse_optional_inline_comment()?;
6551
6552        let with_dcproperties =
6553            match self.parse_options_with_keywords(&[Keyword::WITH, Keyword::DCPROPERTIES])? {
6554                properties if !properties.is_empty() => Some(properties),
6555                _ => None,
6556            };
6557
6558        Ok(Statement::CreateConnector(CreateConnector {
6559            name,
6560            if_not_exists,
6561            connector_type,
6562            url,
6563            comment,
6564            with_dcproperties,
6565        }))
6566    }
6567
6568    /// Parse an operator name, which can contain special characters like +, -, <, >, =
6569    /// that are tokenized as operator tokens rather than identifiers.
6570    /// This is used for PostgreSQL CREATE OPERATOR statements.
6571    ///
6572    /// Examples: `+`, `myschema.+`, `pg_catalog.<=`
6573    fn parse_operator_name(&mut self) -> Result<ObjectName, ParserError> {
6574        let mut parts = vec![];
6575        loop {
6576            parts.push(ObjectNamePart::Identifier(Ident::new(
6577                self.next_token().to_string(),
6578            )));
6579            if !self.consume_token(&Token::Period) {
6580                break;
6581            }
6582        }
6583        Ok(ObjectName(parts))
6584    }
6585
6586    /// Parse a [Statement::CreateOperator]
6587    ///
6588    /// [PostgreSQL Documentation](https://www.postgresql.org/docs/current/sql-createoperator.html)
6589    pub fn parse_create_operator(&mut self) -> Result<Statement, ParserError> {
6590        let name = self.parse_operator_name()?;
6591        self.expect_token(&Token::LParen)?;
6592
6593        let mut function: Option<ObjectName> = None;
6594        let mut is_procedure = false;
6595        let mut left_arg: Option<DataType> = None;
6596        let mut right_arg: Option<DataType> = None;
6597        let mut options: Vec<OperatorOption> = Vec::new();
6598
6599        loop {
6600            let keyword = self.expect_one_of_keywords(&[
6601                Keyword::FUNCTION,
6602                Keyword::PROCEDURE,
6603                Keyword::LEFTARG,
6604                Keyword::RIGHTARG,
6605                Keyword::COMMUTATOR,
6606                Keyword::NEGATOR,
6607                Keyword::RESTRICT,
6608                Keyword::JOIN,
6609                Keyword::HASHES,
6610                Keyword::MERGES,
6611            ])?;
6612
6613            match keyword {
6614                Keyword::HASHES if !options.iter().any(|o| matches!(o, OperatorOption::Hashes)) => {
6615                    options.push(OperatorOption::Hashes);
6616                }
6617                Keyword::MERGES if !options.iter().any(|o| matches!(o, OperatorOption::Merges)) => {
6618                    options.push(OperatorOption::Merges);
6619                }
6620                Keyword::FUNCTION | Keyword::PROCEDURE if function.is_none() => {
6621                    self.expect_token(&Token::Eq)?;
6622                    function = Some(self.parse_object_name(false)?);
6623                    is_procedure = keyword == Keyword::PROCEDURE;
6624                }
6625                Keyword::LEFTARG if left_arg.is_none() => {
6626                    self.expect_token(&Token::Eq)?;
6627                    left_arg = Some(self.parse_data_type()?);
6628                }
6629                Keyword::RIGHTARG if right_arg.is_none() => {
6630                    self.expect_token(&Token::Eq)?;
6631                    right_arg = Some(self.parse_data_type()?);
6632                }
6633                Keyword::COMMUTATOR
6634                    if !options
6635                        .iter()
6636                        .any(|o| matches!(o, OperatorOption::Commutator(_))) =>
6637                {
6638                    self.expect_token(&Token::Eq)?;
6639                    if self.parse_keyword(Keyword::OPERATOR) {
6640                        self.expect_token(&Token::LParen)?;
6641                        let op = self.parse_operator_name()?;
6642                        self.expect_token(&Token::RParen)?;
6643                        options.push(OperatorOption::Commutator(op));
6644                    } else {
6645                        options.push(OperatorOption::Commutator(self.parse_operator_name()?));
6646                    }
6647                }
6648                Keyword::NEGATOR
6649                    if !options
6650                        .iter()
6651                        .any(|o| matches!(o, OperatorOption::Negator(_))) =>
6652                {
6653                    self.expect_token(&Token::Eq)?;
6654                    if self.parse_keyword(Keyword::OPERATOR) {
6655                        self.expect_token(&Token::LParen)?;
6656                        let op = self.parse_operator_name()?;
6657                        self.expect_token(&Token::RParen)?;
6658                        options.push(OperatorOption::Negator(op));
6659                    } else {
6660                        options.push(OperatorOption::Negator(self.parse_operator_name()?));
6661                    }
6662                }
6663                Keyword::RESTRICT
6664                    if !options
6665                        .iter()
6666                        .any(|o| matches!(o, OperatorOption::Restrict(_))) =>
6667                {
6668                    self.expect_token(&Token::Eq)?;
6669                    options.push(OperatorOption::Restrict(Some(
6670                        self.parse_object_name(false)?,
6671                    )));
6672                }
6673                Keyword::JOIN if !options.iter().any(|o| matches!(o, OperatorOption::Join(_))) => {
6674                    self.expect_token(&Token::Eq)?;
6675                    options.push(OperatorOption::Join(Some(self.parse_object_name(false)?)));
6676                }
6677                _ => {
6678                    return Err(ParserError::ParserError(format!(
6679                        "Duplicate or unexpected keyword {:?} in CREATE OPERATOR",
6680                        keyword
6681                    )))
6682                }
6683            }
6684
6685            if !self.consume_token(&Token::Comma) {
6686                break;
6687            }
6688        }
6689
6690        // Expect closing parenthesis
6691        self.expect_token(&Token::RParen)?;
6692
6693        // FUNCTION is required
6694        let function = function.ok_or_else(|| {
6695            ParserError::ParserError("CREATE OPERATOR requires FUNCTION parameter".to_string())
6696        })?;
6697
6698        Ok(Statement::CreateOperator(CreateOperator {
6699            name,
6700            function,
6701            is_procedure,
6702            left_arg,
6703            right_arg,
6704            options,
6705        }))
6706    }
6707
6708    /// Parse a [Statement::CreateOperatorFamily]
6709    ///
6710    /// [PostgreSQL Documentation](https://www.postgresql.org/docs/current/sql-createopfamily.html)
6711    pub fn parse_create_operator_family(&mut self) -> Result<Statement, ParserError> {
6712        let name = self.parse_object_name(false)?;
6713        self.expect_keyword(Keyword::USING)?;
6714        let using = self.parse_identifier()?;
6715
6716        Ok(Statement::CreateOperatorFamily(CreateOperatorFamily {
6717            name,
6718            using,
6719        }))
6720    }
6721
6722    /// Parse a [Statement::CreateOperatorClass]
6723    ///
6724    /// [PostgreSQL Documentation](https://www.postgresql.org/docs/current/sql-createopclass.html)
6725    pub fn parse_create_operator_class(&mut self) -> Result<Statement, ParserError> {
6726        let name = self.parse_object_name(false)?;
6727        let default = self.parse_keyword(Keyword::DEFAULT);
6728        self.expect_keywords(&[Keyword::FOR, Keyword::TYPE])?;
6729        let for_type = self.parse_data_type()?;
6730        self.expect_keyword(Keyword::USING)?;
6731        let using = self.parse_identifier()?;
6732
6733        let family = if self.parse_keyword(Keyword::FAMILY) {
6734            Some(self.parse_object_name(false)?)
6735        } else {
6736            None
6737        };
6738
6739        self.expect_keyword(Keyword::AS)?;
6740
6741        let mut items = vec![];
6742        loop {
6743            if self.parse_keyword(Keyword::OPERATOR) {
6744                let strategy_number = self.parse_literal_uint()?;
6745                let operator_name = self.parse_operator_name()?;
6746
6747                // Optional operator argument types
6748                let op_types = if self.consume_token(&Token::LParen) {
6749                    let left = self.parse_data_type()?;
6750                    self.expect_token(&Token::Comma)?;
6751                    let right = self.parse_data_type()?;
6752                    self.expect_token(&Token::RParen)?;
6753                    Some(OperatorArgTypes { left, right })
6754                } else {
6755                    None
6756                };
6757
6758                // Optional purpose
6759                let purpose = if self.parse_keyword(Keyword::FOR) {
6760                    if self.parse_keyword(Keyword::SEARCH) {
6761                        Some(OperatorPurpose::ForSearch)
6762                    } else if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
6763                        let sort_family = self.parse_object_name(false)?;
6764                        Some(OperatorPurpose::ForOrderBy { sort_family })
6765                    } else {
6766                        return self.expected("SEARCH or ORDER BY after FOR", self.peek_token());
6767                    }
6768                } else {
6769                    None
6770                };
6771
6772                items.push(OperatorClassItem::Operator {
6773                    strategy_number,
6774                    operator_name,
6775                    op_types,
6776                    purpose,
6777                });
6778            } else if self.parse_keyword(Keyword::FUNCTION) {
6779                let support_number = self.parse_literal_uint()?;
6780
6781                // Optional operator types
6782                let op_types =
6783                    if self.consume_token(&Token::LParen) && self.peek_token() != Token::RParen {
6784                        let mut types = vec![];
6785                        loop {
6786                            types.push(self.parse_data_type()?);
6787                            if !self.consume_token(&Token::Comma) {
6788                                break;
6789                            }
6790                        }
6791                        self.expect_token(&Token::RParen)?;
6792                        Some(types)
6793                    } else if self.consume_token(&Token::LParen) {
6794                        self.expect_token(&Token::RParen)?;
6795                        Some(vec![])
6796                    } else {
6797                        None
6798                    };
6799
6800                let function_name = self.parse_object_name(false)?;
6801
6802                // Function argument types
6803                let argument_types = if self.consume_token(&Token::LParen) {
6804                    let mut types = vec![];
6805                    loop {
6806                        if self.peek_token() == Token::RParen {
6807                            break;
6808                        }
6809                        types.push(self.parse_data_type()?);
6810                        if !self.consume_token(&Token::Comma) {
6811                            break;
6812                        }
6813                    }
6814                    self.expect_token(&Token::RParen)?;
6815                    types
6816                } else {
6817                    vec![]
6818                };
6819
6820                items.push(OperatorClassItem::Function {
6821                    support_number,
6822                    op_types,
6823                    function_name,
6824                    argument_types,
6825                });
6826            } else if self.parse_keyword(Keyword::STORAGE) {
6827                let storage_type = self.parse_data_type()?;
6828                items.push(OperatorClassItem::Storage { storage_type });
6829            } else {
6830                break;
6831            }
6832
6833            // Check for comma separator
6834            if !self.consume_token(&Token::Comma) {
6835                break;
6836            }
6837        }
6838
6839        Ok(Statement::CreateOperatorClass(CreateOperatorClass {
6840            name,
6841            default,
6842            for_type,
6843            using,
6844            family,
6845            items,
6846        }))
6847    }
6848
6849    pub fn parse_drop(&mut self) -> Result<Statement, ParserError> {
6850        // MySQL dialect supports `TEMPORARY`
6851        let temporary = dialect_of!(self is MySqlDialect | GenericDialect | DuckDbDialect)
6852            && self.parse_keyword(Keyword::TEMPORARY);
6853        let persistent = dialect_of!(self is DuckDbDialect)
6854            && self.parse_one_of_keywords(&[Keyword::PERSISTENT]).is_some();
6855
6856        let object_type = if self.parse_keyword(Keyword::TABLE) {
6857            ObjectType::Table
6858        } else if self.parse_keyword(Keyword::VIEW) {
6859            ObjectType::View
6860        } else if self.parse_keywords(&[Keyword::MATERIALIZED, Keyword::VIEW]) {
6861            ObjectType::MaterializedView
6862        } else if self.parse_keyword(Keyword::INDEX) {
6863            ObjectType::Index
6864        } else if self.parse_keyword(Keyword::ROLE) {
6865            ObjectType::Role
6866        } else if self.parse_keyword(Keyword::SCHEMA) {
6867            ObjectType::Schema
6868        } else if self.parse_keyword(Keyword::DATABASE) {
6869            ObjectType::Database
6870        } else if self.parse_keyword(Keyword::SEQUENCE) {
6871            ObjectType::Sequence
6872        } else if self.parse_keyword(Keyword::STAGE) {
6873            ObjectType::Stage
6874        } else if self.parse_keyword(Keyword::TYPE) {
6875            ObjectType::Type
6876        } else if self.parse_keyword(Keyword::USER) {
6877            ObjectType::User
6878        } else if self.parse_keyword(Keyword::STREAM) {
6879            ObjectType::Stream
6880        } else if self.parse_keyword(Keyword::FUNCTION) {
6881            return self.parse_drop_function();
6882        } else if self.parse_keyword(Keyword::POLICY) {
6883            return self.parse_drop_policy();
6884        } else if self.parse_keyword(Keyword::CONNECTOR) {
6885            return self.parse_drop_connector();
6886        } else if self.parse_keyword(Keyword::DOMAIN) {
6887            return self.parse_drop_domain();
6888        } else if self.parse_keyword(Keyword::PROCEDURE) {
6889            return self.parse_drop_procedure();
6890        } else if self.parse_keyword(Keyword::SECRET) {
6891            return self.parse_drop_secret(temporary, persistent);
6892        } else if self.parse_keyword(Keyword::TRIGGER) {
6893            return self.parse_drop_trigger();
6894        } else if self.parse_keyword(Keyword::EXTENSION) {
6895            return self.parse_drop_extension();
6896        } else if self.parse_keyword(Keyword::OPERATOR) {
6897            // Check if this is DROP OPERATOR FAMILY or DROP OPERATOR CLASS
6898            return if self.parse_keyword(Keyword::FAMILY) {
6899                self.parse_drop_operator_family()
6900            } else if self.parse_keyword(Keyword::CLASS) {
6901                self.parse_drop_operator_class()
6902            } else {
6903                self.parse_drop_operator()
6904            };
6905        } else {
6906            return self.expected(
6907                "CONNECTOR, DATABASE, EXTENSION, FUNCTION, INDEX, OPERATOR, POLICY, PROCEDURE, ROLE, SCHEMA, SECRET, SEQUENCE, STAGE, TABLE, TRIGGER, TYPE, VIEW, MATERIALIZED VIEW or USER after DROP",
6908                self.peek_token(),
6909            );
6910        };
6911        // Many dialects support the non-standard `IF EXISTS` clause and allow
6912        // specifying multiple objects to delete in a single statement
6913        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6914        let names = self.parse_comma_separated(|p| p.parse_object_name(false))?;
6915
6916        let loc = self.peek_token().span.start;
6917        let cascade = self.parse_keyword(Keyword::CASCADE);
6918        let restrict = self.parse_keyword(Keyword::RESTRICT);
6919        let purge = self.parse_keyword(Keyword::PURGE);
6920        if cascade && restrict {
6921            return parser_err!("Cannot specify both CASCADE and RESTRICT in DROP", loc);
6922        }
6923        if object_type == ObjectType::Role && (cascade || restrict || purge) {
6924            return parser_err!(
6925                "Cannot specify CASCADE, RESTRICT, or PURGE in DROP ROLE",
6926                loc
6927            );
6928        }
6929        let table = if self.parse_keyword(Keyword::ON) {
6930            Some(self.parse_object_name(false)?)
6931        } else {
6932            None
6933        };
6934        Ok(Statement::Drop {
6935            object_type,
6936            if_exists,
6937            names,
6938            cascade,
6939            restrict,
6940            purge,
6941            temporary,
6942            table,
6943        })
6944    }
6945
6946    fn parse_optional_drop_behavior(&mut self) -> Option<DropBehavior> {
6947        match self.parse_one_of_keywords(&[Keyword::CASCADE, Keyword::RESTRICT]) {
6948            Some(Keyword::CASCADE) => Some(DropBehavior::Cascade),
6949            Some(Keyword::RESTRICT) => Some(DropBehavior::Restrict),
6950            _ => None,
6951        }
6952    }
6953
6954    /// ```sql
6955    /// DROP FUNCTION [ IF EXISTS ] name [ ( [ [ argmode ] [ argname ] argtype [, ...] ] ) ] [, ...]
6956    /// [ CASCADE | RESTRICT ]
6957    /// ```
6958    fn parse_drop_function(&mut self) -> Result<Statement, ParserError> {
6959        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6960        let func_desc = self.parse_comma_separated(Parser::parse_function_desc)?;
6961        let drop_behavior = self.parse_optional_drop_behavior();
6962        Ok(Statement::DropFunction(DropFunction {
6963            if_exists,
6964            func_desc,
6965            drop_behavior,
6966        }))
6967    }
6968
6969    /// ```sql
6970    /// DROP POLICY [ IF EXISTS ] name ON table_name [ CASCADE | RESTRICT ]
6971    /// ```
6972    ///
6973    /// [PostgreSQL Documentation](https://www.postgresql.org/docs/current/sql-droppolicy.html)
6974    fn parse_drop_policy(&mut self) -> Result<Statement, ParserError> {
6975        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6976        let name = self.parse_identifier()?;
6977        self.expect_keyword_is(Keyword::ON)?;
6978        let table_name = self.parse_object_name(false)?;
6979        let drop_behavior = self.parse_optional_drop_behavior();
6980        Ok(Statement::DropPolicy {
6981            if_exists,
6982            name,
6983            table_name,
6984            drop_behavior,
6985        })
6986    }
6987    /// ```sql
6988    /// DROP CONNECTOR [IF EXISTS] name
6989    /// ```
6990    ///
6991    /// See [Hive](https://cwiki.apache.org/confluence/pages/viewpage.action?pageId=27362034#LanguageManualDDL-DropConnector)
6992    fn parse_drop_connector(&mut self) -> Result<Statement, ParserError> {
6993        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6994        let name = self.parse_identifier()?;
6995        Ok(Statement::DropConnector { if_exists, name })
6996    }
6997
6998    /// ```sql
6999    /// DROP DOMAIN [ IF EXISTS ] name [ CASCADE | RESTRICT ]
7000    /// ```
7001    fn parse_drop_domain(&mut self) -> Result<Statement, ParserError> {
7002        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7003        let name = self.parse_object_name(false)?;
7004        let drop_behavior = self.parse_optional_drop_behavior();
7005        Ok(Statement::DropDomain(DropDomain {
7006            if_exists,
7007            name,
7008            drop_behavior,
7009        }))
7010    }
7011
7012    /// ```sql
7013    /// DROP PROCEDURE [ IF EXISTS ] name [ ( [ [ argmode ] [ argname ] argtype [, ...] ] ) ] [, ...]
7014    /// [ CASCADE | RESTRICT ]
7015    /// ```
7016    fn parse_drop_procedure(&mut self) -> Result<Statement, ParserError> {
7017        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7018        let proc_desc = self.parse_comma_separated(Parser::parse_function_desc)?;
7019        let drop_behavior = self.parse_optional_drop_behavior();
7020        Ok(Statement::DropProcedure {
7021            if_exists,
7022            proc_desc,
7023            drop_behavior,
7024        })
7025    }
7026
7027    fn parse_function_desc(&mut self) -> Result<FunctionDesc, ParserError> {
7028        let name = self.parse_object_name(false)?;
7029
7030        let args = if self.consume_token(&Token::LParen) {
7031            if self.consume_token(&Token::RParen) {
7032                Some(vec![])
7033            } else {
7034                let args = self.parse_comma_separated(Parser::parse_function_arg)?;
7035                self.expect_token(&Token::RParen)?;
7036                Some(args)
7037            }
7038        } else {
7039            None
7040        };
7041
7042        Ok(FunctionDesc { name, args })
7043    }
7044
7045    /// See [DuckDB Docs](https://duckdb.org/docs/sql/statements/create_secret.html) for more details.
7046    fn parse_drop_secret(
7047        &mut self,
7048        temporary: bool,
7049        persistent: bool,
7050    ) -> Result<Statement, ParserError> {
7051        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7052        let name = self.parse_identifier()?;
7053        let storage_specifier = if self.parse_keyword(Keyword::FROM) {
7054            self.parse_identifier().ok()
7055        } else {
7056            None
7057        };
7058        let temp = match (temporary, persistent) {
7059            (true, false) => Some(true),
7060            (false, true) => Some(false),
7061            (false, false) => None,
7062            _ => self.expected("TEMPORARY or PERSISTENT", self.peek_token())?,
7063        };
7064
7065        Ok(Statement::DropSecret {
7066            if_exists,
7067            temporary: temp,
7068            name,
7069            storage_specifier,
7070        })
7071    }
7072
7073    /// Parse a `DECLARE` statement.
7074    ///
7075    /// ```sql
7076    /// DECLARE name [ BINARY ] [ ASENSITIVE | INSENSITIVE ] [ [ NO ] SCROLL ]
7077    ///     CURSOR [ { WITH | WITHOUT } HOLD ] FOR query
7078    /// ```
7079    ///
7080    /// The syntax can vary significantly between warehouses. See the grammar
7081    /// on the warehouse specific function in such cases.
7082    pub fn parse_declare(&mut self) -> Result<Statement, ParserError> {
7083        if dialect_of!(self is BigQueryDialect) {
7084            return self.parse_big_query_declare();
7085        }
7086        if dialect_of!(self is SnowflakeDialect) {
7087            return self.parse_snowflake_declare();
7088        }
7089        if dialect_of!(self is MsSqlDialect) {
7090            return self.parse_mssql_declare();
7091        }
7092
7093        let name = self.parse_identifier()?;
7094
7095        let binary = Some(self.parse_keyword(Keyword::BINARY));
7096        let sensitive = if self.parse_keyword(Keyword::INSENSITIVE) {
7097            Some(true)
7098        } else if self.parse_keyword(Keyword::ASENSITIVE) {
7099            Some(false)
7100        } else {
7101            None
7102        };
7103        let scroll = if self.parse_keyword(Keyword::SCROLL) {
7104            Some(true)
7105        } else if self.parse_keywords(&[Keyword::NO, Keyword::SCROLL]) {
7106            Some(false)
7107        } else {
7108            None
7109        };
7110
7111        self.expect_keyword_is(Keyword::CURSOR)?;
7112        let declare_type = Some(DeclareType::Cursor);
7113
7114        let hold = match self.parse_one_of_keywords(&[Keyword::WITH, Keyword::WITHOUT]) {
7115            Some(keyword) => {
7116                self.expect_keyword_is(Keyword::HOLD)?;
7117
7118                match keyword {
7119                    Keyword::WITH => Some(true),
7120                    Keyword::WITHOUT => Some(false),
7121                    unexpected_keyword => return Err(ParserError::ParserError(
7122                        format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in cursor hold"),
7123                    )),
7124                }
7125            }
7126            None => None,
7127        };
7128
7129        self.expect_keyword_is(Keyword::FOR)?;
7130
7131        let query = Some(self.parse_query()?);
7132
7133        Ok(Statement::Declare {
7134            stmts: vec![Declare {
7135                names: vec![name],
7136                data_type: None,
7137                assignment: None,
7138                declare_type,
7139                binary,
7140                sensitive,
7141                scroll,
7142                hold,
7143                for_query: query,
7144            }],
7145        })
7146    }
7147
7148    /// Parse a [BigQuery] `DECLARE` statement.
7149    ///
7150    /// Syntax:
7151    /// ```text
7152    /// DECLARE variable_name[, ...] [{ <variable_type> | <DEFAULT expression> }];
7153    /// ```
7154    /// [BigQuery]: https://cloud.google.com/bigquery/docs/reference/standard-sql/procedural-language#declare
7155    pub fn parse_big_query_declare(&mut self) -> Result<Statement, ParserError> {
7156        let names = self.parse_comma_separated(Parser::parse_identifier)?;
7157
7158        let data_type = match self.peek_token().token {
7159            Token::Word(w) if w.keyword == Keyword::DEFAULT => None,
7160            _ => Some(self.parse_data_type()?),
7161        };
7162
7163        let expr = if data_type.is_some() {
7164            if self.parse_keyword(Keyword::DEFAULT) {
7165                Some(self.parse_expr()?)
7166            } else {
7167                None
7168            }
7169        } else {
7170            // If no variable type - default expression must be specified, per BQ docs.
7171            // i.e `DECLARE foo;` is invalid.
7172            self.expect_keyword_is(Keyword::DEFAULT)?;
7173            Some(self.parse_expr()?)
7174        };
7175
7176        Ok(Statement::Declare {
7177            stmts: vec![Declare {
7178                names,
7179                data_type,
7180                assignment: expr.map(|expr| DeclareAssignment::Default(Box::new(expr))),
7181                declare_type: None,
7182                binary: None,
7183                sensitive: None,
7184                scroll: None,
7185                hold: None,
7186                for_query: None,
7187            }],
7188        })
7189    }
7190
7191    /// Parse a [Snowflake] `DECLARE` statement.
7192    ///
7193    /// Syntax:
7194    /// ```text
7195    /// DECLARE
7196    ///   [{ <variable_declaration>
7197    ///      | <cursor_declaration>
7198    ///      | <resultset_declaration>
7199    ///      | <exception_declaration> }; ... ]
7200    ///
7201    /// <variable_declaration>
7202    /// <variable_name> [<type>] [ { DEFAULT | := } <expression>]
7203    ///
7204    /// <cursor_declaration>
7205    /// <cursor_name> CURSOR FOR <query>
7206    ///
7207    /// <resultset_declaration>
7208    /// <resultset_name> RESULTSET [ { DEFAULT | := } ( <query> ) ] ;
7209    ///
7210    /// <exception_declaration>
7211    /// <exception_name> EXCEPTION [ ( <exception_number> , '<exception_message>' ) ] ;
7212    /// ```
7213    ///
7214    /// [Snowflake]: https://docs.snowflake.com/en/sql-reference/snowflake-scripting/declare
7215    pub fn parse_snowflake_declare(&mut self) -> Result<Statement, ParserError> {
7216        let mut stmts = vec![];
7217        loop {
7218            let name = self.parse_identifier()?;
7219            let (declare_type, for_query, assigned_expr, data_type) =
7220                if self.parse_keyword(Keyword::CURSOR) {
7221                    self.expect_keyword_is(Keyword::FOR)?;
7222                    match self.peek_token().token {
7223                        Token::Word(w) if w.keyword == Keyword::SELECT => (
7224                            Some(DeclareType::Cursor),
7225                            Some(self.parse_query()?),
7226                            None,
7227                            None,
7228                        ),
7229                        _ => (
7230                            Some(DeclareType::Cursor),
7231                            None,
7232                            Some(DeclareAssignment::For(Box::new(self.parse_expr()?))),
7233                            None,
7234                        ),
7235                    }
7236                } else if self.parse_keyword(Keyword::RESULTSET) {
7237                    let assigned_expr = if self.peek_token().token != Token::SemiColon {
7238                        self.parse_snowflake_variable_declaration_expression()?
7239                    } else {
7240                        // Nothing more to do. The statement has no further parameters.
7241                        None
7242                    };
7243
7244                    (Some(DeclareType::ResultSet), None, assigned_expr, None)
7245                } else if self.parse_keyword(Keyword::EXCEPTION) {
7246                    let assigned_expr = if self.peek_token().token == Token::LParen {
7247                        Some(DeclareAssignment::Expr(Box::new(self.parse_expr()?)))
7248                    } else {
7249                        // Nothing more to do. The statement has no further parameters.
7250                        None
7251                    };
7252
7253                    (Some(DeclareType::Exception), None, assigned_expr, None)
7254                } else {
7255                    // Without an explicit keyword, the only valid option is variable declaration.
7256                    let (assigned_expr, data_type) = if let Some(assigned_expr) =
7257                        self.parse_snowflake_variable_declaration_expression()?
7258                    {
7259                        (Some(assigned_expr), None)
7260                    } else if let Token::Word(_) = self.peek_token().token {
7261                        let data_type = self.parse_data_type()?;
7262                        (
7263                            self.parse_snowflake_variable_declaration_expression()?,
7264                            Some(data_type),
7265                        )
7266                    } else {
7267                        (None, None)
7268                    };
7269                    (None, None, assigned_expr, data_type)
7270                };
7271            let stmt = Declare {
7272                names: vec![name],
7273                data_type,
7274                assignment: assigned_expr,
7275                declare_type,
7276                binary: None,
7277                sensitive: None,
7278                scroll: None,
7279                hold: None,
7280                for_query,
7281            };
7282
7283            stmts.push(stmt);
7284            if self.consume_token(&Token::SemiColon) {
7285                match self.peek_token().token {
7286                    Token::Word(w)
7287                        if ALL_KEYWORDS
7288                            .binary_search(&w.value.to_uppercase().as_str())
7289                            .is_err() =>
7290                    {
7291                        // Not a keyword - start of a new declaration.
7292                        continue;
7293                    }
7294                    _ => {
7295                        // Put back the semicolon, this is the end of the DECLARE statement.
7296                        self.prev_token();
7297                    }
7298                }
7299            }
7300
7301            break;
7302        }
7303
7304        Ok(Statement::Declare { stmts })
7305    }
7306
7307    /// Parse a [MsSql] `DECLARE` statement.
7308    ///
7309    /// Syntax:
7310    /// ```text
7311    /// DECLARE
7312    // {
7313    //   { @local_variable [AS] data_type [ = value ] }
7314    //   | { @cursor_variable_name CURSOR [ FOR ] }
7315    // } [ ,...n ]
7316    /// ```
7317    /// [MsSql]: https://learn.microsoft.com/en-us/sql/t-sql/language-elements/declare-local-variable-transact-sql?view=sql-server-ver16
7318    pub fn parse_mssql_declare(&mut self) -> Result<Statement, ParserError> {
7319        let stmts = self.parse_comma_separated(Parser::parse_mssql_declare_stmt)?;
7320
7321        Ok(Statement::Declare { stmts })
7322    }
7323
7324    /// Parse the body of a [MsSql] `DECLARE`statement.
7325    ///
7326    /// Syntax:
7327    /// ```text
7328    // {
7329    //   { @local_variable [AS] data_type [ = value ] }
7330    //   | { @cursor_variable_name CURSOR [ FOR ]}
7331    // } [ ,...n ]
7332    /// ```
7333    /// [MsSql]: https://learn.microsoft.com/en-us/sql/t-sql/language-elements/declare-local-variable-transact-sql?view=sql-server-ver16
7334    pub fn parse_mssql_declare_stmt(&mut self) -> Result<Declare, ParserError> {
7335        let name = {
7336            let ident = self.parse_identifier()?;
7337            if !ident.value.starts_with('@')
7338                && !matches!(
7339                    self.peek_token().token,
7340                    Token::Word(w) if w.keyword == Keyword::CURSOR
7341                )
7342            {
7343                Err(ParserError::TokenizerError(
7344                    "Invalid MsSql variable declaration.".to_string(),
7345                ))
7346            } else {
7347                Ok(ident)
7348            }
7349        }?;
7350
7351        let (declare_type, data_type) = match self.peek_token().token {
7352            Token::Word(w) => match w.keyword {
7353                Keyword::CURSOR => {
7354                    self.next_token();
7355                    (Some(DeclareType::Cursor), None)
7356                }
7357                Keyword::AS => {
7358                    self.next_token();
7359                    (None, Some(self.parse_data_type()?))
7360                }
7361                _ => (None, Some(self.parse_data_type()?)),
7362            },
7363            _ => (None, Some(self.parse_data_type()?)),
7364        };
7365
7366        let (for_query, assignment) = if self.peek_keyword(Keyword::FOR) {
7367            self.next_token();
7368            let query = Some(self.parse_query()?);
7369            (query, None)
7370        } else {
7371            let assignment = self.parse_mssql_variable_declaration_expression()?;
7372            (None, assignment)
7373        };
7374
7375        Ok(Declare {
7376            names: vec![name],
7377            data_type,
7378            assignment,
7379            declare_type,
7380            binary: None,
7381            sensitive: None,
7382            scroll: None,
7383            hold: None,
7384            for_query,
7385        })
7386    }
7387
7388    /// Parses the assigned expression in a variable declaration.
7389    ///
7390    /// Syntax:
7391    /// ```text
7392    /// [ { DEFAULT | := } <expression>]
7393    /// ```
7394    /// <https://docs.snowflake.com/en/sql-reference/snowflake-scripting/declare#variable-declaration-syntax>
7395    pub fn parse_snowflake_variable_declaration_expression(
7396        &mut self,
7397    ) -> Result<Option<DeclareAssignment>, ParserError> {
7398        Ok(match self.peek_token().token {
7399            Token::Word(w) if w.keyword == Keyword::DEFAULT => {
7400                self.next_token(); // Skip `DEFAULT`
7401                Some(DeclareAssignment::Default(Box::new(self.parse_expr()?)))
7402            }
7403            Token::Assignment => {
7404                self.next_token(); // Skip `:=`
7405                Some(DeclareAssignment::DuckAssignment(Box::new(
7406                    self.parse_expr()?,
7407                )))
7408            }
7409            _ => None,
7410        })
7411    }
7412
7413    /// Parses the assigned expression in a variable declaration.
7414    ///
7415    /// Syntax:
7416    /// ```text
7417    /// [ = <expression>]
7418    /// ```
7419    pub fn parse_mssql_variable_declaration_expression(
7420        &mut self,
7421    ) -> Result<Option<DeclareAssignment>, ParserError> {
7422        Ok(match self.peek_token().token {
7423            Token::Eq => {
7424                self.next_token(); // Skip `=`
7425                Some(DeclareAssignment::MsSqlAssignment(Box::new(
7426                    self.parse_expr()?,
7427                )))
7428            }
7429            _ => None,
7430        })
7431    }
7432
7433    // FETCH [ direction { FROM | IN } ] cursor INTO target;
7434    pub fn parse_fetch_statement(&mut self) -> Result<Statement, ParserError> {
7435        let direction = if self.parse_keyword(Keyword::NEXT) {
7436            FetchDirection::Next
7437        } else if self.parse_keyword(Keyword::PRIOR) {
7438            FetchDirection::Prior
7439        } else if self.parse_keyword(Keyword::FIRST) {
7440            FetchDirection::First
7441        } else if self.parse_keyword(Keyword::LAST) {
7442            FetchDirection::Last
7443        } else if self.parse_keyword(Keyword::ABSOLUTE) {
7444            FetchDirection::Absolute {
7445                limit: self.parse_number_value()?.value,
7446            }
7447        } else if self.parse_keyword(Keyword::RELATIVE) {
7448            FetchDirection::Relative {
7449                limit: self.parse_number_value()?.value,
7450            }
7451        } else if self.parse_keyword(Keyword::FORWARD) {
7452            if self.parse_keyword(Keyword::ALL) {
7453                FetchDirection::ForwardAll
7454            } else {
7455                FetchDirection::Forward {
7456                    // TODO: Support optional
7457                    limit: Some(self.parse_number_value()?.value),
7458                }
7459            }
7460        } else if self.parse_keyword(Keyword::BACKWARD) {
7461            if self.parse_keyword(Keyword::ALL) {
7462                FetchDirection::BackwardAll
7463            } else {
7464                FetchDirection::Backward {
7465                    // TODO: Support optional
7466                    limit: Some(self.parse_number_value()?.value),
7467                }
7468            }
7469        } else if self.parse_keyword(Keyword::ALL) {
7470            FetchDirection::All
7471        } else {
7472            FetchDirection::Count {
7473                limit: self.parse_number_value()?.value,
7474            }
7475        };
7476
7477        let position = if self.peek_keyword(Keyword::FROM) {
7478            self.expect_keyword(Keyword::FROM)?;
7479            FetchPosition::From
7480        } else if self.peek_keyword(Keyword::IN) {
7481            self.expect_keyword(Keyword::IN)?;
7482            FetchPosition::In
7483        } else {
7484            return parser_err!("Expected FROM or IN", self.peek_token().span.start);
7485        };
7486
7487        let name = self.parse_identifier()?;
7488
7489        let into = if self.parse_keyword(Keyword::INTO) {
7490            Some(self.parse_object_name(false)?)
7491        } else {
7492            None
7493        };
7494
7495        Ok(Statement::Fetch {
7496            name,
7497            direction,
7498            position,
7499            into,
7500        })
7501    }
7502
7503    pub fn parse_discard(&mut self) -> Result<Statement, ParserError> {
7504        let object_type = if self.parse_keyword(Keyword::ALL) {
7505            DiscardObject::ALL
7506        } else if self.parse_keyword(Keyword::PLANS) {
7507            DiscardObject::PLANS
7508        } else if self.parse_keyword(Keyword::SEQUENCES) {
7509            DiscardObject::SEQUENCES
7510        } else if self.parse_keyword(Keyword::TEMP) || self.parse_keyword(Keyword::TEMPORARY) {
7511            DiscardObject::TEMP
7512        } else {
7513            return self.expected(
7514                "ALL, PLANS, SEQUENCES, TEMP or TEMPORARY after DISCARD",
7515                self.peek_token(),
7516            );
7517        };
7518        Ok(Statement::Discard { object_type })
7519    }
7520
7521    pub fn parse_create_index(&mut self, unique: bool) -> Result<Statement, ParserError> {
7522        let concurrently = self.parse_keyword(Keyword::CONCURRENTLY);
7523        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
7524
7525        let mut using = None;
7526
7527        let index_name = if if_not_exists || !self.parse_keyword(Keyword::ON) {
7528            let index_name = self.parse_object_name(false)?;
7529            // MySQL allows `USING index_type` either before or after `ON table_name`
7530            using = self.parse_optional_using_then_index_type()?;
7531            self.expect_keyword_is(Keyword::ON)?;
7532            Some(index_name)
7533        } else {
7534            None
7535        };
7536
7537        let table_name = self.parse_object_name(false)?;
7538
7539        // MySQL allows having two `USING` clauses.
7540        // In that case, the second clause overwrites the first.
7541        using = self.parse_optional_using_then_index_type()?.or(using);
7542
7543        let columns = self.parse_parenthesized_index_column_list()?;
7544
7545        let include = if self.parse_keyword(Keyword::INCLUDE) {
7546            self.expect_token(&Token::LParen)?;
7547            let columns = self.parse_comma_separated(|p| p.parse_identifier())?;
7548            self.expect_token(&Token::RParen)?;
7549            columns
7550        } else {
7551            vec![]
7552        };
7553
7554        let nulls_distinct = if self.parse_keyword(Keyword::NULLS) {
7555            let not = self.parse_keyword(Keyword::NOT);
7556            self.expect_keyword_is(Keyword::DISTINCT)?;
7557            Some(!not)
7558        } else {
7559            None
7560        };
7561
7562        let with = if self.dialect.supports_create_index_with_clause()
7563            && self.parse_keyword(Keyword::WITH)
7564        {
7565            self.expect_token(&Token::LParen)?;
7566            let with_params = self.parse_comma_separated(Parser::parse_expr)?;
7567            self.expect_token(&Token::RParen)?;
7568            with_params
7569        } else {
7570            Vec::new()
7571        };
7572
7573        let predicate = if self.parse_keyword(Keyword::WHERE) {
7574            Some(self.parse_expr()?)
7575        } else {
7576            None
7577        };
7578
7579        // MySQL options (including the modern style of `USING` after the column list instead of
7580        // before, which is deprecated) shouldn't conflict with other preceding options (e.g. `WITH
7581        // PARSER` won't be caught by the above `WITH` clause parsing because MySQL doesn't set that
7582        // support flag). This is probably invalid syntax for other dialects, but it is simpler to
7583        // parse it anyway (as we do inside `ALTER TABLE` and `CREATE TABLE` parsing).
7584        let index_options = self.parse_index_options()?;
7585
7586        // MySQL allows `ALGORITHM` and `LOCK` options. Unlike in `ALTER TABLE`, they need not be comma separated.
7587        let mut alter_options = Vec::new();
7588        while self
7589            .peek_one_of_keywords(&[Keyword::ALGORITHM, Keyword::LOCK])
7590            .is_some()
7591        {
7592            alter_options.push(self.parse_alter_table_operation()?)
7593        }
7594
7595        Ok(Statement::CreateIndex(CreateIndex {
7596            name: index_name,
7597            table_name,
7598            using,
7599            columns,
7600            unique,
7601            concurrently,
7602            if_not_exists,
7603            include,
7604            nulls_distinct,
7605            with,
7606            predicate,
7607            index_options,
7608            alter_options,
7609        }))
7610    }
7611
7612    pub fn parse_create_extension(&mut self) -> Result<Statement, ParserError> {
7613        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
7614        let name = self.parse_identifier()?;
7615
7616        let (schema, version, cascade) = if self.parse_keyword(Keyword::WITH) {
7617            let schema = if self.parse_keyword(Keyword::SCHEMA) {
7618                Some(self.parse_identifier()?)
7619            } else {
7620                None
7621            };
7622
7623            let version = if self.parse_keyword(Keyword::VERSION) {
7624                Some(self.parse_identifier()?)
7625            } else {
7626                None
7627            };
7628
7629            let cascade = self.parse_keyword(Keyword::CASCADE);
7630
7631            (schema, version, cascade)
7632        } else {
7633            (None, None, false)
7634        };
7635
7636        Ok(CreateExtension {
7637            name,
7638            if_not_exists,
7639            schema,
7640            version,
7641            cascade,
7642        }
7643        .into())
7644    }
7645
7646    /// Parse a PostgreSQL-specific [Statement::DropExtension] statement.
7647    pub fn parse_drop_extension(&mut self) -> Result<Statement, ParserError> {
7648        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7649        let names = self.parse_comma_separated(|p| p.parse_identifier())?;
7650        let cascade_or_restrict =
7651            self.parse_one_of_keywords(&[Keyword::CASCADE, Keyword::RESTRICT]);
7652        Ok(Statement::DropExtension(DropExtension {
7653            names,
7654            if_exists,
7655            cascade_or_restrict: cascade_or_restrict
7656                .map(|k| match k {
7657                    Keyword::CASCADE => Ok(ReferentialAction::Cascade),
7658                    Keyword::RESTRICT => Ok(ReferentialAction::Restrict),
7659                    _ => self.expected("CASCADE or RESTRICT", self.peek_token()),
7660                })
7661                .transpose()?,
7662        }))
7663    }
7664
7665    /// Parse a[Statement::DropOperator] statement.
7666    ///
7667    pub fn parse_drop_operator(&mut self) -> Result<Statement, ParserError> {
7668        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7669        let operators = self.parse_comma_separated(|p| p.parse_drop_operator_signature())?;
7670        let drop_behavior = self.parse_optional_drop_behavior();
7671        Ok(Statement::DropOperator(DropOperator {
7672            if_exists,
7673            operators,
7674            drop_behavior,
7675        }))
7676    }
7677
7678    /// Parse an operator signature for a [Statement::DropOperator]
7679    /// Format: `name ( { left_type | NONE } , right_type )`
7680    fn parse_drop_operator_signature(&mut self) -> Result<DropOperatorSignature, ParserError> {
7681        let name = self.parse_operator_name()?;
7682        self.expect_token(&Token::LParen)?;
7683
7684        // Parse left operand type (or NONE for prefix operators)
7685        let left_type = if self.parse_keyword(Keyword::NONE) {
7686            None
7687        } else {
7688            Some(self.parse_data_type()?)
7689        };
7690
7691        self.expect_token(&Token::Comma)?;
7692
7693        // Parse right operand type (always required)
7694        let right_type = self.parse_data_type()?;
7695
7696        self.expect_token(&Token::RParen)?;
7697
7698        Ok(DropOperatorSignature {
7699            name,
7700            left_type,
7701            right_type,
7702        })
7703    }
7704
7705    /// Parse a [Statement::DropOperatorFamily]
7706    ///
7707    /// [PostgreSQL Documentation](https://www.postgresql.org/docs/current/sql-dropopfamily.html)
7708    pub fn parse_drop_operator_family(&mut self) -> Result<Statement, ParserError> {
7709        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7710        let names = self.parse_comma_separated(|p| p.parse_object_name(false))?;
7711        self.expect_keyword(Keyword::USING)?;
7712        let using = self.parse_identifier()?;
7713        let drop_behavior = self.parse_optional_drop_behavior();
7714        Ok(Statement::DropOperatorFamily(DropOperatorFamily {
7715            if_exists,
7716            names,
7717            using,
7718            drop_behavior,
7719        }))
7720    }
7721
7722    /// Parse a [Statement::DropOperatorClass]
7723    ///
7724    /// [PostgreSQL Documentation](https://www.postgresql.org/docs/current/sql-dropopclass.html)
7725    pub fn parse_drop_operator_class(&mut self) -> Result<Statement, ParserError> {
7726        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7727        let names = self.parse_comma_separated(|p| p.parse_object_name(false))?;
7728        self.expect_keyword(Keyword::USING)?;
7729        let using = self.parse_identifier()?;
7730        let drop_behavior = self.parse_optional_drop_behavior();
7731        Ok(Statement::DropOperatorClass(DropOperatorClass {
7732            if_exists,
7733            names,
7734            using,
7735            drop_behavior,
7736        }))
7737    }
7738
7739    //TODO: Implement parsing for Skewed
7740    pub fn parse_hive_distribution(&mut self) -> Result<HiveDistributionStyle, ParserError> {
7741        if self.parse_keywords(&[Keyword::PARTITIONED, Keyword::BY]) {
7742            self.expect_token(&Token::LParen)?;
7743            let columns = self.parse_comma_separated(Parser::parse_column_def)?;
7744            self.expect_token(&Token::RParen)?;
7745            Ok(HiveDistributionStyle::PARTITIONED { columns })
7746        } else {
7747            Ok(HiveDistributionStyle::NONE)
7748        }
7749    }
7750
7751    pub fn parse_hive_formats(&mut self) -> Result<Option<HiveFormat>, ParserError> {
7752        let mut hive_format: Option<HiveFormat> = None;
7753        loop {
7754            match self.parse_one_of_keywords(&[
7755                Keyword::ROW,
7756                Keyword::STORED,
7757                Keyword::LOCATION,
7758                Keyword::WITH,
7759            ]) {
7760                Some(Keyword::ROW) => {
7761                    hive_format
7762                        .get_or_insert_with(HiveFormat::default)
7763                        .row_format = Some(self.parse_row_format()?);
7764                }
7765                Some(Keyword::STORED) => {
7766                    self.expect_keyword_is(Keyword::AS)?;
7767                    if self.parse_keyword(Keyword::INPUTFORMAT) {
7768                        let input_format = self.parse_expr()?;
7769                        self.expect_keyword_is(Keyword::OUTPUTFORMAT)?;
7770                        let output_format = self.parse_expr()?;
7771                        hive_format.get_or_insert_with(HiveFormat::default).storage =
7772                            Some(HiveIOFormat::IOF {
7773                                input_format,
7774                                output_format,
7775                            });
7776                    } else {
7777                        let format = self.parse_file_format()?;
7778                        hive_format.get_or_insert_with(HiveFormat::default).storage =
7779                            Some(HiveIOFormat::FileFormat { format });
7780                    }
7781                }
7782                Some(Keyword::LOCATION) => {
7783                    hive_format.get_or_insert_with(HiveFormat::default).location =
7784                        Some(self.parse_literal_string()?);
7785                }
7786                Some(Keyword::WITH) => {
7787                    self.prev_token();
7788                    let properties = self
7789                        .parse_options_with_keywords(&[Keyword::WITH, Keyword::SERDEPROPERTIES])?;
7790                    if !properties.is_empty() {
7791                        hive_format
7792                            .get_or_insert_with(HiveFormat::default)
7793                            .serde_properties = Some(properties);
7794                    } else {
7795                        break;
7796                    }
7797                }
7798                None => break,
7799                _ => break,
7800            }
7801        }
7802
7803        Ok(hive_format)
7804    }
7805
7806    pub fn parse_row_format(&mut self) -> Result<HiveRowFormat, ParserError> {
7807        self.expect_keyword_is(Keyword::FORMAT)?;
7808        match self.parse_one_of_keywords(&[Keyword::SERDE, Keyword::DELIMITED]) {
7809            Some(Keyword::SERDE) => {
7810                let class = self.parse_literal_string()?;
7811                Ok(HiveRowFormat::SERDE { class })
7812            }
7813            _ => {
7814                let mut row_delimiters = vec![];
7815
7816                loop {
7817                    match self.parse_one_of_keywords(&[
7818                        Keyword::FIELDS,
7819                        Keyword::COLLECTION,
7820                        Keyword::MAP,
7821                        Keyword::LINES,
7822                        Keyword::NULL,
7823                    ]) {
7824                        Some(Keyword::FIELDS) => {
7825                            if self.parse_keywords(&[Keyword::TERMINATED, Keyword::BY]) {
7826                                row_delimiters.push(HiveRowDelimiter {
7827                                    delimiter: HiveDelimiter::FieldsTerminatedBy,
7828                                    char: self.parse_identifier()?,
7829                                });
7830
7831                                if self.parse_keywords(&[Keyword::ESCAPED, Keyword::BY]) {
7832                                    row_delimiters.push(HiveRowDelimiter {
7833                                        delimiter: HiveDelimiter::FieldsEscapedBy,
7834                                        char: self.parse_identifier()?,
7835                                    });
7836                                }
7837                            } else {
7838                                break;
7839                            }
7840                        }
7841                        Some(Keyword::COLLECTION) => {
7842                            if self.parse_keywords(&[
7843                                Keyword::ITEMS,
7844                                Keyword::TERMINATED,
7845                                Keyword::BY,
7846                            ]) {
7847                                row_delimiters.push(HiveRowDelimiter {
7848                                    delimiter: HiveDelimiter::CollectionItemsTerminatedBy,
7849                                    char: self.parse_identifier()?,
7850                                });
7851                            } else {
7852                                break;
7853                            }
7854                        }
7855                        Some(Keyword::MAP) => {
7856                            if self.parse_keywords(&[
7857                                Keyword::KEYS,
7858                                Keyword::TERMINATED,
7859                                Keyword::BY,
7860                            ]) {
7861                                row_delimiters.push(HiveRowDelimiter {
7862                                    delimiter: HiveDelimiter::MapKeysTerminatedBy,
7863                                    char: self.parse_identifier()?,
7864                                });
7865                            } else {
7866                                break;
7867                            }
7868                        }
7869                        Some(Keyword::LINES) => {
7870                            if self.parse_keywords(&[Keyword::TERMINATED, Keyword::BY]) {
7871                                row_delimiters.push(HiveRowDelimiter {
7872                                    delimiter: HiveDelimiter::LinesTerminatedBy,
7873                                    char: self.parse_identifier()?,
7874                                });
7875                            } else {
7876                                break;
7877                            }
7878                        }
7879                        Some(Keyword::NULL) => {
7880                            if self.parse_keywords(&[Keyword::DEFINED, Keyword::AS]) {
7881                                row_delimiters.push(HiveRowDelimiter {
7882                                    delimiter: HiveDelimiter::NullDefinedAs,
7883                                    char: self.parse_identifier()?,
7884                                });
7885                            } else {
7886                                break;
7887                            }
7888                        }
7889                        _ => {
7890                            break;
7891                        }
7892                    }
7893                }
7894
7895                Ok(HiveRowFormat::DELIMITED {
7896                    delimiters: row_delimiters,
7897                })
7898            }
7899        }
7900    }
7901
7902    fn parse_optional_on_cluster(&mut self) -> Result<Option<Ident>, ParserError> {
7903        if self.parse_keywords(&[Keyword::ON, Keyword::CLUSTER]) {
7904            Ok(Some(self.parse_identifier()?))
7905        } else {
7906            Ok(None)
7907        }
7908    }
7909
7910    pub fn parse_create_table(
7911        &mut self,
7912        or_replace: bool,
7913        temporary: bool,
7914        global: Option<bool>,
7915        transient: bool,
7916    ) -> Result<Statement, ParserError> {
7917        let allow_unquoted_hyphen = dialect_of!(self is BigQueryDialect);
7918        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
7919        let table_name = self.parse_object_name(allow_unquoted_hyphen)?;
7920
7921        // PostgreSQL PARTITION OF for child partition tables
7922        // Note: This is a PostgreSQL-specific feature, but the dialect check was intentionally
7923        // removed to allow GenericDialect and other dialects to parse this syntax. This enables
7924        // multi-dialect SQL tools to work with PostgreSQL-specific DDL statements.
7925        //
7926        // PARTITION OF can be combined with other table definition clauses in the AST,
7927        // though PostgreSQL itself prohibits PARTITION OF with AS SELECT or LIKE clauses.
7928        // The parser accepts these combinations for flexibility; semantic validation
7929        // is left to downstream tools.
7930        // Child partitions can have their own constraints and indexes.
7931        let partition_of = if self.parse_keywords(&[Keyword::PARTITION, Keyword::OF]) {
7932            Some(self.parse_object_name(allow_unquoted_hyphen)?)
7933        } else {
7934            None
7935        };
7936
7937        // Clickhouse has `ON CLUSTER 'cluster'` syntax for DDLs
7938        let on_cluster = self.parse_optional_on_cluster()?;
7939
7940        let like = self.maybe_parse_create_table_like(allow_unquoted_hyphen)?;
7941
7942        let clone = if self.parse_keyword(Keyword::CLONE) {
7943            self.parse_object_name(allow_unquoted_hyphen).ok()
7944        } else {
7945            None
7946        };
7947
7948        // parse optional column list (schema)
7949        let (columns, constraints) = self.parse_columns()?;
7950        let comment_after_column_def =
7951            if dialect_of!(self is HiveDialect) && self.parse_keyword(Keyword::COMMENT) {
7952                let next_token = self.next_token();
7953                match next_token.token {
7954                    Token::SingleQuotedString(str) => Some(CommentDef::WithoutEq(str)),
7955                    _ => self.expected("comment", next_token)?,
7956                }
7957            } else {
7958                None
7959            };
7960
7961        // PostgreSQL PARTITION OF: partition bound specification
7962        let for_values = if partition_of.is_some() {
7963            if self.peek_keyword(Keyword::FOR) || self.peek_keyword(Keyword::DEFAULT) {
7964                Some(self.parse_partition_for_values()?)
7965            } else {
7966                return self.expected(
7967                    "FOR VALUES or DEFAULT after PARTITION OF",
7968                    self.peek_token(),
7969                );
7970            }
7971        } else {
7972            None
7973        };
7974
7975        // SQLite supports `WITHOUT ROWID` at the end of `CREATE TABLE`
7976        let without_rowid = self.parse_keywords(&[Keyword::WITHOUT, Keyword::ROWID]);
7977
7978        let hive_distribution = self.parse_hive_distribution()?;
7979        let clustered_by = self.parse_optional_clustered_by()?;
7980        let hive_formats = self.parse_hive_formats()?;
7981
7982        let create_table_config = self.parse_optional_create_table_config()?;
7983
7984        // ClickHouse supports `PRIMARY KEY`, before `ORDER BY`
7985        // https://clickhouse.com/docs/en/sql-reference/statements/create/table#primary-key
7986        let primary_key = if dialect_of!(self is ClickHouseDialect | GenericDialect)
7987            && self.parse_keywords(&[Keyword::PRIMARY, Keyword::KEY])
7988        {
7989            Some(Box::new(self.parse_expr()?))
7990        } else {
7991            None
7992        };
7993
7994        let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
7995            if self.consume_token(&Token::LParen) {
7996                let columns = if self.peek_token() != Token::RParen {
7997                    self.parse_comma_separated(|p| p.parse_expr())?
7998                } else {
7999                    vec![]
8000                };
8001                self.expect_token(&Token::RParen)?;
8002                Some(OneOrManyWithParens::Many(columns))
8003            } else {
8004                Some(OneOrManyWithParens::One(self.parse_expr()?))
8005            }
8006        } else {
8007            None
8008        };
8009
8010        let on_commit = if self.parse_keywords(&[Keyword::ON, Keyword::COMMIT]) {
8011            Some(self.parse_create_table_on_commit()?)
8012        } else {
8013            None
8014        };
8015
8016        let strict = self.parse_keyword(Keyword::STRICT);
8017
8018        // Parse optional `AS ( query )`
8019        let query = if self.parse_keyword(Keyword::AS) {
8020            Some(self.parse_query()?)
8021        } else if self.dialect.supports_create_table_select() && self.parse_keyword(Keyword::SELECT)
8022        {
8023            // rewind the SELECT keyword
8024            self.prev_token();
8025            Some(self.parse_query()?)
8026        } else {
8027            None
8028        };
8029
8030        Ok(CreateTableBuilder::new(table_name)
8031            .temporary(temporary)
8032            .columns(columns)
8033            .constraints(constraints)
8034            .or_replace(or_replace)
8035            .if_not_exists(if_not_exists)
8036            .transient(transient)
8037            .hive_distribution(hive_distribution)
8038            .hive_formats(hive_formats)
8039            .global(global)
8040            .query(query)
8041            .without_rowid(without_rowid)
8042            .like(like)
8043            .clone_clause(clone)
8044            .comment_after_column_def(comment_after_column_def)
8045            .order_by(order_by)
8046            .on_commit(on_commit)
8047            .on_cluster(on_cluster)
8048            .clustered_by(clustered_by)
8049            .partition_by(create_table_config.partition_by)
8050            .cluster_by(create_table_config.cluster_by)
8051            .inherits(create_table_config.inherits)
8052            .partition_of(partition_of)
8053            .for_values(for_values)
8054            .table_options(create_table_config.table_options)
8055            .primary_key(primary_key)
8056            .strict(strict)
8057            .build())
8058    }
8059
8060    fn maybe_parse_create_table_like(
8061        &mut self,
8062        allow_unquoted_hyphen: bool,
8063    ) -> Result<Option<CreateTableLikeKind>, ParserError> {
8064        let like = if self.dialect.supports_create_table_like_parenthesized()
8065            && self.consume_token(&Token::LParen)
8066        {
8067            if self.parse_keyword(Keyword::LIKE) {
8068                let name = self.parse_object_name(allow_unquoted_hyphen)?;
8069                let defaults = if self.parse_keywords(&[Keyword::INCLUDING, Keyword::DEFAULTS]) {
8070                    Some(CreateTableLikeDefaults::Including)
8071                } else if self.parse_keywords(&[Keyword::EXCLUDING, Keyword::DEFAULTS]) {
8072                    Some(CreateTableLikeDefaults::Excluding)
8073                } else {
8074                    None
8075                };
8076                self.expect_token(&Token::RParen)?;
8077                Some(CreateTableLikeKind::Parenthesized(CreateTableLike {
8078                    name,
8079                    defaults,
8080                }))
8081            } else {
8082                // Rollback the '(' it's probably the columns list
8083                self.prev_token();
8084                None
8085            }
8086        } else if self.parse_keyword(Keyword::LIKE) || self.parse_keyword(Keyword::ILIKE) {
8087            let name = self.parse_object_name(allow_unquoted_hyphen)?;
8088            Some(CreateTableLikeKind::Plain(CreateTableLike {
8089                name,
8090                defaults: None,
8091            }))
8092        } else {
8093            None
8094        };
8095        Ok(like)
8096    }
8097
8098    pub(crate) fn parse_create_table_on_commit(&mut self) -> Result<OnCommit, ParserError> {
8099        if self.parse_keywords(&[Keyword::DELETE, Keyword::ROWS]) {
8100            Ok(OnCommit::DeleteRows)
8101        } else if self.parse_keywords(&[Keyword::PRESERVE, Keyword::ROWS]) {
8102            Ok(OnCommit::PreserveRows)
8103        } else if self.parse_keywords(&[Keyword::DROP]) {
8104            Ok(OnCommit::Drop)
8105        } else {
8106            parser_err!(
8107                "Expecting DELETE ROWS, PRESERVE ROWS or DROP",
8108                self.peek_token()
8109            )
8110        }
8111    }
8112
8113    /// Parse [ForValues] of a `PARTITION OF` clause.
8114    ///
8115    /// Parses: `FOR VALUES partition_bound_spec | DEFAULT`
8116    ///
8117    /// [PostgreSQL](https://www.postgresql.org/docs/current/sql-createtable.html)
8118    fn parse_partition_for_values(&mut self) -> Result<ForValues, ParserError> {
8119        if self.parse_keyword(Keyword::DEFAULT) {
8120            return Ok(ForValues::Default);
8121        }
8122
8123        self.expect_keywords(&[Keyword::FOR, Keyword::VALUES])?;
8124
8125        if self.parse_keyword(Keyword::IN) {
8126            // FOR VALUES IN (expr, ...)
8127            self.expect_token(&Token::LParen)?;
8128            if self.peek_token() == Token::RParen {
8129                return self.expected("at least one value", self.peek_token());
8130            }
8131            let values = self.parse_comma_separated(Parser::parse_expr)?;
8132            self.expect_token(&Token::RParen)?;
8133            Ok(ForValues::In(values))
8134        } else if self.parse_keyword(Keyword::FROM) {
8135            // FOR VALUES FROM (...) TO (...)
8136            self.expect_token(&Token::LParen)?;
8137            if self.peek_token() == Token::RParen {
8138                return self.expected("at least one value", self.peek_token());
8139            }
8140            let from = self.parse_comma_separated(Parser::parse_partition_bound_value)?;
8141            self.expect_token(&Token::RParen)?;
8142            self.expect_keyword(Keyword::TO)?;
8143            self.expect_token(&Token::LParen)?;
8144            if self.peek_token() == Token::RParen {
8145                return self.expected("at least one value", self.peek_token());
8146            }
8147            let to = self.parse_comma_separated(Parser::parse_partition_bound_value)?;
8148            self.expect_token(&Token::RParen)?;
8149            Ok(ForValues::From { from, to })
8150        } else if self.parse_keyword(Keyword::WITH) {
8151            // FOR VALUES WITH (MODULUS n, REMAINDER r)
8152            self.expect_token(&Token::LParen)?;
8153            self.expect_keyword(Keyword::MODULUS)?;
8154            let modulus = self.parse_literal_uint()?;
8155            self.expect_token(&Token::Comma)?;
8156            self.expect_keyword(Keyword::REMAINDER)?;
8157            let remainder = self.parse_literal_uint()?;
8158            self.expect_token(&Token::RParen)?;
8159            Ok(ForValues::With { modulus, remainder })
8160        } else {
8161            self.expected("IN, FROM, or WITH after FOR VALUES", self.peek_token())
8162        }
8163    }
8164
8165    /// Parse a single [PartitionBoundValue].
8166    fn parse_partition_bound_value(&mut self) -> Result<PartitionBoundValue, ParserError> {
8167        if self.parse_keyword(Keyword::MINVALUE) {
8168            Ok(PartitionBoundValue::MinValue)
8169        } else if self.parse_keyword(Keyword::MAXVALUE) {
8170            Ok(PartitionBoundValue::MaxValue)
8171        } else {
8172            Ok(PartitionBoundValue::Expr(self.parse_expr()?))
8173        }
8174    }
8175
8176    /// Parse configuration like inheritance, partitioning, clustering information during the table creation.
8177    ///
8178    /// [BigQuery](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#syntax_2)
8179    /// [PostgreSQL](https://www.postgresql.org/docs/current/ddl-partitioning.html)
8180    /// [MySql](https://dev.mysql.com/doc/refman/8.4/en/create-table.html)
8181    fn parse_optional_create_table_config(
8182        &mut self,
8183    ) -> Result<CreateTableConfiguration, ParserError> {
8184        let mut table_options = CreateTableOptions::None;
8185
8186        let inherits = if self.parse_keyword(Keyword::INHERITS) {
8187            Some(self.parse_parenthesized_qualified_column_list(IsOptional::Mandatory, false)?)
8188        } else {
8189            None
8190        };
8191
8192        // PostgreSQL supports `WITH ( options )`, before `AS`
8193        let with_options = self.parse_options(Keyword::WITH)?;
8194        if !with_options.is_empty() {
8195            table_options = CreateTableOptions::With(with_options)
8196        }
8197
8198        let table_properties = self.parse_options(Keyword::TBLPROPERTIES)?;
8199        if !table_properties.is_empty() {
8200            table_options = CreateTableOptions::TableProperties(table_properties);
8201        }
8202        let partition_by = if dialect_of!(self is BigQueryDialect | PostgreSqlDialect | GenericDialect)
8203            && self.parse_keywords(&[Keyword::PARTITION, Keyword::BY])
8204        {
8205            Some(Box::new(self.parse_expr()?))
8206        } else {
8207            None
8208        };
8209
8210        let mut cluster_by = None;
8211        if dialect_of!(self is BigQueryDialect | GenericDialect) {
8212            if self.parse_keywords(&[Keyword::CLUSTER, Keyword::BY]) {
8213                cluster_by = Some(WrappedCollection::NoWrapping(
8214                    self.parse_comma_separated(|p| p.parse_expr())?,
8215                ));
8216            };
8217
8218            if let Token::Word(word) = self.peek_token().token {
8219                if word.keyword == Keyword::OPTIONS {
8220                    table_options =
8221                        CreateTableOptions::Options(self.parse_options(Keyword::OPTIONS)?)
8222                }
8223            };
8224        }
8225
8226        if !dialect_of!(self is HiveDialect) && table_options == CreateTableOptions::None {
8227            let plain_options = self.parse_plain_options()?;
8228            if !plain_options.is_empty() {
8229                table_options = CreateTableOptions::Plain(plain_options)
8230            }
8231        };
8232
8233        Ok(CreateTableConfiguration {
8234            partition_by,
8235            cluster_by,
8236            inherits,
8237            table_options,
8238        })
8239    }
8240
8241    fn parse_plain_option(&mut self) -> Result<Option<SqlOption>, ParserError> {
8242        // Single parameter option
8243        // <https://dev.mysql.com/doc/refman/8.4/en/create-table.html>
8244        if self.parse_keywords(&[Keyword::START, Keyword::TRANSACTION]) {
8245            return Ok(Some(SqlOption::Ident(Ident::new("START TRANSACTION"))));
8246        }
8247
8248        // Custom option
8249        // <https://dev.mysql.com/doc/refman/8.4/en/create-table.html>
8250        if self.parse_keywords(&[Keyword::COMMENT]) {
8251            let has_eq = self.consume_token(&Token::Eq);
8252            let value = self.next_token();
8253
8254            let comment = match (has_eq, value.token) {
8255                (true, Token::SingleQuotedString(s)) => {
8256                    Ok(Some(SqlOption::Comment(CommentDef::WithEq(s))))
8257                }
8258                (false, Token::SingleQuotedString(s)) => {
8259                    Ok(Some(SqlOption::Comment(CommentDef::WithoutEq(s))))
8260                }
8261                (_, token) => {
8262                    self.expected("Token::SingleQuotedString", TokenWithSpan::wrap(token))
8263                }
8264            };
8265            return comment;
8266        }
8267
8268        // <https://dev.mysql.com/doc/refman/8.4/en/create-table.html>
8269        // <https://clickhouse.com/docs/sql-reference/statements/create/table>
8270        if self.parse_keywords(&[Keyword::ENGINE]) {
8271            let _ = self.consume_token(&Token::Eq);
8272            let value = self.next_token();
8273
8274            let engine = match value.token {
8275                Token::Word(w) => {
8276                    let parameters = if self.peek_token() == Token::LParen {
8277                        self.parse_parenthesized_identifiers()?
8278                    } else {
8279                        vec![]
8280                    };
8281
8282                    Ok(Some(SqlOption::NamedParenthesizedList(
8283                        NamedParenthesizedList {
8284                            key: Ident::new("ENGINE"),
8285                            name: Some(Ident::new(w.value)),
8286                            values: parameters,
8287                        },
8288                    )))
8289                }
8290                _ => {
8291                    return self.expected("Token::Word", value)?;
8292                }
8293            };
8294
8295            return engine;
8296        }
8297
8298        // <https://dev.mysql.com/doc/refman/8.4/en/create-table.html>
8299        if self.parse_keywords(&[Keyword::TABLESPACE]) {
8300            let _ = self.consume_token(&Token::Eq);
8301            let value = self.next_token();
8302
8303            let tablespace = match value.token {
8304                Token::Word(Word { value: name, .. }) | Token::SingleQuotedString(name) => {
8305                    let storage = match self.parse_keyword(Keyword::STORAGE) {
8306                        true => {
8307                            let _ = self.consume_token(&Token::Eq);
8308                            let storage_token = self.next_token();
8309                            match &storage_token.token {
8310                                Token::Word(w) => match w.value.to_uppercase().as_str() {
8311                                    "DISK" => Some(StorageType::Disk),
8312                                    "MEMORY" => Some(StorageType::Memory),
8313                                    _ => self
8314                                        .expected("Storage type (DISK or MEMORY)", storage_token)?,
8315                                },
8316                                _ => self.expected("Token::Word", storage_token)?,
8317                            }
8318                        }
8319                        false => None,
8320                    };
8321
8322                    Ok(Some(SqlOption::TableSpace(TablespaceOption {
8323                        name,
8324                        storage,
8325                    })))
8326                }
8327                _ => {
8328                    return self.expected("Token::Word", value)?;
8329                }
8330            };
8331
8332            return tablespace;
8333        }
8334
8335        // <https://dev.mysql.com/doc/refman/8.4/en/create-table.html>
8336        if self.parse_keyword(Keyword::UNION) {
8337            let _ = self.consume_token(&Token::Eq);
8338            let value = self.next_token();
8339
8340            match value.token {
8341                Token::LParen => {
8342                    let tables: Vec<Ident> =
8343                        self.parse_comma_separated0(Parser::parse_identifier, Token::RParen)?;
8344                    self.expect_token(&Token::RParen)?;
8345
8346                    return Ok(Some(SqlOption::NamedParenthesizedList(
8347                        NamedParenthesizedList {
8348                            key: Ident::new("UNION"),
8349                            name: None,
8350                            values: tables,
8351                        },
8352                    )));
8353                }
8354                _ => {
8355                    return self.expected("Token::LParen", value)?;
8356                }
8357            }
8358        }
8359
8360        // Key/Value parameter option
8361        let key = if self.parse_keywords(&[Keyword::DEFAULT, Keyword::CHARSET]) {
8362            Ident::new("DEFAULT CHARSET")
8363        } else if self.parse_keyword(Keyword::CHARSET) {
8364            Ident::new("CHARSET")
8365        } else if self.parse_keywords(&[Keyword::DEFAULT, Keyword::CHARACTER, Keyword::SET]) {
8366            Ident::new("DEFAULT CHARACTER SET")
8367        } else if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
8368            Ident::new("CHARACTER SET")
8369        } else if self.parse_keywords(&[Keyword::DEFAULT, Keyword::COLLATE]) {
8370            Ident::new("DEFAULT COLLATE")
8371        } else if self.parse_keyword(Keyword::COLLATE) {
8372            Ident::new("COLLATE")
8373        } else if self.parse_keywords(&[Keyword::DATA, Keyword::DIRECTORY]) {
8374            Ident::new("DATA DIRECTORY")
8375        } else if self.parse_keywords(&[Keyword::INDEX, Keyword::DIRECTORY]) {
8376            Ident::new("INDEX DIRECTORY")
8377        } else if self.parse_keyword(Keyword::KEY_BLOCK_SIZE) {
8378            Ident::new("KEY_BLOCK_SIZE")
8379        } else if self.parse_keyword(Keyword::ROW_FORMAT) {
8380            Ident::new("ROW_FORMAT")
8381        } else if self.parse_keyword(Keyword::PACK_KEYS) {
8382            Ident::new("PACK_KEYS")
8383        } else if self.parse_keyword(Keyword::STATS_AUTO_RECALC) {
8384            Ident::new("STATS_AUTO_RECALC")
8385        } else if self.parse_keyword(Keyword::STATS_PERSISTENT) {
8386            Ident::new("STATS_PERSISTENT")
8387        } else if self.parse_keyword(Keyword::STATS_SAMPLE_PAGES) {
8388            Ident::new("STATS_SAMPLE_PAGES")
8389        } else if self.parse_keyword(Keyword::DELAY_KEY_WRITE) {
8390            Ident::new("DELAY_KEY_WRITE")
8391        } else if self.parse_keyword(Keyword::COMPRESSION) {
8392            Ident::new("COMPRESSION")
8393        } else if self.parse_keyword(Keyword::ENCRYPTION) {
8394            Ident::new("ENCRYPTION")
8395        } else if self.parse_keyword(Keyword::MAX_ROWS) {
8396            Ident::new("MAX_ROWS")
8397        } else if self.parse_keyword(Keyword::MIN_ROWS) {
8398            Ident::new("MIN_ROWS")
8399        } else if self.parse_keyword(Keyword::AUTOEXTEND_SIZE) {
8400            Ident::new("AUTOEXTEND_SIZE")
8401        } else if self.parse_keyword(Keyword::AVG_ROW_LENGTH) {
8402            Ident::new("AVG_ROW_LENGTH")
8403        } else if self.parse_keyword(Keyword::CHECKSUM) {
8404            Ident::new("CHECKSUM")
8405        } else if self.parse_keyword(Keyword::CONNECTION) {
8406            Ident::new("CONNECTION")
8407        } else if self.parse_keyword(Keyword::ENGINE_ATTRIBUTE) {
8408            Ident::new("ENGINE_ATTRIBUTE")
8409        } else if self.parse_keyword(Keyword::PASSWORD) {
8410            Ident::new("PASSWORD")
8411        } else if self.parse_keyword(Keyword::SECONDARY_ENGINE_ATTRIBUTE) {
8412            Ident::new("SECONDARY_ENGINE_ATTRIBUTE")
8413        } else if self.parse_keyword(Keyword::INSERT_METHOD) {
8414            Ident::new("INSERT_METHOD")
8415        } else if self.parse_keyword(Keyword::AUTO_INCREMENT) {
8416            Ident::new("AUTO_INCREMENT")
8417        } else {
8418            return Ok(None);
8419        };
8420
8421        let _ = self.consume_token(&Token::Eq);
8422
8423        let value = match self
8424            .maybe_parse(|parser| parser.parse_value())?
8425            .map(Expr::Value)
8426        {
8427            Some(expr) => expr,
8428            None => Expr::Identifier(self.parse_identifier()?),
8429        };
8430
8431        Ok(Some(SqlOption::KeyValue { key, value }))
8432    }
8433
8434    pub fn parse_plain_options(&mut self) -> Result<Vec<SqlOption>, ParserError> {
8435        let mut options = Vec::new();
8436
8437        while let Some(option) = self.parse_plain_option()? {
8438            options.push(option);
8439            // Some dialects support comma-separated options; it shouldn't introduce ambiguity to
8440            // consume it for all dialects.
8441            let _ = self.consume_token(&Token::Comma);
8442        }
8443
8444        Ok(options)
8445    }
8446
8447    pub fn parse_optional_inline_comment(&mut self) -> Result<Option<CommentDef>, ParserError> {
8448        let comment = if self.parse_keyword(Keyword::COMMENT) {
8449            let has_eq = self.consume_token(&Token::Eq);
8450            let comment = self.parse_comment_value()?;
8451            Some(if has_eq {
8452                CommentDef::WithEq(comment)
8453            } else {
8454                CommentDef::WithoutEq(comment)
8455            })
8456        } else {
8457            None
8458        };
8459        Ok(comment)
8460    }
8461
8462    pub fn parse_comment_value(&mut self) -> Result<String, ParserError> {
8463        let next_token = self.next_token();
8464        let value = match next_token.token {
8465            Token::SingleQuotedString(str) => str,
8466            Token::DollarQuotedString(str) => str.value,
8467            _ => self.expected("string literal", next_token)?,
8468        };
8469        Ok(value)
8470    }
8471
8472    pub fn parse_optional_procedure_parameters(
8473        &mut self,
8474    ) -> Result<Option<Vec<ProcedureParam>>, ParserError> {
8475        let mut params = vec![];
8476        if !self.consume_token(&Token::LParen) || self.consume_token(&Token::RParen) {
8477            return Ok(Some(params));
8478        }
8479        loop {
8480            if let Token::Word(_) = self.peek_token().token {
8481                params.push(self.parse_procedure_param()?)
8482            }
8483            let comma = self.consume_token(&Token::Comma);
8484            if self.consume_token(&Token::RParen) {
8485                // allow a trailing comma, even though it's not in standard
8486                break;
8487            } else if !comma {
8488                return self.expected("',' or ')' after parameter definition", self.peek_token());
8489            }
8490        }
8491        Ok(Some(params))
8492    }
8493
8494    pub fn parse_columns(&mut self) -> Result<(Vec<ColumnDef>, Vec<TableConstraint>), ParserError> {
8495        let mut columns = vec![];
8496        let mut constraints = vec![];
8497        if !self.consume_token(&Token::LParen) || self.consume_token(&Token::RParen) {
8498            return Ok((columns, constraints));
8499        }
8500
8501        loop {
8502            if let Some(constraint) = self.parse_optional_table_constraint()? {
8503                constraints.push(constraint);
8504            } else if let Token::Word(_) = self.peek_token().token {
8505                columns.push(self.parse_column_def()?);
8506            } else {
8507                return self.expected("column name or constraint definition", self.peek_token());
8508            }
8509
8510            let comma = self.consume_token(&Token::Comma);
8511            let rparen = self.peek_token().token == Token::RParen;
8512
8513            if !comma && !rparen {
8514                return self.expected("',' or ')' after column definition", self.peek_token());
8515            };
8516
8517            if rparen
8518                && (!comma
8519                    || self.dialect.supports_column_definition_trailing_commas()
8520                    || self.options.trailing_commas)
8521            {
8522                let _ = self.consume_token(&Token::RParen);
8523                break;
8524            }
8525        }
8526
8527        Ok((columns, constraints))
8528    }
8529
8530    pub fn parse_procedure_param(&mut self) -> Result<ProcedureParam, ParserError> {
8531        let mode = if self.parse_keyword(Keyword::IN) {
8532            Some(ArgMode::In)
8533        } else if self.parse_keyword(Keyword::OUT) {
8534            Some(ArgMode::Out)
8535        } else if self.parse_keyword(Keyword::INOUT) {
8536            Some(ArgMode::InOut)
8537        } else {
8538            None
8539        };
8540        let name = self.parse_identifier()?;
8541        let data_type = self.parse_data_type()?;
8542        let default = if self.consume_token(&Token::Eq) {
8543            Some(self.parse_expr()?)
8544        } else {
8545            None
8546        };
8547
8548        Ok(ProcedureParam {
8549            name,
8550            data_type,
8551            mode,
8552            default,
8553        })
8554    }
8555
8556    pub fn parse_column_def(&mut self) -> Result<ColumnDef, ParserError> {
8557        let col_name = self.parse_identifier()?;
8558        let data_type = if self.is_column_type_sqlite_unspecified() {
8559            DataType::Unspecified
8560        } else {
8561            self.parse_data_type()?
8562        };
8563        let mut options = vec![];
8564        loop {
8565            if self.parse_keyword(Keyword::CONSTRAINT) {
8566                let name = Some(self.parse_identifier()?);
8567                if let Some(option) = self.parse_optional_column_option()? {
8568                    options.push(ColumnOptionDef { name, option });
8569                } else {
8570                    return self.expected(
8571                        "constraint details after CONSTRAINT <name>",
8572                        self.peek_token(),
8573                    );
8574                }
8575            } else if let Some(option) = self.parse_optional_column_option()? {
8576                options.push(ColumnOptionDef { name: None, option });
8577            } else {
8578                break;
8579            };
8580        }
8581        Ok(ColumnDef {
8582            name: col_name,
8583            data_type,
8584            options,
8585        })
8586    }
8587
8588    fn is_column_type_sqlite_unspecified(&mut self) -> bool {
8589        if dialect_of!(self is SQLiteDialect) {
8590            match self.peek_token().token {
8591                Token::Word(word) => matches!(
8592                    word.keyword,
8593                    Keyword::CONSTRAINT
8594                        | Keyword::PRIMARY
8595                        | Keyword::NOT
8596                        | Keyword::UNIQUE
8597                        | Keyword::CHECK
8598                        | Keyword::DEFAULT
8599                        | Keyword::COLLATE
8600                        | Keyword::REFERENCES
8601                        | Keyword::GENERATED
8602                        | Keyword::AS
8603                ),
8604                _ => true, // e.g. comma immediately after column name
8605            }
8606        } else {
8607            false
8608        }
8609    }
8610
8611    pub fn parse_optional_column_option(&mut self) -> Result<Option<ColumnOption>, ParserError> {
8612        if let Some(option) = self.dialect.parse_column_option(self)? {
8613            return option;
8614        }
8615
8616        self.with_state(
8617            ColumnDefinition,
8618            |parser| -> Result<Option<ColumnOption>, ParserError> {
8619                parser.parse_optional_column_option_inner()
8620            },
8621        )
8622    }
8623
8624    fn parse_optional_column_option_inner(&mut self) -> Result<Option<ColumnOption>, ParserError> {
8625        if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
8626            Ok(Some(ColumnOption::CharacterSet(
8627                self.parse_object_name(false)?,
8628            )))
8629        } else if self.parse_keywords(&[Keyword::COLLATE]) {
8630            Ok(Some(ColumnOption::Collation(
8631                self.parse_object_name(false)?,
8632            )))
8633        } else if self.parse_keywords(&[Keyword::NOT, Keyword::NULL]) {
8634            Ok(Some(ColumnOption::NotNull))
8635        } else if self.parse_keywords(&[Keyword::COMMENT]) {
8636            Ok(Some(ColumnOption::Comment(self.parse_comment_value()?)))
8637        } else if self.parse_keyword(Keyword::NULL) {
8638            Ok(Some(ColumnOption::Null))
8639        } else if self.parse_keyword(Keyword::DEFAULT) {
8640            Ok(Some(ColumnOption::Default(
8641                self.parse_column_option_expr()?,
8642            )))
8643        } else if dialect_of!(self is ClickHouseDialect| GenericDialect)
8644            && self.parse_keyword(Keyword::MATERIALIZED)
8645        {
8646            Ok(Some(ColumnOption::Materialized(
8647                self.parse_column_option_expr()?,
8648            )))
8649        } else if dialect_of!(self is ClickHouseDialect| GenericDialect)
8650            && self.parse_keyword(Keyword::ALIAS)
8651        {
8652            Ok(Some(ColumnOption::Alias(self.parse_column_option_expr()?)))
8653        } else if dialect_of!(self is ClickHouseDialect| GenericDialect)
8654            && self.parse_keyword(Keyword::EPHEMERAL)
8655        {
8656            // The expression is optional for the EPHEMERAL syntax, so we need to check
8657            // if the column definition has remaining tokens before parsing the expression.
8658            if matches!(self.peek_token().token, Token::Comma | Token::RParen) {
8659                Ok(Some(ColumnOption::Ephemeral(None)))
8660            } else {
8661                Ok(Some(ColumnOption::Ephemeral(Some(
8662                    self.parse_column_option_expr()?,
8663                ))))
8664            }
8665        } else if self.parse_keywords(&[Keyword::PRIMARY, Keyword::KEY]) {
8666            let characteristics = self.parse_constraint_characteristics()?;
8667            Ok(Some(
8668                PrimaryKeyConstraint {
8669                    name: None,
8670                    index_name: None,
8671                    index_type: None,
8672                    columns: vec![],
8673                    index_options: vec![],
8674                    characteristics,
8675                }
8676                .into(),
8677            ))
8678        } else if self.parse_keyword(Keyword::UNIQUE) {
8679            let characteristics = self.parse_constraint_characteristics()?;
8680            Ok(Some(
8681                UniqueConstraint {
8682                    name: None,
8683                    index_name: None,
8684                    index_type_display: KeyOrIndexDisplay::None,
8685                    index_type: None,
8686                    columns: vec![],
8687                    index_options: vec![],
8688                    characteristics,
8689                    nulls_distinct: NullsDistinctOption::None,
8690                }
8691                .into(),
8692            ))
8693        } else if self.parse_keyword(Keyword::REFERENCES) {
8694            let foreign_table = self.parse_object_name(false)?;
8695            // PostgreSQL allows omitting the column list and
8696            // uses the primary key column of the foreign table by default
8697            let referred_columns = self.parse_parenthesized_column_list(Optional, false)?;
8698            let mut match_kind = None;
8699            let mut on_delete = None;
8700            let mut on_update = None;
8701            loop {
8702                if match_kind.is_none() && self.parse_keyword(Keyword::MATCH) {
8703                    match_kind = Some(self.parse_match_kind()?);
8704                } else if on_delete.is_none()
8705                    && self.parse_keywords(&[Keyword::ON, Keyword::DELETE])
8706                {
8707                    on_delete = Some(self.parse_referential_action()?);
8708                } else if on_update.is_none()
8709                    && self.parse_keywords(&[Keyword::ON, Keyword::UPDATE])
8710                {
8711                    on_update = Some(self.parse_referential_action()?);
8712                } else {
8713                    break;
8714                }
8715            }
8716            let characteristics = self.parse_constraint_characteristics()?;
8717
8718            Ok(Some(
8719                ForeignKeyConstraint {
8720                    name: None,       // Column-level constraints don't have names
8721                    index_name: None, // Not applicable for column-level constraints
8722                    columns: vec![],  // Not applicable for column-level constraints
8723                    foreign_table,
8724                    referred_columns,
8725                    on_delete,
8726                    on_update,
8727                    match_kind,
8728                    characteristics,
8729                }
8730                .into(),
8731            ))
8732        } else if self.parse_keyword(Keyword::CHECK) {
8733            self.expect_token(&Token::LParen)?;
8734            // since `CHECK` requires parentheses, we can parse the inner expression in ParserState::Normal
8735            let expr: Expr = self.with_state(ParserState::Normal, |p| p.parse_expr())?;
8736            self.expect_token(&Token::RParen)?;
8737            Ok(Some(
8738                CheckConstraint {
8739                    name: None, // Column-level check constraints don't have names
8740                    expr: Box::new(expr),
8741                    enforced: None, // Could be extended later to support MySQL ENFORCED/NOT ENFORCED
8742                }
8743                .into(),
8744            ))
8745        } else if self.parse_keyword(Keyword::AUTO_INCREMENT)
8746            && dialect_of!(self is MySqlDialect | GenericDialect)
8747        {
8748            // Support AUTO_INCREMENT for MySQL
8749            Ok(Some(ColumnOption::DialectSpecific(vec![
8750                Token::make_keyword("AUTO_INCREMENT"),
8751            ])))
8752        } else if self.parse_keyword(Keyword::AUTOINCREMENT)
8753            && dialect_of!(self is SQLiteDialect |  GenericDialect)
8754        {
8755            // Support AUTOINCREMENT for SQLite
8756            Ok(Some(ColumnOption::DialectSpecific(vec![
8757                Token::make_keyword("AUTOINCREMENT"),
8758            ])))
8759        } else if self.parse_keyword(Keyword::ASC)
8760            && self.dialect.supports_asc_desc_in_column_definition()
8761        {
8762            // Support ASC for SQLite
8763            Ok(Some(ColumnOption::DialectSpecific(vec![
8764                Token::make_keyword("ASC"),
8765            ])))
8766        } else if self.parse_keyword(Keyword::DESC)
8767            && self.dialect.supports_asc_desc_in_column_definition()
8768        {
8769            // Support DESC for SQLite
8770            Ok(Some(ColumnOption::DialectSpecific(vec![
8771                Token::make_keyword("DESC"),
8772            ])))
8773        } else if self.parse_keywords(&[Keyword::ON, Keyword::UPDATE])
8774            && dialect_of!(self is MySqlDialect | GenericDialect)
8775        {
8776            let expr = self.parse_column_option_expr()?;
8777            Ok(Some(ColumnOption::OnUpdate(expr)))
8778        } else if self.parse_keyword(Keyword::GENERATED) {
8779            self.parse_optional_column_option_generated()
8780        } else if dialect_of!(self is BigQueryDialect | GenericDialect)
8781            && self.parse_keyword(Keyword::OPTIONS)
8782        {
8783            self.prev_token();
8784            Ok(Some(ColumnOption::Options(
8785                self.parse_options(Keyword::OPTIONS)?,
8786            )))
8787        } else if self.parse_keyword(Keyword::AS)
8788            && dialect_of!(self is MySqlDialect | SQLiteDialect | DuckDbDialect | GenericDialect)
8789        {
8790            self.parse_optional_column_option_as()
8791        } else if self.parse_keyword(Keyword::SRID)
8792            && dialect_of!(self is MySqlDialect | GenericDialect)
8793        {
8794            Ok(Some(ColumnOption::Srid(Box::new(
8795                self.parse_column_option_expr()?,
8796            ))))
8797        } else if self.parse_keyword(Keyword::IDENTITY)
8798            && dialect_of!(self is MsSqlDialect | GenericDialect)
8799        {
8800            let parameters = if self.consume_token(&Token::LParen) {
8801                let seed = self.parse_number()?;
8802                self.expect_token(&Token::Comma)?;
8803                let increment = self.parse_number()?;
8804                self.expect_token(&Token::RParen)?;
8805
8806                Some(IdentityPropertyFormatKind::FunctionCall(
8807                    IdentityParameters { seed, increment },
8808                ))
8809            } else {
8810                None
8811            };
8812            Ok(Some(ColumnOption::Identity(
8813                IdentityPropertyKind::Identity(IdentityProperty {
8814                    parameters,
8815                    order: None,
8816                }),
8817            )))
8818        } else if dialect_of!(self is SQLiteDialect | GenericDialect)
8819            && self.parse_keywords(&[Keyword::ON, Keyword::CONFLICT])
8820        {
8821            // Support ON CONFLICT for SQLite
8822            Ok(Some(ColumnOption::OnConflict(
8823                self.expect_one_of_keywords(&[
8824                    Keyword::ROLLBACK,
8825                    Keyword::ABORT,
8826                    Keyword::FAIL,
8827                    Keyword::IGNORE,
8828                    Keyword::REPLACE,
8829                ])?,
8830            )))
8831        } else if self.parse_keyword(Keyword::INVISIBLE) {
8832            Ok(Some(ColumnOption::Invisible))
8833        } else {
8834            Ok(None)
8835        }
8836    }
8837
8838    /// When parsing some column option expressions we need to revert to [ParserState::Normal] since
8839    /// `NOT NULL` is allowed as an alias for `IS NOT NULL`.
8840    /// In those cases we use this helper instead of calling [Parser::parse_expr] directly.
8841    ///
8842    /// For example, consider these `CREATE TABLE` statements:
8843    /// ```sql
8844    /// CREATE TABLE foo (abc BOOL DEFAULT (42 NOT NULL) NOT NULL);
8845    /// ```
8846    /// vs
8847    /// ```sql
8848    /// CREATE TABLE foo (abc BOOL NOT NULL);
8849    /// ```
8850    ///
8851    /// In the first we should parse the inner portion of `(42 NOT NULL)` as [Expr::IsNotNull],
8852    /// whereas is both statements that trailing `NOT NULL` should only be parsed as a
8853    /// [ColumnOption::NotNull].
8854    fn parse_column_option_expr(&mut self) -> Result<Expr, ParserError> {
8855        if self.peek_token_ref().token == Token::LParen {
8856            let expr: Expr = self.with_state(ParserState::Normal, |p| p.parse_prefix())?;
8857            Ok(expr)
8858        } else {
8859            Ok(self.parse_expr()?)
8860        }
8861    }
8862
8863    pub(crate) fn parse_tag(&mut self) -> Result<Tag, ParserError> {
8864        let name = self.parse_object_name(false)?;
8865        self.expect_token(&Token::Eq)?;
8866        let value = self.parse_literal_string()?;
8867
8868        Ok(Tag::new(name, value))
8869    }
8870
8871    fn parse_optional_column_option_generated(
8872        &mut self,
8873    ) -> Result<Option<ColumnOption>, ParserError> {
8874        if self.parse_keywords(&[Keyword::ALWAYS, Keyword::AS, Keyword::IDENTITY]) {
8875            let mut sequence_options = vec![];
8876            if self.expect_token(&Token::LParen).is_ok() {
8877                sequence_options = self.parse_create_sequence_options()?;
8878                self.expect_token(&Token::RParen)?;
8879            }
8880            Ok(Some(ColumnOption::Generated {
8881                generated_as: GeneratedAs::Always,
8882                sequence_options: Some(sequence_options),
8883                generation_expr: None,
8884                generation_expr_mode: None,
8885                generated_keyword: true,
8886            }))
8887        } else if self.parse_keywords(&[
8888            Keyword::BY,
8889            Keyword::DEFAULT,
8890            Keyword::AS,
8891            Keyword::IDENTITY,
8892        ]) {
8893            let mut sequence_options = vec![];
8894            if self.expect_token(&Token::LParen).is_ok() {
8895                sequence_options = self.parse_create_sequence_options()?;
8896                self.expect_token(&Token::RParen)?;
8897            }
8898            Ok(Some(ColumnOption::Generated {
8899                generated_as: GeneratedAs::ByDefault,
8900                sequence_options: Some(sequence_options),
8901                generation_expr: None,
8902                generation_expr_mode: None,
8903                generated_keyword: true,
8904            }))
8905        } else if self.parse_keywords(&[Keyword::ALWAYS, Keyword::AS]) {
8906            if self.expect_token(&Token::LParen).is_ok() {
8907                let expr: Expr = self.with_state(ParserState::Normal, |p| p.parse_expr())?;
8908                self.expect_token(&Token::RParen)?;
8909                let (gen_as, expr_mode) = if self.parse_keywords(&[Keyword::STORED]) {
8910                    Ok((
8911                        GeneratedAs::ExpStored,
8912                        Some(GeneratedExpressionMode::Stored),
8913                    ))
8914                } else if dialect_of!(self is PostgreSqlDialect) {
8915                    // Postgres' AS IDENTITY branches are above, this one needs STORED
8916                    self.expected("STORED", self.peek_token())
8917                } else if self.parse_keywords(&[Keyword::VIRTUAL]) {
8918                    Ok((GeneratedAs::Always, Some(GeneratedExpressionMode::Virtual)))
8919                } else {
8920                    Ok((GeneratedAs::Always, None))
8921                }?;
8922
8923                Ok(Some(ColumnOption::Generated {
8924                    generated_as: gen_as,
8925                    sequence_options: None,
8926                    generation_expr: Some(expr),
8927                    generation_expr_mode: expr_mode,
8928                    generated_keyword: true,
8929                }))
8930            } else {
8931                Ok(None)
8932            }
8933        } else {
8934            Ok(None)
8935        }
8936    }
8937
8938    fn parse_optional_column_option_as(&mut self) -> Result<Option<ColumnOption>, ParserError> {
8939        // Some DBs allow 'AS (expr)', shorthand for GENERATED ALWAYS AS
8940        self.expect_token(&Token::LParen)?;
8941        let expr = self.parse_expr()?;
8942        self.expect_token(&Token::RParen)?;
8943
8944        let (gen_as, expr_mode) = if self.parse_keywords(&[Keyword::STORED]) {
8945            (
8946                GeneratedAs::ExpStored,
8947                Some(GeneratedExpressionMode::Stored),
8948            )
8949        } else if self.parse_keywords(&[Keyword::VIRTUAL]) {
8950            (GeneratedAs::Always, Some(GeneratedExpressionMode::Virtual))
8951        } else {
8952            (GeneratedAs::Always, None)
8953        };
8954
8955        Ok(Some(ColumnOption::Generated {
8956            generated_as: gen_as,
8957            sequence_options: None,
8958            generation_expr: Some(expr),
8959            generation_expr_mode: expr_mode,
8960            generated_keyword: false,
8961        }))
8962    }
8963
8964    pub fn parse_optional_clustered_by(&mut self) -> Result<Option<ClusteredBy>, ParserError> {
8965        let clustered_by = if dialect_of!(self is HiveDialect|GenericDialect)
8966            && self.parse_keywords(&[Keyword::CLUSTERED, Keyword::BY])
8967        {
8968            let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
8969
8970            let sorted_by = if self.parse_keywords(&[Keyword::SORTED, Keyword::BY]) {
8971                self.expect_token(&Token::LParen)?;
8972                let sorted_by_columns = self.parse_comma_separated(|p| p.parse_order_by_expr())?;
8973                self.expect_token(&Token::RParen)?;
8974                Some(sorted_by_columns)
8975            } else {
8976                None
8977            };
8978
8979            self.expect_keyword_is(Keyword::INTO)?;
8980            let num_buckets = self.parse_number_value()?.value;
8981            self.expect_keyword_is(Keyword::BUCKETS)?;
8982            Some(ClusteredBy {
8983                columns,
8984                sorted_by,
8985                num_buckets,
8986            })
8987        } else {
8988            None
8989        };
8990        Ok(clustered_by)
8991    }
8992
8993    pub fn parse_referential_action(&mut self) -> Result<ReferentialAction, ParserError> {
8994        if self.parse_keyword(Keyword::RESTRICT) {
8995            Ok(ReferentialAction::Restrict)
8996        } else if self.parse_keyword(Keyword::CASCADE) {
8997            Ok(ReferentialAction::Cascade)
8998        } else if self.parse_keywords(&[Keyword::SET, Keyword::NULL]) {
8999            Ok(ReferentialAction::SetNull)
9000        } else if self.parse_keywords(&[Keyword::NO, Keyword::ACTION]) {
9001            Ok(ReferentialAction::NoAction)
9002        } else if self.parse_keywords(&[Keyword::SET, Keyword::DEFAULT]) {
9003            Ok(ReferentialAction::SetDefault)
9004        } else {
9005            self.expected(
9006                "one of RESTRICT, CASCADE, SET NULL, NO ACTION or SET DEFAULT",
9007                self.peek_token(),
9008            )
9009        }
9010    }
9011
9012    pub fn parse_match_kind(&mut self) -> Result<ConstraintReferenceMatchKind, ParserError> {
9013        if self.parse_keyword(Keyword::FULL) {
9014            Ok(ConstraintReferenceMatchKind::Full)
9015        } else if self.parse_keyword(Keyword::PARTIAL) {
9016            Ok(ConstraintReferenceMatchKind::Partial)
9017        } else if self.parse_keyword(Keyword::SIMPLE) {
9018            Ok(ConstraintReferenceMatchKind::Simple)
9019        } else {
9020            self.expected("one of FULL, PARTIAL or SIMPLE", self.peek_token())
9021        }
9022    }
9023
9024    pub fn parse_constraint_characteristics(
9025        &mut self,
9026    ) -> Result<Option<ConstraintCharacteristics>, ParserError> {
9027        let mut cc = ConstraintCharacteristics::default();
9028
9029        loop {
9030            if cc.deferrable.is_none() && self.parse_keywords(&[Keyword::NOT, Keyword::DEFERRABLE])
9031            {
9032                cc.deferrable = Some(false);
9033            } else if cc.deferrable.is_none() && self.parse_keyword(Keyword::DEFERRABLE) {
9034                cc.deferrable = Some(true);
9035            } else if cc.initially.is_none() && self.parse_keyword(Keyword::INITIALLY) {
9036                if self.parse_keyword(Keyword::DEFERRED) {
9037                    cc.initially = Some(DeferrableInitial::Deferred);
9038                } else if self.parse_keyword(Keyword::IMMEDIATE) {
9039                    cc.initially = Some(DeferrableInitial::Immediate);
9040                } else {
9041                    self.expected("one of DEFERRED or IMMEDIATE", self.peek_token())?;
9042                }
9043            } else if cc.enforced.is_none() && self.parse_keyword(Keyword::ENFORCED) {
9044                cc.enforced = Some(true);
9045            } else if cc.enforced.is_none()
9046                && self.parse_keywords(&[Keyword::NOT, Keyword::ENFORCED])
9047            {
9048                cc.enforced = Some(false);
9049            } else {
9050                break;
9051            }
9052        }
9053
9054        if cc.deferrable.is_some() || cc.initially.is_some() || cc.enforced.is_some() {
9055            Ok(Some(cc))
9056        } else {
9057            Ok(None)
9058        }
9059    }
9060
9061    pub fn parse_optional_table_constraint(
9062        &mut self,
9063    ) -> Result<Option<TableConstraint>, ParserError> {
9064        let name = if self.parse_keyword(Keyword::CONSTRAINT) {
9065            Some(self.parse_identifier()?)
9066        } else {
9067            None
9068        };
9069
9070        let next_token = self.next_token();
9071        match next_token.token {
9072            Token::Word(w) if w.keyword == Keyword::UNIQUE => {
9073                let index_type_display = self.parse_index_type_display();
9074                if !dialect_of!(self is GenericDialect | MySqlDialect)
9075                    && !index_type_display.is_none()
9076                {
9077                    return self
9078                        .expected("`index_name` or `(column_name [, ...])`", self.peek_token());
9079                }
9080
9081                let nulls_distinct = self.parse_optional_nulls_distinct()?;
9082
9083                // optional index name
9084                let index_name = self.parse_optional_ident()?;
9085                let index_type = self.parse_optional_using_then_index_type()?;
9086
9087                let columns = self.parse_parenthesized_index_column_list()?;
9088                let index_options = self.parse_index_options()?;
9089                let characteristics = self.parse_constraint_characteristics()?;
9090                Ok(Some(
9091                    UniqueConstraint {
9092                        name,
9093                        index_name,
9094                        index_type_display,
9095                        index_type,
9096                        columns,
9097                        index_options,
9098                        characteristics,
9099                        nulls_distinct,
9100                    }
9101                    .into(),
9102                ))
9103            }
9104            Token::Word(w) if w.keyword == Keyword::PRIMARY => {
9105                // after `PRIMARY` always stay `KEY`
9106                self.expect_keyword_is(Keyword::KEY)?;
9107
9108                // optional index name
9109                let index_name = self.parse_optional_ident()?;
9110                let index_type = self.parse_optional_using_then_index_type()?;
9111
9112                let columns = self.parse_parenthesized_index_column_list()?;
9113                let index_options = self.parse_index_options()?;
9114                let characteristics = self.parse_constraint_characteristics()?;
9115                Ok(Some(
9116                    PrimaryKeyConstraint {
9117                        name,
9118                        index_name,
9119                        index_type,
9120                        columns,
9121                        index_options,
9122                        characteristics,
9123                    }
9124                    .into(),
9125                ))
9126            }
9127            Token::Word(w) if w.keyword == Keyword::FOREIGN => {
9128                self.expect_keyword_is(Keyword::KEY)?;
9129                let index_name = self.parse_optional_ident()?;
9130                let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
9131                self.expect_keyword_is(Keyword::REFERENCES)?;
9132                let foreign_table = self.parse_object_name(false)?;
9133                let referred_columns = self.parse_parenthesized_column_list(Optional, false)?;
9134                let mut match_kind = None;
9135                let mut on_delete = None;
9136                let mut on_update = None;
9137                loop {
9138                    if match_kind.is_none() && self.parse_keyword(Keyword::MATCH) {
9139                        match_kind = Some(self.parse_match_kind()?);
9140                    } else if on_delete.is_none()
9141                        && self.parse_keywords(&[Keyword::ON, Keyword::DELETE])
9142                    {
9143                        on_delete = Some(self.parse_referential_action()?);
9144                    } else if on_update.is_none()
9145                        && self.parse_keywords(&[Keyword::ON, Keyword::UPDATE])
9146                    {
9147                        on_update = Some(self.parse_referential_action()?);
9148                    } else {
9149                        break;
9150                    }
9151                }
9152
9153                let characteristics = self.parse_constraint_characteristics()?;
9154
9155                Ok(Some(
9156                    ForeignKeyConstraint {
9157                        name,
9158                        index_name,
9159                        columns,
9160                        foreign_table,
9161                        referred_columns,
9162                        on_delete,
9163                        on_update,
9164                        match_kind,
9165                        characteristics,
9166                    }
9167                    .into(),
9168                ))
9169            }
9170            Token::Word(w) if w.keyword == Keyword::CHECK => {
9171                self.expect_token(&Token::LParen)?;
9172                let expr = Box::new(self.parse_expr()?);
9173                self.expect_token(&Token::RParen)?;
9174
9175                let enforced = if self.parse_keyword(Keyword::ENFORCED) {
9176                    Some(true)
9177                } else if self.parse_keywords(&[Keyword::NOT, Keyword::ENFORCED]) {
9178                    Some(false)
9179                } else {
9180                    None
9181                };
9182
9183                Ok(Some(
9184                    CheckConstraint {
9185                        name,
9186                        expr,
9187                        enforced,
9188                    }
9189                    .into(),
9190                ))
9191            }
9192            Token::Word(w)
9193                if (w.keyword == Keyword::INDEX || w.keyword == Keyword::KEY)
9194                    && dialect_of!(self is GenericDialect | MySqlDialect)
9195                    && name.is_none() =>
9196            {
9197                let display_as_key = w.keyword == Keyword::KEY;
9198
9199                let name = match self.peek_token().token {
9200                    Token::Word(word) if word.keyword == Keyword::USING => None,
9201                    _ => self.parse_optional_ident()?,
9202                };
9203
9204                let index_type = self.parse_optional_using_then_index_type()?;
9205                let columns = self.parse_parenthesized_index_column_list()?;
9206                let index_options = self.parse_index_options()?;
9207
9208                Ok(Some(
9209                    IndexConstraint {
9210                        display_as_key,
9211                        name,
9212                        index_type,
9213                        columns,
9214                        index_options,
9215                    }
9216                    .into(),
9217                ))
9218            }
9219            Token::Word(w)
9220                if (w.keyword == Keyword::FULLTEXT || w.keyword == Keyword::SPATIAL)
9221                    && dialect_of!(self is GenericDialect | MySqlDialect) =>
9222            {
9223                if let Some(name) = name {
9224                    return self.expected(
9225                        "FULLTEXT or SPATIAL option without constraint name",
9226                        TokenWithSpan {
9227                            token: Token::make_keyword(&name.to_string()),
9228                            span: next_token.span,
9229                        },
9230                    );
9231                }
9232
9233                let fulltext = w.keyword == Keyword::FULLTEXT;
9234
9235                let index_type_display = self.parse_index_type_display();
9236
9237                let opt_index_name = self.parse_optional_ident()?;
9238
9239                let columns = self.parse_parenthesized_index_column_list()?;
9240
9241                Ok(Some(
9242                    FullTextOrSpatialConstraint {
9243                        fulltext,
9244                        index_type_display,
9245                        opt_index_name,
9246                        columns,
9247                    }
9248                    .into(),
9249                ))
9250            }
9251            _ => {
9252                if name.is_some() {
9253                    self.expected("PRIMARY, UNIQUE, FOREIGN, or CHECK", next_token)
9254                } else {
9255                    self.prev_token();
9256                    Ok(None)
9257                }
9258            }
9259        }
9260    }
9261
9262    fn parse_optional_nulls_distinct(&mut self) -> Result<NullsDistinctOption, ParserError> {
9263        Ok(if self.parse_keyword(Keyword::NULLS) {
9264            let not = self.parse_keyword(Keyword::NOT);
9265            self.expect_keyword_is(Keyword::DISTINCT)?;
9266            if not {
9267                NullsDistinctOption::NotDistinct
9268            } else {
9269                NullsDistinctOption::Distinct
9270            }
9271        } else {
9272            NullsDistinctOption::None
9273        })
9274    }
9275
9276    pub fn maybe_parse_options(
9277        &mut self,
9278        keyword: Keyword,
9279    ) -> Result<Option<Vec<SqlOption>>, ParserError> {
9280        if let Token::Word(word) = self.peek_token().token {
9281            if word.keyword == keyword {
9282                return Ok(Some(self.parse_options(keyword)?));
9283            }
9284        };
9285        Ok(None)
9286    }
9287
9288    pub fn parse_options(&mut self, keyword: Keyword) -> Result<Vec<SqlOption>, ParserError> {
9289        if self.parse_keyword(keyword) {
9290            self.expect_token(&Token::LParen)?;
9291            let options = self.parse_comma_separated0(Parser::parse_sql_option, Token::RParen)?;
9292            self.expect_token(&Token::RParen)?;
9293            Ok(options)
9294        } else {
9295            Ok(vec![])
9296        }
9297    }
9298
9299    pub fn parse_options_with_keywords(
9300        &mut self,
9301        keywords: &[Keyword],
9302    ) -> Result<Vec<SqlOption>, ParserError> {
9303        if self.parse_keywords(keywords) {
9304            self.expect_token(&Token::LParen)?;
9305            let options = self.parse_comma_separated(Parser::parse_sql_option)?;
9306            self.expect_token(&Token::RParen)?;
9307            Ok(options)
9308        } else {
9309            Ok(vec![])
9310        }
9311    }
9312
9313    pub fn parse_index_type(&mut self) -> Result<IndexType, ParserError> {
9314        Ok(if self.parse_keyword(Keyword::BTREE) {
9315            IndexType::BTree
9316        } else if self.parse_keyword(Keyword::HASH) {
9317            IndexType::Hash
9318        } else if self.parse_keyword(Keyword::GIN) {
9319            IndexType::GIN
9320        } else if self.parse_keyword(Keyword::GIST) {
9321            IndexType::GiST
9322        } else if self.parse_keyword(Keyword::SPGIST) {
9323            IndexType::SPGiST
9324        } else if self.parse_keyword(Keyword::BRIN) {
9325            IndexType::BRIN
9326        } else if self.parse_keyword(Keyword::BLOOM) {
9327            IndexType::Bloom
9328        } else {
9329            IndexType::Custom(self.parse_identifier()?)
9330        })
9331    }
9332
9333    /// Optionally parse the `USING` keyword, followed by an [IndexType]
9334    /// Example:
9335    /// ```sql
9336    //// USING BTREE (name, age DESC)
9337    /// ```
9338    pub fn parse_optional_using_then_index_type(
9339        &mut self,
9340    ) -> Result<Option<IndexType>, ParserError> {
9341        if self.parse_keyword(Keyword::USING) {
9342            Ok(Some(self.parse_index_type()?))
9343        } else {
9344            Ok(None)
9345        }
9346    }
9347
9348    /// Parse `[ident]`, mostly `ident` is name, like:
9349    /// `window_name`, `index_name`, ...
9350    pub fn parse_optional_ident(&mut self) -> Result<Option<Ident>, ParserError> {
9351        self.maybe_parse(|parser| parser.parse_identifier())
9352    }
9353
9354    #[must_use]
9355    pub fn parse_index_type_display(&mut self) -> KeyOrIndexDisplay {
9356        if self.parse_keyword(Keyword::KEY) {
9357            KeyOrIndexDisplay::Key
9358        } else if self.parse_keyword(Keyword::INDEX) {
9359            KeyOrIndexDisplay::Index
9360        } else {
9361            KeyOrIndexDisplay::None
9362        }
9363    }
9364
9365    pub fn parse_optional_index_option(&mut self) -> Result<Option<IndexOption>, ParserError> {
9366        if let Some(index_type) = self.parse_optional_using_then_index_type()? {
9367            Ok(Some(IndexOption::Using(index_type)))
9368        } else if self.parse_keyword(Keyword::COMMENT) {
9369            let s = self.parse_literal_string()?;
9370            Ok(Some(IndexOption::Comment(s)))
9371        } else {
9372            Ok(None)
9373        }
9374    }
9375
9376    pub fn parse_index_options(&mut self) -> Result<Vec<IndexOption>, ParserError> {
9377        let mut options = Vec::new();
9378
9379        loop {
9380            match self.parse_optional_index_option()? {
9381                Some(index_option) => options.push(index_option),
9382                None => return Ok(options),
9383            }
9384        }
9385    }
9386
9387    pub fn parse_sql_option(&mut self) -> Result<SqlOption, ParserError> {
9388        let is_mssql = dialect_of!(self is MsSqlDialect|GenericDialect);
9389
9390        match self.peek_token().token {
9391            Token::Word(w) if w.keyword == Keyword::HEAP && is_mssql => {
9392                Ok(SqlOption::Ident(self.parse_identifier()?))
9393            }
9394            Token::Word(w) if w.keyword == Keyword::PARTITION && is_mssql => {
9395                self.parse_option_partition()
9396            }
9397            Token::Word(w) if w.keyword == Keyword::CLUSTERED && is_mssql => {
9398                self.parse_option_clustered()
9399            }
9400            _ => {
9401                let name = self.parse_identifier()?;
9402                self.expect_token(&Token::Eq)?;
9403                let value = self.parse_expr()?;
9404
9405                Ok(SqlOption::KeyValue { key: name, value })
9406            }
9407        }
9408    }
9409
9410    pub fn parse_option_clustered(&mut self) -> Result<SqlOption, ParserError> {
9411        if self.parse_keywords(&[
9412            Keyword::CLUSTERED,
9413            Keyword::COLUMNSTORE,
9414            Keyword::INDEX,
9415            Keyword::ORDER,
9416        ]) {
9417            Ok(SqlOption::Clustered(
9418                TableOptionsClustered::ColumnstoreIndexOrder(
9419                    self.parse_parenthesized_column_list(IsOptional::Mandatory, false)?,
9420                ),
9421            ))
9422        } else if self.parse_keywords(&[Keyword::CLUSTERED, Keyword::COLUMNSTORE, Keyword::INDEX]) {
9423            Ok(SqlOption::Clustered(
9424                TableOptionsClustered::ColumnstoreIndex,
9425            ))
9426        } else if self.parse_keywords(&[Keyword::CLUSTERED, Keyword::INDEX]) {
9427            self.expect_token(&Token::LParen)?;
9428
9429            let columns = self.parse_comma_separated(|p| {
9430                let name = p.parse_identifier()?;
9431                let asc = p.parse_asc_desc();
9432
9433                Ok(ClusteredIndex { name, asc })
9434            })?;
9435
9436            self.expect_token(&Token::RParen)?;
9437
9438            Ok(SqlOption::Clustered(TableOptionsClustered::Index(columns)))
9439        } else {
9440            Err(ParserError::ParserError(
9441                "invalid CLUSTERED sequence".to_string(),
9442            ))
9443        }
9444    }
9445
9446    pub fn parse_option_partition(&mut self) -> Result<SqlOption, ParserError> {
9447        self.expect_keyword_is(Keyword::PARTITION)?;
9448        self.expect_token(&Token::LParen)?;
9449        let column_name = self.parse_identifier()?;
9450
9451        self.expect_keyword_is(Keyword::RANGE)?;
9452        let range_direction = if self.parse_keyword(Keyword::LEFT) {
9453            Some(PartitionRangeDirection::Left)
9454        } else if self.parse_keyword(Keyword::RIGHT) {
9455            Some(PartitionRangeDirection::Right)
9456        } else {
9457            None
9458        };
9459
9460        self.expect_keywords(&[Keyword::FOR, Keyword::VALUES])?;
9461        self.expect_token(&Token::LParen)?;
9462
9463        let for_values = self.parse_comma_separated(Parser::parse_expr)?;
9464
9465        self.expect_token(&Token::RParen)?;
9466        self.expect_token(&Token::RParen)?;
9467
9468        Ok(SqlOption::Partition {
9469            column_name,
9470            range_direction,
9471            for_values,
9472        })
9473    }
9474
9475    pub fn parse_partition(&mut self) -> Result<Partition, ParserError> {
9476        self.expect_token(&Token::LParen)?;
9477        let partitions = self.parse_comma_separated(Parser::parse_expr)?;
9478        self.expect_token(&Token::RParen)?;
9479        Ok(Partition::Partitions(partitions))
9480    }
9481
9482    pub fn parse_projection_select(&mut self) -> Result<ProjectionSelect, ParserError> {
9483        self.expect_token(&Token::LParen)?;
9484        self.expect_keyword_is(Keyword::SELECT)?;
9485        let projection = self.parse_projection()?;
9486        let group_by = self.parse_optional_group_by()?;
9487        let order_by = self.parse_optional_order_by()?;
9488        self.expect_token(&Token::RParen)?;
9489        Ok(ProjectionSelect {
9490            projection,
9491            group_by,
9492            order_by,
9493        })
9494    }
9495    pub fn parse_alter_table_add_projection(&mut self) -> Result<AlterTableOperation, ParserError> {
9496        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
9497        let name = self.parse_identifier()?;
9498        let query = self.parse_projection_select()?;
9499        Ok(AlterTableOperation::AddProjection {
9500            if_not_exists,
9501            name,
9502            select: query,
9503        })
9504    }
9505
9506    pub fn parse_alter_table_operation(&mut self) -> Result<AlterTableOperation, ParserError> {
9507        let operation = if self.parse_keyword(Keyword::ADD) {
9508            if let Some(constraint) = self.parse_optional_table_constraint()? {
9509                let not_valid = self.parse_keywords(&[Keyword::NOT, Keyword::VALID]);
9510                AlterTableOperation::AddConstraint {
9511                    constraint,
9512                    not_valid,
9513                }
9514            } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
9515                && self.parse_keyword(Keyword::PROJECTION)
9516            {
9517                return self.parse_alter_table_add_projection();
9518            } else {
9519                let if_not_exists =
9520                    self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
9521                let mut new_partitions = vec![];
9522                loop {
9523                    if self.parse_keyword(Keyword::PARTITION) {
9524                        new_partitions.push(self.parse_partition()?);
9525                    } else {
9526                        break;
9527                    }
9528                }
9529                if !new_partitions.is_empty() {
9530                    AlterTableOperation::AddPartitions {
9531                        if_not_exists,
9532                        new_partitions,
9533                    }
9534                } else {
9535                    let column_keyword = self.parse_keyword(Keyword::COLUMN);
9536
9537                    let if_not_exists = if dialect_of!(self is PostgreSqlDialect | BigQueryDialect | DuckDbDialect | GenericDialect)
9538                    {
9539                        self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS])
9540                            || if_not_exists
9541                    } else {
9542                        false
9543                    };
9544
9545                    let column_def = self.parse_column_def()?;
9546
9547                    let column_position = self.parse_column_position()?;
9548
9549                    AlterTableOperation::AddColumn {
9550                        column_keyword,
9551                        if_not_exists,
9552                        column_def,
9553                        column_position,
9554                    }
9555                }
9556            }
9557        } else if self.parse_keyword(Keyword::RENAME) {
9558            if dialect_of!(self is PostgreSqlDialect) && self.parse_keyword(Keyword::CONSTRAINT) {
9559                let old_name = self.parse_identifier()?;
9560                self.expect_keyword_is(Keyword::TO)?;
9561                let new_name = self.parse_identifier()?;
9562                AlterTableOperation::RenameConstraint { old_name, new_name }
9563            } else if self.parse_keyword(Keyword::TO) {
9564                let table_name = self.parse_object_name(false)?;
9565                AlterTableOperation::RenameTable {
9566                    table_name: RenameTableNameKind::To(table_name),
9567                }
9568            } else if self.parse_keyword(Keyword::AS) {
9569                let table_name = self.parse_object_name(false)?;
9570                AlterTableOperation::RenameTable {
9571                    table_name: RenameTableNameKind::As(table_name),
9572                }
9573            } else {
9574                let _ = self.parse_keyword(Keyword::COLUMN); // [ COLUMN ]
9575                let old_column_name = self.parse_identifier()?;
9576                self.expect_keyword_is(Keyword::TO)?;
9577                let new_column_name = self.parse_identifier()?;
9578                AlterTableOperation::RenameColumn {
9579                    old_column_name,
9580                    new_column_name,
9581                }
9582            }
9583        } else if self.parse_keyword(Keyword::DISABLE) {
9584            if self.parse_keywords(&[Keyword::ROW, Keyword::LEVEL, Keyword::SECURITY]) {
9585                AlterTableOperation::DisableRowLevelSecurity {}
9586            } else if self.parse_keyword(Keyword::RULE) {
9587                let name = self.parse_identifier()?;
9588                AlterTableOperation::DisableRule { name }
9589            } else if self.parse_keyword(Keyword::TRIGGER) {
9590                let name = self.parse_identifier()?;
9591                AlterTableOperation::DisableTrigger { name }
9592            } else {
9593                return self.expected(
9594                    "ROW LEVEL SECURITY, RULE, or TRIGGER after DISABLE",
9595                    self.peek_token(),
9596                );
9597            }
9598        } else if self.parse_keyword(Keyword::ENABLE) {
9599            if self.parse_keywords(&[Keyword::ALWAYS, Keyword::RULE]) {
9600                let name = self.parse_identifier()?;
9601                AlterTableOperation::EnableAlwaysRule { name }
9602            } else if self.parse_keywords(&[Keyword::ALWAYS, Keyword::TRIGGER]) {
9603                let name = self.parse_identifier()?;
9604                AlterTableOperation::EnableAlwaysTrigger { name }
9605            } else if self.parse_keywords(&[Keyword::ROW, Keyword::LEVEL, Keyword::SECURITY]) {
9606                AlterTableOperation::EnableRowLevelSecurity {}
9607            } else if self.parse_keywords(&[Keyword::REPLICA, Keyword::RULE]) {
9608                let name = self.parse_identifier()?;
9609                AlterTableOperation::EnableReplicaRule { name }
9610            } else if self.parse_keywords(&[Keyword::REPLICA, Keyword::TRIGGER]) {
9611                let name = self.parse_identifier()?;
9612                AlterTableOperation::EnableReplicaTrigger { name }
9613            } else if self.parse_keyword(Keyword::RULE) {
9614                let name = self.parse_identifier()?;
9615                AlterTableOperation::EnableRule { name }
9616            } else if self.parse_keyword(Keyword::TRIGGER) {
9617                let name = self.parse_identifier()?;
9618                AlterTableOperation::EnableTrigger { name }
9619            } else {
9620                return self.expected(
9621                    "ALWAYS, REPLICA, ROW LEVEL SECURITY, RULE, or TRIGGER after ENABLE",
9622                    self.peek_token(),
9623                );
9624            }
9625        } else if self.parse_keywords(&[Keyword::CLEAR, Keyword::PROJECTION])
9626            && dialect_of!(self is ClickHouseDialect|GenericDialect)
9627        {
9628            let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
9629            let name = self.parse_identifier()?;
9630            let partition = if self.parse_keywords(&[Keyword::IN, Keyword::PARTITION]) {
9631                Some(self.parse_identifier()?)
9632            } else {
9633                None
9634            };
9635            AlterTableOperation::ClearProjection {
9636                if_exists,
9637                name,
9638                partition,
9639            }
9640        } else if self.parse_keywords(&[Keyword::MATERIALIZE, Keyword::PROJECTION])
9641            && dialect_of!(self is ClickHouseDialect|GenericDialect)
9642        {
9643            let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
9644            let name = self.parse_identifier()?;
9645            let partition = if self.parse_keywords(&[Keyword::IN, Keyword::PARTITION]) {
9646                Some(self.parse_identifier()?)
9647            } else {
9648                None
9649            };
9650            AlterTableOperation::MaterializeProjection {
9651                if_exists,
9652                name,
9653                partition,
9654            }
9655        } else if self.parse_keyword(Keyword::DROP) {
9656            if self.parse_keywords(&[Keyword::IF, Keyword::EXISTS, Keyword::PARTITION]) {
9657                self.expect_token(&Token::LParen)?;
9658                let partitions = self.parse_comma_separated(Parser::parse_expr)?;
9659                self.expect_token(&Token::RParen)?;
9660                AlterTableOperation::DropPartitions {
9661                    partitions,
9662                    if_exists: true,
9663                }
9664            } else if self.parse_keyword(Keyword::PARTITION) {
9665                self.expect_token(&Token::LParen)?;
9666                let partitions = self.parse_comma_separated(Parser::parse_expr)?;
9667                self.expect_token(&Token::RParen)?;
9668                AlterTableOperation::DropPartitions {
9669                    partitions,
9670                    if_exists: false,
9671                }
9672            } else if self.parse_keyword(Keyword::CONSTRAINT) {
9673                let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
9674                let name = self.parse_identifier()?;
9675                let drop_behavior = self.parse_optional_drop_behavior();
9676                AlterTableOperation::DropConstraint {
9677                    if_exists,
9678                    name,
9679                    drop_behavior,
9680                }
9681            } else if self.parse_keywords(&[Keyword::PRIMARY, Keyword::KEY]) {
9682                let drop_behavior = self.parse_optional_drop_behavior();
9683                AlterTableOperation::DropPrimaryKey { drop_behavior }
9684            } else if self.parse_keywords(&[Keyword::FOREIGN, Keyword::KEY]) {
9685                let name = self.parse_identifier()?;
9686                let drop_behavior = self.parse_optional_drop_behavior();
9687                AlterTableOperation::DropForeignKey {
9688                    name,
9689                    drop_behavior,
9690                }
9691            } else if self.parse_keyword(Keyword::INDEX) {
9692                let name = self.parse_identifier()?;
9693                AlterTableOperation::DropIndex { name }
9694            } else if self.parse_keyword(Keyword::PROJECTION)
9695                && dialect_of!(self is ClickHouseDialect|GenericDialect)
9696            {
9697                let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
9698                let name = self.parse_identifier()?;
9699                AlterTableOperation::DropProjection { if_exists, name }
9700            } else if self.parse_keywords(&[Keyword::CLUSTERING, Keyword::KEY]) {
9701                AlterTableOperation::DropClusteringKey
9702            } else {
9703                let has_column_keyword = self.parse_keyword(Keyword::COLUMN); // [ COLUMN ]
9704                let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
9705                let column_names = if self.dialect.supports_comma_separated_drop_column_list() {
9706                    self.parse_comma_separated(Parser::parse_identifier)?
9707                } else {
9708                    vec![self.parse_identifier()?]
9709                };
9710                let drop_behavior = self.parse_optional_drop_behavior();
9711                AlterTableOperation::DropColumn {
9712                    has_column_keyword,
9713                    column_names,
9714                    if_exists,
9715                    drop_behavior,
9716                }
9717            }
9718        } else if self.parse_keyword(Keyword::PARTITION) {
9719            self.expect_token(&Token::LParen)?;
9720            let before = self.parse_comma_separated(Parser::parse_expr)?;
9721            self.expect_token(&Token::RParen)?;
9722            self.expect_keyword_is(Keyword::RENAME)?;
9723            self.expect_keywords(&[Keyword::TO, Keyword::PARTITION])?;
9724            self.expect_token(&Token::LParen)?;
9725            let renames = self.parse_comma_separated(Parser::parse_expr)?;
9726            self.expect_token(&Token::RParen)?;
9727            AlterTableOperation::RenamePartitions {
9728                old_partitions: before,
9729                new_partitions: renames,
9730            }
9731        } else if self.parse_keyword(Keyword::CHANGE) {
9732            let _ = self.parse_keyword(Keyword::COLUMN); // [ COLUMN ]
9733            let old_name = self.parse_identifier()?;
9734            let new_name = self.parse_identifier()?;
9735            let data_type = self.parse_data_type()?;
9736            let mut options = vec![];
9737            while let Some(option) = self.parse_optional_column_option()? {
9738                options.push(option);
9739            }
9740
9741            let column_position = self.parse_column_position()?;
9742
9743            AlterTableOperation::ChangeColumn {
9744                old_name,
9745                new_name,
9746                data_type,
9747                options,
9748                column_position,
9749            }
9750        } else if self.parse_keyword(Keyword::MODIFY) {
9751            let _ = self.parse_keyword(Keyword::COLUMN); // [ COLUMN ]
9752            let col_name = self.parse_identifier()?;
9753            let data_type = self.parse_data_type()?;
9754            let mut options = vec![];
9755            while let Some(option) = self.parse_optional_column_option()? {
9756                options.push(option);
9757            }
9758
9759            let column_position = self.parse_column_position()?;
9760
9761            AlterTableOperation::ModifyColumn {
9762                col_name,
9763                data_type,
9764                options,
9765                column_position,
9766            }
9767        } else if self.parse_keyword(Keyword::ALTER) {
9768            let _ = self.parse_keyword(Keyword::COLUMN); // [ COLUMN ]
9769            let column_name = self.parse_identifier()?;
9770            let is_postgresql = dialect_of!(self is PostgreSqlDialect);
9771
9772            let op: AlterColumnOperation = if self.parse_keywords(&[
9773                Keyword::SET,
9774                Keyword::NOT,
9775                Keyword::NULL,
9776            ]) {
9777                AlterColumnOperation::SetNotNull {}
9778            } else if self.parse_keywords(&[Keyword::DROP, Keyword::NOT, Keyword::NULL]) {
9779                AlterColumnOperation::DropNotNull {}
9780            } else if self.parse_keywords(&[Keyword::SET, Keyword::DEFAULT]) {
9781                AlterColumnOperation::SetDefault {
9782                    value: self.parse_expr()?,
9783                }
9784            } else if self.parse_keywords(&[Keyword::DROP, Keyword::DEFAULT]) {
9785                AlterColumnOperation::DropDefault {}
9786            } else if self.parse_keywords(&[Keyword::SET, Keyword::DATA, Keyword::TYPE]) {
9787                self.parse_set_data_type(true)?
9788            } else if self.parse_keyword(Keyword::TYPE) {
9789                self.parse_set_data_type(false)?
9790            } else if self.parse_keywords(&[Keyword::ADD, Keyword::GENERATED]) {
9791                let generated_as = if self.parse_keyword(Keyword::ALWAYS) {
9792                    Some(GeneratedAs::Always)
9793                } else if self.parse_keywords(&[Keyword::BY, Keyword::DEFAULT]) {
9794                    Some(GeneratedAs::ByDefault)
9795                } else {
9796                    None
9797                };
9798
9799                self.expect_keywords(&[Keyword::AS, Keyword::IDENTITY])?;
9800
9801                let mut sequence_options: Option<Vec<SequenceOptions>> = None;
9802
9803                if self.peek_token().token == Token::LParen {
9804                    self.expect_token(&Token::LParen)?;
9805                    sequence_options = Some(self.parse_create_sequence_options()?);
9806                    self.expect_token(&Token::RParen)?;
9807                }
9808
9809                AlterColumnOperation::AddGenerated {
9810                    generated_as,
9811                    sequence_options,
9812                }
9813            } else {
9814                let message = if is_postgresql {
9815                    "SET/DROP NOT NULL, SET DEFAULT, SET DATA TYPE, or ADD GENERATED after ALTER COLUMN"
9816                } else {
9817                    "SET/DROP NOT NULL, SET DEFAULT, or SET DATA TYPE after ALTER COLUMN"
9818                };
9819
9820                return self.expected(message, self.peek_token());
9821            };
9822            AlterTableOperation::AlterColumn { column_name, op }
9823        } else if self.parse_keyword(Keyword::SWAP) {
9824            self.expect_keyword_is(Keyword::WITH)?;
9825            let table_name = self.parse_object_name(false)?;
9826            AlterTableOperation::SwapWith { table_name }
9827        } else if dialect_of!(self is PostgreSqlDialect | GenericDialect)
9828            && self.parse_keywords(&[Keyword::OWNER, Keyword::TO])
9829        {
9830            let new_owner = self.parse_owner()?;
9831            AlterTableOperation::OwnerTo { new_owner }
9832        } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
9833            && self.parse_keyword(Keyword::ATTACH)
9834        {
9835            AlterTableOperation::AttachPartition {
9836                partition: self.parse_part_or_partition()?,
9837            }
9838        } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
9839            && self.parse_keyword(Keyword::DETACH)
9840        {
9841            AlterTableOperation::DetachPartition {
9842                partition: self.parse_part_or_partition()?,
9843            }
9844        } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
9845            && self.parse_keyword(Keyword::FREEZE)
9846        {
9847            let partition = self.parse_part_or_partition()?;
9848            let with_name = if self.parse_keyword(Keyword::WITH) {
9849                self.expect_keyword_is(Keyword::NAME)?;
9850                Some(self.parse_identifier()?)
9851            } else {
9852                None
9853            };
9854            AlterTableOperation::FreezePartition {
9855                partition,
9856                with_name,
9857            }
9858        } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
9859            && self.parse_keyword(Keyword::UNFREEZE)
9860        {
9861            let partition = self.parse_part_or_partition()?;
9862            let with_name = if self.parse_keyword(Keyword::WITH) {
9863                self.expect_keyword_is(Keyword::NAME)?;
9864                Some(self.parse_identifier()?)
9865            } else {
9866                None
9867            };
9868            AlterTableOperation::UnfreezePartition {
9869                partition,
9870                with_name,
9871            }
9872        } else if self.parse_keywords(&[Keyword::CLUSTER, Keyword::BY]) {
9873            self.expect_token(&Token::LParen)?;
9874            let exprs = self.parse_comma_separated(|parser| parser.parse_expr())?;
9875            self.expect_token(&Token::RParen)?;
9876            AlterTableOperation::ClusterBy { exprs }
9877        } else if self.parse_keywords(&[Keyword::SUSPEND, Keyword::RECLUSTER]) {
9878            AlterTableOperation::SuspendRecluster
9879        } else if self.parse_keywords(&[Keyword::RESUME, Keyword::RECLUSTER]) {
9880            AlterTableOperation::ResumeRecluster
9881        } else if self.parse_keyword(Keyword::LOCK) {
9882            let equals = self.consume_token(&Token::Eq);
9883            let lock = match self.parse_one_of_keywords(&[
9884                Keyword::DEFAULT,
9885                Keyword::EXCLUSIVE,
9886                Keyword::NONE,
9887                Keyword::SHARED,
9888            ]) {
9889                Some(Keyword::DEFAULT) => AlterTableLock::Default,
9890                Some(Keyword::EXCLUSIVE) => AlterTableLock::Exclusive,
9891                Some(Keyword::NONE) => AlterTableLock::None,
9892                Some(Keyword::SHARED) => AlterTableLock::Shared,
9893                _ => self.expected(
9894                    "DEFAULT, EXCLUSIVE, NONE or SHARED after LOCK [=]",
9895                    self.peek_token(),
9896                )?,
9897            };
9898            AlterTableOperation::Lock { equals, lock }
9899        } else if self.parse_keyword(Keyword::ALGORITHM) {
9900            let equals = self.consume_token(&Token::Eq);
9901            let algorithm = match self.parse_one_of_keywords(&[
9902                Keyword::DEFAULT,
9903                Keyword::INSTANT,
9904                Keyword::INPLACE,
9905                Keyword::COPY,
9906            ]) {
9907                Some(Keyword::DEFAULT) => AlterTableAlgorithm::Default,
9908                Some(Keyword::INSTANT) => AlterTableAlgorithm::Instant,
9909                Some(Keyword::INPLACE) => AlterTableAlgorithm::Inplace,
9910                Some(Keyword::COPY) => AlterTableAlgorithm::Copy,
9911                _ => self.expected(
9912                    "DEFAULT, INSTANT, INPLACE, or COPY after ALGORITHM [=]",
9913                    self.peek_token(),
9914                )?,
9915            };
9916            AlterTableOperation::Algorithm { equals, algorithm }
9917        } else if self.parse_keyword(Keyword::AUTO_INCREMENT) {
9918            let equals = self.consume_token(&Token::Eq);
9919            let value = self.parse_number_value()?;
9920            AlterTableOperation::AutoIncrement { equals, value }
9921        } else if self.parse_keywords(&[Keyword::REPLICA, Keyword::IDENTITY]) {
9922            let identity = if self.parse_keyword(Keyword::NONE) {
9923                ReplicaIdentity::None
9924            } else if self.parse_keyword(Keyword::FULL) {
9925                ReplicaIdentity::Full
9926            } else if self.parse_keyword(Keyword::DEFAULT) {
9927                ReplicaIdentity::Default
9928            } else if self.parse_keywords(&[Keyword::USING, Keyword::INDEX]) {
9929                ReplicaIdentity::Index(self.parse_identifier()?)
9930            } else {
9931                return self.expected(
9932                    "NONE, FULL, DEFAULT, or USING INDEX index_name after REPLICA IDENTITY",
9933                    self.peek_token(),
9934                );
9935            };
9936
9937            AlterTableOperation::ReplicaIdentity { identity }
9938        } else if self.parse_keywords(&[Keyword::VALIDATE, Keyword::CONSTRAINT]) {
9939            let name = self.parse_identifier()?;
9940            AlterTableOperation::ValidateConstraint { name }
9941        } else {
9942            let mut options =
9943                self.parse_options_with_keywords(&[Keyword::SET, Keyword::TBLPROPERTIES])?;
9944            if !options.is_empty() {
9945                AlterTableOperation::SetTblProperties {
9946                    table_properties: options,
9947                }
9948            } else {
9949                options = self.parse_options(Keyword::SET)?;
9950                if !options.is_empty() {
9951                    AlterTableOperation::SetOptionsParens { options }
9952                } else {
9953                    return self.expected(
9954                    "ADD, RENAME, PARTITION, SWAP, DROP, REPLICA IDENTITY, SET, or SET TBLPROPERTIES after ALTER TABLE",
9955                    self.peek_token(),
9956                  );
9957                }
9958            }
9959        };
9960        Ok(operation)
9961    }
9962
9963    fn parse_set_data_type(&mut self, had_set: bool) -> Result<AlterColumnOperation, ParserError> {
9964        let data_type = self.parse_data_type()?;
9965        let using = if self.dialect.supports_alter_column_type_using()
9966            && self.parse_keyword(Keyword::USING)
9967        {
9968            Some(self.parse_expr()?)
9969        } else {
9970            None
9971        };
9972        Ok(AlterColumnOperation::SetDataType {
9973            data_type,
9974            using,
9975            had_set,
9976        })
9977    }
9978
9979    fn parse_part_or_partition(&mut self) -> Result<Partition, ParserError> {
9980        let keyword = self.expect_one_of_keywords(&[Keyword::PART, Keyword::PARTITION])?;
9981        match keyword {
9982            Keyword::PART => Ok(Partition::Part(self.parse_expr()?)),
9983            Keyword::PARTITION => Ok(Partition::Expr(self.parse_expr()?)),
9984            // unreachable because expect_one_of_keywords used above
9985            unexpected_keyword => Err(ParserError::ParserError(
9986                format!("Internal parser error: expected any of {{PART, PARTITION}}, got {unexpected_keyword:?}"),
9987            )),
9988        }
9989    }
9990
9991    pub fn parse_alter(&mut self) -> Result<Statement, ParserError> {
9992        let object_type = self.expect_one_of_keywords(&[
9993            Keyword::VIEW,
9994            Keyword::TYPE,
9995            Keyword::TABLE,
9996            Keyword::INDEX,
9997            Keyword::ROLE,
9998            Keyword::POLICY,
9999            Keyword::CONNECTOR,
10000            Keyword::ICEBERG,
10001            Keyword::SCHEMA,
10002            Keyword::USER,
10003            Keyword::OPERATOR,
10004        ])?;
10005        match object_type {
10006            Keyword::SCHEMA => {
10007                self.prev_token();
10008                self.prev_token();
10009                self.parse_alter_schema()
10010            }
10011            Keyword::VIEW => self.parse_alter_view(),
10012            Keyword::TYPE => self.parse_alter_type(),
10013            Keyword::TABLE => self.parse_alter_table(false),
10014            Keyword::ICEBERG => {
10015                self.expect_keyword(Keyword::TABLE)?;
10016                self.parse_alter_table(true)
10017            }
10018            Keyword::INDEX => {
10019                let index_name = self.parse_object_name(false)?;
10020                let operation = if self.parse_keyword(Keyword::RENAME) {
10021                    if self.parse_keyword(Keyword::TO) {
10022                        let index_name = self.parse_object_name(false)?;
10023                        AlterIndexOperation::RenameIndex { index_name }
10024                    } else {
10025                        return self.expected("TO after RENAME", self.peek_token());
10026                    }
10027                } else {
10028                    return self.expected("RENAME after ALTER INDEX", self.peek_token());
10029                };
10030
10031                Ok(Statement::AlterIndex {
10032                    name: index_name,
10033                    operation,
10034                })
10035            }
10036            Keyword::OPERATOR => {
10037                if self.parse_keyword(Keyword::FAMILY) {
10038                    self.parse_alter_operator_family()
10039                } else {
10040                    self.parse_alter_operator()
10041                }
10042            }
10043            Keyword::ROLE => self.parse_alter_role(),
10044            Keyword::POLICY => self.parse_alter_policy(),
10045            Keyword::CONNECTOR => self.parse_alter_connector(),
10046            Keyword::USER => self.parse_alter_user(),
10047            // unreachable because expect_one_of_keywords used above
10048            unexpected_keyword => Err(ParserError::ParserError(
10049                format!("Internal parser error: expected any of {{VIEW, TYPE, TABLE, INDEX, ROLE, POLICY, CONNECTOR, ICEBERG, SCHEMA, USER, OPERATOR}}, got {unexpected_keyword:?}"),
10050            )),
10051        }
10052    }
10053
10054    /// Parse a [Statement::AlterTable]
10055    pub fn parse_alter_table(&mut self, iceberg: bool) -> Result<Statement, ParserError> {
10056        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
10057        let only = self.parse_keyword(Keyword::ONLY); // [ ONLY ]
10058        let table_name = self.parse_object_name(false)?;
10059        let on_cluster = self.parse_optional_on_cluster()?;
10060        let operations = self.parse_comma_separated(Parser::parse_alter_table_operation)?;
10061
10062        let mut location = None;
10063        if self.parse_keyword(Keyword::LOCATION) {
10064            location = Some(HiveSetLocation {
10065                has_set: false,
10066                location: self.parse_identifier()?,
10067            });
10068        } else if self.parse_keywords(&[Keyword::SET, Keyword::LOCATION]) {
10069            location = Some(HiveSetLocation {
10070                has_set: true,
10071                location: self.parse_identifier()?,
10072            });
10073        }
10074
10075        let end_token = if self.peek_token_ref().token == Token::SemiColon {
10076            self.peek_token_ref().clone()
10077        } else {
10078            self.get_current_token().clone()
10079        };
10080
10081        Ok(AlterTable {
10082            name: table_name,
10083            if_exists,
10084            only,
10085            operations,
10086            location,
10087            on_cluster,
10088            table_type: if iceberg {
10089                Some(AlterTableType::Iceberg)
10090            } else {
10091                None
10092            },
10093            end_token: AttachedToken(end_token),
10094        }
10095        .into())
10096    }
10097
10098    pub fn parse_alter_view(&mut self) -> Result<Statement, ParserError> {
10099        let name = self.parse_object_name(false)?;
10100        let columns = self.parse_parenthesized_column_list(Optional, false)?;
10101
10102        let with_options = self.parse_options(Keyword::WITH)?;
10103
10104        self.expect_keyword_is(Keyword::AS)?;
10105        let query = self.parse_query()?;
10106
10107        Ok(Statement::AlterView {
10108            name,
10109            columns,
10110            query,
10111            with_options,
10112        })
10113    }
10114
10115    /// Parse a [Statement::AlterType]
10116    pub fn parse_alter_type(&mut self) -> Result<Statement, ParserError> {
10117        let name = self.parse_object_name(false)?;
10118
10119        if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
10120            let new_name = self.parse_identifier()?;
10121            Ok(Statement::AlterType(AlterType {
10122                name,
10123                operation: AlterTypeOperation::Rename(AlterTypeRename { new_name }),
10124            }))
10125        } else if self.parse_keywords(&[Keyword::ADD, Keyword::VALUE]) {
10126            let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
10127            let new_enum_value = self.parse_identifier()?;
10128            let position = if self.parse_keyword(Keyword::BEFORE) {
10129                Some(AlterTypeAddValuePosition::Before(self.parse_identifier()?))
10130            } else if self.parse_keyword(Keyword::AFTER) {
10131                Some(AlterTypeAddValuePosition::After(self.parse_identifier()?))
10132            } else {
10133                None
10134            };
10135
10136            Ok(Statement::AlterType(AlterType {
10137                name,
10138                operation: AlterTypeOperation::AddValue(AlterTypeAddValue {
10139                    if_not_exists,
10140                    value: new_enum_value,
10141                    position,
10142                }),
10143            }))
10144        } else if self.parse_keywords(&[Keyword::RENAME, Keyword::VALUE]) {
10145            let existing_enum_value = self.parse_identifier()?;
10146            self.expect_keyword(Keyword::TO)?;
10147            let new_enum_value = self.parse_identifier()?;
10148
10149            Ok(Statement::AlterType(AlterType {
10150                name,
10151                operation: AlterTypeOperation::RenameValue(AlterTypeRenameValue {
10152                    from: existing_enum_value,
10153                    to: new_enum_value,
10154                }),
10155            }))
10156        } else {
10157            self.expected_ref(
10158                "{RENAME TO | { RENAME | ADD } VALUE}",
10159                self.peek_token_ref(),
10160            )
10161        }
10162    }
10163
10164    /// Parse a [Statement::AlterOperator]
10165    ///
10166    /// [PostgreSQL Documentation](https://www.postgresql.org/docs/current/sql-alteroperator.html)
10167    pub fn parse_alter_operator(&mut self) -> Result<Statement, ParserError> {
10168        let name = self.parse_operator_name()?;
10169
10170        // Parse (left_type, right_type)
10171        self.expect_token(&Token::LParen)?;
10172
10173        let left_type = if self.parse_keyword(Keyword::NONE) {
10174            None
10175        } else {
10176            Some(self.parse_data_type()?)
10177        };
10178
10179        self.expect_token(&Token::Comma)?;
10180        let right_type = self.parse_data_type()?;
10181        self.expect_token(&Token::RParen)?;
10182
10183        // Parse the operation
10184        let operation = if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
10185            let owner = if self.parse_keyword(Keyword::CURRENT_ROLE) {
10186                Owner::CurrentRole
10187            } else if self.parse_keyword(Keyword::CURRENT_USER) {
10188                Owner::CurrentUser
10189            } else if self.parse_keyword(Keyword::SESSION_USER) {
10190                Owner::SessionUser
10191            } else {
10192                Owner::Ident(self.parse_identifier()?)
10193            };
10194            AlterOperatorOperation::OwnerTo(owner)
10195        } else if self.parse_keywords(&[Keyword::SET, Keyword::SCHEMA]) {
10196            let schema_name = self.parse_object_name(false)?;
10197            AlterOperatorOperation::SetSchema { schema_name }
10198        } else if self.parse_keyword(Keyword::SET) {
10199            self.expect_token(&Token::LParen)?;
10200
10201            let mut options = Vec::new();
10202            loop {
10203                let keyword = self.expect_one_of_keywords(&[
10204                    Keyword::RESTRICT,
10205                    Keyword::JOIN,
10206                    Keyword::COMMUTATOR,
10207                    Keyword::NEGATOR,
10208                    Keyword::HASHES,
10209                    Keyword::MERGES,
10210                ])?;
10211
10212                match keyword {
10213                    Keyword::RESTRICT => {
10214                        self.expect_token(&Token::Eq)?;
10215                        let proc_name = if self.parse_keyword(Keyword::NONE) {
10216                            None
10217                        } else {
10218                            Some(self.parse_object_name(false)?)
10219                        };
10220                        options.push(OperatorOption::Restrict(proc_name));
10221                    }
10222                    Keyword::JOIN => {
10223                        self.expect_token(&Token::Eq)?;
10224                        let proc_name = if self.parse_keyword(Keyword::NONE) {
10225                            None
10226                        } else {
10227                            Some(self.parse_object_name(false)?)
10228                        };
10229                        options.push(OperatorOption::Join(proc_name));
10230                    }
10231                    Keyword::COMMUTATOR => {
10232                        self.expect_token(&Token::Eq)?;
10233                        let op_name = self.parse_operator_name()?;
10234                        options.push(OperatorOption::Commutator(op_name));
10235                    }
10236                    Keyword::NEGATOR => {
10237                        self.expect_token(&Token::Eq)?;
10238                        let op_name = self.parse_operator_name()?;
10239                        options.push(OperatorOption::Negator(op_name));
10240                    }
10241                    Keyword::HASHES => {
10242                        options.push(OperatorOption::Hashes);
10243                    }
10244                    Keyword::MERGES => {
10245                        options.push(OperatorOption::Merges);
10246                    }
10247                    unexpected_keyword => return Err(ParserError::ParserError(
10248                        format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in operator option"),
10249                    )),
10250                }
10251
10252                if !self.consume_token(&Token::Comma) {
10253                    break;
10254                }
10255            }
10256
10257            self.expect_token(&Token::RParen)?;
10258            AlterOperatorOperation::Set { options }
10259        } else {
10260            return self.expected_ref(
10261                "OWNER TO, SET SCHEMA, or SET after ALTER OPERATOR",
10262                self.peek_token_ref(),
10263            );
10264        };
10265
10266        Ok(Statement::AlterOperator(AlterOperator {
10267            name,
10268            left_type,
10269            right_type,
10270            operation,
10271        }))
10272    }
10273
10274    /// Parse an operator item for ALTER OPERATOR FAMILY ADD operations
10275    fn parse_operator_family_add_operator(&mut self) -> Result<OperatorFamilyItem, ParserError> {
10276        let strategy_number = self.parse_literal_uint()?;
10277        let operator_name = self.parse_operator_name()?;
10278
10279        // Operator argument types (required for ALTER OPERATOR FAMILY)
10280        self.expect_token(&Token::LParen)?;
10281        let op_types = self.parse_comma_separated(Parser::parse_data_type)?;
10282        self.expect_token(&Token::RParen)?;
10283
10284        // Optional purpose
10285        let purpose = if self.parse_keyword(Keyword::FOR) {
10286            if self.parse_keyword(Keyword::SEARCH) {
10287                Some(OperatorPurpose::ForSearch)
10288            } else if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
10289                let sort_family = self.parse_object_name(false)?;
10290                Some(OperatorPurpose::ForOrderBy { sort_family })
10291            } else {
10292                return self.expected("SEARCH or ORDER BY after FOR", self.peek_token());
10293            }
10294        } else {
10295            None
10296        };
10297
10298        Ok(OperatorFamilyItem::Operator {
10299            strategy_number,
10300            operator_name,
10301            op_types,
10302            purpose,
10303        })
10304    }
10305
10306    /// Parse a function item for ALTER OPERATOR FAMILY ADD operations
10307    fn parse_operator_family_add_function(&mut self) -> Result<OperatorFamilyItem, ParserError> {
10308        let support_number = self.parse_literal_uint()?;
10309
10310        // Optional operator types
10311        let op_types = if self.consume_token(&Token::LParen) && self.peek_token() != Token::RParen {
10312            let types = self.parse_comma_separated(Parser::parse_data_type)?;
10313            self.expect_token(&Token::RParen)?;
10314            Some(types)
10315        } else if self.consume_token(&Token::LParen) {
10316            self.expect_token(&Token::RParen)?;
10317            Some(vec![])
10318        } else {
10319            None
10320        };
10321
10322        let function_name = self.parse_object_name(false)?;
10323
10324        // Function argument types
10325        let argument_types = if self.consume_token(&Token::LParen) {
10326            if self.peek_token() == Token::RParen {
10327                self.expect_token(&Token::RParen)?;
10328                vec![]
10329            } else {
10330                let types = self.parse_comma_separated(Parser::parse_data_type)?;
10331                self.expect_token(&Token::RParen)?;
10332                types
10333            }
10334        } else {
10335            vec![]
10336        };
10337
10338        Ok(OperatorFamilyItem::Function {
10339            support_number,
10340            op_types,
10341            function_name,
10342            argument_types,
10343        })
10344    }
10345
10346    /// Parse an operator item for ALTER OPERATOR FAMILY DROP operations
10347    fn parse_operator_family_drop_operator(
10348        &mut self,
10349    ) -> Result<OperatorFamilyDropItem, ParserError> {
10350        let strategy_number = self.parse_literal_uint()?;
10351
10352        // Operator argument types (required for DROP)
10353        self.expect_token(&Token::LParen)?;
10354        let op_types = self.parse_comma_separated(Parser::parse_data_type)?;
10355        self.expect_token(&Token::RParen)?;
10356
10357        Ok(OperatorFamilyDropItem::Operator {
10358            strategy_number,
10359            op_types,
10360        })
10361    }
10362
10363    /// Parse a function item for ALTER OPERATOR FAMILY DROP operations
10364    fn parse_operator_family_drop_function(
10365        &mut self,
10366    ) -> Result<OperatorFamilyDropItem, ParserError> {
10367        let support_number = self.parse_literal_uint()?;
10368
10369        // Operator types (required for DROP)
10370        self.expect_token(&Token::LParen)?;
10371        let op_types = self.parse_comma_separated(Parser::parse_data_type)?;
10372        self.expect_token(&Token::RParen)?;
10373
10374        Ok(OperatorFamilyDropItem::Function {
10375            support_number,
10376            op_types,
10377        })
10378    }
10379
10380    /// Parse an operator family item for ADD operations (dispatches to operator or function parsing)
10381    fn parse_operator_family_add_item(&mut self) -> Result<OperatorFamilyItem, ParserError> {
10382        if self.parse_keyword(Keyword::OPERATOR) {
10383            self.parse_operator_family_add_operator()
10384        } else if self.parse_keyword(Keyword::FUNCTION) {
10385            self.parse_operator_family_add_function()
10386        } else {
10387            self.expected("OPERATOR or FUNCTION", self.peek_token())
10388        }
10389    }
10390
10391    /// Parse an operator family item for DROP operations (dispatches to operator or function parsing)
10392    fn parse_operator_family_drop_item(&mut self) -> Result<OperatorFamilyDropItem, ParserError> {
10393        if self.parse_keyword(Keyword::OPERATOR) {
10394            self.parse_operator_family_drop_operator()
10395        } else if self.parse_keyword(Keyword::FUNCTION) {
10396            self.parse_operator_family_drop_function()
10397        } else {
10398            self.expected("OPERATOR or FUNCTION", self.peek_token())
10399        }
10400    }
10401
10402    /// Parse a [Statement::AlterOperatorFamily]
10403    /// See <https://www.postgresql.org/docs/current/sql-alteropfamily.html>
10404    pub fn parse_alter_operator_family(&mut self) -> Result<Statement, ParserError> {
10405        let name = self.parse_object_name(false)?;
10406        self.expect_keyword(Keyword::USING)?;
10407        let using = self.parse_identifier()?;
10408
10409        let operation = if self.parse_keyword(Keyword::ADD) {
10410            let items = self.parse_comma_separated(Parser::parse_operator_family_add_item)?;
10411            AlterOperatorFamilyOperation::Add { items }
10412        } else if self.parse_keyword(Keyword::DROP) {
10413            let items = self.parse_comma_separated(Parser::parse_operator_family_drop_item)?;
10414            AlterOperatorFamilyOperation::Drop { items }
10415        } else if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
10416            let new_name = self.parse_object_name(false)?;
10417            AlterOperatorFamilyOperation::RenameTo { new_name }
10418        } else if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
10419            let owner = self.parse_owner()?;
10420            AlterOperatorFamilyOperation::OwnerTo(owner)
10421        } else if self.parse_keywords(&[Keyword::SET, Keyword::SCHEMA]) {
10422            let schema_name = self.parse_object_name(false)?;
10423            AlterOperatorFamilyOperation::SetSchema { schema_name }
10424        } else {
10425            return self.expected_ref(
10426                "ADD, DROP, RENAME TO, OWNER TO, or SET SCHEMA after ALTER OPERATOR FAMILY",
10427                self.peek_token_ref(),
10428            );
10429        };
10430
10431        Ok(Statement::AlterOperatorFamily(AlterOperatorFamily {
10432            name,
10433            using,
10434            operation,
10435        }))
10436    }
10437
10438    // Parse a [Statement::AlterSchema]
10439    // ALTER SCHEMA [ IF EXISTS ] schema_name
10440    pub fn parse_alter_schema(&mut self) -> Result<Statement, ParserError> {
10441        self.expect_keywords(&[Keyword::ALTER, Keyword::SCHEMA])?;
10442        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
10443        let name = self.parse_object_name(false)?;
10444        let operation = if self.parse_keywords(&[Keyword::SET, Keyword::OPTIONS]) {
10445            self.prev_token();
10446            let options = self.parse_options(Keyword::OPTIONS)?;
10447            AlterSchemaOperation::SetOptionsParens { options }
10448        } else if self.parse_keywords(&[Keyword::SET, Keyword::DEFAULT, Keyword::COLLATE]) {
10449            let collate = self.parse_expr()?;
10450            AlterSchemaOperation::SetDefaultCollate { collate }
10451        } else if self.parse_keywords(&[Keyword::ADD, Keyword::REPLICA]) {
10452            let replica = self.parse_identifier()?;
10453            let options = if self.peek_keyword(Keyword::OPTIONS) {
10454                Some(self.parse_options(Keyword::OPTIONS)?)
10455            } else {
10456                None
10457            };
10458            AlterSchemaOperation::AddReplica { replica, options }
10459        } else if self.parse_keywords(&[Keyword::DROP, Keyword::REPLICA]) {
10460            let replica = self.parse_identifier()?;
10461            AlterSchemaOperation::DropReplica { replica }
10462        } else if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
10463            let new_name = self.parse_object_name(false)?;
10464            AlterSchemaOperation::Rename { name: new_name }
10465        } else if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
10466            let owner = self.parse_owner()?;
10467            AlterSchemaOperation::OwnerTo { owner }
10468        } else {
10469            return self.expected_ref("ALTER SCHEMA operation", self.peek_token_ref());
10470        };
10471        Ok(Statement::AlterSchema(AlterSchema {
10472            name,
10473            if_exists,
10474            operations: vec![operation],
10475        }))
10476    }
10477
10478    /// Parse a `CALL procedure_name(arg1, arg2, ...)`
10479    /// or `CALL procedure_name` statement
10480    pub fn parse_call(&mut self) -> Result<Statement, ParserError> {
10481        let object_name = self.parse_object_name(false)?;
10482        if self.peek_token().token == Token::LParen {
10483            match self.parse_function(object_name)? {
10484                Expr::Function(f) => Ok(Statement::Call(f)),
10485                other => parser_err!(
10486                    format!("Expected a simple procedure call but found: {other}"),
10487                    self.peek_token().span.start
10488                ),
10489            }
10490        } else {
10491            Ok(Statement::Call(Function {
10492                name: object_name,
10493                uses_odbc_syntax: false,
10494                parameters: FunctionArguments::None,
10495                args: FunctionArguments::None,
10496                over: None,
10497                filter: None,
10498                null_treatment: None,
10499                within_group: vec![],
10500            }))
10501        }
10502    }
10503
10504    /// Parse a copy statement
10505    pub fn parse_copy(&mut self) -> Result<Statement, ParserError> {
10506        let source;
10507        if self.consume_token(&Token::LParen) {
10508            source = CopySource::Query(self.parse_query()?);
10509            self.expect_token(&Token::RParen)?;
10510        } else {
10511            let table_name = self.parse_object_name(false)?;
10512            let columns = self.parse_parenthesized_column_list(Optional, false)?;
10513            source = CopySource::Table {
10514                table_name,
10515                columns,
10516            };
10517        }
10518        let to = match self.parse_one_of_keywords(&[Keyword::FROM, Keyword::TO]) {
10519            Some(Keyword::FROM) => false,
10520            Some(Keyword::TO) => true,
10521            _ => self.expected("FROM or TO", self.peek_token())?,
10522        };
10523        if !to {
10524            // Use a separate if statement to prevent Rust compiler from complaining about
10525            // "if statement in this position is unstable: https://github.com/rust-lang/rust/issues/53667"
10526            if let CopySource::Query(_) = source {
10527                return Err(ParserError::ParserError(
10528                    "COPY ... FROM does not support query as a source".to_string(),
10529                ));
10530            }
10531        }
10532        let target = if self.parse_keyword(Keyword::STDIN) {
10533            CopyTarget::Stdin
10534        } else if self.parse_keyword(Keyword::STDOUT) {
10535            CopyTarget::Stdout
10536        } else if self.parse_keyword(Keyword::PROGRAM) {
10537            CopyTarget::Program {
10538                command: self.parse_literal_string()?,
10539            }
10540        } else {
10541            CopyTarget::File {
10542                filename: self.parse_literal_string()?,
10543            }
10544        };
10545        let _ = self.parse_keyword(Keyword::WITH); // [ WITH ]
10546        let mut options = vec![];
10547        if self.consume_token(&Token::LParen) {
10548            options = self.parse_comma_separated(Parser::parse_copy_option)?;
10549            self.expect_token(&Token::RParen)?;
10550        }
10551        let mut legacy_options = vec![];
10552        while let Some(opt) = self.maybe_parse(|parser| parser.parse_copy_legacy_option())? {
10553            legacy_options.push(opt);
10554        }
10555        let values = if let CopyTarget::Stdin = target {
10556            self.expect_token(&Token::SemiColon)?;
10557            self.parse_tsv()
10558        } else {
10559            vec![]
10560        };
10561        Ok(Statement::Copy {
10562            source,
10563            to,
10564            target,
10565            options,
10566            legacy_options,
10567            values,
10568        })
10569    }
10570
10571    /// Parse [Statement::Open]
10572    fn parse_open(&mut self) -> Result<Statement, ParserError> {
10573        self.expect_keyword(Keyword::OPEN)?;
10574        Ok(Statement::Open(OpenStatement {
10575            cursor_name: self.parse_identifier()?,
10576        }))
10577    }
10578
10579    pub fn parse_close(&mut self) -> Result<Statement, ParserError> {
10580        let cursor = if self.parse_keyword(Keyword::ALL) {
10581            CloseCursor::All
10582        } else {
10583            let name = self.parse_identifier()?;
10584
10585            CloseCursor::Specific { name }
10586        };
10587
10588        Ok(Statement::Close { cursor })
10589    }
10590
10591    fn parse_copy_option(&mut self) -> Result<CopyOption, ParserError> {
10592        let ret = match self.parse_one_of_keywords(&[
10593            Keyword::FORMAT,
10594            Keyword::FREEZE,
10595            Keyword::DELIMITER,
10596            Keyword::NULL,
10597            Keyword::HEADER,
10598            Keyword::QUOTE,
10599            Keyword::ESCAPE,
10600            Keyword::FORCE_QUOTE,
10601            Keyword::FORCE_NOT_NULL,
10602            Keyword::FORCE_NULL,
10603            Keyword::ENCODING,
10604        ]) {
10605            Some(Keyword::FORMAT) => CopyOption::Format(self.parse_identifier()?),
10606            Some(Keyword::FREEZE) => CopyOption::Freeze(!matches!(
10607                self.parse_one_of_keywords(&[Keyword::TRUE, Keyword::FALSE]),
10608                Some(Keyword::FALSE)
10609            )),
10610            Some(Keyword::DELIMITER) => CopyOption::Delimiter(self.parse_literal_char()?),
10611            Some(Keyword::NULL) => CopyOption::Null(self.parse_literal_string()?),
10612            Some(Keyword::HEADER) => CopyOption::Header(!matches!(
10613                self.parse_one_of_keywords(&[Keyword::TRUE, Keyword::FALSE]),
10614                Some(Keyword::FALSE)
10615            )),
10616            Some(Keyword::QUOTE) => CopyOption::Quote(self.parse_literal_char()?),
10617            Some(Keyword::ESCAPE) => CopyOption::Escape(self.parse_literal_char()?),
10618            Some(Keyword::FORCE_QUOTE) => {
10619                CopyOption::ForceQuote(self.parse_parenthesized_column_list(Mandatory, false)?)
10620            }
10621            Some(Keyword::FORCE_NOT_NULL) => {
10622                CopyOption::ForceNotNull(self.parse_parenthesized_column_list(Mandatory, false)?)
10623            }
10624            Some(Keyword::FORCE_NULL) => {
10625                CopyOption::ForceNull(self.parse_parenthesized_column_list(Mandatory, false)?)
10626            }
10627            Some(Keyword::ENCODING) => CopyOption::Encoding(self.parse_literal_string()?),
10628            _ => self.expected("option", self.peek_token())?,
10629        };
10630        Ok(ret)
10631    }
10632
10633    fn parse_copy_legacy_option(&mut self) -> Result<CopyLegacyOption, ParserError> {
10634        // FORMAT \[ AS \] is optional
10635        if self.parse_keyword(Keyword::FORMAT) {
10636            let _ = self.parse_keyword(Keyword::AS);
10637        }
10638
10639        let ret = match self.parse_one_of_keywords(&[
10640            Keyword::ACCEPTANYDATE,
10641            Keyword::ACCEPTINVCHARS,
10642            Keyword::ADDQUOTES,
10643            Keyword::ALLOWOVERWRITE,
10644            Keyword::BINARY,
10645            Keyword::BLANKSASNULL,
10646            Keyword::BZIP2,
10647            Keyword::CLEANPATH,
10648            Keyword::COMPUPDATE,
10649            Keyword::CSV,
10650            Keyword::DATEFORMAT,
10651            Keyword::DELIMITER,
10652            Keyword::EMPTYASNULL,
10653            Keyword::ENCRYPTED,
10654            Keyword::ESCAPE,
10655            Keyword::EXTENSION,
10656            Keyword::FIXEDWIDTH,
10657            Keyword::GZIP,
10658            Keyword::HEADER,
10659            Keyword::IAM_ROLE,
10660            Keyword::IGNOREHEADER,
10661            Keyword::JSON,
10662            Keyword::MANIFEST,
10663            Keyword::MAXFILESIZE,
10664            Keyword::NULL,
10665            Keyword::PARALLEL,
10666            Keyword::PARQUET,
10667            Keyword::PARTITION,
10668            Keyword::REGION,
10669            Keyword::REMOVEQUOTES,
10670            Keyword::ROWGROUPSIZE,
10671            Keyword::STATUPDATE,
10672            Keyword::TIMEFORMAT,
10673            Keyword::TRUNCATECOLUMNS,
10674            Keyword::ZSTD,
10675        ]) {
10676            Some(Keyword::ACCEPTANYDATE) => CopyLegacyOption::AcceptAnyDate,
10677            Some(Keyword::ACCEPTINVCHARS) => {
10678                let _ = self.parse_keyword(Keyword::AS); // [ AS ]
10679                let ch = if matches!(self.peek_token().token, Token::SingleQuotedString(_)) {
10680                    Some(self.parse_literal_string()?)
10681                } else {
10682                    None
10683                };
10684                CopyLegacyOption::AcceptInvChars(ch)
10685            }
10686            Some(Keyword::ADDQUOTES) => CopyLegacyOption::AddQuotes,
10687            Some(Keyword::ALLOWOVERWRITE) => CopyLegacyOption::AllowOverwrite,
10688            Some(Keyword::BINARY) => CopyLegacyOption::Binary,
10689            Some(Keyword::BLANKSASNULL) => CopyLegacyOption::BlankAsNull,
10690            Some(Keyword::BZIP2) => CopyLegacyOption::Bzip2,
10691            Some(Keyword::CLEANPATH) => CopyLegacyOption::CleanPath,
10692            Some(Keyword::COMPUPDATE) => {
10693                let preset = self.parse_keyword(Keyword::PRESET);
10694                let enabled = match self.parse_one_of_keywords(&[
10695                    Keyword::TRUE,
10696                    Keyword::FALSE,
10697                    Keyword::ON,
10698                    Keyword::OFF,
10699                ]) {
10700                    Some(Keyword::TRUE) | Some(Keyword::ON) => Some(true),
10701                    Some(Keyword::FALSE) | Some(Keyword::OFF) => Some(false),
10702                    _ => None,
10703                };
10704                CopyLegacyOption::CompUpdate { preset, enabled }
10705            }
10706            Some(Keyword::CSV) => CopyLegacyOption::Csv({
10707                let mut opts = vec![];
10708                while let Some(opt) =
10709                    self.maybe_parse(|parser| parser.parse_copy_legacy_csv_option())?
10710                {
10711                    opts.push(opt);
10712                }
10713                opts
10714            }),
10715            Some(Keyword::DATEFORMAT) => {
10716                let _ = self.parse_keyword(Keyword::AS);
10717                let fmt = if matches!(self.peek_token().token, Token::SingleQuotedString(_)) {
10718                    Some(self.parse_literal_string()?)
10719                } else {
10720                    None
10721                };
10722                CopyLegacyOption::DateFormat(fmt)
10723            }
10724            Some(Keyword::DELIMITER) => {
10725                let _ = self.parse_keyword(Keyword::AS);
10726                CopyLegacyOption::Delimiter(self.parse_literal_char()?)
10727            }
10728            Some(Keyword::EMPTYASNULL) => CopyLegacyOption::EmptyAsNull,
10729            Some(Keyword::ENCRYPTED) => {
10730                let auto = self.parse_keyword(Keyword::AUTO);
10731                CopyLegacyOption::Encrypted { auto }
10732            }
10733            Some(Keyword::ESCAPE) => CopyLegacyOption::Escape,
10734            Some(Keyword::EXTENSION) => {
10735                let ext = self.parse_literal_string()?;
10736                CopyLegacyOption::Extension(ext)
10737            }
10738            Some(Keyword::FIXEDWIDTH) => {
10739                let spec = self.parse_literal_string()?;
10740                CopyLegacyOption::FixedWidth(spec)
10741            }
10742            Some(Keyword::GZIP) => CopyLegacyOption::Gzip,
10743            Some(Keyword::HEADER) => CopyLegacyOption::Header,
10744            Some(Keyword::IAM_ROLE) => CopyLegacyOption::IamRole(self.parse_iam_role_kind()?),
10745            Some(Keyword::IGNOREHEADER) => {
10746                let _ = self.parse_keyword(Keyword::AS);
10747                let num_rows = self.parse_literal_uint()?;
10748                CopyLegacyOption::IgnoreHeader(num_rows)
10749            }
10750            Some(Keyword::JSON) => CopyLegacyOption::Json,
10751            Some(Keyword::MANIFEST) => {
10752                let verbose = self.parse_keyword(Keyword::VERBOSE);
10753                CopyLegacyOption::Manifest { verbose }
10754            }
10755            Some(Keyword::MAXFILESIZE) => {
10756                let _ = self.parse_keyword(Keyword::AS);
10757                let size = self.parse_number_value()?.value;
10758                let unit = match self.parse_one_of_keywords(&[Keyword::MB, Keyword::GB]) {
10759                    Some(Keyword::MB) => Some(FileSizeUnit::MB),
10760                    Some(Keyword::GB) => Some(FileSizeUnit::GB),
10761                    _ => None,
10762                };
10763                CopyLegacyOption::MaxFileSize(FileSize { size, unit })
10764            }
10765            Some(Keyword::NULL) => {
10766                let _ = self.parse_keyword(Keyword::AS);
10767                CopyLegacyOption::Null(self.parse_literal_string()?)
10768            }
10769            Some(Keyword::PARALLEL) => {
10770                let enabled = match self.parse_one_of_keywords(&[
10771                    Keyword::TRUE,
10772                    Keyword::FALSE,
10773                    Keyword::ON,
10774                    Keyword::OFF,
10775                ]) {
10776                    Some(Keyword::TRUE) | Some(Keyword::ON) => Some(true),
10777                    Some(Keyword::FALSE) | Some(Keyword::OFF) => Some(false),
10778                    _ => None,
10779                };
10780                CopyLegacyOption::Parallel(enabled)
10781            }
10782            Some(Keyword::PARQUET) => CopyLegacyOption::Parquet,
10783            Some(Keyword::PARTITION) => {
10784                self.expect_keyword(Keyword::BY)?;
10785                let columns = self.parse_parenthesized_column_list(IsOptional::Mandatory, false)?;
10786                let include = self.parse_keyword(Keyword::INCLUDE);
10787                CopyLegacyOption::PartitionBy(UnloadPartitionBy { columns, include })
10788            }
10789            Some(Keyword::REGION) => {
10790                let _ = self.parse_keyword(Keyword::AS);
10791                let region = self.parse_literal_string()?;
10792                CopyLegacyOption::Region(region)
10793            }
10794            Some(Keyword::REMOVEQUOTES) => CopyLegacyOption::RemoveQuotes,
10795            Some(Keyword::ROWGROUPSIZE) => {
10796                let _ = self.parse_keyword(Keyword::AS);
10797                let file_size = self.parse_file_size()?;
10798                CopyLegacyOption::RowGroupSize(file_size)
10799            }
10800            Some(Keyword::STATUPDATE) => {
10801                let enabled = match self.parse_one_of_keywords(&[
10802                    Keyword::TRUE,
10803                    Keyword::FALSE,
10804                    Keyword::ON,
10805                    Keyword::OFF,
10806                ]) {
10807                    Some(Keyword::TRUE) | Some(Keyword::ON) => Some(true),
10808                    Some(Keyword::FALSE) | Some(Keyword::OFF) => Some(false),
10809                    _ => None,
10810                };
10811                CopyLegacyOption::StatUpdate(enabled)
10812            }
10813            Some(Keyword::TIMEFORMAT) => {
10814                let _ = self.parse_keyword(Keyword::AS);
10815                let fmt = if matches!(self.peek_token().token, Token::SingleQuotedString(_)) {
10816                    Some(self.parse_literal_string()?)
10817                } else {
10818                    None
10819                };
10820                CopyLegacyOption::TimeFormat(fmt)
10821            }
10822            Some(Keyword::TRUNCATECOLUMNS) => CopyLegacyOption::TruncateColumns,
10823            Some(Keyword::ZSTD) => CopyLegacyOption::Zstd,
10824            _ => self.expected("option", self.peek_token())?,
10825        };
10826        Ok(ret)
10827    }
10828
10829    fn parse_file_size(&mut self) -> Result<FileSize, ParserError> {
10830        let size = self.parse_number_value()?.value;
10831        let unit = self.maybe_parse_file_size_unit();
10832        Ok(FileSize { size, unit })
10833    }
10834
10835    fn maybe_parse_file_size_unit(&mut self) -> Option<FileSizeUnit> {
10836        match self.parse_one_of_keywords(&[Keyword::MB, Keyword::GB]) {
10837            Some(Keyword::MB) => Some(FileSizeUnit::MB),
10838            Some(Keyword::GB) => Some(FileSizeUnit::GB),
10839            _ => None,
10840        }
10841    }
10842
10843    fn parse_iam_role_kind(&mut self) -> Result<IamRoleKind, ParserError> {
10844        if self.parse_keyword(Keyword::DEFAULT) {
10845            Ok(IamRoleKind::Default)
10846        } else {
10847            let arn = self.parse_literal_string()?;
10848            Ok(IamRoleKind::Arn(arn))
10849        }
10850    }
10851
10852    fn parse_copy_legacy_csv_option(&mut self) -> Result<CopyLegacyCsvOption, ParserError> {
10853        let ret = match self.parse_one_of_keywords(&[
10854            Keyword::HEADER,
10855            Keyword::QUOTE,
10856            Keyword::ESCAPE,
10857            Keyword::FORCE,
10858        ]) {
10859            Some(Keyword::HEADER) => CopyLegacyCsvOption::Header,
10860            Some(Keyword::QUOTE) => {
10861                let _ = self.parse_keyword(Keyword::AS); // [ AS ]
10862                CopyLegacyCsvOption::Quote(self.parse_literal_char()?)
10863            }
10864            Some(Keyword::ESCAPE) => {
10865                let _ = self.parse_keyword(Keyword::AS); // [ AS ]
10866                CopyLegacyCsvOption::Escape(self.parse_literal_char()?)
10867            }
10868            Some(Keyword::FORCE) if self.parse_keywords(&[Keyword::NOT, Keyword::NULL]) => {
10869                CopyLegacyCsvOption::ForceNotNull(
10870                    self.parse_comma_separated(|p| p.parse_identifier())?,
10871                )
10872            }
10873            Some(Keyword::FORCE) if self.parse_keywords(&[Keyword::QUOTE]) => {
10874                CopyLegacyCsvOption::ForceQuote(
10875                    self.parse_comma_separated(|p| p.parse_identifier())?,
10876                )
10877            }
10878            _ => self.expected("csv option", self.peek_token())?,
10879        };
10880        Ok(ret)
10881    }
10882
10883    fn parse_literal_char(&mut self) -> Result<char, ParserError> {
10884        let s = self.parse_literal_string()?;
10885        if s.len() != 1 {
10886            let loc = self
10887                .tokens
10888                .get(self.index - 1)
10889                .map_or(Location { line: 0, column: 0 }, |t| t.span.start);
10890            return parser_err!(format!("Expect a char, found {s:?}"), loc);
10891        }
10892        Ok(s.chars().next().unwrap())
10893    }
10894
10895    /// Parse a tab separated values in
10896    /// COPY payload
10897    pub fn parse_tsv(&mut self) -> Vec<Option<String>> {
10898        self.parse_tab_value()
10899    }
10900
10901    pub fn parse_tab_value(&mut self) -> Vec<Option<String>> {
10902        let mut values = vec![];
10903        let mut content = String::from("");
10904        while let Some(t) = self.next_token_no_skip().map(|t| &t.token) {
10905            match t {
10906                Token::Whitespace(Whitespace::Tab) => {
10907                    values.push(Some(content.to_string()));
10908                    content.clear();
10909                }
10910                Token::Whitespace(Whitespace::Newline) => {
10911                    values.push(Some(content.to_string()));
10912                    content.clear();
10913                }
10914                Token::Backslash => {
10915                    if self.consume_token(&Token::Period) {
10916                        return values;
10917                    }
10918                    if let Token::Word(w) = self.next_token().token {
10919                        if w.value == "N" {
10920                            values.push(None);
10921                        }
10922                    }
10923                }
10924                _ => {
10925                    content.push_str(&t.to_string());
10926                }
10927            }
10928        }
10929        values
10930    }
10931
10932    /// Parse a literal value (numbers, strings, date/time, booleans)
10933    pub fn parse_value(&mut self) -> Result<ValueWithSpan, ParserError> {
10934        let next_token = self.next_token();
10935        let span = next_token.span;
10936        let ok_value = |value: Value| Ok(value.with_span(span));
10937        match next_token.token {
10938            Token::Word(w) => match w.keyword {
10939                Keyword::TRUE if self.dialect.supports_boolean_literals() => {
10940                    ok_value(Value::Boolean(true))
10941                }
10942                Keyword::FALSE if self.dialect.supports_boolean_literals() => {
10943                    ok_value(Value::Boolean(false))
10944                }
10945                Keyword::NULL => ok_value(Value::Null),
10946                Keyword::NoKeyword if w.quote_style.is_some() => match w.quote_style {
10947                    Some('"') => ok_value(Value::DoubleQuotedString(w.value)),
10948                    Some('\'') => ok_value(Value::SingleQuotedString(w.value)),
10949                    _ => self.expected(
10950                        "A value?",
10951                        TokenWithSpan {
10952                            token: Token::Word(w),
10953                            span,
10954                        },
10955                    )?,
10956                },
10957                _ => self.expected(
10958                    "a concrete value",
10959                    TokenWithSpan {
10960                        token: Token::Word(w),
10961                        span,
10962                    },
10963                ),
10964            },
10965            // The call to n.parse() returns a bigdecimal when the
10966            // bigdecimal feature is enabled, and is otherwise a no-op
10967            // (i.e., it returns the input string).
10968            Token::Number(n, l) => ok_value(Value::Number(Self::parse(n, span.start)?, l)),
10969            Token::SingleQuotedString(ref s) => ok_value(Value::SingleQuotedString(
10970                self.maybe_concat_string_literal(s.to_string()),
10971            )),
10972            Token::DoubleQuotedString(ref s) => ok_value(Value::DoubleQuotedString(
10973                self.maybe_concat_string_literal(s.to_string()),
10974            )),
10975            Token::TripleSingleQuotedString(ref s) => {
10976                ok_value(Value::TripleSingleQuotedString(s.to_string()))
10977            }
10978            Token::TripleDoubleQuotedString(ref s) => {
10979                ok_value(Value::TripleDoubleQuotedString(s.to_string()))
10980            }
10981            Token::DollarQuotedString(ref s) => ok_value(Value::DollarQuotedString(s.clone())),
10982            Token::SingleQuotedByteStringLiteral(ref s) => {
10983                ok_value(Value::SingleQuotedByteStringLiteral(s.clone()))
10984            }
10985            Token::DoubleQuotedByteStringLiteral(ref s) => {
10986                ok_value(Value::DoubleQuotedByteStringLiteral(s.clone()))
10987            }
10988            Token::TripleSingleQuotedByteStringLiteral(ref s) => {
10989                ok_value(Value::TripleSingleQuotedByteStringLiteral(s.clone()))
10990            }
10991            Token::TripleDoubleQuotedByteStringLiteral(ref s) => {
10992                ok_value(Value::TripleDoubleQuotedByteStringLiteral(s.clone()))
10993            }
10994            Token::SingleQuotedRawStringLiteral(ref s) => {
10995                ok_value(Value::SingleQuotedRawStringLiteral(s.clone()))
10996            }
10997            Token::DoubleQuotedRawStringLiteral(ref s) => {
10998                ok_value(Value::DoubleQuotedRawStringLiteral(s.clone()))
10999            }
11000            Token::TripleSingleQuotedRawStringLiteral(ref s) => {
11001                ok_value(Value::TripleSingleQuotedRawStringLiteral(s.clone()))
11002            }
11003            Token::TripleDoubleQuotedRawStringLiteral(ref s) => {
11004                ok_value(Value::TripleDoubleQuotedRawStringLiteral(s.clone()))
11005            }
11006            Token::NationalStringLiteral(ref s) => {
11007                ok_value(Value::NationalStringLiteral(s.to_string()))
11008            }
11009            Token::QuoteDelimitedStringLiteral(v) => {
11010                ok_value(Value::QuoteDelimitedStringLiteral(v))
11011            }
11012            Token::NationalQuoteDelimitedStringLiteral(v) => {
11013                ok_value(Value::NationalQuoteDelimitedStringLiteral(v))
11014            }
11015            Token::EscapedStringLiteral(ref s) => {
11016                ok_value(Value::EscapedStringLiteral(s.to_string()))
11017            }
11018            Token::UnicodeStringLiteral(ref s) => {
11019                ok_value(Value::UnicodeStringLiteral(s.to_string()))
11020            }
11021            Token::HexStringLiteral(ref s) => ok_value(Value::HexStringLiteral(s.to_string())),
11022            Token::Placeholder(ref s) => ok_value(Value::Placeholder(s.to_string())),
11023            tok @ Token::Colon | tok @ Token::AtSign => {
11024                // 1. Not calling self.parse_identifier(false)?
11025                //    because only in placeholder we want to check
11026                //    numbers as idfentifies.  This because snowflake
11027                //    allows numbers as placeholders
11028                // 2. Not calling self.next_token() to enforce `tok`
11029                //    be followed immediately by a word/number, ie.
11030                //    without any whitespace in between
11031                let next_token = self.next_token_no_skip().unwrap_or(&EOF_TOKEN).clone();
11032                let ident = match next_token.token {
11033                    Token::Word(w) => Ok(w.into_ident(next_token.span)),
11034                    Token::Number(w, false) => Ok(Ident::with_span(next_token.span, w)),
11035                    _ => self.expected("placeholder", next_token),
11036                }?;
11037                Ok(Value::Placeholder(tok.to_string() + &ident.value)
11038                    .with_span(Span::new(span.start, ident.span.end)))
11039            }
11040            unexpected => self.expected(
11041                "a value",
11042                TokenWithSpan {
11043                    token: unexpected,
11044                    span,
11045                },
11046            ),
11047        }
11048    }
11049
11050    fn maybe_concat_string_literal(&mut self, mut str: String) -> String {
11051        if self.dialect.supports_string_literal_concatenation() {
11052            while let Token::SingleQuotedString(ref s) | Token::DoubleQuotedString(ref s) =
11053                self.peek_token_ref().token
11054            {
11055                str.push_str(s.clone().as_str());
11056                self.advance_token();
11057            }
11058        }
11059        str
11060    }
11061
11062    /// Parse an unsigned numeric literal
11063    pub fn parse_number_value(&mut self) -> Result<ValueWithSpan, ParserError> {
11064        let value_wrapper = self.parse_value()?;
11065        match &value_wrapper.value {
11066            Value::Number(_, _) => Ok(value_wrapper),
11067            Value::Placeholder(_) => Ok(value_wrapper),
11068            _ => {
11069                self.prev_token();
11070                self.expected("literal number", self.peek_token())
11071            }
11072        }
11073    }
11074
11075    /// Parse a numeric literal as an expression. Returns a [`Expr::UnaryOp`] if the number is signed,
11076    /// otherwise returns a [`Expr::Value`]
11077    pub fn parse_number(&mut self) -> Result<Expr, ParserError> {
11078        let next_token = self.next_token();
11079        match next_token.token {
11080            Token::Plus => Ok(Expr::UnaryOp {
11081                op: UnaryOperator::Plus,
11082                expr: Box::new(Expr::Value(self.parse_number_value()?)),
11083            }),
11084            Token::Minus => Ok(Expr::UnaryOp {
11085                op: UnaryOperator::Minus,
11086                expr: Box::new(Expr::Value(self.parse_number_value()?)),
11087            }),
11088            _ => {
11089                self.prev_token();
11090                Ok(Expr::Value(self.parse_number_value()?))
11091            }
11092        }
11093    }
11094
11095    fn parse_introduced_string_expr(&mut self) -> Result<Expr, ParserError> {
11096        let next_token = self.next_token();
11097        let span = next_token.span;
11098        match next_token.token {
11099            Token::SingleQuotedString(ref s) => Ok(Expr::Value(
11100                Value::SingleQuotedString(s.to_string()).with_span(span),
11101            )),
11102            Token::DoubleQuotedString(ref s) => Ok(Expr::Value(
11103                Value::DoubleQuotedString(s.to_string()).with_span(span),
11104            )),
11105            Token::HexStringLiteral(ref s) => Ok(Expr::Value(
11106                Value::HexStringLiteral(s.to_string()).with_span(span),
11107            )),
11108            unexpected => self.expected(
11109                "a string value",
11110                TokenWithSpan {
11111                    token: unexpected,
11112                    span,
11113                },
11114            ),
11115        }
11116    }
11117
11118    /// Parse an unsigned literal integer/long
11119    pub fn parse_literal_uint(&mut self) -> Result<u64, ParserError> {
11120        let next_token = self.next_token();
11121        match next_token.token {
11122            Token::Number(s, _) => Self::parse::<u64>(s, next_token.span.start),
11123            _ => self.expected("literal int", next_token),
11124        }
11125    }
11126
11127    /// Parse the body of a `CREATE FUNCTION` specified as a string.
11128    /// e.g. `CREATE FUNCTION ... AS $$ body $$`.
11129    fn parse_create_function_body_string(&mut self) -> Result<CreateFunctionBody, ParserError> {
11130        let parse_string_expr = |parser: &mut Parser| -> Result<Expr, ParserError> {
11131            let peek_token = parser.peek_token();
11132            let span = peek_token.span;
11133            match peek_token.token {
11134                Token::DollarQuotedString(s) if dialect_of!(parser is PostgreSqlDialect | GenericDialect) =>
11135                {
11136                    parser.next_token();
11137                    Ok(Expr::Value(Value::DollarQuotedString(s).with_span(span)))
11138                }
11139                _ => Ok(Expr::Value(
11140                    Value::SingleQuotedString(parser.parse_literal_string()?).with_span(span),
11141                )),
11142            }
11143        };
11144
11145        Ok(CreateFunctionBody::AsBeforeOptions {
11146            body: parse_string_expr(self)?,
11147            link_symbol: if self.consume_token(&Token::Comma) {
11148                Some(parse_string_expr(self)?)
11149            } else {
11150                None
11151            },
11152        })
11153    }
11154
11155    /// Parse a literal string
11156    pub fn parse_literal_string(&mut self) -> Result<String, ParserError> {
11157        let next_token = self.next_token();
11158        match next_token.token {
11159            Token::Word(Word {
11160                value,
11161                keyword: Keyword::NoKeyword,
11162                ..
11163            }) => Ok(value),
11164            Token::SingleQuotedString(s) => Ok(s),
11165            Token::DoubleQuotedString(s) => Ok(s),
11166            Token::EscapedStringLiteral(s) if dialect_of!(self is PostgreSqlDialect | GenericDialect) => {
11167                Ok(s)
11168            }
11169            Token::UnicodeStringLiteral(s) => Ok(s),
11170            _ => self.expected("literal string", next_token),
11171        }
11172    }
11173
11174    /// Parse a boolean string
11175    pub(crate) fn parse_boolean_string(&mut self) -> Result<bool, ParserError> {
11176        match self.parse_one_of_keywords(&[Keyword::TRUE, Keyword::FALSE]) {
11177            Some(Keyword::TRUE) => Ok(true),
11178            Some(Keyword::FALSE) => Ok(false),
11179            _ => self.expected("TRUE or FALSE", self.peek_token()),
11180        }
11181    }
11182
11183    /// Parse a literal unicode normalization clause
11184    pub fn parse_unicode_is_normalized(&mut self, expr: Expr) -> Result<Expr, ParserError> {
11185        let neg = self.parse_keyword(Keyword::NOT);
11186        let normalized_form = self.maybe_parse(|parser| {
11187            match parser.parse_one_of_keywords(&[
11188                Keyword::NFC,
11189                Keyword::NFD,
11190                Keyword::NFKC,
11191                Keyword::NFKD,
11192            ]) {
11193                Some(Keyword::NFC) => Ok(NormalizationForm::NFC),
11194                Some(Keyword::NFD) => Ok(NormalizationForm::NFD),
11195                Some(Keyword::NFKC) => Ok(NormalizationForm::NFKC),
11196                Some(Keyword::NFKD) => Ok(NormalizationForm::NFKD),
11197                _ => parser.expected("unicode normalization form", parser.peek_token()),
11198            }
11199        })?;
11200        if self.parse_keyword(Keyword::NORMALIZED) {
11201            return Ok(Expr::IsNormalized {
11202                expr: Box::new(expr),
11203                form: normalized_form,
11204                negated: neg,
11205            });
11206        }
11207        self.expected("unicode normalization form", self.peek_token())
11208    }
11209
11210    pub fn parse_enum_values(&mut self) -> Result<Vec<EnumMember>, ParserError> {
11211        self.expect_token(&Token::LParen)?;
11212        let values = self.parse_comma_separated(|parser| {
11213            let name = parser.parse_literal_string()?;
11214            let e = if parser.consume_token(&Token::Eq) {
11215                let value = parser.parse_number()?;
11216                EnumMember::NamedValue(name, value)
11217            } else {
11218                EnumMember::Name(name)
11219            };
11220            Ok(e)
11221        })?;
11222        self.expect_token(&Token::RParen)?;
11223
11224        Ok(values)
11225    }
11226
11227    /// Parse a SQL datatype (in the context of a CREATE TABLE statement for example)
11228    pub fn parse_data_type(&mut self) -> Result<DataType, ParserError> {
11229        let (ty, trailing_bracket) = self.parse_data_type_helper()?;
11230        if trailing_bracket.0 {
11231            return parser_err!(
11232                format!("unmatched > after parsing data type {ty}"),
11233                self.peek_token()
11234            );
11235        }
11236
11237        Ok(ty)
11238    }
11239
11240    fn parse_data_type_helper(
11241        &mut self,
11242    ) -> Result<(DataType, MatchedTrailingBracket), ParserError> {
11243        let dialect = self.dialect;
11244        self.advance_token();
11245        let next_token = self.get_current_token();
11246        let next_token_index = self.get_current_index();
11247
11248        let mut trailing_bracket: MatchedTrailingBracket = false.into();
11249        let mut data = match &next_token.token {
11250            Token::Word(w) => match w.keyword {
11251                Keyword::BOOLEAN => Ok(DataType::Boolean),
11252                Keyword::BOOL => Ok(DataType::Bool),
11253                Keyword::FLOAT => {
11254                    let precision = self.parse_exact_number_optional_precision_scale()?;
11255
11256                    if self.parse_keyword(Keyword::UNSIGNED) {
11257                        Ok(DataType::FloatUnsigned(precision))
11258                    } else {
11259                        Ok(DataType::Float(precision))
11260                    }
11261                }
11262                Keyword::REAL => {
11263                    if self.parse_keyword(Keyword::UNSIGNED) {
11264                        Ok(DataType::RealUnsigned)
11265                    } else {
11266                        Ok(DataType::Real)
11267                    }
11268                }
11269                Keyword::FLOAT4 => Ok(DataType::Float4),
11270                Keyword::FLOAT32 => Ok(DataType::Float32),
11271                Keyword::FLOAT64 => Ok(DataType::Float64),
11272                Keyword::FLOAT8 => Ok(DataType::Float8),
11273                Keyword::DOUBLE => {
11274                    if self.parse_keyword(Keyword::PRECISION) {
11275                        if self.parse_keyword(Keyword::UNSIGNED) {
11276                            Ok(DataType::DoublePrecisionUnsigned)
11277                        } else {
11278                            Ok(DataType::DoublePrecision)
11279                        }
11280                    } else {
11281                        let precision = self.parse_exact_number_optional_precision_scale()?;
11282
11283                        if self.parse_keyword(Keyword::UNSIGNED) {
11284                            Ok(DataType::DoubleUnsigned(precision))
11285                        } else {
11286                            Ok(DataType::Double(precision))
11287                        }
11288                    }
11289                }
11290                Keyword::TINYINT => {
11291                    let optional_precision = self.parse_optional_precision();
11292                    if self.parse_keyword(Keyword::UNSIGNED) {
11293                        Ok(DataType::TinyIntUnsigned(optional_precision?))
11294                    } else {
11295                        if dialect.supports_data_type_signed_suffix() {
11296                            let _ = self.parse_keyword(Keyword::SIGNED);
11297                        }
11298                        Ok(DataType::TinyInt(optional_precision?))
11299                    }
11300                }
11301                Keyword::INT2 => {
11302                    let optional_precision = self.parse_optional_precision();
11303                    if self.parse_keyword(Keyword::UNSIGNED) {
11304                        Ok(DataType::Int2Unsigned(optional_precision?))
11305                    } else {
11306                        Ok(DataType::Int2(optional_precision?))
11307                    }
11308                }
11309                Keyword::SMALLINT => {
11310                    let optional_precision = self.parse_optional_precision();
11311                    if self.parse_keyword(Keyword::UNSIGNED) {
11312                        Ok(DataType::SmallIntUnsigned(optional_precision?))
11313                    } else {
11314                        if dialect.supports_data_type_signed_suffix() {
11315                            let _ = self.parse_keyword(Keyword::SIGNED);
11316                        }
11317                        Ok(DataType::SmallInt(optional_precision?))
11318                    }
11319                }
11320                Keyword::MEDIUMINT => {
11321                    let optional_precision = self.parse_optional_precision();
11322                    if self.parse_keyword(Keyword::UNSIGNED) {
11323                        Ok(DataType::MediumIntUnsigned(optional_precision?))
11324                    } else {
11325                        if dialect.supports_data_type_signed_suffix() {
11326                            let _ = self.parse_keyword(Keyword::SIGNED);
11327                        }
11328                        Ok(DataType::MediumInt(optional_precision?))
11329                    }
11330                }
11331                Keyword::INT => {
11332                    let optional_precision = self.parse_optional_precision();
11333                    if self.parse_keyword(Keyword::UNSIGNED) {
11334                        Ok(DataType::IntUnsigned(optional_precision?))
11335                    } else {
11336                        if dialect.supports_data_type_signed_suffix() {
11337                            let _ = self.parse_keyword(Keyword::SIGNED);
11338                        }
11339                        Ok(DataType::Int(optional_precision?))
11340                    }
11341                }
11342                Keyword::INT4 => {
11343                    let optional_precision = self.parse_optional_precision();
11344                    if self.parse_keyword(Keyword::UNSIGNED) {
11345                        Ok(DataType::Int4Unsigned(optional_precision?))
11346                    } else {
11347                        Ok(DataType::Int4(optional_precision?))
11348                    }
11349                }
11350                Keyword::INT8 => {
11351                    let optional_precision = self.parse_optional_precision();
11352                    if self.parse_keyword(Keyword::UNSIGNED) {
11353                        Ok(DataType::Int8Unsigned(optional_precision?))
11354                    } else {
11355                        Ok(DataType::Int8(optional_precision?))
11356                    }
11357                }
11358                Keyword::INT16 => Ok(DataType::Int16),
11359                Keyword::INT32 => Ok(DataType::Int32),
11360                Keyword::INT64 => Ok(DataType::Int64),
11361                Keyword::INT128 => Ok(DataType::Int128),
11362                Keyword::INT256 => Ok(DataType::Int256),
11363                Keyword::INTEGER => {
11364                    let optional_precision = self.parse_optional_precision();
11365                    if self.parse_keyword(Keyword::UNSIGNED) {
11366                        Ok(DataType::IntegerUnsigned(optional_precision?))
11367                    } else {
11368                        if dialect.supports_data_type_signed_suffix() {
11369                            let _ = self.parse_keyword(Keyword::SIGNED);
11370                        }
11371                        Ok(DataType::Integer(optional_precision?))
11372                    }
11373                }
11374                Keyword::BIGINT => {
11375                    let optional_precision = self.parse_optional_precision();
11376                    if self.parse_keyword(Keyword::UNSIGNED) {
11377                        Ok(DataType::BigIntUnsigned(optional_precision?))
11378                    } else {
11379                        if dialect.supports_data_type_signed_suffix() {
11380                            let _ = self.parse_keyword(Keyword::SIGNED);
11381                        }
11382                        Ok(DataType::BigInt(optional_precision?))
11383                    }
11384                }
11385                Keyword::HUGEINT => Ok(DataType::HugeInt),
11386                Keyword::UBIGINT => Ok(DataType::UBigInt),
11387                Keyword::UHUGEINT => Ok(DataType::UHugeInt),
11388                Keyword::USMALLINT => Ok(DataType::USmallInt),
11389                Keyword::UTINYINT => Ok(DataType::UTinyInt),
11390                Keyword::UINT8 => Ok(DataType::UInt8),
11391                Keyword::UINT16 => Ok(DataType::UInt16),
11392                Keyword::UINT32 => Ok(DataType::UInt32),
11393                Keyword::UINT64 => Ok(DataType::UInt64),
11394                Keyword::UINT128 => Ok(DataType::UInt128),
11395                Keyword::UINT256 => Ok(DataType::UInt256),
11396                Keyword::VARCHAR => Ok(DataType::Varchar(self.parse_optional_character_length()?)),
11397                Keyword::NVARCHAR => {
11398                    Ok(DataType::Nvarchar(self.parse_optional_character_length()?))
11399                }
11400                Keyword::CHARACTER => {
11401                    if self.parse_keyword(Keyword::VARYING) {
11402                        Ok(DataType::CharacterVarying(
11403                            self.parse_optional_character_length()?,
11404                        ))
11405                    } else if self.parse_keywords(&[Keyword::LARGE, Keyword::OBJECT]) {
11406                        Ok(DataType::CharacterLargeObject(
11407                            self.parse_optional_precision()?,
11408                        ))
11409                    } else {
11410                        Ok(DataType::Character(self.parse_optional_character_length()?))
11411                    }
11412                }
11413                Keyword::CHAR => {
11414                    if self.parse_keyword(Keyword::VARYING) {
11415                        Ok(DataType::CharVarying(
11416                            self.parse_optional_character_length()?,
11417                        ))
11418                    } else if self.parse_keywords(&[Keyword::LARGE, Keyword::OBJECT]) {
11419                        Ok(DataType::CharLargeObject(self.parse_optional_precision()?))
11420                    } else {
11421                        Ok(DataType::Char(self.parse_optional_character_length()?))
11422                    }
11423                }
11424                Keyword::CLOB => Ok(DataType::Clob(self.parse_optional_precision()?)),
11425                Keyword::BINARY => Ok(DataType::Binary(self.parse_optional_precision()?)),
11426                Keyword::VARBINARY => Ok(DataType::Varbinary(self.parse_optional_binary_length()?)),
11427                Keyword::BLOB => Ok(DataType::Blob(self.parse_optional_precision()?)),
11428                Keyword::TINYBLOB => Ok(DataType::TinyBlob),
11429                Keyword::MEDIUMBLOB => Ok(DataType::MediumBlob),
11430                Keyword::LONGBLOB => Ok(DataType::LongBlob),
11431                Keyword::BYTES => Ok(DataType::Bytes(self.parse_optional_precision()?)),
11432                Keyword::BIT => {
11433                    if self.parse_keyword(Keyword::VARYING) {
11434                        Ok(DataType::BitVarying(self.parse_optional_precision()?))
11435                    } else {
11436                        Ok(DataType::Bit(self.parse_optional_precision()?))
11437                    }
11438                }
11439                Keyword::VARBIT => Ok(DataType::VarBit(self.parse_optional_precision()?)),
11440                Keyword::UUID => Ok(DataType::Uuid),
11441                Keyword::DATE => Ok(DataType::Date),
11442                Keyword::DATE32 => Ok(DataType::Date32),
11443                Keyword::DATETIME => Ok(DataType::Datetime(self.parse_optional_precision()?)),
11444                Keyword::DATETIME64 => {
11445                    self.prev_token();
11446                    let (precision, time_zone) = self.parse_datetime_64()?;
11447                    Ok(DataType::Datetime64(precision, time_zone))
11448                }
11449                Keyword::TIMESTAMP => {
11450                    let precision = self.parse_optional_precision()?;
11451                    let tz = if self.parse_keyword(Keyword::WITH) {
11452                        self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
11453                        TimezoneInfo::WithTimeZone
11454                    } else if self.parse_keyword(Keyword::WITHOUT) {
11455                        self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
11456                        TimezoneInfo::WithoutTimeZone
11457                    } else {
11458                        TimezoneInfo::None
11459                    };
11460                    Ok(DataType::Timestamp(precision, tz))
11461                }
11462                Keyword::TIMESTAMPTZ => Ok(DataType::Timestamp(
11463                    self.parse_optional_precision()?,
11464                    TimezoneInfo::Tz,
11465                )),
11466                Keyword::TIMESTAMP_NTZ => {
11467                    Ok(DataType::TimestampNtz(self.parse_optional_precision()?))
11468                }
11469                Keyword::TIME => {
11470                    let precision = self.parse_optional_precision()?;
11471                    let tz = if self.parse_keyword(Keyword::WITH) {
11472                        self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
11473                        TimezoneInfo::WithTimeZone
11474                    } else if self.parse_keyword(Keyword::WITHOUT) {
11475                        self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
11476                        TimezoneInfo::WithoutTimeZone
11477                    } else {
11478                        TimezoneInfo::None
11479                    };
11480                    Ok(DataType::Time(precision, tz))
11481                }
11482                Keyword::TIMETZ => Ok(DataType::Time(
11483                    self.parse_optional_precision()?,
11484                    TimezoneInfo::Tz,
11485                )),
11486                Keyword::INTERVAL => {
11487                    if self.dialect.supports_interval_options() {
11488                        let fields = self.maybe_parse_optional_interval_fields()?;
11489                        let precision = self.parse_optional_precision()?;
11490                        Ok(DataType::Interval { fields, precision })
11491                    } else {
11492                        Ok(DataType::Interval {
11493                            fields: None,
11494                            precision: None,
11495                        })
11496                    }
11497                }
11498                Keyword::JSON => Ok(DataType::JSON),
11499                Keyword::JSONB => Ok(DataType::JSONB),
11500                Keyword::REGCLASS => Ok(DataType::Regclass),
11501                Keyword::STRING => Ok(DataType::String(self.parse_optional_precision()?)),
11502                Keyword::FIXEDSTRING => {
11503                    self.expect_token(&Token::LParen)?;
11504                    let character_length = self.parse_literal_uint()?;
11505                    self.expect_token(&Token::RParen)?;
11506                    Ok(DataType::FixedString(character_length))
11507                }
11508                Keyword::TEXT => Ok(DataType::Text),
11509                Keyword::TINYTEXT => Ok(DataType::TinyText),
11510                Keyword::MEDIUMTEXT => Ok(DataType::MediumText),
11511                Keyword::LONGTEXT => Ok(DataType::LongText),
11512                Keyword::BYTEA => Ok(DataType::Bytea),
11513                Keyword::NUMERIC => Ok(DataType::Numeric(
11514                    self.parse_exact_number_optional_precision_scale()?,
11515                )),
11516                Keyword::DECIMAL => {
11517                    let precision = self.parse_exact_number_optional_precision_scale()?;
11518
11519                    if self.parse_keyword(Keyword::UNSIGNED) {
11520                        Ok(DataType::DecimalUnsigned(precision))
11521                    } else {
11522                        Ok(DataType::Decimal(precision))
11523                    }
11524                }
11525                Keyword::DEC => {
11526                    let precision = self.parse_exact_number_optional_precision_scale()?;
11527
11528                    if self.parse_keyword(Keyword::UNSIGNED) {
11529                        Ok(DataType::DecUnsigned(precision))
11530                    } else {
11531                        Ok(DataType::Dec(precision))
11532                    }
11533                }
11534                Keyword::BIGNUMERIC => Ok(DataType::BigNumeric(
11535                    self.parse_exact_number_optional_precision_scale()?,
11536                )),
11537                Keyword::BIGDECIMAL => Ok(DataType::BigDecimal(
11538                    self.parse_exact_number_optional_precision_scale()?,
11539                )),
11540                Keyword::ENUM => Ok(DataType::Enum(self.parse_enum_values()?, None)),
11541                Keyword::ENUM8 => Ok(DataType::Enum(self.parse_enum_values()?, Some(8))),
11542                Keyword::ENUM16 => Ok(DataType::Enum(self.parse_enum_values()?, Some(16))),
11543                Keyword::SET => Ok(DataType::Set(self.parse_string_values()?)),
11544                Keyword::ARRAY => {
11545                    if dialect_of!(self is SnowflakeDialect) {
11546                        Ok(DataType::Array(ArrayElemTypeDef::None))
11547                    } else if dialect_of!(self is ClickHouseDialect) {
11548                        Ok(self.parse_sub_type(|internal_type| {
11549                            DataType::Array(ArrayElemTypeDef::Parenthesis(internal_type))
11550                        })?)
11551                    } else {
11552                        self.expect_token(&Token::Lt)?;
11553                        let (inside_type, _trailing_bracket) = self.parse_data_type_helper()?;
11554                        trailing_bracket = self.expect_closing_angle_bracket(_trailing_bracket)?;
11555                        Ok(DataType::Array(ArrayElemTypeDef::AngleBracket(Box::new(
11556                            inside_type,
11557                        ))))
11558                    }
11559                }
11560                Keyword::STRUCT if dialect_is!(dialect is DuckDbDialect) => {
11561                    self.prev_token();
11562                    let field_defs = self.parse_duckdb_struct_type_def()?;
11563                    Ok(DataType::Struct(field_defs, StructBracketKind::Parentheses))
11564                }
11565                Keyword::STRUCT if dialect_is!(dialect is BigQueryDialect | GenericDialect) => {
11566                    self.prev_token();
11567                    let (field_defs, _trailing_bracket) =
11568                        self.parse_struct_type_def(Self::parse_struct_field_def)?;
11569                    trailing_bracket = _trailing_bracket;
11570                    Ok(DataType::Struct(
11571                        field_defs,
11572                        StructBracketKind::AngleBrackets,
11573                    ))
11574                }
11575                Keyword::UNION if dialect_is!(dialect is DuckDbDialect | GenericDialect) => {
11576                    self.prev_token();
11577                    let fields = self.parse_union_type_def()?;
11578                    Ok(DataType::Union(fields))
11579                }
11580                Keyword::NULLABLE if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
11581                    Ok(self.parse_sub_type(DataType::Nullable)?)
11582                }
11583                Keyword::LOWCARDINALITY if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
11584                    Ok(self.parse_sub_type(DataType::LowCardinality)?)
11585                }
11586                Keyword::MAP if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
11587                    self.prev_token();
11588                    let (key_data_type, value_data_type) = self.parse_click_house_map_def()?;
11589                    Ok(DataType::Map(
11590                        Box::new(key_data_type),
11591                        Box::new(value_data_type),
11592                    ))
11593                }
11594                Keyword::NESTED if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
11595                    self.expect_token(&Token::LParen)?;
11596                    let field_defs = self.parse_comma_separated(Parser::parse_column_def)?;
11597                    self.expect_token(&Token::RParen)?;
11598                    Ok(DataType::Nested(field_defs))
11599                }
11600                Keyword::TUPLE if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
11601                    self.prev_token();
11602                    let field_defs = self.parse_click_house_tuple_def()?;
11603                    Ok(DataType::Tuple(field_defs))
11604                }
11605                Keyword::SETOF => {
11606                    let inner = self.parse_data_type()?;
11607                    Ok(DataType::SetOf(Box::new(inner)))
11608                }
11609                Keyword::TRIGGER => Ok(DataType::Trigger),
11610                Keyword::ANY if self.peek_keyword(Keyword::TYPE) => {
11611                    let _ = self.parse_keyword(Keyword::TYPE);
11612                    Ok(DataType::AnyType)
11613                }
11614                Keyword::TABLE => {
11615                    // an LParen after the TABLE keyword indicates that table columns are being defined
11616                    // whereas no LParen indicates an anonymous table expression will be returned
11617                    if self.peek_token() == Token::LParen {
11618                        let columns = self.parse_returns_table_columns()?;
11619                        Ok(DataType::Table(Some(columns)))
11620                    } else {
11621                        Ok(DataType::Table(None))
11622                    }
11623                }
11624                Keyword::SIGNED => {
11625                    if self.parse_keyword(Keyword::INTEGER) {
11626                        Ok(DataType::SignedInteger)
11627                    } else {
11628                        Ok(DataType::Signed)
11629                    }
11630                }
11631                Keyword::UNSIGNED => {
11632                    if self.parse_keyword(Keyword::INTEGER) {
11633                        Ok(DataType::UnsignedInteger)
11634                    } else {
11635                        Ok(DataType::Unsigned)
11636                    }
11637                }
11638                Keyword::TSVECTOR if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
11639                    Ok(DataType::TsVector)
11640                }
11641                Keyword::TSQUERY if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
11642                    Ok(DataType::TsQuery)
11643                }
11644                _ => {
11645                    self.prev_token();
11646                    let type_name = self.parse_object_name(false)?;
11647                    if let Some(modifiers) = self.parse_optional_type_modifiers()? {
11648                        Ok(DataType::Custom(type_name, modifiers))
11649                    } else {
11650                        Ok(DataType::Custom(type_name, vec![]))
11651                    }
11652                }
11653            },
11654            _ => self.expected_at("a data type name", next_token_index),
11655        }?;
11656
11657        if self.dialect.supports_array_typedef_with_brackets() {
11658            while self.consume_token(&Token::LBracket) {
11659                // Parse optional array data type size
11660                let size = self.maybe_parse(|p| p.parse_literal_uint())?;
11661                self.expect_token(&Token::RBracket)?;
11662                data = DataType::Array(ArrayElemTypeDef::SquareBracket(Box::new(data), size))
11663            }
11664        }
11665        Ok((data, trailing_bracket))
11666    }
11667
11668    fn parse_returns_table_column(&mut self) -> Result<ColumnDef, ParserError> {
11669        self.parse_column_def()
11670    }
11671
11672    fn parse_returns_table_columns(&mut self) -> Result<Vec<ColumnDef>, ParserError> {
11673        self.expect_token(&Token::LParen)?;
11674        let columns = self.parse_comma_separated(Parser::parse_returns_table_column)?;
11675        self.expect_token(&Token::RParen)?;
11676        Ok(columns)
11677    }
11678
11679    pub fn parse_string_values(&mut self) -> Result<Vec<String>, ParserError> {
11680        self.expect_token(&Token::LParen)?;
11681        let mut values = Vec::new();
11682        loop {
11683            let next_token = self.next_token();
11684            match next_token.token {
11685                Token::SingleQuotedString(value) => values.push(value),
11686                _ => self.expected("a string", next_token)?,
11687            }
11688            let next_token = self.next_token();
11689            match next_token.token {
11690                Token::Comma => (),
11691                Token::RParen => break,
11692                _ => self.expected(", or }", next_token)?,
11693            }
11694        }
11695        Ok(values)
11696    }
11697
11698    /// Strictly parse `identifier AS identifier`
11699    pub fn parse_identifier_with_alias(&mut self) -> Result<IdentWithAlias, ParserError> {
11700        let ident = self.parse_identifier()?;
11701        self.expect_keyword_is(Keyword::AS)?;
11702        let alias = self.parse_identifier()?;
11703        Ok(IdentWithAlias { ident, alias })
11704    }
11705
11706    /// Parse `identifier [AS] identifier` where the AS keyword is optional
11707    fn parse_identifier_with_optional_alias(&mut self) -> Result<IdentWithAlias, ParserError> {
11708        let ident = self.parse_identifier()?;
11709        let _after_as = self.parse_keyword(Keyword::AS);
11710        let alias = self.parse_identifier()?;
11711        Ok(IdentWithAlias { ident, alias })
11712    }
11713
11714    /// Parse comma-separated list of parenthesized queries for pipe operators
11715    fn parse_pipe_operator_queries(&mut self) -> Result<Vec<Query>, ParserError> {
11716        self.parse_comma_separated(|parser| {
11717            parser.expect_token(&Token::LParen)?;
11718            let query = parser.parse_query()?;
11719            parser.expect_token(&Token::RParen)?;
11720            Ok(*query)
11721        })
11722    }
11723
11724    /// Parse set quantifier for pipe operators that require DISTINCT. E.g. INTERSECT and EXCEPT
11725    fn parse_distinct_required_set_quantifier(
11726        &mut self,
11727        operator_name: &str,
11728    ) -> Result<SetQuantifier, ParserError> {
11729        let quantifier = self.parse_set_quantifier(&Some(SetOperator::Intersect));
11730        match quantifier {
11731            SetQuantifier::Distinct | SetQuantifier::DistinctByName => Ok(quantifier),
11732            _ => Err(ParserError::ParserError(format!(
11733                "{operator_name} pipe operator requires DISTINCT modifier",
11734            ))),
11735        }
11736    }
11737
11738    /// Parse optional identifier alias (with or without AS keyword)
11739    fn parse_identifier_optional_alias(&mut self) -> Result<Option<Ident>, ParserError> {
11740        if self.parse_keyword(Keyword::AS) {
11741            Ok(Some(self.parse_identifier()?))
11742        } else {
11743            // Check if the next token is an identifier (implicit alias)
11744            self.maybe_parse(|parser| parser.parse_identifier())
11745        }
11746    }
11747
11748    /// Optionally parses an alias for a select list item
11749    fn maybe_parse_select_item_alias(&mut self) -> Result<Option<Ident>, ParserError> {
11750        fn validator(explicit: bool, kw: &Keyword, parser: &mut Parser) -> bool {
11751            parser.dialect.is_select_item_alias(explicit, kw, parser)
11752        }
11753        self.parse_optional_alias_inner(None, validator)
11754    }
11755
11756    /// Optionally parses an alias for a table like in `... FROM generate_series(1, 10) AS t (col)`.
11757    /// In this case, the alias is allowed to optionally name the columns in the table, in
11758    /// addition to the table itself.
11759    pub fn maybe_parse_table_alias(&mut self) -> Result<Option<TableAlias>, ParserError> {
11760        fn validator(explicit: bool, kw: &Keyword, parser: &mut Parser) -> bool {
11761            parser.dialect.is_table_factor_alias(explicit, kw, parser)
11762        }
11763        let explicit = self.peek_keyword(Keyword::AS);
11764        match self.parse_optional_alias_inner(None, validator)? {
11765            Some(name) => {
11766                let columns = self.parse_table_alias_column_defs()?;
11767                Ok(Some(TableAlias {
11768                    explicit,
11769                    name,
11770                    columns,
11771                }))
11772            }
11773            None => Ok(None),
11774        }
11775    }
11776
11777    fn parse_table_index_hints(&mut self) -> Result<Vec<TableIndexHints>, ParserError> {
11778        let mut hints = vec![];
11779        while let Some(hint_type) =
11780            self.parse_one_of_keywords(&[Keyword::USE, Keyword::IGNORE, Keyword::FORCE])
11781        {
11782            let hint_type = match hint_type {
11783                Keyword::USE => TableIndexHintType::Use,
11784                Keyword::IGNORE => TableIndexHintType::Ignore,
11785                Keyword::FORCE => TableIndexHintType::Force,
11786                _ => {
11787                    return self.expected(
11788                        "expected to match USE/IGNORE/FORCE keyword",
11789                        self.peek_token(),
11790                    )
11791                }
11792            };
11793            let index_type = match self.parse_one_of_keywords(&[Keyword::INDEX, Keyword::KEY]) {
11794                Some(Keyword::INDEX) => TableIndexType::Index,
11795                Some(Keyword::KEY) => TableIndexType::Key,
11796                _ => {
11797                    return self.expected("expected to match INDEX/KEY keyword", self.peek_token())
11798                }
11799            };
11800            let for_clause = if self.parse_keyword(Keyword::FOR) {
11801                let clause = if self.parse_keyword(Keyword::JOIN) {
11802                    TableIndexHintForClause::Join
11803                } else if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
11804                    TableIndexHintForClause::OrderBy
11805                } else if self.parse_keywords(&[Keyword::GROUP, Keyword::BY]) {
11806                    TableIndexHintForClause::GroupBy
11807                } else {
11808                    return self.expected(
11809                        "expected to match FOR/ORDER BY/GROUP BY table hint in for clause",
11810                        self.peek_token(),
11811                    );
11812                };
11813                Some(clause)
11814            } else {
11815                None
11816            };
11817
11818            self.expect_token(&Token::LParen)?;
11819            let index_names = if self.peek_token().token != Token::RParen {
11820                self.parse_comma_separated(Parser::parse_identifier)?
11821            } else {
11822                vec![]
11823            };
11824            self.expect_token(&Token::RParen)?;
11825            hints.push(TableIndexHints {
11826                hint_type,
11827                index_type,
11828                for_clause,
11829                index_names,
11830            });
11831        }
11832        Ok(hints)
11833    }
11834
11835    /// Wrapper for parse_optional_alias_inner, left for backwards-compatibility
11836    /// but new flows should use the context-specific methods such as `maybe_parse_select_item_alias`
11837    /// and `maybe_parse_table_alias`.
11838    pub fn parse_optional_alias(
11839        &mut self,
11840        reserved_kwds: &[Keyword],
11841    ) -> Result<Option<Ident>, ParserError> {
11842        fn validator(_explicit: bool, _kw: &Keyword, _parser: &mut Parser) -> bool {
11843            false
11844        }
11845        self.parse_optional_alias_inner(Some(reserved_kwds), validator)
11846    }
11847
11848    /// Parses an optional alias after a SQL element such as a select list item
11849    /// or a table name.
11850    ///
11851    /// This method accepts an optional list of reserved keywords or a function
11852    /// to call to validate if a keyword should be parsed as an alias, to allow
11853    /// callers to customize the parsing logic based on their context.
11854    fn parse_optional_alias_inner<F>(
11855        &mut self,
11856        reserved_kwds: Option<&[Keyword]>,
11857        validator: F,
11858    ) -> Result<Option<Ident>, ParserError>
11859    where
11860        F: Fn(bool, &Keyword, &mut Parser) -> bool,
11861    {
11862        let after_as = self.parse_keyword(Keyword::AS);
11863
11864        let next_token = self.next_token();
11865        match next_token.token {
11866            // Accepts a keyword as an alias if the AS keyword explicitly indicate an alias or if the
11867            // caller provided a list of reserved keywords and the keyword is not on that list.
11868            Token::Word(w)
11869                if reserved_kwds.is_some()
11870                    && (after_as || reserved_kwds.is_some_and(|x| !x.contains(&w.keyword))) =>
11871            {
11872                Ok(Some(w.into_ident(next_token.span)))
11873            }
11874            // Accepts a keyword as alias based on the caller's context, such as to what SQL element
11875            // this word is a potential alias of using the validator call-back. This allows for
11876            // dialect-specific logic.
11877            Token::Word(w) if validator(after_as, &w.keyword, self) => {
11878                Ok(Some(w.into_ident(next_token.span)))
11879            }
11880            // For backwards-compatibility, we accept quoted strings as aliases regardless of the context.
11881            Token::SingleQuotedString(s) => Ok(Some(Ident::with_quote('\'', s))),
11882            Token::DoubleQuotedString(s) => Ok(Some(Ident::with_quote('\"', s))),
11883            _ => {
11884                if after_as {
11885                    return self.expected("an identifier after AS", next_token);
11886                }
11887                self.prev_token();
11888                Ok(None) // no alias found
11889            }
11890        }
11891    }
11892
11893    pub fn parse_optional_group_by(&mut self) -> Result<Option<GroupByExpr>, ParserError> {
11894        if self.parse_keywords(&[Keyword::GROUP, Keyword::BY]) {
11895            let expressions = if self.parse_keyword(Keyword::ALL) {
11896                None
11897            } else {
11898                Some(self.parse_comma_separated(Parser::parse_group_by_expr)?)
11899            };
11900
11901            let mut modifiers = vec![];
11902            if self.dialect.supports_group_by_with_modifier() {
11903                loop {
11904                    if !self.parse_keyword(Keyword::WITH) {
11905                        break;
11906                    }
11907                    let keyword = self.expect_one_of_keywords(&[
11908                        Keyword::ROLLUP,
11909                        Keyword::CUBE,
11910                        Keyword::TOTALS,
11911                    ])?;
11912                    modifiers.push(match keyword {
11913                        Keyword::ROLLUP => GroupByWithModifier::Rollup,
11914                        Keyword::CUBE => GroupByWithModifier::Cube,
11915                        Keyword::TOTALS => GroupByWithModifier::Totals,
11916                        _ => {
11917                            return parser_err!(
11918                                "BUG: expected to match GroupBy modifier keyword",
11919                                self.peek_token().span.start
11920                            )
11921                        }
11922                    });
11923                }
11924            }
11925            if self.parse_keywords(&[Keyword::GROUPING, Keyword::SETS]) {
11926                self.expect_token(&Token::LParen)?;
11927                let result = self.parse_comma_separated(|p| {
11928                    if p.peek_token_ref().token == Token::LParen {
11929                        p.parse_tuple(true, true)
11930                    } else {
11931                        Ok(vec![p.parse_expr()?])
11932                    }
11933                })?;
11934                self.expect_token(&Token::RParen)?;
11935                modifiers.push(GroupByWithModifier::GroupingSets(Expr::GroupingSets(
11936                    result,
11937                )));
11938            };
11939            let group_by = match expressions {
11940                None => GroupByExpr::All(modifiers),
11941                Some(exprs) => GroupByExpr::Expressions(exprs, modifiers),
11942            };
11943            Ok(Some(group_by))
11944        } else {
11945            Ok(None)
11946        }
11947    }
11948
11949    pub fn parse_optional_order_by(&mut self) -> Result<Option<OrderBy>, ParserError> {
11950        if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
11951            let order_by =
11952                if self.dialect.supports_order_by_all() && self.parse_keyword(Keyword::ALL) {
11953                    let order_by_options = self.parse_order_by_options()?;
11954                    OrderBy {
11955                        kind: OrderByKind::All(order_by_options),
11956                        interpolate: None,
11957                    }
11958                } else {
11959                    let exprs = self.parse_comma_separated(Parser::parse_order_by_expr)?;
11960                    let interpolate = if dialect_of!(self is ClickHouseDialect | GenericDialect) {
11961                        self.parse_interpolations()?
11962                    } else {
11963                        None
11964                    };
11965                    OrderBy {
11966                        kind: OrderByKind::Expressions(exprs),
11967                        interpolate,
11968                    }
11969                };
11970            Ok(Some(order_by))
11971        } else {
11972            Ok(None)
11973        }
11974    }
11975
11976    fn parse_optional_limit_clause(&mut self) -> Result<Option<LimitClause>, ParserError> {
11977        let mut offset = if self.parse_keyword(Keyword::OFFSET) {
11978            Some(self.parse_offset()?)
11979        } else {
11980            None
11981        };
11982
11983        let (limit, limit_by) = if self.parse_keyword(Keyword::LIMIT) {
11984            let expr = self.parse_limit()?;
11985
11986            if self.dialect.supports_limit_comma()
11987                && offset.is_none()
11988                && expr.is_some() // ALL not supported with comma
11989                && self.consume_token(&Token::Comma)
11990            {
11991                let offset = expr.ok_or_else(|| {
11992                    ParserError::ParserError(
11993                        "Missing offset for LIMIT <offset>, <limit>".to_string(),
11994                    )
11995                })?;
11996                return Ok(Some(LimitClause::OffsetCommaLimit {
11997                    offset,
11998                    limit: self.parse_expr()?,
11999                }));
12000            }
12001
12002            let limit_by = if dialect_of!(self is ClickHouseDialect | GenericDialect)
12003                && self.parse_keyword(Keyword::BY)
12004            {
12005                Some(self.parse_comma_separated(Parser::parse_expr)?)
12006            } else {
12007                None
12008            };
12009
12010            (Some(expr), limit_by)
12011        } else {
12012            (None, None)
12013        };
12014
12015        if offset.is_none() && limit.is_some() && self.parse_keyword(Keyword::OFFSET) {
12016            offset = Some(self.parse_offset()?);
12017        }
12018
12019        if offset.is_some() || (limit.is_some() && limit != Some(None)) || limit_by.is_some() {
12020            Ok(Some(LimitClause::LimitOffset {
12021                limit: limit.unwrap_or_default(),
12022                offset,
12023                limit_by: limit_by.unwrap_or_default(),
12024            }))
12025        } else {
12026            Ok(None)
12027        }
12028    }
12029
12030    /// Parse a table object for insertion
12031    /// e.g. `some_database.some_table` or `FUNCTION some_table_func(...)`
12032    pub fn parse_table_object(&mut self) -> Result<TableObject, ParserError> {
12033        if self.dialect.supports_insert_table_function() && self.parse_keyword(Keyword::FUNCTION) {
12034            let fn_name = self.parse_object_name(false)?;
12035            self.parse_function_call(fn_name)
12036                .map(TableObject::TableFunction)
12037        } else {
12038            self.parse_object_name(false).map(TableObject::TableName)
12039        }
12040    }
12041
12042    /// Parse a possibly qualified, possibly quoted identifier, e.g.
12043    /// `foo` or `myschema."table"
12044    ///
12045    /// The `in_table_clause` parameter indicates whether the object name is a table in a FROM, JOIN,
12046    /// or similar table clause. Currently, this is used only to support unquoted hyphenated identifiers
12047    /// in this context on BigQuery.
12048    pub fn parse_object_name(&mut self, in_table_clause: bool) -> Result<ObjectName, ParserError> {
12049        self.parse_object_name_inner(in_table_clause, false)
12050    }
12051
12052    /// Parse a possibly qualified, possibly quoted identifier, e.g.
12053    /// `foo` or `myschema."table"
12054    ///
12055    /// The `in_table_clause` parameter indicates whether the object name is a table in a FROM, JOIN,
12056    /// or similar table clause. Currently, this is used only to support unquoted hyphenated identifiers
12057    /// in this context on BigQuery.
12058    ///
12059    /// The `allow_wildcards` parameter indicates whether to allow for wildcards in the object name
12060    /// e.g. *, *.*, `foo`.*, or "foo"."bar"
12061    fn parse_object_name_inner(
12062        &mut self,
12063        in_table_clause: bool,
12064        allow_wildcards: bool,
12065    ) -> Result<ObjectName, ParserError> {
12066        let mut parts = vec![];
12067        if dialect_of!(self is BigQueryDialect) && in_table_clause {
12068            loop {
12069                let (ident, end_with_period) = self.parse_unquoted_hyphenated_identifier()?;
12070                parts.push(ObjectNamePart::Identifier(ident));
12071                if !self.consume_token(&Token::Period) && !end_with_period {
12072                    break;
12073                }
12074            }
12075        } else {
12076            loop {
12077                if allow_wildcards && self.peek_token().token == Token::Mul {
12078                    let span = self.next_token().span;
12079                    parts.push(ObjectNamePart::Identifier(Ident {
12080                        value: Token::Mul.to_string(),
12081                        quote_style: None,
12082                        span,
12083                    }));
12084                } else if dialect_of!(self is BigQueryDialect) && in_table_clause {
12085                    let (ident, end_with_period) = self.parse_unquoted_hyphenated_identifier()?;
12086                    parts.push(ObjectNamePart::Identifier(ident));
12087                    if !self.consume_token(&Token::Period) && !end_with_period {
12088                        break;
12089                    }
12090                } else if self.dialect.supports_object_name_double_dot_notation()
12091                    && parts.len() == 1
12092                    && matches!(self.peek_token().token, Token::Period)
12093                {
12094                    // Empty string here means default schema
12095                    parts.push(ObjectNamePart::Identifier(Ident::new("")));
12096                } else {
12097                    let ident = self.parse_identifier()?;
12098                    let part = if self
12099                        .dialect
12100                        .is_identifier_generating_function_name(&ident, &parts)
12101                    {
12102                        self.expect_token(&Token::LParen)?;
12103                        let args: Vec<FunctionArg> =
12104                            self.parse_comma_separated0(Self::parse_function_args, Token::RParen)?;
12105                        self.expect_token(&Token::RParen)?;
12106                        ObjectNamePart::Function(ObjectNamePartFunction { name: ident, args })
12107                    } else {
12108                        ObjectNamePart::Identifier(ident)
12109                    };
12110                    parts.push(part);
12111                }
12112
12113                if !self.consume_token(&Token::Period) {
12114                    break;
12115                }
12116            }
12117        }
12118
12119        // BigQuery accepts any number of quoted identifiers of a table name.
12120        // https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#quoted_identifiers
12121        if dialect_of!(self is BigQueryDialect)
12122            && parts.iter().any(|part| {
12123                part.as_ident()
12124                    .is_some_and(|ident| ident.value.contains('.'))
12125            })
12126        {
12127            parts = parts
12128                .into_iter()
12129                .flat_map(|part| match part.as_ident() {
12130                    Some(ident) => ident
12131                        .value
12132                        .split('.')
12133                        .map(|value| {
12134                            ObjectNamePart::Identifier(Ident {
12135                                value: value.into(),
12136                                quote_style: ident.quote_style,
12137                                span: ident.span,
12138                            })
12139                        })
12140                        .collect::<Vec<_>>(),
12141                    None => vec![part],
12142                })
12143                .collect()
12144        }
12145
12146        Ok(ObjectName(parts))
12147    }
12148
12149    /// Parse identifiers
12150    pub fn parse_identifiers(&mut self) -> Result<Vec<Ident>, ParserError> {
12151        let mut idents = vec![];
12152        loop {
12153            match &self.peek_token_ref().token {
12154                Token::Word(w) => {
12155                    idents.push(w.clone().into_ident(self.peek_token_ref().span));
12156                }
12157                Token::EOF | Token::Eq | Token::SemiColon => break,
12158                _ => {}
12159            }
12160            self.advance_token();
12161        }
12162        Ok(idents)
12163    }
12164
12165    /// Parse identifiers of form ident1[.identN]*
12166    ///
12167    /// Similar in functionality to [parse_identifiers], with difference
12168    /// being this function is much more strict about parsing a valid multipart identifier, not
12169    /// allowing extraneous tokens to be parsed, otherwise it fails.
12170    ///
12171    /// For example:
12172    ///
12173    /// ```rust
12174    /// use sqlparser::ast::Ident;
12175    /// use sqlparser::dialect::GenericDialect;
12176    /// use sqlparser::parser::Parser;
12177    ///
12178    /// let dialect = GenericDialect {};
12179    /// let expected = vec![Ident::new("one"), Ident::new("two")];
12180    ///
12181    /// // expected usage
12182    /// let sql = "one.two";
12183    /// let mut parser = Parser::new(&dialect).try_with_sql(sql).unwrap();
12184    /// let actual = parser.parse_multipart_identifier().unwrap();
12185    /// assert_eq!(&actual, &expected);
12186    ///
12187    /// // parse_identifiers is more loose on what it allows, parsing successfully
12188    /// let sql = "one + two";
12189    /// let mut parser = Parser::new(&dialect).try_with_sql(sql).unwrap();
12190    /// let actual = parser.parse_identifiers().unwrap();
12191    /// assert_eq!(&actual, &expected);
12192    ///
12193    /// // expected to strictly fail due to + separator
12194    /// let sql = "one + two";
12195    /// let mut parser = Parser::new(&dialect).try_with_sql(sql).unwrap();
12196    /// let actual = parser.parse_multipart_identifier().unwrap_err();
12197    /// assert_eq!(
12198    ///     actual.to_string(),
12199    ///     "sql parser error: Unexpected token in identifier: +"
12200    /// );
12201    /// ```
12202    ///
12203    /// [parse_identifiers]: Parser::parse_identifiers
12204    pub fn parse_multipart_identifier(&mut self) -> Result<Vec<Ident>, ParserError> {
12205        let mut idents = vec![];
12206
12207        // expecting at least one word for identifier
12208        let next_token = self.next_token();
12209        match next_token.token {
12210            Token::Word(w) => idents.push(w.into_ident(next_token.span)),
12211            Token::EOF => {
12212                return Err(ParserError::ParserError(
12213                    "Empty input when parsing identifier".to_string(),
12214                ))?
12215            }
12216            token => {
12217                return Err(ParserError::ParserError(format!(
12218                    "Unexpected token in identifier: {token}"
12219                )))?
12220            }
12221        };
12222
12223        // parse optional next parts if exist
12224        loop {
12225            match self.next_token().token {
12226                // ensure that optional period is succeeded by another identifier
12227                Token::Period => {
12228                    let next_token = self.next_token();
12229                    match next_token.token {
12230                        Token::Word(w) => idents.push(w.into_ident(next_token.span)),
12231                        Token::EOF => {
12232                            return Err(ParserError::ParserError(
12233                                "Trailing period in identifier".to_string(),
12234                            ))?
12235                        }
12236                        token => {
12237                            return Err(ParserError::ParserError(format!(
12238                                "Unexpected token following period in identifier: {token}"
12239                            )))?
12240                        }
12241                    }
12242                }
12243                Token::EOF => break,
12244                token => {
12245                    return Err(ParserError::ParserError(format!(
12246                        "Unexpected token in identifier: {token}"
12247                    )))?;
12248                }
12249            }
12250        }
12251
12252        Ok(idents)
12253    }
12254
12255    /// Parse a simple one-word identifier (possibly quoted, possibly a keyword)
12256    pub fn parse_identifier(&mut self) -> Result<Ident, ParserError> {
12257        let next_token = self.next_token();
12258        match next_token.token {
12259            Token::Word(w) => Ok(w.into_ident(next_token.span)),
12260            Token::SingleQuotedString(s) => Ok(Ident::with_quote('\'', s)),
12261            Token::DoubleQuotedString(s) => Ok(Ident::with_quote('\"', s)),
12262            _ => self.expected("identifier", next_token),
12263        }
12264    }
12265
12266    /// On BigQuery, hyphens are permitted in unquoted identifiers inside of a FROM or
12267    /// TABLE clause.
12268    ///
12269    /// The first segment must be an ordinary unquoted identifier, e.g. it must not start
12270    /// with a digit. Subsequent segments are either must either be valid identifiers or
12271    /// integers, e.g. foo-123 is allowed, but foo-123a is not.
12272    ///
12273    /// [BigQuery-lexical](https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical)
12274    ///
12275    /// Return a tuple of the identifier and a boolean indicating it ends with a period.
12276    fn parse_unquoted_hyphenated_identifier(&mut self) -> Result<(Ident, bool), ParserError> {
12277        match self.peek_token().token {
12278            Token::Word(w) => {
12279                let quote_style_is_none = w.quote_style.is_none();
12280                let mut requires_whitespace = false;
12281                let mut ident = w.into_ident(self.next_token().span);
12282                if quote_style_is_none {
12283                    while matches!(self.peek_token_no_skip().token, Token::Minus) {
12284                        self.next_token();
12285                        ident.value.push('-');
12286
12287                        let token = self
12288                            .next_token_no_skip()
12289                            .cloned()
12290                            .unwrap_or(TokenWithSpan::wrap(Token::EOF));
12291                        requires_whitespace = match token.token {
12292                            Token::Word(next_word) if next_word.quote_style.is_none() => {
12293                                ident.value.push_str(&next_word.value);
12294                                false
12295                            }
12296                            Token::Number(s, false) => {
12297                                // A number token can represent a decimal value ending with a period, e.g., `Number('123.')`.
12298                                // However, for an [ObjectName], it is part of a hyphenated identifier, e.g., `foo-123.bar`.
12299                                //
12300                                // If a number token is followed by a period, it is part of an [ObjectName].
12301                                // Return the identifier with `true` if the number token is followed by a period, indicating that
12302                                // parsing should continue for the next part of the hyphenated identifier.
12303                                if s.ends_with('.') {
12304                                    let Some(s) = s.split('.').next().filter(|s| {
12305                                        !s.is_empty() && s.chars().all(|c| c.is_ascii_digit())
12306                                    }) else {
12307                                        return self.expected(
12308                                            "continuation of hyphenated identifier",
12309                                            TokenWithSpan::new(Token::Number(s, false), token.span),
12310                                        );
12311                                    };
12312                                    ident.value.push_str(s);
12313                                    return Ok((ident, true));
12314                                } else {
12315                                    ident.value.push_str(&s);
12316                                }
12317                                // If next token is period, then it is part of an ObjectName and we don't expect whitespace
12318                                // after the number.
12319                                !matches!(self.peek_token().token, Token::Period)
12320                            }
12321                            _ => {
12322                                return self
12323                                    .expected("continuation of hyphenated identifier", token);
12324                            }
12325                        }
12326                    }
12327
12328                    // If the last segment was a number, we must check that it's followed by whitespace,
12329                    // otherwise foo-123a will be parsed as `foo-123` with the alias `a`.
12330                    if requires_whitespace {
12331                        let token = self.next_token();
12332                        if !matches!(token.token, Token::EOF | Token::Whitespace(_)) {
12333                            return self
12334                                .expected("whitespace following hyphenated identifier", token);
12335                        }
12336                    }
12337                }
12338                Ok((ident, false))
12339            }
12340            _ => Ok((self.parse_identifier()?, false)),
12341        }
12342    }
12343
12344    /// Parses a parenthesized, comma-separated list of column definitions within a view.
12345    fn parse_view_columns(&mut self) -> Result<Vec<ViewColumnDef>, ParserError> {
12346        if self.consume_token(&Token::LParen) {
12347            if self.peek_token().token == Token::RParen {
12348                self.next_token();
12349                Ok(vec![])
12350            } else {
12351                let cols = self.parse_comma_separated_with_trailing_commas(
12352                    Parser::parse_view_column,
12353                    self.dialect.supports_column_definition_trailing_commas(),
12354                    Self::is_reserved_for_column_alias,
12355                )?;
12356                self.expect_token(&Token::RParen)?;
12357                Ok(cols)
12358            }
12359        } else {
12360            Ok(vec![])
12361        }
12362    }
12363
12364    /// Parses a column definition within a view.
12365    fn parse_view_column(&mut self) -> Result<ViewColumnDef, ParserError> {
12366        let name = self.parse_identifier()?;
12367        let options = self.parse_view_column_options()?;
12368        let data_type = if dialect_of!(self is ClickHouseDialect) {
12369            Some(self.parse_data_type()?)
12370        } else {
12371            None
12372        };
12373        Ok(ViewColumnDef {
12374            name,
12375            data_type,
12376            options,
12377        })
12378    }
12379
12380    fn parse_view_column_options(&mut self) -> Result<Option<ColumnOptions>, ParserError> {
12381        let mut options = Vec::new();
12382        loop {
12383            let option = self.parse_optional_column_option()?;
12384            if let Some(option) = option {
12385                options.push(option);
12386            } else {
12387                break;
12388            }
12389        }
12390        if options.is_empty() {
12391            Ok(None)
12392        } else if self.dialect.supports_space_separated_column_options() {
12393            Ok(Some(ColumnOptions::SpaceSeparated(options)))
12394        } else {
12395            Ok(Some(ColumnOptions::CommaSeparated(options)))
12396        }
12397    }
12398
12399    /// Parses a parenthesized comma-separated list of unqualified, possibly quoted identifiers.
12400    /// For example: `(col1, "col 2", ...)`
12401    pub fn parse_parenthesized_column_list(
12402        &mut self,
12403        optional: IsOptional,
12404        allow_empty: bool,
12405    ) -> Result<Vec<Ident>, ParserError> {
12406        self.parse_parenthesized_column_list_inner(optional, allow_empty, |p| p.parse_identifier())
12407    }
12408
12409    pub fn parse_parenthesized_compound_identifier_list(
12410        &mut self,
12411        optional: IsOptional,
12412        allow_empty: bool,
12413    ) -> Result<Vec<Expr>, ParserError> {
12414        self.parse_parenthesized_column_list_inner(optional, allow_empty, |p| {
12415            Ok(Expr::CompoundIdentifier(
12416                p.parse_period_separated(|p| p.parse_identifier())?,
12417            ))
12418        })
12419    }
12420
12421    /// Parses a parenthesized comma-separated list of index columns, which can be arbitrary
12422    /// expressions with ordering information (and an opclass in some dialects).
12423    fn parse_parenthesized_index_column_list(&mut self) -> Result<Vec<IndexColumn>, ParserError> {
12424        self.parse_parenthesized_column_list_inner(Mandatory, false, |p| {
12425            p.parse_create_index_expr()
12426        })
12427    }
12428
12429    /// Parses a parenthesized comma-separated list of qualified, possibly quoted identifiers.
12430    /// For example: `(db1.sc1.tbl1.col1, db1.sc1.tbl1."col 2", ...)`
12431    pub fn parse_parenthesized_qualified_column_list(
12432        &mut self,
12433        optional: IsOptional,
12434        allow_empty: bool,
12435    ) -> Result<Vec<ObjectName>, ParserError> {
12436        self.parse_parenthesized_column_list_inner(optional, allow_empty, |p| {
12437            p.parse_object_name(true)
12438        })
12439    }
12440
12441    /// Parses a parenthesized comma-separated list of columns using
12442    /// the provided function to parse each element.
12443    fn parse_parenthesized_column_list_inner<F, T>(
12444        &mut self,
12445        optional: IsOptional,
12446        allow_empty: bool,
12447        mut f: F,
12448    ) -> Result<Vec<T>, ParserError>
12449    where
12450        F: FnMut(&mut Parser) -> Result<T, ParserError>,
12451    {
12452        if self.consume_token(&Token::LParen) {
12453            if allow_empty && self.peek_token().token == Token::RParen {
12454                self.next_token();
12455                Ok(vec![])
12456            } else {
12457                let cols = self.parse_comma_separated(|p| f(p))?;
12458                self.expect_token(&Token::RParen)?;
12459                Ok(cols)
12460            }
12461        } else if optional == Optional {
12462            Ok(vec![])
12463        } else {
12464            self.expected("a list of columns in parentheses", self.peek_token())
12465        }
12466    }
12467
12468    /// Parses a parenthesized comma-separated list of table alias column definitions.
12469    fn parse_table_alias_column_defs(&mut self) -> Result<Vec<TableAliasColumnDef>, ParserError> {
12470        if self.consume_token(&Token::LParen) {
12471            let cols = self.parse_comma_separated(|p| {
12472                let name = p.parse_identifier()?;
12473                let data_type = p.maybe_parse(|p| p.parse_data_type())?;
12474                Ok(TableAliasColumnDef { name, data_type })
12475            })?;
12476            self.expect_token(&Token::RParen)?;
12477            Ok(cols)
12478        } else {
12479            Ok(vec![])
12480        }
12481    }
12482
12483    pub fn parse_precision(&mut self) -> Result<u64, ParserError> {
12484        self.expect_token(&Token::LParen)?;
12485        let n = self.parse_literal_uint()?;
12486        self.expect_token(&Token::RParen)?;
12487        Ok(n)
12488    }
12489
12490    pub fn parse_optional_precision(&mut self) -> Result<Option<u64>, ParserError> {
12491        if self.consume_token(&Token::LParen) {
12492            let n = self.parse_literal_uint()?;
12493            self.expect_token(&Token::RParen)?;
12494            Ok(Some(n))
12495        } else {
12496            Ok(None)
12497        }
12498    }
12499
12500    fn maybe_parse_optional_interval_fields(
12501        &mut self,
12502    ) -> Result<Option<IntervalFields>, ParserError> {
12503        match self.parse_one_of_keywords(&[
12504            // Can be followed by `TO` option
12505            Keyword::YEAR,
12506            Keyword::DAY,
12507            Keyword::HOUR,
12508            Keyword::MINUTE,
12509            // No `TO` option
12510            Keyword::MONTH,
12511            Keyword::SECOND,
12512        ]) {
12513            Some(Keyword::YEAR) => {
12514                if self.peek_keyword(Keyword::TO) {
12515                    self.expect_keyword(Keyword::TO)?;
12516                    self.expect_keyword(Keyword::MONTH)?;
12517                    Ok(Some(IntervalFields::YearToMonth))
12518                } else {
12519                    Ok(Some(IntervalFields::Year))
12520                }
12521            }
12522            Some(Keyword::DAY) => {
12523                if self.peek_keyword(Keyword::TO) {
12524                    self.expect_keyword(Keyword::TO)?;
12525                    match self.expect_one_of_keywords(&[
12526                        Keyword::HOUR,
12527                        Keyword::MINUTE,
12528                        Keyword::SECOND,
12529                    ])? {
12530                        Keyword::HOUR => Ok(Some(IntervalFields::DayToHour)),
12531                        Keyword::MINUTE => Ok(Some(IntervalFields::DayToMinute)),
12532                        Keyword::SECOND => Ok(Some(IntervalFields::DayToSecond)),
12533                        _ => {
12534                            self.prev_token();
12535                            self.expected("HOUR, MINUTE, or SECOND", self.peek_token())
12536                        }
12537                    }
12538                } else {
12539                    Ok(Some(IntervalFields::Day))
12540                }
12541            }
12542            Some(Keyword::HOUR) => {
12543                if self.peek_keyword(Keyword::TO) {
12544                    self.expect_keyword(Keyword::TO)?;
12545                    match self.expect_one_of_keywords(&[Keyword::MINUTE, Keyword::SECOND])? {
12546                        Keyword::MINUTE => Ok(Some(IntervalFields::HourToMinute)),
12547                        Keyword::SECOND => Ok(Some(IntervalFields::HourToSecond)),
12548                        _ => {
12549                            self.prev_token();
12550                            self.expected("MINUTE or SECOND", self.peek_token())
12551                        }
12552                    }
12553                } else {
12554                    Ok(Some(IntervalFields::Hour))
12555                }
12556            }
12557            Some(Keyword::MINUTE) => {
12558                if self.peek_keyword(Keyword::TO) {
12559                    self.expect_keyword(Keyword::TO)?;
12560                    self.expect_keyword(Keyword::SECOND)?;
12561                    Ok(Some(IntervalFields::MinuteToSecond))
12562                } else {
12563                    Ok(Some(IntervalFields::Minute))
12564                }
12565            }
12566            Some(Keyword::MONTH) => Ok(Some(IntervalFields::Month)),
12567            Some(Keyword::SECOND) => Ok(Some(IntervalFields::Second)),
12568            Some(_) => {
12569                self.prev_token();
12570                self.expected(
12571                    "YEAR, MONTH, DAY, HOUR, MINUTE, or SECOND",
12572                    self.peek_token(),
12573                )
12574            }
12575            None => Ok(None),
12576        }
12577    }
12578
12579    /// Parse datetime64 [1]
12580    /// Syntax
12581    /// ```sql
12582    /// DateTime64(precision[, timezone])
12583    /// ```
12584    ///
12585    /// [1]: https://clickhouse.com/docs/en/sql-reference/data-types/datetime64
12586    pub fn parse_datetime_64(&mut self) -> Result<(u64, Option<String>), ParserError> {
12587        self.expect_keyword_is(Keyword::DATETIME64)?;
12588        self.expect_token(&Token::LParen)?;
12589        let precision = self.parse_literal_uint()?;
12590        let time_zone = if self.consume_token(&Token::Comma) {
12591            Some(self.parse_literal_string()?)
12592        } else {
12593            None
12594        };
12595        self.expect_token(&Token::RParen)?;
12596        Ok((precision, time_zone))
12597    }
12598
12599    pub fn parse_optional_character_length(
12600        &mut self,
12601    ) -> Result<Option<CharacterLength>, ParserError> {
12602        if self.consume_token(&Token::LParen) {
12603            let character_length = self.parse_character_length()?;
12604            self.expect_token(&Token::RParen)?;
12605            Ok(Some(character_length))
12606        } else {
12607            Ok(None)
12608        }
12609    }
12610
12611    pub fn parse_optional_binary_length(&mut self) -> Result<Option<BinaryLength>, ParserError> {
12612        if self.consume_token(&Token::LParen) {
12613            let binary_length = self.parse_binary_length()?;
12614            self.expect_token(&Token::RParen)?;
12615            Ok(Some(binary_length))
12616        } else {
12617            Ok(None)
12618        }
12619    }
12620
12621    pub fn parse_character_length(&mut self) -> Result<CharacterLength, ParserError> {
12622        if self.parse_keyword(Keyword::MAX) {
12623            return Ok(CharacterLength::Max);
12624        }
12625        let length = self.parse_literal_uint()?;
12626        let unit = if self.parse_keyword(Keyword::CHARACTERS) {
12627            Some(CharLengthUnits::Characters)
12628        } else if self.parse_keyword(Keyword::OCTETS) {
12629            Some(CharLengthUnits::Octets)
12630        } else {
12631            None
12632        };
12633        Ok(CharacterLength::IntegerLength { length, unit })
12634    }
12635
12636    pub fn parse_binary_length(&mut self) -> Result<BinaryLength, ParserError> {
12637        if self.parse_keyword(Keyword::MAX) {
12638            return Ok(BinaryLength::Max);
12639        }
12640        let length = self.parse_literal_uint()?;
12641        Ok(BinaryLength::IntegerLength { length })
12642    }
12643
12644    pub fn parse_optional_precision_scale(
12645        &mut self,
12646    ) -> Result<(Option<u64>, Option<u64>), ParserError> {
12647        if self.consume_token(&Token::LParen) {
12648            let n = self.parse_literal_uint()?;
12649            let scale = if self.consume_token(&Token::Comma) {
12650                Some(self.parse_literal_uint()?)
12651            } else {
12652                None
12653            };
12654            self.expect_token(&Token::RParen)?;
12655            Ok((Some(n), scale))
12656        } else {
12657            Ok((None, None))
12658        }
12659    }
12660
12661    pub fn parse_exact_number_optional_precision_scale(
12662        &mut self,
12663    ) -> Result<ExactNumberInfo, ParserError> {
12664        if self.consume_token(&Token::LParen) {
12665            let precision = self.parse_literal_uint()?;
12666            let scale = if self.consume_token(&Token::Comma) {
12667                Some(self.parse_signed_integer()?)
12668            } else {
12669                None
12670            };
12671
12672            self.expect_token(&Token::RParen)?;
12673
12674            match scale {
12675                None => Ok(ExactNumberInfo::Precision(precision)),
12676                Some(scale) => Ok(ExactNumberInfo::PrecisionAndScale(precision, scale)),
12677            }
12678        } else {
12679            Ok(ExactNumberInfo::None)
12680        }
12681    }
12682
12683    /// Parse an optionally signed integer literal.
12684    fn parse_signed_integer(&mut self) -> Result<i64, ParserError> {
12685        let is_negative = self.consume_token(&Token::Minus);
12686
12687        if !is_negative {
12688            let _ = self.consume_token(&Token::Plus);
12689        }
12690
12691        let current_token = self.peek_token_ref();
12692        match &current_token.token {
12693            Token::Number(s, _) => {
12694                let s = s.clone();
12695                let span_start = current_token.span.start;
12696                self.advance_token();
12697                let value = Self::parse::<i64>(s, span_start)?;
12698                Ok(if is_negative { -value } else { value })
12699            }
12700            _ => self.expected_ref("number", current_token),
12701        }
12702    }
12703
12704    pub fn parse_optional_type_modifiers(&mut self) -> Result<Option<Vec<String>>, ParserError> {
12705        if self.consume_token(&Token::LParen) {
12706            let mut modifiers = Vec::new();
12707            loop {
12708                let next_token = self.next_token();
12709                match next_token.token {
12710                    Token::Word(w) => modifiers.push(w.to_string()),
12711                    Token::Number(n, _) => modifiers.push(n),
12712                    Token::SingleQuotedString(s) => modifiers.push(s),
12713
12714                    Token::Comma => {
12715                        continue;
12716                    }
12717                    Token::RParen => {
12718                        break;
12719                    }
12720                    _ => self.expected("type modifiers", next_token)?,
12721                }
12722            }
12723
12724            Ok(Some(modifiers))
12725        } else {
12726            Ok(None)
12727        }
12728    }
12729
12730    /// Parse a parenthesized sub data type
12731    fn parse_sub_type<F>(&mut self, parent_type: F) -> Result<DataType, ParserError>
12732    where
12733        F: FnOnce(Box<DataType>) -> DataType,
12734    {
12735        self.expect_token(&Token::LParen)?;
12736        let inside_type = self.parse_data_type()?;
12737        self.expect_token(&Token::RParen)?;
12738        Ok(parent_type(inside_type.into()))
12739    }
12740
12741    /// Parse a DELETE statement, returning a `Box`ed SetExpr
12742    ///
12743    /// This is used to reduce the size of the stack frames in debug builds
12744    fn parse_delete_setexpr_boxed(
12745        &mut self,
12746        delete_token: TokenWithSpan,
12747    ) -> Result<Box<SetExpr>, ParserError> {
12748        Ok(Box::new(SetExpr::Delete(self.parse_delete(delete_token)?)))
12749    }
12750
12751    pub fn parse_delete(&mut self, delete_token: TokenWithSpan) -> Result<Statement, ParserError> {
12752        let (tables, with_from_keyword) = if !self.parse_keyword(Keyword::FROM) {
12753            // `FROM` keyword is optional in BigQuery SQL.
12754            // https://cloud.google.com/bigquery/docs/reference/standard-sql/dml-syntax#delete_statement
12755            if dialect_of!(self is BigQueryDialect | OracleDialect | GenericDialect) {
12756                (vec![], false)
12757            } else {
12758                let tables = self.parse_comma_separated(|p| p.parse_object_name(false))?;
12759                self.expect_keyword_is(Keyword::FROM)?;
12760                (tables, true)
12761            }
12762        } else {
12763            (vec![], true)
12764        };
12765
12766        let from = self.parse_comma_separated(Parser::parse_table_and_joins)?;
12767        let using = if self.parse_keyword(Keyword::USING) {
12768            Some(self.parse_comma_separated(Parser::parse_table_and_joins)?)
12769        } else {
12770            None
12771        };
12772        let selection = if self.parse_keyword(Keyword::WHERE) {
12773            Some(self.parse_expr()?)
12774        } else {
12775            None
12776        };
12777        let returning = if self.parse_keyword(Keyword::RETURNING) {
12778            Some(self.parse_comma_separated(Parser::parse_select_item)?)
12779        } else {
12780            None
12781        };
12782        let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
12783            self.parse_comma_separated(Parser::parse_order_by_expr)?
12784        } else {
12785            vec![]
12786        };
12787        let limit = if self.parse_keyword(Keyword::LIMIT) {
12788            self.parse_limit()?
12789        } else {
12790            None
12791        };
12792
12793        Ok(Statement::Delete(Delete {
12794            delete_token: delete_token.into(),
12795            tables,
12796            from: if with_from_keyword {
12797                FromTable::WithFromKeyword(from)
12798            } else {
12799                FromTable::WithoutKeyword(from)
12800            },
12801            using,
12802            selection,
12803            returning,
12804            order_by,
12805            limit,
12806        }))
12807    }
12808
12809    // KILL [CONNECTION | QUERY | MUTATION] processlist_id
12810    pub fn parse_kill(&mut self) -> Result<Statement, ParserError> {
12811        let modifier_keyword =
12812            self.parse_one_of_keywords(&[Keyword::CONNECTION, Keyword::QUERY, Keyword::MUTATION]);
12813
12814        let id = self.parse_literal_uint()?;
12815
12816        let modifier = match modifier_keyword {
12817            Some(Keyword::CONNECTION) => Some(KillType::Connection),
12818            Some(Keyword::QUERY) => Some(KillType::Query),
12819            Some(Keyword::MUTATION) => {
12820                if dialect_of!(self is ClickHouseDialect | GenericDialect) {
12821                    Some(KillType::Mutation)
12822                } else {
12823                    self.expected(
12824                        "Unsupported type for KILL, allowed: CONNECTION | QUERY",
12825                        self.peek_token(),
12826                    )?
12827                }
12828            }
12829            _ => None,
12830        };
12831
12832        Ok(Statement::Kill { modifier, id })
12833    }
12834
12835    pub fn parse_explain(
12836        &mut self,
12837        describe_alias: DescribeAlias,
12838    ) -> Result<Statement, ParserError> {
12839        let mut analyze = false;
12840        let mut verbose = false;
12841        let mut query_plan = false;
12842        let mut estimate = false;
12843        let mut format = None;
12844        let mut options = None;
12845
12846        // Note: DuckDB is compatible with PostgreSQL syntax for this statement,
12847        // although not all features may be implemented.
12848        if describe_alias == DescribeAlias::Explain
12849            && self.dialect.supports_explain_with_utility_options()
12850            && self.peek_token().token == Token::LParen
12851        {
12852            options = Some(self.parse_utility_options()?)
12853        } else if self.parse_keywords(&[Keyword::QUERY, Keyword::PLAN]) {
12854            query_plan = true;
12855        } else if self.parse_keyword(Keyword::ESTIMATE) {
12856            estimate = true;
12857        } else {
12858            analyze = self.parse_keyword(Keyword::ANALYZE);
12859            verbose = self.parse_keyword(Keyword::VERBOSE);
12860            if self.parse_keyword(Keyword::FORMAT) {
12861                format = Some(self.parse_analyze_format_kind()?);
12862            }
12863        }
12864
12865        match self.maybe_parse(|parser| parser.parse_statement())? {
12866            Some(Statement::Explain { .. }) | Some(Statement::ExplainTable { .. }) => Err(
12867                ParserError::ParserError("Explain must be root of the plan".to_string()),
12868            ),
12869            Some(statement) => Ok(Statement::Explain {
12870                describe_alias,
12871                analyze,
12872                verbose,
12873                query_plan,
12874                estimate,
12875                statement: Box::new(statement),
12876                format,
12877                options,
12878            }),
12879            _ => {
12880                let hive_format =
12881                    match self.parse_one_of_keywords(&[Keyword::EXTENDED, Keyword::FORMATTED]) {
12882                        Some(Keyword::EXTENDED) => Some(HiveDescribeFormat::Extended),
12883                        Some(Keyword::FORMATTED) => Some(HiveDescribeFormat::Formatted),
12884                        _ => None,
12885                    };
12886
12887                let has_table_keyword = if self.dialect.describe_requires_table_keyword() {
12888                    // only allow to use TABLE keyword for DESC|DESCRIBE statement
12889                    self.parse_keyword(Keyword::TABLE)
12890                } else {
12891                    false
12892                };
12893
12894                let table_name = self.parse_object_name(false)?;
12895                Ok(Statement::ExplainTable {
12896                    describe_alias,
12897                    hive_format,
12898                    has_table_keyword,
12899                    table_name,
12900                })
12901            }
12902        }
12903    }
12904
12905    /// Parse a query expression, i.e. a `SELECT` statement optionally
12906    /// preceded with some `WITH` CTE declarations and optionally followed
12907    /// by `ORDER BY`. Unlike some other parse_... methods, this one doesn't
12908    /// expect the initial keyword to be already consumed
12909    pub fn parse_query(&mut self) -> Result<Box<Query>, ParserError> {
12910        let _guard = self.recursion_counter.try_decrease()?;
12911        let with = if self.parse_keyword(Keyword::WITH) {
12912            let with_token = self.get_current_token();
12913            Some(With {
12914                with_token: with_token.clone().into(),
12915                recursive: self.parse_keyword(Keyword::RECURSIVE),
12916                cte_tables: self.parse_comma_separated(Parser::parse_cte)?,
12917            })
12918        } else {
12919            None
12920        };
12921        if self.parse_keyword(Keyword::INSERT) {
12922            Ok(Query {
12923                with,
12924                body: self.parse_insert_setexpr_boxed(self.get_current_token().clone())?,
12925                order_by: None,
12926                limit_clause: None,
12927                fetch: None,
12928                locks: vec![],
12929                for_clause: None,
12930                settings: None,
12931                format_clause: None,
12932                pipe_operators: vec![],
12933            }
12934            .into())
12935        } else if self.parse_keyword(Keyword::UPDATE) {
12936            Ok(Query {
12937                with,
12938                body: self.parse_update_setexpr_boxed(self.get_current_token().clone())?,
12939                order_by: None,
12940                limit_clause: None,
12941                fetch: None,
12942                locks: vec![],
12943                for_clause: None,
12944                settings: None,
12945                format_clause: None,
12946                pipe_operators: vec![],
12947            }
12948            .into())
12949        } else if self.parse_keyword(Keyword::DELETE) {
12950            Ok(Query {
12951                with,
12952                body: self.parse_delete_setexpr_boxed(self.get_current_token().clone())?,
12953                limit_clause: None,
12954                order_by: None,
12955                fetch: None,
12956                locks: vec![],
12957                for_clause: None,
12958                settings: None,
12959                format_clause: None,
12960                pipe_operators: vec![],
12961            }
12962            .into())
12963        } else if self.parse_keyword(Keyword::MERGE) {
12964            Ok(Query {
12965                with,
12966                body: self.parse_merge_setexpr_boxed(self.get_current_token().clone())?,
12967                limit_clause: None,
12968                order_by: None,
12969                fetch: None,
12970                locks: vec![],
12971                for_clause: None,
12972                settings: None,
12973                format_clause: None,
12974                pipe_operators: vec![],
12975            }
12976            .into())
12977        } else {
12978            let body = self.parse_query_body(self.dialect.prec_unknown())?;
12979
12980            let order_by = self.parse_optional_order_by()?;
12981
12982            let limit_clause = self.parse_optional_limit_clause()?;
12983
12984            let settings = self.parse_settings()?;
12985
12986            let fetch = if self.parse_keyword(Keyword::FETCH) {
12987                Some(self.parse_fetch()?)
12988            } else {
12989                None
12990            };
12991
12992            let mut for_clause = None;
12993            let mut locks = Vec::new();
12994            while self.parse_keyword(Keyword::FOR) {
12995                if let Some(parsed_for_clause) = self.parse_for_clause()? {
12996                    for_clause = Some(parsed_for_clause);
12997                    break;
12998                } else {
12999                    locks.push(self.parse_lock()?);
13000                }
13001            }
13002            let format_clause = if dialect_of!(self is ClickHouseDialect | GenericDialect)
13003                && self.parse_keyword(Keyword::FORMAT)
13004            {
13005                if self.parse_keyword(Keyword::NULL) {
13006                    Some(FormatClause::Null)
13007                } else {
13008                    let ident = self.parse_identifier()?;
13009                    Some(FormatClause::Identifier(ident))
13010                }
13011            } else {
13012                None
13013            };
13014
13015            let pipe_operators = if self.dialect.supports_pipe_operator() {
13016                self.parse_pipe_operators()?
13017            } else {
13018                Vec::new()
13019            };
13020
13021            Ok(Query {
13022                with,
13023                body,
13024                order_by,
13025                limit_clause,
13026                fetch,
13027                locks,
13028                for_clause,
13029                settings,
13030                format_clause,
13031                pipe_operators,
13032            }
13033            .into())
13034        }
13035    }
13036
13037    fn parse_pipe_operators(&mut self) -> Result<Vec<PipeOperator>, ParserError> {
13038        let mut pipe_operators = Vec::new();
13039
13040        while self.consume_token(&Token::VerticalBarRightAngleBracket) {
13041            let kw = self.expect_one_of_keywords(&[
13042                Keyword::SELECT,
13043                Keyword::EXTEND,
13044                Keyword::SET,
13045                Keyword::DROP,
13046                Keyword::AS,
13047                Keyword::WHERE,
13048                Keyword::LIMIT,
13049                Keyword::AGGREGATE,
13050                Keyword::ORDER,
13051                Keyword::TABLESAMPLE,
13052                Keyword::RENAME,
13053                Keyword::UNION,
13054                Keyword::INTERSECT,
13055                Keyword::EXCEPT,
13056                Keyword::CALL,
13057                Keyword::PIVOT,
13058                Keyword::UNPIVOT,
13059                Keyword::JOIN,
13060                Keyword::INNER,
13061                Keyword::LEFT,
13062                Keyword::RIGHT,
13063                Keyword::FULL,
13064                Keyword::CROSS,
13065            ])?;
13066            match kw {
13067                Keyword::SELECT => {
13068                    let exprs = self.parse_comma_separated(Parser::parse_select_item)?;
13069                    pipe_operators.push(PipeOperator::Select { exprs })
13070                }
13071                Keyword::EXTEND => {
13072                    let exprs = self.parse_comma_separated(Parser::parse_select_item)?;
13073                    pipe_operators.push(PipeOperator::Extend { exprs })
13074                }
13075                Keyword::SET => {
13076                    let assignments = self.parse_comma_separated(Parser::parse_assignment)?;
13077                    pipe_operators.push(PipeOperator::Set { assignments })
13078                }
13079                Keyword::DROP => {
13080                    let columns = self.parse_identifiers()?;
13081                    pipe_operators.push(PipeOperator::Drop { columns })
13082                }
13083                Keyword::AS => {
13084                    let alias = self.parse_identifier()?;
13085                    pipe_operators.push(PipeOperator::As { alias })
13086                }
13087                Keyword::WHERE => {
13088                    let expr = self.parse_expr()?;
13089                    pipe_operators.push(PipeOperator::Where { expr })
13090                }
13091                Keyword::LIMIT => {
13092                    let expr = self.parse_expr()?;
13093                    let offset = if self.parse_keyword(Keyword::OFFSET) {
13094                        Some(self.parse_expr()?)
13095                    } else {
13096                        None
13097                    };
13098                    pipe_operators.push(PipeOperator::Limit { expr, offset })
13099                }
13100                Keyword::AGGREGATE => {
13101                    let full_table_exprs = if self.peek_keyword(Keyword::GROUP) {
13102                        vec![]
13103                    } else {
13104                        self.parse_comma_separated(|parser| {
13105                            parser.parse_expr_with_alias_and_order_by()
13106                        })?
13107                    };
13108
13109                    let group_by_expr = if self.parse_keywords(&[Keyword::GROUP, Keyword::BY]) {
13110                        self.parse_comma_separated(|parser| {
13111                            parser.parse_expr_with_alias_and_order_by()
13112                        })?
13113                    } else {
13114                        vec![]
13115                    };
13116
13117                    pipe_operators.push(PipeOperator::Aggregate {
13118                        full_table_exprs,
13119                        group_by_expr,
13120                    })
13121                }
13122                Keyword::ORDER => {
13123                    self.expect_one_of_keywords(&[Keyword::BY])?;
13124                    let exprs = self.parse_comma_separated(Parser::parse_order_by_expr)?;
13125                    pipe_operators.push(PipeOperator::OrderBy { exprs })
13126                }
13127                Keyword::TABLESAMPLE => {
13128                    let sample = self.parse_table_sample(TableSampleModifier::TableSample)?;
13129                    pipe_operators.push(PipeOperator::TableSample { sample });
13130                }
13131                Keyword::RENAME => {
13132                    let mappings =
13133                        self.parse_comma_separated(Parser::parse_identifier_with_optional_alias)?;
13134                    pipe_operators.push(PipeOperator::Rename { mappings });
13135                }
13136                Keyword::UNION => {
13137                    let set_quantifier = self.parse_set_quantifier(&Some(SetOperator::Union));
13138                    let queries = self.parse_pipe_operator_queries()?;
13139                    pipe_operators.push(PipeOperator::Union {
13140                        set_quantifier,
13141                        queries,
13142                    });
13143                }
13144                Keyword::INTERSECT => {
13145                    let set_quantifier =
13146                        self.parse_distinct_required_set_quantifier("INTERSECT")?;
13147                    let queries = self.parse_pipe_operator_queries()?;
13148                    pipe_operators.push(PipeOperator::Intersect {
13149                        set_quantifier,
13150                        queries,
13151                    });
13152                }
13153                Keyword::EXCEPT => {
13154                    let set_quantifier = self.parse_distinct_required_set_quantifier("EXCEPT")?;
13155                    let queries = self.parse_pipe_operator_queries()?;
13156                    pipe_operators.push(PipeOperator::Except {
13157                        set_quantifier,
13158                        queries,
13159                    });
13160                }
13161                Keyword::CALL => {
13162                    let function_name = self.parse_object_name(false)?;
13163                    let function_expr = self.parse_function(function_name)?;
13164                    if let Expr::Function(function) = function_expr {
13165                        let alias = self.parse_identifier_optional_alias()?;
13166                        pipe_operators.push(PipeOperator::Call { function, alias });
13167                    } else {
13168                        return Err(ParserError::ParserError(
13169                            "Expected function call after CALL".to_string(),
13170                        ));
13171                    }
13172                }
13173                Keyword::PIVOT => {
13174                    self.expect_token(&Token::LParen)?;
13175                    let aggregate_functions =
13176                        self.parse_comma_separated(Self::parse_aliased_function_call)?;
13177                    self.expect_keyword_is(Keyword::FOR)?;
13178                    let value_column = self.parse_period_separated(|p| p.parse_identifier())?;
13179                    self.expect_keyword_is(Keyword::IN)?;
13180
13181                    self.expect_token(&Token::LParen)?;
13182                    let value_source = if self.parse_keyword(Keyword::ANY) {
13183                        let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
13184                            self.parse_comma_separated(Parser::parse_order_by_expr)?
13185                        } else {
13186                            vec![]
13187                        };
13188                        PivotValueSource::Any(order_by)
13189                    } else if self.peek_sub_query() {
13190                        PivotValueSource::Subquery(self.parse_query()?)
13191                    } else {
13192                        PivotValueSource::List(
13193                            self.parse_comma_separated(Self::parse_expr_with_alias)?,
13194                        )
13195                    };
13196                    self.expect_token(&Token::RParen)?;
13197                    self.expect_token(&Token::RParen)?;
13198
13199                    let alias = self.parse_identifier_optional_alias()?;
13200
13201                    pipe_operators.push(PipeOperator::Pivot {
13202                        aggregate_functions,
13203                        value_column,
13204                        value_source,
13205                        alias,
13206                    });
13207                }
13208                Keyword::UNPIVOT => {
13209                    self.expect_token(&Token::LParen)?;
13210                    let value_column = self.parse_identifier()?;
13211                    self.expect_keyword(Keyword::FOR)?;
13212                    let name_column = self.parse_identifier()?;
13213                    self.expect_keyword(Keyword::IN)?;
13214
13215                    self.expect_token(&Token::LParen)?;
13216                    let unpivot_columns = self.parse_comma_separated(Parser::parse_identifier)?;
13217                    self.expect_token(&Token::RParen)?;
13218
13219                    self.expect_token(&Token::RParen)?;
13220
13221                    let alias = self.parse_identifier_optional_alias()?;
13222
13223                    pipe_operators.push(PipeOperator::Unpivot {
13224                        value_column,
13225                        name_column,
13226                        unpivot_columns,
13227                        alias,
13228                    });
13229                }
13230                Keyword::JOIN
13231                | Keyword::INNER
13232                | Keyword::LEFT
13233                | Keyword::RIGHT
13234                | Keyword::FULL
13235                | Keyword::CROSS => {
13236                    self.prev_token();
13237                    let mut joins = self.parse_joins()?;
13238                    if joins.len() != 1 {
13239                        return Err(ParserError::ParserError(
13240                            "Join pipe operator must have a single join".to_string(),
13241                        ));
13242                    }
13243                    let join = joins.swap_remove(0);
13244                    pipe_operators.push(PipeOperator::Join(join))
13245                }
13246                unhandled => {
13247                    return Err(ParserError::ParserError(format!(
13248                    "`expect_one_of_keywords` further up allowed unhandled keyword: {unhandled:?}"
13249                )))
13250                }
13251            }
13252        }
13253        Ok(pipe_operators)
13254    }
13255
13256    fn parse_settings(&mut self) -> Result<Option<Vec<Setting>>, ParserError> {
13257        let settings = if dialect_of!(self is ClickHouseDialect|GenericDialect)
13258            && self.parse_keyword(Keyword::SETTINGS)
13259        {
13260            let key_values = self.parse_comma_separated(|p| {
13261                let key = p.parse_identifier()?;
13262                p.expect_token(&Token::Eq)?;
13263                let value = p.parse_expr()?;
13264                Ok(Setting { key, value })
13265            })?;
13266            Some(key_values)
13267        } else {
13268            None
13269        };
13270        Ok(settings)
13271    }
13272
13273    /// Parse a mssql `FOR [XML | JSON | BROWSE]` clause
13274    pub fn parse_for_clause(&mut self) -> Result<Option<ForClause>, ParserError> {
13275        if self.parse_keyword(Keyword::XML) {
13276            Ok(Some(self.parse_for_xml()?))
13277        } else if self.parse_keyword(Keyword::JSON) {
13278            Ok(Some(self.parse_for_json()?))
13279        } else if self.parse_keyword(Keyword::BROWSE) {
13280            Ok(Some(ForClause::Browse))
13281        } else {
13282            Ok(None)
13283        }
13284    }
13285
13286    /// Parse a mssql `FOR XML` clause
13287    pub fn parse_for_xml(&mut self) -> Result<ForClause, ParserError> {
13288        let for_xml = if self.parse_keyword(Keyword::RAW) {
13289            let mut element_name = None;
13290            if self.peek_token().token == Token::LParen {
13291                self.expect_token(&Token::LParen)?;
13292                element_name = Some(self.parse_literal_string()?);
13293                self.expect_token(&Token::RParen)?;
13294            }
13295            ForXml::Raw(element_name)
13296        } else if self.parse_keyword(Keyword::AUTO) {
13297            ForXml::Auto
13298        } else if self.parse_keyword(Keyword::EXPLICIT) {
13299            ForXml::Explicit
13300        } else if self.parse_keyword(Keyword::PATH) {
13301            let mut element_name = None;
13302            if self.peek_token().token == Token::LParen {
13303                self.expect_token(&Token::LParen)?;
13304                element_name = Some(self.parse_literal_string()?);
13305                self.expect_token(&Token::RParen)?;
13306            }
13307            ForXml::Path(element_name)
13308        } else {
13309            return Err(ParserError::ParserError(
13310                "Expected FOR XML [RAW | AUTO | EXPLICIT | PATH ]".to_string(),
13311            ));
13312        };
13313        let mut elements = false;
13314        let mut binary_base64 = false;
13315        let mut root = None;
13316        let mut r#type = false;
13317        while self.peek_token().token == Token::Comma {
13318            self.next_token();
13319            if self.parse_keyword(Keyword::ELEMENTS) {
13320                elements = true;
13321            } else if self.parse_keyword(Keyword::BINARY) {
13322                self.expect_keyword_is(Keyword::BASE64)?;
13323                binary_base64 = true;
13324            } else if self.parse_keyword(Keyword::ROOT) {
13325                self.expect_token(&Token::LParen)?;
13326                root = Some(self.parse_literal_string()?);
13327                self.expect_token(&Token::RParen)?;
13328            } else if self.parse_keyword(Keyword::TYPE) {
13329                r#type = true;
13330            }
13331        }
13332        Ok(ForClause::Xml {
13333            for_xml,
13334            elements,
13335            binary_base64,
13336            root,
13337            r#type,
13338        })
13339    }
13340
13341    /// Parse a mssql `FOR JSON` clause
13342    pub fn parse_for_json(&mut self) -> Result<ForClause, ParserError> {
13343        let for_json = if self.parse_keyword(Keyword::AUTO) {
13344            ForJson::Auto
13345        } else if self.parse_keyword(Keyword::PATH) {
13346            ForJson::Path
13347        } else {
13348            return Err(ParserError::ParserError(
13349                "Expected FOR JSON [AUTO | PATH ]".to_string(),
13350            ));
13351        };
13352        let mut root = None;
13353        let mut include_null_values = false;
13354        let mut without_array_wrapper = false;
13355        while self.peek_token().token == Token::Comma {
13356            self.next_token();
13357            if self.parse_keyword(Keyword::ROOT) {
13358                self.expect_token(&Token::LParen)?;
13359                root = Some(self.parse_literal_string()?);
13360                self.expect_token(&Token::RParen)?;
13361            } else if self.parse_keyword(Keyword::INCLUDE_NULL_VALUES) {
13362                include_null_values = true;
13363            } else if self.parse_keyword(Keyword::WITHOUT_ARRAY_WRAPPER) {
13364                without_array_wrapper = true;
13365            }
13366        }
13367        Ok(ForClause::Json {
13368            for_json,
13369            root,
13370            include_null_values,
13371            without_array_wrapper,
13372        })
13373    }
13374
13375    /// Parse a CTE (`alias [( col1, col2, ... )] AS (subquery)`)
13376    pub fn parse_cte(&mut self) -> Result<Cte, ParserError> {
13377        let name = self.parse_identifier()?;
13378
13379        let mut cte = if self.parse_keyword(Keyword::AS) {
13380            let mut is_materialized = None;
13381            if dialect_of!(self is PostgreSqlDialect) {
13382                if self.parse_keyword(Keyword::MATERIALIZED) {
13383                    is_materialized = Some(CteAsMaterialized::Materialized);
13384                } else if self.parse_keywords(&[Keyword::NOT, Keyword::MATERIALIZED]) {
13385                    is_materialized = Some(CteAsMaterialized::NotMaterialized);
13386                }
13387            }
13388            self.expect_token(&Token::LParen)?;
13389
13390            let query = self.parse_query()?;
13391            let closing_paren_token = self.expect_token(&Token::RParen)?;
13392
13393            let alias = TableAlias {
13394                explicit: false,
13395                name,
13396                columns: vec![],
13397            };
13398            Cte {
13399                alias,
13400                query,
13401                from: None,
13402                materialized: is_materialized,
13403                closing_paren_token: closing_paren_token.into(),
13404            }
13405        } else {
13406            let columns = self.parse_table_alias_column_defs()?;
13407            self.expect_keyword_is(Keyword::AS)?;
13408            let mut is_materialized = None;
13409            if dialect_of!(self is PostgreSqlDialect) {
13410                if self.parse_keyword(Keyword::MATERIALIZED) {
13411                    is_materialized = Some(CteAsMaterialized::Materialized);
13412                } else if self.parse_keywords(&[Keyword::NOT, Keyword::MATERIALIZED]) {
13413                    is_materialized = Some(CteAsMaterialized::NotMaterialized);
13414                }
13415            }
13416            self.expect_token(&Token::LParen)?;
13417
13418            let query = self.parse_query()?;
13419            let closing_paren_token = self.expect_token(&Token::RParen)?;
13420
13421            let alias = TableAlias {
13422                explicit: false,
13423                name,
13424                columns,
13425            };
13426            Cte {
13427                alias,
13428                query,
13429                from: None,
13430                materialized: is_materialized,
13431                closing_paren_token: closing_paren_token.into(),
13432            }
13433        };
13434        if self.parse_keyword(Keyword::FROM) {
13435            cte.from = Some(self.parse_identifier()?);
13436        }
13437        Ok(cte)
13438    }
13439
13440    /// Parse a "query body", which is an expression with roughly the
13441    /// following grammar:
13442    /// ```sql
13443    ///   query_body ::= restricted_select | '(' subquery ')' | set_operation
13444    ///   restricted_select ::= 'SELECT' [expr_list] [ from ] [ where ] [ groupby_having ]
13445    ///   subquery ::= query_body [ order_by_limit ]
13446    ///   set_operation ::= query_body { 'UNION' | 'EXCEPT' | 'INTERSECT' } [ 'ALL' ] query_body
13447    /// ```
13448    pub fn parse_query_body(&mut self, precedence: u8) -> Result<Box<SetExpr>, ParserError> {
13449        // We parse the expression using a Pratt parser, as in `parse_expr()`.
13450        // Start by parsing a restricted SELECT or a `(subquery)`:
13451        let expr = if self.peek_keyword(Keyword::SELECT)
13452            || (self.peek_keyword(Keyword::FROM) && self.dialect.supports_from_first_select())
13453        {
13454            SetExpr::Select(self.parse_select().map(Box::new)?)
13455        } else if self.consume_token(&Token::LParen) {
13456            // CTEs are not allowed here, but the parser currently accepts them
13457            let subquery = self.parse_query()?;
13458            self.expect_token(&Token::RParen)?;
13459            SetExpr::Query(subquery)
13460        } else if self.parse_keyword(Keyword::VALUES) {
13461            let is_mysql = dialect_of!(self is MySqlDialect);
13462            SetExpr::Values(self.parse_values(is_mysql, false)?)
13463        } else if self.parse_keyword(Keyword::VALUE) {
13464            let is_mysql = dialect_of!(self is MySqlDialect);
13465            SetExpr::Values(self.parse_values(is_mysql, true)?)
13466        } else if self.parse_keyword(Keyword::TABLE) {
13467            SetExpr::Table(Box::new(self.parse_as_table()?))
13468        } else {
13469            return self.expected(
13470                "SELECT, VALUES, or a subquery in the query body",
13471                self.peek_token(),
13472            );
13473        };
13474
13475        self.parse_remaining_set_exprs(expr, precedence)
13476    }
13477
13478    /// Parse any extra set expressions that may be present in a query body
13479    ///
13480    /// (this is its own function to reduce required stack size in debug builds)
13481    fn parse_remaining_set_exprs(
13482        &mut self,
13483        mut expr: SetExpr,
13484        precedence: u8,
13485    ) -> Result<Box<SetExpr>, ParserError> {
13486        loop {
13487            // The query can be optionally followed by a set operator:
13488            let op = self.parse_set_operator(&self.peek_token().token);
13489            let next_precedence = match op {
13490                // UNION and EXCEPT have the same binding power and evaluate left-to-right
13491                Some(SetOperator::Union) | Some(SetOperator::Except) | Some(SetOperator::Minus) => {
13492                    10
13493                }
13494                // INTERSECT has higher precedence than UNION/EXCEPT
13495                Some(SetOperator::Intersect) => 20,
13496                // Unexpected token or EOF => stop parsing the query body
13497                None => break,
13498            };
13499            if precedence >= next_precedence {
13500                break;
13501            }
13502            self.next_token(); // skip past the set operator
13503            let set_quantifier = self.parse_set_quantifier(&op);
13504            expr = SetExpr::SetOperation {
13505                left: Box::new(expr),
13506                op: op.unwrap(),
13507                set_quantifier,
13508                right: self.parse_query_body(next_precedence)?,
13509            };
13510        }
13511
13512        Ok(expr.into())
13513    }
13514
13515    pub fn parse_set_operator(&mut self, token: &Token) -> Option<SetOperator> {
13516        match token {
13517            Token::Word(w) if w.keyword == Keyword::UNION => Some(SetOperator::Union),
13518            Token::Word(w) if w.keyword == Keyword::EXCEPT => Some(SetOperator::Except),
13519            Token::Word(w) if w.keyword == Keyword::INTERSECT => Some(SetOperator::Intersect),
13520            Token::Word(w) if w.keyword == Keyword::MINUS => Some(SetOperator::Minus),
13521            _ => None,
13522        }
13523    }
13524
13525    pub fn parse_set_quantifier(&mut self, op: &Option<SetOperator>) -> SetQuantifier {
13526        match op {
13527            Some(
13528                SetOperator::Except
13529                | SetOperator::Intersect
13530                | SetOperator::Union
13531                | SetOperator::Minus,
13532            ) => {
13533                if self.parse_keywords(&[Keyword::DISTINCT, Keyword::BY, Keyword::NAME]) {
13534                    SetQuantifier::DistinctByName
13535                } else if self.parse_keywords(&[Keyword::BY, Keyword::NAME]) {
13536                    SetQuantifier::ByName
13537                } else if self.parse_keyword(Keyword::ALL) {
13538                    if self.parse_keywords(&[Keyword::BY, Keyword::NAME]) {
13539                        SetQuantifier::AllByName
13540                    } else {
13541                        SetQuantifier::All
13542                    }
13543                } else if self.parse_keyword(Keyword::DISTINCT) {
13544                    SetQuantifier::Distinct
13545                } else {
13546                    SetQuantifier::None
13547                }
13548            }
13549            _ => SetQuantifier::None,
13550        }
13551    }
13552
13553    /// Parse a restricted `SELECT` statement (no CTEs / `UNION` / `ORDER BY`)
13554    pub fn parse_select(&mut self) -> Result<Select, ParserError> {
13555        let mut from_first = None;
13556
13557        if self.dialect.supports_from_first_select() && self.peek_keyword(Keyword::FROM) {
13558            let from_token = self.expect_keyword(Keyword::FROM)?;
13559            let from = self.parse_table_with_joins()?;
13560            if !self.peek_keyword(Keyword::SELECT) {
13561                return Ok(Select {
13562                    select_token: AttachedToken(from_token),
13563                    distinct: None,
13564                    top: None,
13565                    top_before_distinct: false,
13566                    projection: vec![],
13567                    exclude: None,
13568                    into: None,
13569                    from,
13570                    lateral_views: vec![],
13571                    prewhere: None,
13572                    selection: None,
13573                    group_by: GroupByExpr::Expressions(vec![], vec![]),
13574                    cluster_by: vec![],
13575                    distribute_by: vec![],
13576                    sort_by: vec![],
13577                    having: None,
13578                    named_window: vec![],
13579                    window_before_qualify: false,
13580                    qualify: None,
13581                    value_table_mode: None,
13582                    connect_by: None,
13583                    flavor: SelectFlavor::FromFirstNoSelect,
13584                });
13585            }
13586            from_first = Some(from);
13587        }
13588
13589        let select_token = self.expect_keyword(Keyword::SELECT)?;
13590        let value_table_mode = self.parse_value_table_mode()?;
13591
13592        let mut top_before_distinct = false;
13593        let mut top = None;
13594        if self.dialect.supports_top_before_distinct() && self.parse_keyword(Keyword::TOP) {
13595            top = Some(self.parse_top()?);
13596            top_before_distinct = true;
13597        }
13598        let distinct = self.parse_all_or_distinct()?;
13599        if !self.dialect.supports_top_before_distinct() && self.parse_keyword(Keyword::TOP) {
13600            top = Some(self.parse_top()?);
13601        }
13602
13603        let projection =
13604            if self.dialect.supports_empty_projections() && self.peek_keyword(Keyword::FROM) {
13605                vec![]
13606            } else {
13607                self.parse_projection()?
13608            };
13609
13610        let exclude = if self.dialect.supports_select_exclude() {
13611            self.parse_optional_select_item_exclude()?
13612        } else {
13613            None
13614        };
13615
13616        let into = if self.parse_keyword(Keyword::INTO) {
13617            Some(self.parse_select_into()?)
13618        } else {
13619            None
13620        };
13621
13622        // Note that for keywords to be properly handled here, they need to be
13623        // added to `RESERVED_FOR_COLUMN_ALIAS` / `RESERVED_FOR_TABLE_ALIAS`,
13624        // otherwise they may be parsed as an alias as part of the `projection`
13625        // or `from`.
13626
13627        let (from, from_first) = if let Some(from) = from_first.take() {
13628            (from, true)
13629        } else if self.parse_keyword(Keyword::FROM) {
13630            (self.parse_table_with_joins()?, false)
13631        } else {
13632            (vec![], false)
13633        };
13634
13635        let mut lateral_views = vec![];
13636        loop {
13637            if self.parse_keywords(&[Keyword::LATERAL, Keyword::VIEW]) {
13638                let outer = self.parse_keyword(Keyword::OUTER);
13639                let lateral_view = self.parse_expr()?;
13640                let lateral_view_name = self.parse_object_name(false)?;
13641                let lateral_col_alias = self
13642                    .parse_comma_separated(|parser| {
13643                        parser.parse_optional_alias(&[
13644                            Keyword::WHERE,
13645                            Keyword::GROUP,
13646                            Keyword::CLUSTER,
13647                            Keyword::HAVING,
13648                            Keyword::LATERAL,
13649                        ]) // This couldn't possibly be a bad idea
13650                    })?
13651                    .into_iter()
13652                    .flatten()
13653                    .collect();
13654
13655                lateral_views.push(LateralView {
13656                    lateral_view,
13657                    lateral_view_name,
13658                    lateral_col_alias,
13659                    outer,
13660                });
13661            } else {
13662                break;
13663            }
13664        }
13665
13666        let prewhere = if dialect_of!(self is ClickHouseDialect|GenericDialect)
13667            && self.parse_keyword(Keyword::PREWHERE)
13668        {
13669            Some(self.parse_expr()?)
13670        } else {
13671            None
13672        };
13673
13674        let selection = if self.parse_keyword(Keyword::WHERE) {
13675            Some(self.parse_expr()?)
13676        } else {
13677            None
13678        };
13679
13680        let group_by = self
13681            .parse_optional_group_by()?
13682            .unwrap_or_else(|| GroupByExpr::Expressions(vec![], vec![]));
13683
13684        let cluster_by = if self.parse_keywords(&[Keyword::CLUSTER, Keyword::BY]) {
13685            self.parse_comma_separated(Parser::parse_expr)?
13686        } else {
13687            vec![]
13688        };
13689
13690        let distribute_by = if self.parse_keywords(&[Keyword::DISTRIBUTE, Keyword::BY]) {
13691            self.parse_comma_separated(Parser::parse_expr)?
13692        } else {
13693            vec![]
13694        };
13695
13696        let sort_by = if self.parse_keywords(&[Keyword::SORT, Keyword::BY]) {
13697            self.parse_comma_separated(Parser::parse_order_by_expr)?
13698        } else {
13699            vec![]
13700        };
13701
13702        let having = if self.parse_keyword(Keyword::HAVING) {
13703            Some(self.parse_expr()?)
13704        } else {
13705            None
13706        };
13707
13708        // Accept QUALIFY and WINDOW in any order and flag accordingly.
13709        let (named_windows, qualify, window_before_qualify) = if self.parse_keyword(Keyword::WINDOW)
13710        {
13711            let named_windows = self.parse_comma_separated(Parser::parse_named_window)?;
13712            if self.parse_keyword(Keyword::QUALIFY) {
13713                (named_windows, Some(self.parse_expr()?), true)
13714            } else {
13715                (named_windows, None, true)
13716            }
13717        } else if self.parse_keyword(Keyword::QUALIFY) {
13718            let qualify = Some(self.parse_expr()?);
13719            if self.parse_keyword(Keyword::WINDOW) {
13720                (
13721                    self.parse_comma_separated(Parser::parse_named_window)?,
13722                    qualify,
13723                    false,
13724                )
13725            } else {
13726                (Default::default(), qualify, false)
13727            }
13728        } else {
13729            Default::default()
13730        };
13731
13732        let connect_by = if self.dialect.supports_connect_by()
13733            && self
13734                .parse_one_of_keywords(&[Keyword::START, Keyword::CONNECT])
13735                .is_some()
13736        {
13737            self.prev_token();
13738            Some(self.parse_connect_by()?)
13739        } else {
13740            None
13741        };
13742
13743        Ok(Select {
13744            select_token: AttachedToken(select_token),
13745            distinct,
13746            top,
13747            top_before_distinct,
13748            projection,
13749            exclude,
13750            into,
13751            from,
13752            lateral_views,
13753            prewhere,
13754            selection,
13755            group_by,
13756            cluster_by,
13757            distribute_by,
13758            sort_by,
13759            having,
13760            named_window: named_windows,
13761            window_before_qualify,
13762            qualify,
13763            value_table_mode,
13764            connect_by,
13765            flavor: if from_first {
13766                SelectFlavor::FromFirst
13767            } else {
13768                SelectFlavor::Standard
13769            },
13770        })
13771    }
13772
13773    fn parse_value_table_mode(&mut self) -> Result<Option<ValueTableMode>, ParserError> {
13774        if !dialect_of!(self is BigQueryDialect) {
13775            return Ok(None);
13776        }
13777
13778        let mode = if self.parse_keywords(&[Keyword::DISTINCT, Keyword::AS, Keyword::VALUE]) {
13779            Some(ValueTableMode::DistinctAsValue)
13780        } else if self.parse_keywords(&[Keyword::DISTINCT, Keyword::AS, Keyword::STRUCT]) {
13781            Some(ValueTableMode::DistinctAsStruct)
13782        } else if self.parse_keywords(&[Keyword::AS, Keyword::VALUE])
13783            || self.parse_keywords(&[Keyword::ALL, Keyword::AS, Keyword::VALUE])
13784        {
13785            Some(ValueTableMode::AsValue)
13786        } else if self.parse_keywords(&[Keyword::AS, Keyword::STRUCT])
13787            || self.parse_keywords(&[Keyword::ALL, Keyword::AS, Keyword::STRUCT])
13788        {
13789            Some(ValueTableMode::AsStruct)
13790        } else if self.parse_keyword(Keyword::AS) {
13791            self.expected("VALUE or STRUCT", self.peek_token())?
13792        } else {
13793            None
13794        };
13795
13796        Ok(mode)
13797    }
13798
13799    /// Invoke `f` after first setting the parser's `ParserState` to `state`.
13800    ///
13801    /// Upon return, restores the parser's state to what it started at.
13802    fn with_state<T, F>(&mut self, state: ParserState, mut f: F) -> Result<T, ParserError>
13803    where
13804        F: FnMut(&mut Parser) -> Result<T, ParserError>,
13805    {
13806        let current_state = self.state;
13807        self.state = state;
13808        let res = f(self);
13809        self.state = current_state;
13810        res
13811    }
13812
13813    pub fn parse_connect_by(&mut self) -> Result<ConnectBy, ParserError> {
13814        let (condition, relationships) = if self.parse_keywords(&[Keyword::CONNECT, Keyword::BY]) {
13815            let relationships = self.with_state(ParserState::ConnectBy, |parser| {
13816                parser.parse_comma_separated(Parser::parse_expr)
13817            })?;
13818            self.expect_keywords(&[Keyword::START, Keyword::WITH])?;
13819            let condition = self.parse_expr()?;
13820            (condition, relationships)
13821        } else {
13822            self.expect_keywords(&[Keyword::START, Keyword::WITH])?;
13823            let condition = self.parse_expr()?;
13824            self.expect_keywords(&[Keyword::CONNECT, Keyword::BY])?;
13825            let relationships = self.with_state(ParserState::ConnectBy, |parser| {
13826                parser.parse_comma_separated(Parser::parse_expr)
13827            })?;
13828            (condition, relationships)
13829        };
13830        Ok(ConnectBy {
13831            condition,
13832            relationships,
13833        })
13834    }
13835
13836    /// Parse `CREATE TABLE x AS TABLE y`
13837    pub fn parse_as_table(&mut self) -> Result<Table, ParserError> {
13838        let token1 = self.next_token();
13839        let token2 = self.next_token();
13840        let token3 = self.next_token();
13841
13842        let table_name;
13843        let schema_name;
13844        if token2 == Token::Period {
13845            match token1.token {
13846                Token::Word(w) => {
13847                    schema_name = w.value;
13848                }
13849                _ => {
13850                    return self.expected("Schema name", token1);
13851                }
13852            }
13853            match token3.token {
13854                Token::Word(w) => {
13855                    table_name = w.value;
13856                }
13857                _ => {
13858                    return self.expected("Table name", token3);
13859                }
13860            }
13861            Ok(Table {
13862                table_name: Some(table_name),
13863                schema_name: Some(schema_name),
13864            })
13865        } else {
13866            match token1.token {
13867                Token::Word(w) => {
13868                    table_name = w.value;
13869                }
13870                _ => {
13871                    return self.expected("Table name", token1);
13872                }
13873            }
13874            Ok(Table {
13875                table_name: Some(table_name),
13876                schema_name: None,
13877            })
13878        }
13879    }
13880
13881    /// Parse a `SET ROLE` statement. Expects SET to be consumed already.
13882    fn parse_set_role(
13883        &mut self,
13884        modifier: Option<ContextModifier>,
13885    ) -> Result<Statement, ParserError> {
13886        self.expect_keyword_is(Keyword::ROLE)?;
13887
13888        let role_name = if self.parse_keyword(Keyword::NONE) {
13889            None
13890        } else {
13891            Some(self.parse_identifier()?)
13892        };
13893        Ok(Statement::Set(Set::SetRole {
13894            context_modifier: modifier,
13895            role_name,
13896        }))
13897    }
13898
13899    fn parse_set_values(
13900        &mut self,
13901        parenthesized_assignment: bool,
13902    ) -> Result<Vec<Expr>, ParserError> {
13903        let mut values = vec![];
13904
13905        if parenthesized_assignment {
13906            self.expect_token(&Token::LParen)?;
13907        }
13908
13909        loop {
13910            let value = if let Some(expr) = self.try_parse_expr_sub_query()? {
13911                expr
13912            } else if let Ok(expr) = self.parse_expr() {
13913                expr
13914            } else {
13915                self.expected("variable value", self.peek_token())?
13916            };
13917
13918            values.push(value);
13919            if self.consume_token(&Token::Comma) {
13920                continue;
13921            }
13922
13923            if parenthesized_assignment {
13924                self.expect_token(&Token::RParen)?;
13925            }
13926            return Ok(values);
13927        }
13928    }
13929
13930    fn parse_context_modifier(&mut self) -> Option<ContextModifier> {
13931        let modifier =
13932            self.parse_one_of_keywords(&[Keyword::SESSION, Keyword::LOCAL, Keyword::GLOBAL])?;
13933
13934        Self::keyword_to_modifier(modifier)
13935    }
13936
13937    /// Parse a single SET statement assignment `var = expr`.
13938    fn parse_set_assignment(&mut self) -> Result<SetAssignment, ParserError> {
13939        let scope = self.parse_context_modifier();
13940
13941        let name = if self.dialect.supports_parenthesized_set_variables()
13942            && self.consume_token(&Token::LParen)
13943        {
13944            // Parenthesized assignments are handled in the `parse_set` function after
13945            // trying to parse list of assignments using this function.
13946            // If a dialect supports both, and we find a LParen, we early exit from this function.
13947            self.expected("Unparenthesized assignment", self.peek_token())?
13948        } else {
13949            self.parse_object_name(false)?
13950        };
13951
13952        if !(self.consume_token(&Token::Eq) || self.parse_keyword(Keyword::TO)) {
13953            return self.expected("assignment operator", self.peek_token());
13954        }
13955
13956        let value = self.parse_expr()?;
13957
13958        Ok(SetAssignment { scope, name, value })
13959    }
13960
13961    fn parse_set(&mut self) -> Result<Statement, ParserError> {
13962        let hivevar = self.parse_keyword(Keyword::HIVEVAR);
13963
13964        // Modifier is either HIVEVAR: or a ContextModifier (LOCAL, SESSION, etc), not both
13965        let scope = if !hivevar {
13966            self.parse_context_modifier()
13967        } else {
13968            None
13969        };
13970
13971        if hivevar {
13972            self.expect_token(&Token::Colon)?;
13973        }
13974
13975        if let Some(set_role_stmt) = self.maybe_parse(|parser| parser.parse_set_role(scope))? {
13976            return Ok(set_role_stmt);
13977        }
13978
13979        // Handle special cases first
13980        if self.parse_keywords(&[Keyword::TIME, Keyword::ZONE])
13981            || self.parse_keyword(Keyword::TIMEZONE)
13982        {
13983            if self.consume_token(&Token::Eq) || self.parse_keyword(Keyword::TO) {
13984                return Ok(Set::SingleAssignment {
13985                    scope,
13986                    hivevar,
13987                    variable: ObjectName::from(vec!["TIMEZONE".into()]),
13988                    values: self.parse_set_values(false)?,
13989                }
13990                .into());
13991            } else {
13992                // A shorthand alias for SET TIME ZONE that doesn't require
13993                // the assignment operator. It's originally PostgreSQL specific,
13994                // but we allow it for all the dialects
13995                return Ok(Set::SetTimeZone {
13996                    local: scope == Some(ContextModifier::Local),
13997                    value: self.parse_expr()?,
13998                }
13999                .into());
14000            }
14001        } else if self.dialect.supports_set_names() && self.parse_keyword(Keyword::NAMES) {
14002            if self.parse_keyword(Keyword::DEFAULT) {
14003                return Ok(Set::SetNamesDefault {}.into());
14004            }
14005            let charset_name = self.parse_identifier()?;
14006            let collation_name = if self.parse_one_of_keywords(&[Keyword::COLLATE]).is_some() {
14007                Some(self.parse_literal_string()?)
14008            } else {
14009                None
14010            };
14011
14012            return Ok(Set::SetNames {
14013                charset_name,
14014                collation_name,
14015            }
14016            .into());
14017        } else if self.parse_keyword(Keyword::CHARACTERISTICS) {
14018            self.expect_keywords(&[Keyword::AS, Keyword::TRANSACTION])?;
14019            return Ok(Set::SetTransaction {
14020                modes: self.parse_transaction_modes()?,
14021                snapshot: None,
14022                session: true,
14023            }
14024            .into());
14025        } else if self.parse_keyword(Keyword::TRANSACTION) {
14026            if self.parse_keyword(Keyword::SNAPSHOT) {
14027                let snapshot_id = self.parse_value()?.value;
14028                return Ok(Set::SetTransaction {
14029                    modes: vec![],
14030                    snapshot: Some(snapshot_id),
14031                    session: false,
14032                }
14033                .into());
14034            }
14035            return Ok(Set::SetTransaction {
14036                modes: self.parse_transaction_modes()?,
14037                snapshot: None,
14038                session: false,
14039            }
14040            .into());
14041        } else if self.parse_keyword(Keyword::AUTHORIZATION) {
14042            let auth_value = if self.parse_keyword(Keyword::DEFAULT) {
14043                SetSessionAuthorizationParamKind::Default
14044            } else {
14045                let value = self.parse_identifier()?;
14046                SetSessionAuthorizationParamKind::User(value)
14047            };
14048            return Ok(Set::SetSessionAuthorization(SetSessionAuthorizationParam {
14049                scope: scope.expect("SET ... AUTHORIZATION must have a scope"),
14050                kind: auth_value,
14051            })
14052            .into());
14053        }
14054
14055        if self.dialect.supports_comma_separated_set_assignments() {
14056            if scope.is_some() {
14057                self.prev_token();
14058            }
14059
14060            if let Some(assignments) = self
14061                .maybe_parse(|parser| parser.parse_comma_separated(Parser::parse_set_assignment))?
14062            {
14063                return if assignments.len() > 1 {
14064                    Ok(Set::MultipleAssignments { assignments }.into())
14065                } else {
14066                    let SetAssignment { scope, name, value } =
14067                        assignments.into_iter().next().ok_or_else(|| {
14068                            ParserError::ParserError("Expected at least one assignment".to_string())
14069                        })?;
14070
14071                    Ok(Set::SingleAssignment {
14072                        scope,
14073                        hivevar,
14074                        variable: name,
14075                        values: vec![value],
14076                    }
14077                    .into())
14078                };
14079            }
14080        }
14081
14082        let variables = if self.dialect.supports_parenthesized_set_variables()
14083            && self.consume_token(&Token::LParen)
14084        {
14085            let vars = OneOrManyWithParens::Many(
14086                self.parse_comma_separated(|parser: &mut Parser<'a>| parser.parse_identifier())?
14087                    .into_iter()
14088                    .map(|ident| ObjectName::from(vec![ident]))
14089                    .collect(),
14090            );
14091            self.expect_token(&Token::RParen)?;
14092            vars
14093        } else {
14094            OneOrManyWithParens::One(self.parse_object_name(false)?)
14095        };
14096
14097        if self.consume_token(&Token::Eq) || self.parse_keyword(Keyword::TO) {
14098            let stmt = match variables {
14099                OneOrManyWithParens::One(var) => Set::SingleAssignment {
14100                    scope,
14101                    hivevar,
14102                    variable: var,
14103                    values: self.parse_set_values(false)?,
14104                },
14105                OneOrManyWithParens::Many(vars) => Set::ParenthesizedAssignments {
14106                    variables: vars,
14107                    values: self.parse_set_values(true)?,
14108                },
14109            };
14110
14111            return Ok(stmt.into());
14112        }
14113
14114        if self.dialect.supports_set_stmt_without_operator() {
14115            self.prev_token();
14116            return self.parse_set_session_params();
14117        };
14118
14119        self.expected("equals sign or TO", self.peek_token())
14120    }
14121
14122    pub fn parse_set_session_params(&mut self) -> Result<Statement, ParserError> {
14123        if self.parse_keyword(Keyword::STATISTICS) {
14124            let topic = match self.parse_one_of_keywords(&[
14125                Keyword::IO,
14126                Keyword::PROFILE,
14127                Keyword::TIME,
14128                Keyword::XML,
14129            ]) {
14130                Some(Keyword::IO) => SessionParamStatsTopic::IO,
14131                Some(Keyword::PROFILE) => SessionParamStatsTopic::Profile,
14132                Some(Keyword::TIME) => SessionParamStatsTopic::Time,
14133                Some(Keyword::XML) => SessionParamStatsTopic::Xml,
14134                _ => return self.expected("IO, PROFILE, TIME or XML", self.peek_token()),
14135            };
14136            let value = self.parse_session_param_value()?;
14137            Ok(
14138                Set::SetSessionParam(SetSessionParamKind::Statistics(SetSessionParamStatistics {
14139                    topic,
14140                    value,
14141                }))
14142                .into(),
14143            )
14144        } else if self.parse_keyword(Keyword::IDENTITY_INSERT) {
14145            let obj = self.parse_object_name(false)?;
14146            let value = self.parse_session_param_value()?;
14147            Ok(Set::SetSessionParam(SetSessionParamKind::IdentityInsert(
14148                SetSessionParamIdentityInsert { obj, value },
14149            ))
14150            .into())
14151        } else if self.parse_keyword(Keyword::OFFSETS) {
14152            let keywords = self.parse_comma_separated(|parser| {
14153                let next_token = parser.next_token();
14154                match &next_token.token {
14155                    Token::Word(w) => Ok(w.to_string()),
14156                    _ => parser.expected("SQL keyword", next_token),
14157                }
14158            })?;
14159            let value = self.parse_session_param_value()?;
14160            Ok(
14161                Set::SetSessionParam(SetSessionParamKind::Offsets(SetSessionParamOffsets {
14162                    keywords,
14163                    value,
14164                }))
14165                .into(),
14166            )
14167        } else {
14168            let names = self.parse_comma_separated(|parser| {
14169                let next_token = parser.next_token();
14170                match next_token.token {
14171                    Token::Word(w) => Ok(w.to_string()),
14172                    _ => parser.expected("Session param name", next_token),
14173                }
14174            })?;
14175            let value = self.parse_expr()?.to_string();
14176            Ok(
14177                Set::SetSessionParam(SetSessionParamKind::Generic(SetSessionParamGeneric {
14178                    names,
14179                    value,
14180                }))
14181                .into(),
14182            )
14183        }
14184    }
14185
14186    fn parse_session_param_value(&mut self) -> Result<SessionParamValue, ParserError> {
14187        if self.parse_keyword(Keyword::ON) {
14188            Ok(SessionParamValue::On)
14189        } else if self.parse_keyword(Keyword::OFF) {
14190            Ok(SessionParamValue::Off)
14191        } else {
14192            self.expected("ON or OFF", self.peek_token())
14193        }
14194    }
14195
14196    pub fn parse_show(&mut self) -> Result<Statement, ParserError> {
14197        let terse = self.parse_keyword(Keyword::TERSE);
14198        let extended = self.parse_keyword(Keyword::EXTENDED);
14199        let full = self.parse_keyword(Keyword::FULL);
14200        let session = self.parse_keyword(Keyword::SESSION);
14201        let global = self.parse_keyword(Keyword::GLOBAL);
14202        let external = self.parse_keyword(Keyword::EXTERNAL);
14203        if self
14204            .parse_one_of_keywords(&[Keyword::COLUMNS, Keyword::FIELDS])
14205            .is_some()
14206        {
14207            Ok(self.parse_show_columns(extended, full)?)
14208        } else if self.parse_keyword(Keyword::TABLES) {
14209            Ok(self.parse_show_tables(terse, extended, full, external)?)
14210        } else if self.parse_keywords(&[Keyword::MATERIALIZED, Keyword::VIEWS]) {
14211            Ok(self.parse_show_views(terse, true)?)
14212        } else if self.parse_keyword(Keyword::VIEWS) {
14213            Ok(self.parse_show_views(terse, false)?)
14214        } else if self.parse_keyword(Keyword::FUNCTIONS) {
14215            Ok(self.parse_show_functions()?)
14216        } else if extended || full {
14217            Err(ParserError::ParserError(
14218                "EXTENDED/FULL are not supported with this type of SHOW query".to_string(),
14219            ))
14220        } else if self.parse_one_of_keywords(&[Keyword::CREATE]).is_some() {
14221            Ok(self.parse_show_create()?)
14222        } else if self.parse_keyword(Keyword::COLLATION) {
14223            Ok(self.parse_show_collation()?)
14224        } else if self.parse_keyword(Keyword::VARIABLES)
14225            && dialect_of!(self is MySqlDialect | GenericDialect)
14226        {
14227            Ok(Statement::ShowVariables {
14228                filter: self.parse_show_statement_filter()?,
14229                session,
14230                global,
14231            })
14232        } else if self.parse_keyword(Keyword::STATUS)
14233            && dialect_of!(self is MySqlDialect | GenericDialect)
14234        {
14235            Ok(Statement::ShowStatus {
14236                filter: self.parse_show_statement_filter()?,
14237                session,
14238                global,
14239            })
14240        } else if self.parse_keyword(Keyword::DATABASES) {
14241            self.parse_show_databases(terse)
14242        } else if self.parse_keyword(Keyword::SCHEMAS) {
14243            self.parse_show_schemas(terse)
14244        } else if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
14245            self.parse_show_charset(false)
14246        } else if self.parse_keyword(Keyword::CHARSET) {
14247            self.parse_show_charset(true)
14248        } else {
14249            Ok(Statement::ShowVariable {
14250                variable: self.parse_identifiers()?,
14251            })
14252        }
14253    }
14254
14255    fn parse_show_charset(&mut self, is_shorthand: bool) -> Result<Statement, ParserError> {
14256        // parse one of keywords
14257        Ok(Statement::ShowCharset(ShowCharset {
14258            is_shorthand,
14259            filter: self.parse_show_statement_filter()?,
14260        }))
14261    }
14262
14263    fn parse_show_databases(&mut self, terse: bool) -> Result<Statement, ParserError> {
14264        let history = self.parse_keyword(Keyword::HISTORY);
14265        let show_options = self.parse_show_stmt_options()?;
14266        Ok(Statement::ShowDatabases {
14267            terse,
14268            history,
14269            show_options,
14270        })
14271    }
14272
14273    fn parse_show_schemas(&mut self, terse: bool) -> Result<Statement, ParserError> {
14274        let history = self.parse_keyword(Keyword::HISTORY);
14275        let show_options = self.parse_show_stmt_options()?;
14276        Ok(Statement::ShowSchemas {
14277            terse,
14278            history,
14279            show_options,
14280        })
14281    }
14282
14283    pub fn parse_show_create(&mut self) -> Result<Statement, ParserError> {
14284        let obj_type = match self.expect_one_of_keywords(&[
14285            Keyword::TABLE,
14286            Keyword::TRIGGER,
14287            Keyword::FUNCTION,
14288            Keyword::PROCEDURE,
14289            Keyword::EVENT,
14290            Keyword::VIEW,
14291        ])? {
14292            Keyword::TABLE => Ok(ShowCreateObject::Table),
14293            Keyword::TRIGGER => Ok(ShowCreateObject::Trigger),
14294            Keyword::FUNCTION => Ok(ShowCreateObject::Function),
14295            Keyword::PROCEDURE => Ok(ShowCreateObject::Procedure),
14296            Keyword::EVENT => Ok(ShowCreateObject::Event),
14297            Keyword::VIEW => Ok(ShowCreateObject::View),
14298            keyword => Err(ParserError::ParserError(format!(
14299                "Unable to map keyword to ShowCreateObject: {keyword:?}"
14300            ))),
14301        }?;
14302
14303        let obj_name = self.parse_object_name(false)?;
14304
14305        Ok(Statement::ShowCreate { obj_type, obj_name })
14306    }
14307
14308    pub fn parse_show_columns(
14309        &mut self,
14310        extended: bool,
14311        full: bool,
14312    ) -> Result<Statement, ParserError> {
14313        let show_options = self.parse_show_stmt_options()?;
14314        Ok(Statement::ShowColumns {
14315            extended,
14316            full,
14317            show_options,
14318        })
14319    }
14320
14321    fn parse_show_tables(
14322        &mut self,
14323        terse: bool,
14324        extended: bool,
14325        full: bool,
14326        external: bool,
14327    ) -> Result<Statement, ParserError> {
14328        let history = !external && self.parse_keyword(Keyword::HISTORY);
14329        let show_options = self.parse_show_stmt_options()?;
14330        Ok(Statement::ShowTables {
14331            terse,
14332            history,
14333            extended,
14334            full,
14335            external,
14336            show_options,
14337        })
14338    }
14339
14340    fn parse_show_views(
14341        &mut self,
14342        terse: bool,
14343        materialized: bool,
14344    ) -> Result<Statement, ParserError> {
14345        let show_options = self.parse_show_stmt_options()?;
14346        Ok(Statement::ShowViews {
14347            materialized,
14348            terse,
14349            show_options,
14350        })
14351    }
14352
14353    pub fn parse_show_functions(&mut self) -> Result<Statement, ParserError> {
14354        let filter = self.parse_show_statement_filter()?;
14355        Ok(Statement::ShowFunctions { filter })
14356    }
14357
14358    pub fn parse_show_collation(&mut self) -> Result<Statement, ParserError> {
14359        let filter = self.parse_show_statement_filter()?;
14360        Ok(Statement::ShowCollation { filter })
14361    }
14362
14363    pub fn parse_show_statement_filter(
14364        &mut self,
14365    ) -> Result<Option<ShowStatementFilter>, ParserError> {
14366        if self.parse_keyword(Keyword::LIKE) {
14367            Ok(Some(ShowStatementFilter::Like(
14368                self.parse_literal_string()?,
14369            )))
14370        } else if self.parse_keyword(Keyword::ILIKE) {
14371            Ok(Some(ShowStatementFilter::ILike(
14372                self.parse_literal_string()?,
14373            )))
14374        } else if self.parse_keyword(Keyword::WHERE) {
14375            Ok(Some(ShowStatementFilter::Where(self.parse_expr()?)))
14376        } else {
14377            self.maybe_parse(|parser| -> Result<String, ParserError> {
14378                parser.parse_literal_string()
14379            })?
14380            .map_or(Ok(None), |filter| {
14381                Ok(Some(ShowStatementFilter::NoKeyword(filter)))
14382            })
14383        }
14384    }
14385
14386    pub fn parse_use(&mut self) -> Result<Statement, ParserError> {
14387        // Determine which keywords are recognized by the current dialect
14388        let parsed_keyword = if dialect_of!(self is HiveDialect) {
14389            // HiveDialect accepts USE DEFAULT; statement without any db specified
14390            if self.parse_keyword(Keyword::DEFAULT) {
14391                return Ok(Statement::Use(Use::Default));
14392            }
14393            None // HiveDialect doesn't expect any other specific keyword after `USE`
14394        } else if dialect_of!(self is DatabricksDialect) {
14395            self.parse_one_of_keywords(&[Keyword::CATALOG, Keyword::DATABASE, Keyword::SCHEMA])
14396        } else if dialect_of!(self is SnowflakeDialect) {
14397            self.parse_one_of_keywords(&[
14398                Keyword::DATABASE,
14399                Keyword::SCHEMA,
14400                Keyword::WAREHOUSE,
14401                Keyword::ROLE,
14402                Keyword::SECONDARY,
14403            ])
14404        } else {
14405            None // No specific keywords for other dialects, including GenericDialect
14406        };
14407
14408        let result = if matches!(parsed_keyword, Some(Keyword::SECONDARY)) {
14409            self.parse_secondary_roles()?
14410        } else {
14411            let obj_name = self.parse_object_name(false)?;
14412            match parsed_keyword {
14413                Some(Keyword::CATALOG) => Use::Catalog(obj_name),
14414                Some(Keyword::DATABASE) => Use::Database(obj_name),
14415                Some(Keyword::SCHEMA) => Use::Schema(obj_name),
14416                Some(Keyword::WAREHOUSE) => Use::Warehouse(obj_name),
14417                Some(Keyword::ROLE) => Use::Role(obj_name),
14418                _ => Use::Object(obj_name),
14419            }
14420        };
14421
14422        Ok(Statement::Use(result))
14423    }
14424
14425    fn parse_secondary_roles(&mut self) -> Result<Use, ParserError> {
14426        self.expect_one_of_keywords(&[Keyword::ROLES, Keyword::ROLE])?;
14427        if self.parse_keyword(Keyword::NONE) {
14428            Ok(Use::SecondaryRoles(SecondaryRoles::None))
14429        } else if self.parse_keyword(Keyword::ALL) {
14430            Ok(Use::SecondaryRoles(SecondaryRoles::All))
14431        } else {
14432            let roles = self.parse_comma_separated(|parser| parser.parse_identifier())?;
14433            Ok(Use::SecondaryRoles(SecondaryRoles::List(roles)))
14434        }
14435    }
14436
14437    pub fn parse_table_and_joins(&mut self) -> Result<TableWithJoins, ParserError> {
14438        let relation = self.parse_table_factor()?;
14439        // Note that for keywords to be properly handled here, they need to be
14440        // added to `RESERVED_FOR_TABLE_ALIAS`, otherwise they may be parsed as
14441        // a table alias.
14442        let joins = self.parse_joins()?;
14443        Ok(TableWithJoins { relation, joins })
14444    }
14445
14446    fn parse_joins(&mut self) -> Result<Vec<Join>, ParserError> {
14447        let mut joins = vec![];
14448        loop {
14449            let global = self.parse_keyword(Keyword::GLOBAL);
14450            let join = if self.parse_keyword(Keyword::CROSS) {
14451                let join_operator = if self.parse_keyword(Keyword::JOIN) {
14452                    JoinOperator::CrossJoin(JoinConstraint::None)
14453                } else if self.parse_keyword(Keyword::APPLY) {
14454                    // MSSQL extension, similar to CROSS JOIN LATERAL
14455                    JoinOperator::CrossApply
14456                } else {
14457                    return self.expected("JOIN or APPLY after CROSS", self.peek_token());
14458                };
14459                let relation = self.parse_table_factor()?;
14460                let join_operator = if matches!(join_operator, JoinOperator::CrossJoin(_))
14461                    && self.dialect.supports_cross_join_constraint()
14462                {
14463                    let constraint = self.parse_join_constraint(false)?;
14464                    JoinOperator::CrossJoin(constraint)
14465                } else {
14466                    join_operator
14467                };
14468                Join {
14469                    relation,
14470                    global,
14471                    join_operator,
14472                }
14473            } else if self.parse_keyword(Keyword::OUTER) {
14474                // MSSQL extension, similar to LEFT JOIN LATERAL .. ON 1=1
14475                self.expect_keyword_is(Keyword::APPLY)?;
14476                Join {
14477                    relation: self.parse_table_factor()?,
14478                    global,
14479                    join_operator: JoinOperator::OuterApply,
14480                }
14481            } else if self.parse_keyword(Keyword::ASOF) {
14482                self.expect_keyword_is(Keyword::JOIN)?;
14483                let relation = self.parse_table_factor()?;
14484                self.expect_keyword_is(Keyword::MATCH_CONDITION)?;
14485                let match_condition = self.parse_parenthesized(Self::parse_expr)?;
14486                Join {
14487                    relation,
14488                    global,
14489                    join_operator: JoinOperator::AsOf {
14490                        match_condition,
14491                        constraint: self.parse_join_constraint(false)?,
14492                    },
14493                }
14494            } else {
14495                let natural = self.parse_keyword(Keyword::NATURAL);
14496                let peek_keyword = if let Token::Word(w) = self.peek_token().token {
14497                    w.keyword
14498                } else {
14499                    Keyword::NoKeyword
14500                };
14501
14502                let join_operator_type = match peek_keyword {
14503                    Keyword::INNER | Keyword::JOIN => {
14504                        let inner = self.parse_keyword(Keyword::INNER); // [ INNER ]
14505                        self.expect_keyword_is(Keyword::JOIN)?;
14506                        if inner {
14507                            JoinOperator::Inner
14508                        } else {
14509                            JoinOperator::Join
14510                        }
14511                    }
14512                    kw @ Keyword::LEFT | kw @ Keyword::RIGHT => {
14513                        let _ = self.next_token(); // consume LEFT/RIGHT
14514                        let is_left = kw == Keyword::LEFT;
14515                        let join_type = self.parse_one_of_keywords(&[
14516                            Keyword::OUTER,
14517                            Keyword::SEMI,
14518                            Keyword::ANTI,
14519                            Keyword::JOIN,
14520                        ]);
14521                        match join_type {
14522                            Some(Keyword::OUTER) => {
14523                                self.expect_keyword_is(Keyword::JOIN)?;
14524                                if is_left {
14525                                    JoinOperator::LeftOuter
14526                                } else {
14527                                    JoinOperator::RightOuter
14528                                }
14529                            }
14530                            Some(Keyword::SEMI) => {
14531                                self.expect_keyword_is(Keyword::JOIN)?;
14532                                if is_left {
14533                                    JoinOperator::LeftSemi
14534                                } else {
14535                                    JoinOperator::RightSemi
14536                                }
14537                            }
14538                            Some(Keyword::ANTI) => {
14539                                self.expect_keyword_is(Keyword::JOIN)?;
14540                                if is_left {
14541                                    JoinOperator::LeftAnti
14542                                } else {
14543                                    JoinOperator::RightAnti
14544                                }
14545                            }
14546                            Some(Keyword::JOIN) => {
14547                                if is_left {
14548                                    JoinOperator::Left
14549                                } else {
14550                                    JoinOperator::Right
14551                                }
14552                            }
14553                            _ => {
14554                                return Err(ParserError::ParserError(format!(
14555                                    "expected OUTER, SEMI, ANTI or JOIN after {kw:?}"
14556                                )))
14557                            }
14558                        }
14559                    }
14560                    Keyword::ANTI => {
14561                        let _ = self.next_token(); // consume ANTI
14562                        self.expect_keyword_is(Keyword::JOIN)?;
14563                        JoinOperator::Anti
14564                    }
14565                    Keyword::SEMI => {
14566                        let _ = self.next_token(); // consume SEMI
14567                        self.expect_keyword_is(Keyword::JOIN)?;
14568                        JoinOperator::Semi
14569                    }
14570                    Keyword::FULL => {
14571                        let _ = self.next_token(); // consume FULL
14572                        let _ = self.parse_keyword(Keyword::OUTER); // [ OUTER ]
14573                        self.expect_keyword_is(Keyword::JOIN)?;
14574                        JoinOperator::FullOuter
14575                    }
14576                    Keyword::OUTER => {
14577                        return self.expected("LEFT, RIGHT, or FULL", self.peek_token());
14578                    }
14579                    Keyword::STRAIGHT_JOIN => {
14580                        let _ = self.next_token(); // consume STRAIGHT_JOIN
14581                        JoinOperator::StraightJoin
14582                    }
14583                    _ if natural => {
14584                        return self.expected("a join type after NATURAL", self.peek_token());
14585                    }
14586                    _ => break,
14587                };
14588                let mut relation = self.parse_table_factor()?;
14589
14590                if !self
14591                    .dialect
14592                    .supports_left_associative_joins_without_parens()
14593                    && self.peek_parens_less_nested_join()
14594                {
14595                    let joins = self.parse_joins()?;
14596                    relation = TableFactor::NestedJoin {
14597                        table_with_joins: Box::new(TableWithJoins { relation, joins }),
14598                        alias: None,
14599                    };
14600                }
14601
14602                let join_constraint = self.parse_join_constraint(natural)?;
14603                Join {
14604                    relation,
14605                    global,
14606                    join_operator: join_operator_type(join_constraint),
14607                }
14608            };
14609            joins.push(join);
14610        }
14611        Ok(joins)
14612    }
14613
14614    fn peek_parens_less_nested_join(&self) -> bool {
14615        matches!(
14616            self.peek_token_ref().token,
14617            Token::Word(Word {
14618                keyword: Keyword::JOIN
14619                    | Keyword::INNER
14620                    | Keyword::LEFT
14621                    | Keyword::RIGHT
14622                    | Keyword::FULL,
14623                ..
14624            })
14625        )
14626    }
14627
14628    /// A table name or a parenthesized subquery, followed by optional `[AS] alias`
14629    pub fn parse_table_factor(&mut self) -> Result<TableFactor, ParserError> {
14630        if self.parse_keyword(Keyword::LATERAL) {
14631            // LATERAL must always be followed by a subquery or table function.
14632            if self.consume_token(&Token::LParen) {
14633                self.parse_derived_table_factor(Lateral)
14634            } else {
14635                let name = self.parse_object_name(false)?;
14636                self.expect_token(&Token::LParen)?;
14637                let args = self.parse_optional_args()?;
14638                let alias = self.maybe_parse_table_alias()?;
14639                Ok(TableFactor::Function {
14640                    lateral: true,
14641                    name,
14642                    args,
14643                    alias,
14644                })
14645            }
14646        } else if self.parse_keyword(Keyword::TABLE) {
14647            // parse table function (SELECT * FROM TABLE (<expr>) [ AS <alias> ])
14648            self.expect_token(&Token::LParen)?;
14649            let expr = self.parse_expr()?;
14650            self.expect_token(&Token::RParen)?;
14651            let alias = self.maybe_parse_table_alias()?;
14652            Ok(TableFactor::TableFunction { expr, alias })
14653        } else if self.consume_token(&Token::LParen) {
14654            // A left paren introduces either a derived table (i.e., a subquery)
14655            // or a nested join. It's nearly impossible to determine ahead of
14656            // time which it is... so we just try to parse both.
14657            //
14658            // Here's an example that demonstrates the complexity:
14659            //                     /-------------------------------------------------------\
14660            //                     | /-----------------------------------\                 |
14661            //     SELECT * FROM ( ( ( (SELECT 1) UNION (SELECT 2) ) AS t1 NATURAL JOIN t2 ) )
14662            //                   ^ ^ ^ ^
14663            //                   | | | |
14664            //                   | | | |
14665            //                   | | | (4) belongs to a SetExpr::Query inside the subquery
14666            //                   | | (3) starts a derived table (subquery)
14667            //                   | (2) starts a nested join
14668            //                   (1) an additional set of parens around a nested join
14669            //
14670
14671            // If the recently consumed '(' starts a derived table, the call to
14672            // `parse_derived_table_factor` below will return success after parsing the
14673            // subquery, followed by the closing ')', and the alias of the derived table.
14674            // In the example above this is case (3).
14675            if let Some(mut table) =
14676                self.maybe_parse(|parser| parser.parse_derived_table_factor(NotLateral))?
14677            {
14678                while let Some(kw) = self.parse_one_of_keywords(&[Keyword::PIVOT, Keyword::UNPIVOT])
14679                {
14680                    table = match kw {
14681                        Keyword::PIVOT => self.parse_pivot_table_factor(table)?,
14682                        Keyword::UNPIVOT => self.parse_unpivot_table_factor(table)?,
14683                        unexpected_keyword => return Err(ParserError::ParserError(
14684                            format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in pivot/unpivot"),
14685                        )),
14686                    }
14687                }
14688                return Ok(table);
14689            }
14690
14691            // A parsing error from `parse_derived_table_factor` indicates that the '(' we've
14692            // recently consumed does not start a derived table (cases 1, 2, or 4).
14693            // `maybe_parse` will ignore such an error and rewind to be after the opening '('.
14694
14695            // Inside the parentheses we expect to find an (A) table factor
14696            // followed by some joins or (B) another level of nesting.
14697            let mut table_and_joins = self.parse_table_and_joins()?;
14698
14699            #[allow(clippy::if_same_then_else)]
14700            if !table_and_joins.joins.is_empty() {
14701                self.expect_token(&Token::RParen)?;
14702                let alias = self.maybe_parse_table_alias()?;
14703                Ok(TableFactor::NestedJoin {
14704                    table_with_joins: Box::new(table_and_joins),
14705                    alias,
14706                }) // (A)
14707            } else if let TableFactor::NestedJoin {
14708                table_with_joins: _,
14709                alias: _,
14710            } = &table_and_joins.relation
14711            {
14712                // (B): `table_and_joins` (what we found inside the parentheses)
14713                // is a nested join `(foo JOIN bar)`, not followed by other joins.
14714                self.expect_token(&Token::RParen)?;
14715                let alias = self.maybe_parse_table_alias()?;
14716                Ok(TableFactor::NestedJoin {
14717                    table_with_joins: Box::new(table_and_joins),
14718                    alias,
14719                })
14720            } else if dialect_of!(self is SnowflakeDialect | GenericDialect) {
14721                // Dialect-specific behavior: Snowflake diverges from the
14722                // standard and from most of the other implementations by
14723                // allowing extra parentheses not only around a join (B), but
14724                // around lone table names (e.g. `FROM (mytable [AS alias])`)
14725                // and around derived tables (e.g. `FROM ((SELECT ...)
14726                // [AS alias])`) as well.
14727                self.expect_token(&Token::RParen)?;
14728
14729                if let Some(outer_alias) = self.maybe_parse_table_alias()? {
14730                    // Snowflake also allows specifying an alias *after* parens
14731                    // e.g. `FROM (mytable) AS alias`
14732                    match &mut table_and_joins.relation {
14733                        TableFactor::Derived { alias, .. }
14734                        | TableFactor::Table { alias, .. }
14735                        | TableFactor::Function { alias, .. }
14736                        | TableFactor::UNNEST { alias, .. }
14737                        | TableFactor::JsonTable { alias, .. }
14738                        | TableFactor::XmlTable { alias, .. }
14739                        | TableFactor::OpenJsonTable { alias, .. }
14740                        | TableFactor::TableFunction { alias, .. }
14741                        | TableFactor::Pivot { alias, .. }
14742                        | TableFactor::Unpivot { alias, .. }
14743                        | TableFactor::MatchRecognize { alias, .. }
14744                        | TableFactor::SemanticView { alias, .. }
14745                        | TableFactor::NestedJoin { alias, .. } => {
14746                            // but not `FROM (mytable AS alias1) AS alias2`.
14747                            if let Some(inner_alias) = alias {
14748                                return Err(ParserError::ParserError(format!(
14749                                    "duplicate alias {inner_alias}"
14750                                )));
14751                            }
14752                            // Act as if the alias was specified normally next
14753                            // to the table name: `(mytable) AS alias` ->
14754                            // `(mytable AS alias)`
14755                            alias.replace(outer_alias);
14756                        }
14757                    };
14758                }
14759                // Do not store the extra set of parens in the AST
14760                Ok(table_and_joins.relation)
14761            } else {
14762                // The SQL spec prohibits derived tables and bare tables from
14763                // appearing alone in parentheses (e.g. `FROM (mytable)`)
14764                self.expected("joined table", self.peek_token())
14765            }
14766        } else if dialect_of!(self is SnowflakeDialect | DatabricksDialect | GenericDialect)
14767            && matches!(
14768                self.peek_tokens(),
14769                [
14770                    Token::Word(Word {
14771                        keyword: Keyword::VALUES,
14772                        ..
14773                    }),
14774                    Token::LParen
14775                ]
14776            )
14777        {
14778            self.expect_keyword_is(Keyword::VALUES)?;
14779
14780            // Snowflake and Databricks allow syntax like below:
14781            // SELECT * FROM VALUES (1, 'a'), (2, 'b') AS t (col1, col2)
14782            // where there are no parentheses around the VALUES clause.
14783            let values = SetExpr::Values(self.parse_values(false, false)?);
14784            let alias = self.maybe_parse_table_alias()?;
14785            Ok(TableFactor::Derived {
14786                lateral: false,
14787                subquery: Box::new(Query {
14788                    with: None,
14789                    body: Box::new(values),
14790                    order_by: None,
14791                    limit_clause: None,
14792                    fetch: None,
14793                    locks: vec![],
14794                    for_clause: None,
14795                    settings: None,
14796                    format_clause: None,
14797                    pipe_operators: vec![],
14798                }),
14799                alias,
14800            })
14801        } else if dialect_of!(self is BigQueryDialect | PostgreSqlDialect | GenericDialect)
14802            && self.parse_keyword(Keyword::UNNEST)
14803        {
14804            self.expect_token(&Token::LParen)?;
14805            let array_exprs = self.parse_comma_separated(Parser::parse_expr)?;
14806            self.expect_token(&Token::RParen)?;
14807
14808            let with_ordinality = self.parse_keywords(&[Keyword::WITH, Keyword::ORDINALITY]);
14809            let alias = match self.maybe_parse_table_alias() {
14810                Ok(Some(alias)) => Some(alias),
14811                Ok(None) => None,
14812                Err(e) => return Err(e),
14813            };
14814
14815            let with_offset = match self.expect_keywords(&[Keyword::WITH, Keyword::OFFSET]) {
14816                Ok(()) => true,
14817                Err(_) => false,
14818            };
14819
14820            let with_offset_alias = if with_offset {
14821                match self.parse_optional_alias(keywords::RESERVED_FOR_COLUMN_ALIAS) {
14822                    Ok(Some(alias)) => Some(alias),
14823                    Ok(None) => None,
14824                    Err(e) => return Err(e),
14825                }
14826            } else {
14827                None
14828            };
14829
14830            Ok(TableFactor::UNNEST {
14831                alias,
14832                array_exprs,
14833                with_offset,
14834                with_offset_alias,
14835                with_ordinality,
14836            })
14837        } else if self.parse_keyword_with_tokens(Keyword::JSON_TABLE, &[Token::LParen]) {
14838            let json_expr = self.parse_expr()?;
14839            self.expect_token(&Token::Comma)?;
14840            let json_path = self.parse_value()?.value;
14841            self.expect_keyword_is(Keyword::COLUMNS)?;
14842            self.expect_token(&Token::LParen)?;
14843            let columns = self.parse_comma_separated(Parser::parse_json_table_column_def)?;
14844            self.expect_token(&Token::RParen)?;
14845            self.expect_token(&Token::RParen)?;
14846            let alias = self.maybe_parse_table_alias()?;
14847            Ok(TableFactor::JsonTable {
14848                json_expr,
14849                json_path,
14850                columns,
14851                alias,
14852            })
14853        } else if self.parse_keyword_with_tokens(Keyword::OPENJSON, &[Token::LParen]) {
14854            self.prev_token();
14855            self.parse_open_json_table_factor()
14856        } else if self.parse_keyword_with_tokens(Keyword::XMLTABLE, &[Token::LParen]) {
14857            self.prev_token();
14858            self.parse_xml_table_factor()
14859        } else if self.dialect.supports_semantic_view_table_factor()
14860            && self.peek_keyword_with_tokens(Keyword::SEMANTIC_VIEW, &[Token::LParen])
14861        {
14862            self.parse_semantic_view_table_factor()
14863        } else {
14864            let name = self.parse_object_name(true)?;
14865
14866            let json_path = match self.peek_token().token {
14867                Token::LBracket if self.dialect.supports_partiql() => Some(self.parse_json_path()?),
14868                _ => None,
14869            };
14870
14871            let partitions: Vec<Ident> = if dialect_of!(self is MySqlDialect | GenericDialect)
14872                && self.parse_keyword(Keyword::PARTITION)
14873            {
14874                self.parse_parenthesized_identifiers()?
14875            } else {
14876                vec![]
14877            };
14878
14879            // Parse potential version qualifier
14880            let version = self.maybe_parse_table_version()?;
14881
14882            // Postgres, MSSQL, ClickHouse: table-valued functions:
14883            let args = if self.consume_token(&Token::LParen) {
14884                Some(self.parse_table_function_args()?)
14885            } else {
14886                None
14887            };
14888
14889            let with_ordinality = self.parse_keywords(&[Keyword::WITH, Keyword::ORDINALITY]);
14890
14891            let mut sample = None;
14892            if self.dialect.supports_table_sample_before_alias() {
14893                if let Some(parsed_sample) = self.maybe_parse_table_sample()? {
14894                    sample = Some(TableSampleKind::BeforeTableAlias(parsed_sample));
14895                }
14896            }
14897
14898            let alias = self.maybe_parse_table_alias()?;
14899
14900            // MYSQL-specific table hints:
14901            let index_hints = if self.dialect.supports_table_hints() {
14902                self.maybe_parse(|p| p.parse_table_index_hints())?
14903                    .unwrap_or(vec![])
14904            } else {
14905                vec![]
14906            };
14907
14908            // MSSQL-specific table hints:
14909            let mut with_hints = vec![];
14910            if self.parse_keyword(Keyword::WITH) {
14911                if self.consume_token(&Token::LParen) {
14912                    with_hints = self.parse_comma_separated(Parser::parse_expr)?;
14913                    self.expect_token(&Token::RParen)?;
14914                } else {
14915                    // rewind, as WITH may belong to the next statement's CTE
14916                    self.prev_token();
14917                }
14918            };
14919
14920            if !self.dialect.supports_table_sample_before_alias() {
14921                if let Some(parsed_sample) = self.maybe_parse_table_sample()? {
14922                    sample = Some(TableSampleKind::AfterTableAlias(parsed_sample));
14923                }
14924            }
14925
14926            let mut table = TableFactor::Table {
14927                name,
14928                alias,
14929                args,
14930                with_hints,
14931                version,
14932                partitions,
14933                with_ordinality,
14934                json_path,
14935                sample,
14936                index_hints,
14937            };
14938
14939            while let Some(kw) = self.parse_one_of_keywords(&[Keyword::PIVOT, Keyword::UNPIVOT]) {
14940                table = match kw {
14941                    Keyword::PIVOT => self.parse_pivot_table_factor(table)?,
14942                    Keyword::UNPIVOT => self.parse_unpivot_table_factor(table)?,
14943                    unexpected_keyword => return Err(ParserError::ParserError(
14944                        format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in pivot/unpivot"),
14945                    )),
14946                }
14947            }
14948
14949            if self.dialect.supports_match_recognize()
14950                && self.parse_keyword(Keyword::MATCH_RECOGNIZE)
14951            {
14952                table = self.parse_match_recognize(table)?;
14953            }
14954
14955            Ok(table)
14956        }
14957    }
14958
14959    fn maybe_parse_table_sample(&mut self) -> Result<Option<Box<TableSample>>, ParserError> {
14960        let modifier = if self.parse_keyword(Keyword::TABLESAMPLE) {
14961            TableSampleModifier::TableSample
14962        } else if self.parse_keyword(Keyword::SAMPLE) {
14963            TableSampleModifier::Sample
14964        } else {
14965            return Ok(None);
14966        };
14967        self.parse_table_sample(modifier).map(Some)
14968    }
14969
14970    fn parse_table_sample(
14971        &mut self,
14972        modifier: TableSampleModifier,
14973    ) -> Result<Box<TableSample>, ParserError> {
14974        let name = match self.parse_one_of_keywords(&[
14975            Keyword::BERNOULLI,
14976            Keyword::ROW,
14977            Keyword::SYSTEM,
14978            Keyword::BLOCK,
14979        ]) {
14980            Some(Keyword::BERNOULLI) => Some(TableSampleMethod::Bernoulli),
14981            Some(Keyword::ROW) => Some(TableSampleMethod::Row),
14982            Some(Keyword::SYSTEM) => Some(TableSampleMethod::System),
14983            Some(Keyword::BLOCK) => Some(TableSampleMethod::Block),
14984            _ => None,
14985        };
14986
14987        let parenthesized = self.consume_token(&Token::LParen);
14988
14989        let (quantity, bucket) = if parenthesized && self.parse_keyword(Keyword::BUCKET) {
14990            let selected_bucket = self.parse_number_value()?.value;
14991            self.expect_keywords(&[Keyword::OUT, Keyword::OF])?;
14992            let total = self.parse_number_value()?.value;
14993            let on = if self.parse_keyword(Keyword::ON) {
14994                Some(self.parse_expr()?)
14995            } else {
14996                None
14997            };
14998            (
14999                None,
15000                Some(TableSampleBucket {
15001                    bucket: selected_bucket,
15002                    total,
15003                    on,
15004                }),
15005            )
15006        } else {
15007            let value = match self.maybe_parse(|p| p.parse_expr())? {
15008                Some(num) => num,
15009                None => {
15010                    let next_token = self.next_token();
15011                    if let Token::Word(w) = next_token.token {
15012                        Expr::Value(Value::Placeholder(w.value).with_span(next_token.span))
15013                    } else {
15014                        return parser_err!(
15015                            "Expecting number or byte length e.g. 100M",
15016                            self.peek_token().span.start
15017                        );
15018                    }
15019                }
15020            };
15021            let unit = if self.parse_keyword(Keyword::ROWS) {
15022                Some(TableSampleUnit::Rows)
15023            } else if self.parse_keyword(Keyword::PERCENT) {
15024                Some(TableSampleUnit::Percent)
15025            } else {
15026                None
15027            };
15028            (
15029                Some(TableSampleQuantity {
15030                    parenthesized,
15031                    value,
15032                    unit,
15033                }),
15034                None,
15035            )
15036        };
15037        if parenthesized {
15038            self.expect_token(&Token::RParen)?;
15039        }
15040
15041        let seed = if self.parse_keyword(Keyword::REPEATABLE) {
15042            Some(self.parse_table_sample_seed(TableSampleSeedModifier::Repeatable)?)
15043        } else if self.parse_keyword(Keyword::SEED) {
15044            Some(self.parse_table_sample_seed(TableSampleSeedModifier::Seed)?)
15045        } else {
15046            None
15047        };
15048
15049        let offset = if self.parse_keyword(Keyword::OFFSET) {
15050            Some(self.parse_expr()?)
15051        } else {
15052            None
15053        };
15054
15055        Ok(Box::new(TableSample {
15056            modifier,
15057            name,
15058            quantity,
15059            seed,
15060            bucket,
15061            offset,
15062        }))
15063    }
15064
15065    fn parse_table_sample_seed(
15066        &mut self,
15067        modifier: TableSampleSeedModifier,
15068    ) -> Result<TableSampleSeed, ParserError> {
15069        self.expect_token(&Token::LParen)?;
15070        let value = self.parse_number_value()?.value;
15071        self.expect_token(&Token::RParen)?;
15072        Ok(TableSampleSeed { modifier, value })
15073    }
15074
15075    /// Parses `OPENJSON( jsonExpression [ , path ] )  [ <with_clause> ]` clause,
15076    /// assuming the `OPENJSON` keyword was already consumed.
15077    fn parse_open_json_table_factor(&mut self) -> Result<TableFactor, ParserError> {
15078        self.expect_token(&Token::LParen)?;
15079        let json_expr = self.parse_expr()?;
15080        let json_path = if self.consume_token(&Token::Comma) {
15081            Some(self.parse_value()?.value)
15082        } else {
15083            None
15084        };
15085        self.expect_token(&Token::RParen)?;
15086        let columns = if self.parse_keyword(Keyword::WITH) {
15087            self.expect_token(&Token::LParen)?;
15088            let columns = self.parse_comma_separated(Parser::parse_openjson_table_column_def)?;
15089            self.expect_token(&Token::RParen)?;
15090            columns
15091        } else {
15092            Vec::new()
15093        };
15094        let alias = self.maybe_parse_table_alias()?;
15095        Ok(TableFactor::OpenJsonTable {
15096            json_expr,
15097            json_path,
15098            columns,
15099            alias,
15100        })
15101    }
15102
15103    fn parse_xml_table_factor(&mut self) -> Result<TableFactor, ParserError> {
15104        self.expect_token(&Token::LParen)?;
15105        let namespaces = if self.parse_keyword(Keyword::XMLNAMESPACES) {
15106            self.expect_token(&Token::LParen)?;
15107            let namespaces = self.parse_comma_separated(Parser::parse_xml_namespace_definition)?;
15108            self.expect_token(&Token::RParen)?;
15109            self.expect_token(&Token::Comma)?;
15110            namespaces
15111        } else {
15112            vec![]
15113        };
15114        let row_expression = self.parse_expr()?;
15115        let passing = self.parse_xml_passing_clause()?;
15116        self.expect_keyword_is(Keyword::COLUMNS)?;
15117        let columns = self.parse_comma_separated(Parser::parse_xml_table_column)?;
15118        self.expect_token(&Token::RParen)?;
15119        let alias = self.maybe_parse_table_alias()?;
15120        Ok(TableFactor::XmlTable {
15121            namespaces,
15122            row_expression,
15123            passing,
15124            columns,
15125            alias,
15126        })
15127    }
15128
15129    fn parse_xml_namespace_definition(&mut self) -> Result<XmlNamespaceDefinition, ParserError> {
15130        let uri = self.parse_expr()?;
15131        self.expect_keyword_is(Keyword::AS)?;
15132        let name = self.parse_identifier()?;
15133        Ok(XmlNamespaceDefinition { uri, name })
15134    }
15135
15136    fn parse_xml_table_column(&mut self) -> Result<XmlTableColumn, ParserError> {
15137        let name = self.parse_identifier()?;
15138
15139        let option = if self.parse_keyword(Keyword::FOR) {
15140            self.expect_keyword(Keyword::ORDINALITY)?;
15141            XmlTableColumnOption::ForOrdinality
15142        } else {
15143            let r#type = self.parse_data_type()?;
15144            let mut path = None;
15145            let mut default = None;
15146
15147            if self.parse_keyword(Keyword::PATH) {
15148                path = Some(self.parse_expr()?);
15149            }
15150
15151            if self.parse_keyword(Keyword::DEFAULT) {
15152                default = Some(self.parse_expr()?);
15153            }
15154
15155            let not_null = self.parse_keywords(&[Keyword::NOT, Keyword::NULL]);
15156            if !not_null {
15157                // NULL is the default but can be specified explicitly
15158                let _ = self.parse_keyword(Keyword::NULL);
15159            }
15160
15161            XmlTableColumnOption::NamedInfo {
15162                r#type,
15163                path,
15164                default,
15165                nullable: !not_null,
15166            }
15167        };
15168        Ok(XmlTableColumn { name, option })
15169    }
15170
15171    fn parse_xml_passing_clause(&mut self) -> Result<XmlPassingClause, ParserError> {
15172        let mut arguments = vec![];
15173        if self.parse_keyword(Keyword::PASSING) {
15174            loop {
15175                let by_value =
15176                    self.parse_keyword(Keyword::BY) && self.expect_keyword(Keyword::VALUE).is_ok();
15177                let expr = self.parse_expr()?;
15178                let alias = if self.parse_keyword(Keyword::AS) {
15179                    Some(self.parse_identifier()?)
15180                } else {
15181                    None
15182                };
15183                arguments.push(XmlPassingArgument {
15184                    expr,
15185                    alias,
15186                    by_value,
15187                });
15188                if !self.consume_token(&Token::Comma) {
15189                    break;
15190                }
15191            }
15192        }
15193        Ok(XmlPassingClause { arguments })
15194    }
15195
15196    /// Parse a [TableFactor::SemanticView]
15197    fn parse_semantic_view_table_factor(&mut self) -> Result<TableFactor, ParserError> {
15198        self.expect_keyword(Keyword::SEMANTIC_VIEW)?;
15199        self.expect_token(&Token::LParen)?;
15200
15201        let name = self.parse_object_name(true)?;
15202
15203        // Parse DIMENSIONS, METRICS, FACTS and WHERE clauses in flexible order
15204        let mut dimensions = Vec::new();
15205        let mut metrics = Vec::new();
15206        let mut facts = Vec::new();
15207        let mut where_clause = None;
15208
15209        while self.peek_token().token != Token::RParen {
15210            if self.parse_keyword(Keyword::DIMENSIONS) {
15211                if !dimensions.is_empty() {
15212                    return Err(ParserError::ParserError(
15213                        "DIMENSIONS clause can only be specified once".to_string(),
15214                    ));
15215                }
15216                dimensions = self.parse_comma_separated(Parser::parse_wildcard_expr)?;
15217            } else if self.parse_keyword(Keyword::METRICS) {
15218                if !metrics.is_empty() {
15219                    return Err(ParserError::ParserError(
15220                        "METRICS clause can only be specified once".to_string(),
15221                    ));
15222                }
15223                metrics = self.parse_comma_separated(Parser::parse_wildcard_expr)?;
15224            } else if self.parse_keyword(Keyword::FACTS) {
15225                if !facts.is_empty() {
15226                    return Err(ParserError::ParserError(
15227                        "FACTS clause can only be specified once".to_string(),
15228                    ));
15229                }
15230                facts = self.parse_comma_separated(Parser::parse_wildcard_expr)?;
15231            } else if self.parse_keyword(Keyword::WHERE) {
15232                if where_clause.is_some() {
15233                    return Err(ParserError::ParserError(
15234                        "WHERE clause can only be specified once".to_string(),
15235                    ));
15236                }
15237                where_clause = Some(self.parse_expr()?);
15238            } else {
15239                return parser_err!(
15240                    format!(
15241                        "Expected one of DIMENSIONS, METRICS, FACTS or WHERE, got {}",
15242                        self.peek_token().token
15243                    ),
15244                    self.peek_token().span.start
15245                )?;
15246            }
15247        }
15248
15249        self.expect_token(&Token::RParen)?;
15250
15251        let alias = self.maybe_parse_table_alias()?;
15252
15253        Ok(TableFactor::SemanticView {
15254            name,
15255            dimensions,
15256            metrics,
15257            facts,
15258            where_clause,
15259            alias,
15260        })
15261    }
15262
15263    fn parse_match_recognize(&mut self, table: TableFactor) -> Result<TableFactor, ParserError> {
15264        self.expect_token(&Token::LParen)?;
15265
15266        let partition_by = if self.parse_keywords(&[Keyword::PARTITION, Keyword::BY]) {
15267            self.parse_comma_separated(Parser::parse_expr)?
15268        } else {
15269            vec![]
15270        };
15271
15272        let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
15273            self.parse_comma_separated(Parser::parse_order_by_expr)?
15274        } else {
15275            vec![]
15276        };
15277
15278        let measures = if self.parse_keyword(Keyword::MEASURES) {
15279            self.parse_comma_separated(|p| {
15280                let expr = p.parse_expr()?;
15281                let _ = p.parse_keyword(Keyword::AS);
15282                let alias = p.parse_identifier()?;
15283                Ok(Measure { expr, alias })
15284            })?
15285        } else {
15286            vec![]
15287        };
15288
15289        let rows_per_match =
15290            if self.parse_keywords(&[Keyword::ONE, Keyword::ROW, Keyword::PER, Keyword::MATCH]) {
15291                Some(RowsPerMatch::OneRow)
15292            } else if self.parse_keywords(&[
15293                Keyword::ALL,
15294                Keyword::ROWS,
15295                Keyword::PER,
15296                Keyword::MATCH,
15297            ]) {
15298                Some(RowsPerMatch::AllRows(
15299                    if self.parse_keywords(&[Keyword::SHOW, Keyword::EMPTY, Keyword::MATCHES]) {
15300                        Some(EmptyMatchesMode::Show)
15301                    } else if self.parse_keywords(&[
15302                        Keyword::OMIT,
15303                        Keyword::EMPTY,
15304                        Keyword::MATCHES,
15305                    ]) {
15306                        Some(EmptyMatchesMode::Omit)
15307                    } else if self.parse_keywords(&[
15308                        Keyword::WITH,
15309                        Keyword::UNMATCHED,
15310                        Keyword::ROWS,
15311                    ]) {
15312                        Some(EmptyMatchesMode::WithUnmatched)
15313                    } else {
15314                        None
15315                    },
15316                ))
15317            } else {
15318                None
15319            };
15320
15321        let after_match_skip =
15322            if self.parse_keywords(&[Keyword::AFTER, Keyword::MATCH, Keyword::SKIP]) {
15323                if self.parse_keywords(&[Keyword::PAST, Keyword::LAST, Keyword::ROW]) {
15324                    Some(AfterMatchSkip::PastLastRow)
15325                } else if self.parse_keywords(&[Keyword::TO, Keyword::NEXT, Keyword::ROW]) {
15326                    Some(AfterMatchSkip::ToNextRow)
15327                } else if self.parse_keywords(&[Keyword::TO, Keyword::FIRST]) {
15328                    Some(AfterMatchSkip::ToFirst(self.parse_identifier()?))
15329                } else if self.parse_keywords(&[Keyword::TO, Keyword::LAST]) {
15330                    Some(AfterMatchSkip::ToLast(self.parse_identifier()?))
15331                } else {
15332                    let found = self.next_token();
15333                    return self.expected("after match skip option", found);
15334                }
15335            } else {
15336                None
15337            };
15338
15339        self.expect_keyword_is(Keyword::PATTERN)?;
15340        let pattern = self.parse_parenthesized(Self::parse_pattern)?;
15341
15342        self.expect_keyword_is(Keyword::DEFINE)?;
15343
15344        let symbols = self.parse_comma_separated(|p| {
15345            let symbol = p.parse_identifier()?;
15346            p.expect_keyword_is(Keyword::AS)?;
15347            let definition = p.parse_expr()?;
15348            Ok(SymbolDefinition { symbol, definition })
15349        })?;
15350
15351        self.expect_token(&Token::RParen)?;
15352
15353        let alias = self.maybe_parse_table_alias()?;
15354
15355        Ok(TableFactor::MatchRecognize {
15356            table: Box::new(table),
15357            partition_by,
15358            order_by,
15359            measures,
15360            rows_per_match,
15361            after_match_skip,
15362            pattern,
15363            symbols,
15364            alias,
15365        })
15366    }
15367
15368    fn parse_base_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
15369        match self.next_token().token {
15370            Token::Caret => Ok(MatchRecognizePattern::Symbol(MatchRecognizeSymbol::Start)),
15371            Token::Placeholder(s) if s == "$" => {
15372                Ok(MatchRecognizePattern::Symbol(MatchRecognizeSymbol::End))
15373            }
15374            Token::LBrace => {
15375                self.expect_token(&Token::Minus)?;
15376                let symbol = self.parse_identifier().map(MatchRecognizeSymbol::Named)?;
15377                self.expect_token(&Token::Minus)?;
15378                self.expect_token(&Token::RBrace)?;
15379                Ok(MatchRecognizePattern::Exclude(symbol))
15380            }
15381            Token::Word(Word {
15382                value,
15383                quote_style: None,
15384                ..
15385            }) if value == "PERMUTE" => {
15386                self.expect_token(&Token::LParen)?;
15387                let symbols = self.parse_comma_separated(|p| {
15388                    p.parse_identifier().map(MatchRecognizeSymbol::Named)
15389                })?;
15390                self.expect_token(&Token::RParen)?;
15391                Ok(MatchRecognizePattern::Permute(symbols))
15392            }
15393            Token::LParen => {
15394                let pattern = self.parse_pattern()?;
15395                self.expect_token(&Token::RParen)?;
15396                Ok(MatchRecognizePattern::Group(Box::new(pattern)))
15397            }
15398            _ => {
15399                self.prev_token();
15400                self.parse_identifier()
15401                    .map(MatchRecognizeSymbol::Named)
15402                    .map(MatchRecognizePattern::Symbol)
15403            }
15404        }
15405    }
15406
15407    fn parse_repetition_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
15408        let mut pattern = self.parse_base_pattern()?;
15409        loop {
15410            let token = self.next_token();
15411            let quantifier = match token.token {
15412                Token::Mul => RepetitionQuantifier::ZeroOrMore,
15413                Token::Plus => RepetitionQuantifier::OneOrMore,
15414                Token::Placeholder(s) if s == "?" => RepetitionQuantifier::AtMostOne,
15415                Token::LBrace => {
15416                    // quantifier is a range like {n} or {n,} or {,m} or {n,m}
15417                    let token = self.next_token();
15418                    match token.token {
15419                        Token::Comma => {
15420                            let next_token = self.next_token();
15421                            let Token::Number(n, _) = next_token.token else {
15422                                return self.expected("literal number", next_token);
15423                            };
15424                            self.expect_token(&Token::RBrace)?;
15425                            RepetitionQuantifier::AtMost(Self::parse(n, token.span.start)?)
15426                        }
15427                        Token::Number(n, _) if self.consume_token(&Token::Comma) => {
15428                            let next_token = self.next_token();
15429                            match next_token.token {
15430                                Token::Number(m, _) => {
15431                                    self.expect_token(&Token::RBrace)?;
15432                                    RepetitionQuantifier::Range(
15433                                        Self::parse(n, token.span.start)?,
15434                                        Self::parse(m, token.span.start)?,
15435                                    )
15436                                }
15437                                Token::RBrace => {
15438                                    RepetitionQuantifier::AtLeast(Self::parse(n, token.span.start)?)
15439                                }
15440                                _ => {
15441                                    return self.expected("} or upper bound", next_token);
15442                                }
15443                            }
15444                        }
15445                        Token::Number(n, _) => {
15446                            self.expect_token(&Token::RBrace)?;
15447                            RepetitionQuantifier::Exactly(Self::parse(n, token.span.start)?)
15448                        }
15449                        _ => return self.expected("quantifier range", token),
15450                    }
15451                }
15452                _ => {
15453                    self.prev_token();
15454                    break;
15455                }
15456            };
15457            pattern = MatchRecognizePattern::Repetition(Box::new(pattern), quantifier);
15458        }
15459        Ok(pattern)
15460    }
15461
15462    fn parse_concat_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
15463        let mut patterns = vec![self.parse_repetition_pattern()?];
15464        while !matches!(self.peek_token().token, Token::RParen | Token::Pipe) {
15465            patterns.push(self.parse_repetition_pattern()?);
15466        }
15467        match <[MatchRecognizePattern; 1]>::try_from(patterns) {
15468            Ok([pattern]) => Ok(pattern),
15469            Err(patterns) => Ok(MatchRecognizePattern::Concat(patterns)),
15470        }
15471    }
15472
15473    fn parse_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
15474        let pattern = self.parse_concat_pattern()?;
15475        if self.consume_token(&Token::Pipe) {
15476            match self.parse_pattern()? {
15477                // flatten nested alternations
15478                MatchRecognizePattern::Alternation(mut patterns) => {
15479                    patterns.insert(0, pattern);
15480                    Ok(MatchRecognizePattern::Alternation(patterns))
15481                }
15482                next => Ok(MatchRecognizePattern::Alternation(vec![pattern, next])),
15483            }
15484        } else {
15485            Ok(pattern)
15486        }
15487    }
15488
15489    /// Parses a the timestamp version specifier (i.e. query historical data)
15490    pub fn maybe_parse_table_version(&mut self) -> Result<Option<TableVersion>, ParserError> {
15491        if self.dialect.supports_timestamp_versioning() {
15492            if self.parse_keywords(&[Keyword::FOR, Keyword::SYSTEM_TIME, Keyword::AS, Keyword::OF])
15493            {
15494                let expr = self.parse_expr()?;
15495                return Ok(Some(TableVersion::ForSystemTimeAsOf(expr)));
15496            } else if self.peek_keyword(Keyword::AT) || self.peek_keyword(Keyword::BEFORE) {
15497                let func_name = self.parse_object_name(true)?;
15498                let func = self.parse_function(func_name)?;
15499                return Ok(Some(TableVersion::Function(func)));
15500            }
15501        }
15502        Ok(None)
15503    }
15504
15505    /// Parses MySQL's JSON_TABLE column definition.
15506    /// For example: `id INT EXISTS PATH '$' DEFAULT '0' ON EMPTY ERROR ON ERROR`
15507    pub fn parse_json_table_column_def(&mut self) -> Result<JsonTableColumn, ParserError> {
15508        if self.parse_keyword(Keyword::NESTED) {
15509            let _has_path_keyword = self.parse_keyword(Keyword::PATH);
15510            let path = self.parse_value()?.value;
15511            self.expect_keyword_is(Keyword::COLUMNS)?;
15512            let columns = self.parse_parenthesized(|p| {
15513                p.parse_comma_separated(Self::parse_json_table_column_def)
15514            })?;
15515            return Ok(JsonTableColumn::Nested(JsonTableNestedColumn {
15516                path,
15517                columns,
15518            }));
15519        }
15520        let name = self.parse_identifier()?;
15521        if self.parse_keyword(Keyword::FOR) {
15522            self.expect_keyword_is(Keyword::ORDINALITY)?;
15523            return Ok(JsonTableColumn::ForOrdinality(name));
15524        }
15525        let r#type = self.parse_data_type()?;
15526        let exists = self.parse_keyword(Keyword::EXISTS);
15527        self.expect_keyword_is(Keyword::PATH)?;
15528        let path = self.parse_value()?.value;
15529        let mut on_empty = None;
15530        let mut on_error = None;
15531        while let Some(error_handling) = self.parse_json_table_column_error_handling()? {
15532            if self.parse_keyword(Keyword::EMPTY) {
15533                on_empty = Some(error_handling);
15534            } else {
15535                self.expect_keyword_is(Keyword::ERROR)?;
15536                on_error = Some(error_handling);
15537            }
15538        }
15539        Ok(JsonTableColumn::Named(JsonTableNamedColumn {
15540            name,
15541            r#type,
15542            path,
15543            exists,
15544            on_empty,
15545            on_error,
15546        }))
15547    }
15548
15549    /// Parses MSSQL's `OPENJSON WITH` column definition.
15550    ///
15551    /// ```sql
15552    /// colName type [ column_path ] [ AS JSON ]
15553    /// ```
15554    ///
15555    /// Reference: <https://learn.microsoft.com/en-us/sql/t-sql/functions/openjson-transact-sql?view=sql-server-ver16#syntax>
15556    pub fn parse_openjson_table_column_def(&mut self) -> Result<OpenJsonTableColumn, ParserError> {
15557        let name = self.parse_identifier()?;
15558        let r#type = self.parse_data_type()?;
15559        let path = if let Token::SingleQuotedString(path) = self.peek_token().token {
15560            self.next_token();
15561            Some(path)
15562        } else {
15563            None
15564        };
15565        let as_json = self.parse_keyword(Keyword::AS);
15566        if as_json {
15567            self.expect_keyword_is(Keyword::JSON)?;
15568        }
15569        Ok(OpenJsonTableColumn {
15570            name,
15571            r#type,
15572            path,
15573            as_json,
15574        })
15575    }
15576
15577    fn parse_json_table_column_error_handling(
15578        &mut self,
15579    ) -> Result<Option<JsonTableColumnErrorHandling>, ParserError> {
15580        let res = if self.parse_keyword(Keyword::NULL) {
15581            JsonTableColumnErrorHandling::Null
15582        } else if self.parse_keyword(Keyword::ERROR) {
15583            JsonTableColumnErrorHandling::Error
15584        } else if self.parse_keyword(Keyword::DEFAULT) {
15585            JsonTableColumnErrorHandling::Default(self.parse_value()?.value)
15586        } else {
15587            return Ok(None);
15588        };
15589        self.expect_keyword_is(Keyword::ON)?;
15590        Ok(Some(res))
15591    }
15592
15593    pub fn parse_derived_table_factor(
15594        &mut self,
15595        lateral: IsLateral,
15596    ) -> Result<TableFactor, ParserError> {
15597        let subquery = self.parse_query()?;
15598        self.expect_token(&Token::RParen)?;
15599        let alias = self.maybe_parse_table_alias()?;
15600        Ok(TableFactor::Derived {
15601            lateral: match lateral {
15602                Lateral => true,
15603                NotLateral => false,
15604            },
15605            subquery,
15606            alias,
15607        })
15608    }
15609
15610    fn parse_aliased_function_call(&mut self) -> Result<ExprWithAlias, ParserError> {
15611        let function_name = match self.next_token().token {
15612            Token::Word(w) => Ok(w.value),
15613            _ => self.expected("a function identifier", self.peek_token()),
15614        }?;
15615        let expr = self.parse_function(ObjectName::from(vec![Ident::new(function_name)]))?;
15616        let alias = if self.parse_keyword(Keyword::AS) {
15617            Some(self.parse_identifier()?)
15618        } else {
15619            None
15620        };
15621
15622        Ok(ExprWithAlias { expr, alias })
15623    }
15624    /// Parses an expression with an optional alias
15625    ///
15626    /// Examples:
15627    ///
15628    /// ```sql
15629    /// SUM(price) AS total_price
15630    /// ```
15631    /// ```sql
15632    /// SUM(price)
15633    /// ```
15634    ///
15635    /// Example
15636    /// ```
15637    /// # use sqlparser::parser::{Parser, ParserError};
15638    /// # use sqlparser::dialect::GenericDialect;
15639    /// # fn main() ->Result<(), ParserError> {
15640    /// let sql = r#"SUM("a") as "b""#;
15641    /// let mut parser = Parser::new(&GenericDialect).try_with_sql(sql)?;
15642    /// let expr_with_alias = parser.parse_expr_with_alias()?;
15643    /// assert_eq!(Some("b".to_string()), expr_with_alias.alias.map(|x|x.value));
15644    /// # Ok(())
15645    /// # }
15646    pub fn parse_expr_with_alias(&mut self) -> Result<ExprWithAlias, ParserError> {
15647        let expr = self.parse_expr()?;
15648        let alias = if self.parse_keyword(Keyword::AS) {
15649            Some(self.parse_identifier()?)
15650        } else {
15651            None
15652        };
15653
15654        Ok(ExprWithAlias { expr, alias })
15655    }
15656
15657    pub fn parse_pivot_table_factor(
15658        &mut self,
15659        table: TableFactor,
15660    ) -> Result<TableFactor, ParserError> {
15661        self.expect_token(&Token::LParen)?;
15662        let aggregate_functions = self.parse_comma_separated(Self::parse_aliased_function_call)?;
15663        self.expect_keyword_is(Keyword::FOR)?;
15664        let value_column = if self.peek_token_ref().token == Token::LParen {
15665            self.parse_parenthesized_column_list_inner(Mandatory, false, |p| {
15666                p.parse_subexpr(self.dialect.prec_value(Precedence::Between))
15667            })?
15668        } else {
15669            vec![self.parse_subexpr(self.dialect.prec_value(Precedence::Between))?]
15670        };
15671        self.expect_keyword_is(Keyword::IN)?;
15672
15673        self.expect_token(&Token::LParen)?;
15674        let value_source = if self.parse_keyword(Keyword::ANY) {
15675            let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
15676                self.parse_comma_separated(Parser::parse_order_by_expr)?
15677            } else {
15678                vec![]
15679            };
15680            PivotValueSource::Any(order_by)
15681        } else if self.peek_sub_query() {
15682            PivotValueSource::Subquery(self.parse_query()?)
15683        } else {
15684            PivotValueSource::List(self.parse_comma_separated(Self::parse_expr_with_alias)?)
15685        };
15686        self.expect_token(&Token::RParen)?;
15687
15688        let default_on_null =
15689            if self.parse_keywords(&[Keyword::DEFAULT, Keyword::ON, Keyword::NULL]) {
15690                self.expect_token(&Token::LParen)?;
15691                let expr = self.parse_expr()?;
15692                self.expect_token(&Token::RParen)?;
15693                Some(expr)
15694            } else {
15695                None
15696            };
15697
15698        self.expect_token(&Token::RParen)?;
15699        let alias = self.maybe_parse_table_alias()?;
15700        Ok(TableFactor::Pivot {
15701            table: Box::new(table),
15702            aggregate_functions,
15703            value_column,
15704            value_source,
15705            default_on_null,
15706            alias,
15707        })
15708    }
15709
15710    pub fn parse_unpivot_table_factor(
15711        &mut self,
15712        table: TableFactor,
15713    ) -> Result<TableFactor, ParserError> {
15714        let null_inclusion = if self.parse_keyword(Keyword::INCLUDE) {
15715            self.expect_keyword_is(Keyword::NULLS)?;
15716            Some(NullInclusion::IncludeNulls)
15717        } else if self.parse_keyword(Keyword::EXCLUDE) {
15718            self.expect_keyword_is(Keyword::NULLS)?;
15719            Some(NullInclusion::ExcludeNulls)
15720        } else {
15721            None
15722        };
15723        self.expect_token(&Token::LParen)?;
15724        let value = self.parse_expr()?;
15725        self.expect_keyword_is(Keyword::FOR)?;
15726        let name = self.parse_identifier()?;
15727        self.expect_keyword_is(Keyword::IN)?;
15728        let columns = self.parse_parenthesized_column_list_inner(Mandatory, false, |p| {
15729            p.parse_expr_with_alias()
15730        })?;
15731        self.expect_token(&Token::RParen)?;
15732        let alias = self.maybe_parse_table_alias()?;
15733        Ok(TableFactor::Unpivot {
15734            table: Box::new(table),
15735            value,
15736            null_inclusion,
15737            name,
15738            columns,
15739            alias,
15740        })
15741    }
15742
15743    pub fn parse_join_constraint(&mut self, natural: bool) -> Result<JoinConstraint, ParserError> {
15744        if natural {
15745            Ok(JoinConstraint::Natural)
15746        } else if self.parse_keyword(Keyword::ON) {
15747            let constraint = self.parse_expr()?;
15748            Ok(JoinConstraint::On(constraint))
15749        } else if self.parse_keyword(Keyword::USING) {
15750            let columns = self.parse_parenthesized_qualified_column_list(Mandatory, false)?;
15751            Ok(JoinConstraint::Using(columns))
15752        } else {
15753            Ok(JoinConstraint::None)
15754            //self.expected("ON, or USING after JOIN", self.peek_token())
15755        }
15756    }
15757
15758    /// Parse a GRANT statement.
15759    pub fn parse_grant(&mut self) -> Result<Statement, ParserError> {
15760        let (privileges, objects) = self.parse_grant_deny_revoke_privileges_objects()?;
15761
15762        self.expect_keyword_is(Keyword::TO)?;
15763        let grantees = self.parse_grantees()?;
15764
15765        let with_grant_option =
15766            self.parse_keywords(&[Keyword::WITH, Keyword::GRANT, Keyword::OPTION]);
15767
15768        let current_grants =
15769            if self.parse_keywords(&[Keyword::COPY, Keyword::CURRENT, Keyword::GRANTS]) {
15770                Some(CurrentGrantsKind::CopyCurrentGrants)
15771            } else if self.parse_keywords(&[Keyword::REVOKE, Keyword::CURRENT, Keyword::GRANTS]) {
15772                Some(CurrentGrantsKind::RevokeCurrentGrants)
15773            } else {
15774                None
15775            };
15776
15777        let as_grantor = if self.parse_keywords(&[Keyword::AS]) {
15778            Some(self.parse_identifier()?)
15779        } else {
15780            None
15781        };
15782
15783        let granted_by = if self.parse_keywords(&[Keyword::GRANTED, Keyword::BY]) {
15784            Some(self.parse_identifier()?)
15785        } else {
15786            None
15787        };
15788
15789        Ok(Statement::Grant {
15790            privileges,
15791            objects,
15792            grantees,
15793            with_grant_option,
15794            as_grantor,
15795            granted_by,
15796            current_grants,
15797        })
15798    }
15799
15800    fn parse_grantees(&mut self) -> Result<Vec<Grantee>, ParserError> {
15801        let mut values = vec![];
15802        let mut grantee_type = GranteesType::None;
15803        loop {
15804            let new_grantee_type = if self.parse_keyword(Keyword::ROLE) {
15805                GranteesType::Role
15806            } else if self.parse_keyword(Keyword::USER) {
15807                GranteesType::User
15808            } else if self.parse_keyword(Keyword::SHARE) {
15809                GranteesType::Share
15810            } else if self.parse_keyword(Keyword::GROUP) {
15811                GranteesType::Group
15812            } else if self.parse_keyword(Keyword::PUBLIC) {
15813                GranteesType::Public
15814            } else if self.parse_keywords(&[Keyword::DATABASE, Keyword::ROLE]) {
15815                GranteesType::DatabaseRole
15816            } else if self.parse_keywords(&[Keyword::APPLICATION, Keyword::ROLE]) {
15817                GranteesType::ApplicationRole
15818            } else if self.parse_keyword(Keyword::APPLICATION) {
15819                GranteesType::Application
15820            } else {
15821                grantee_type.clone() // keep from previous iteraton, if not specified
15822            };
15823
15824            if self
15825                .dialect
15826                .get_reserved_grantees_types()
15827                .contains(&new_grantee_type)
15828            {
15829                self.prev_token();
15830            } else {
15831                grantee_type = new_grantee_type;
15832            }
15833
15834            let grantee = if grantee_type == GranteesType::Public {
15835                Grantee {
15836                    grantee_type: grantee_type.clone(),
15837                    name: None,
15838                }
15839            } else {
15840                let mut name = self.parse_grantee_name()?;
15841                if self.consume_token(&Token::Colon) {
15842                    // Redshift supports namespace prefix for external users and groups:
15843                    // <Namespace>:<GroupName> or <Namespace>:<UserName>
15844                    // https://docs.aws.amazon.com/redshift/latest/mgmt/redshift-iam-access-control-native-idp.html
15845                    let ident = self.parse_identifier()?;
15846                    if let GranteeName::ObjectName(namespace) = name {
15847                        name = GranteeName::ObjectName(ObjectName::from(vec![Ident::new(
15848                            format!("{namespace}:{ident}"),
15849                        )]));
15850                    };
15851                }
15852                Grantee {
15853                    grantee_type: grantee_type.clone(),
15854                    name: Some(name),
15855                }
15856            };
15857
15858            values.push(grantee);
15859
15860            if !self.consume_token(&Token::Comma) {
15861                break;
15862            }
15863        }
15864
15865        Ok(values)
15866    }
15867
15868    pub fn parse_grant_deny_revoke_privileges_objects(
15869        &mut self,
15870    ) -> Result<(Privileges, Option<GrantObjects>), ParserError> {
15871        let privileges = if self.parse_keyword(Keyword::ALL) {
15872            Privileges::All {
15873                with_privileges_keyword: self.parse_keyword(Keyword::PRIVILEGES),
15874            }
15875        } else {
15876            let actions = self.parse_actions_list()?;
15877            Privileges::Actions(actions)
15878        };
15879
15880        let objects = if self.parse_keyword(Keyword::ON) {
15881            if self.parse_keywords(&[Keyword::ALL, Keyword::TABLES, Keyword::IN, Keyword::SCHEMA]) {
15882                Some(GrantObjects::AllTablesInSchema {
15883                    schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15884                })
15885            } else if self.parse_keywords(&[
15886                Keyword::ALL,
15887                Keyword::EXTERNAL,
15888                Keyword::TABLES,
15889                Keyword::IN,
15890                Keyword::SCHEMA,
15891            ]) {
15892                Some(GrantObjects::AllExternalTablesInSchema {
15893                    schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15894                })
15895            } else if self.parse_keywords(&[
15896                Keyword::ALL,
15897                Keyword::VIEWS,
15898                Keyword::IN,
15899                Keyword::SCHEMA,
15900            ]) {
15901                Some(GrantObjects::AllViewsInSchema {
15902                    schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15903                })
15904            } else if self.parse_keywords(&[
15905                Keyword::ALL,
15906                Keyword::MATERIALIZED,
15907                Keyword::VIEWS,
15908                Keyword::IN,
15909                Keyword::SCHEMA,
15910            ]) {
15911                Some(GrantObjects::AllMaterializedViewsInSchema {
15912                    schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15913                })
15914            } else if self.parse_keywords(&[
15915                Keyword::ALL,
15916                Keyword::FUNCTIONS,
15917                Keyword::IN,
15918                Keyword::SCHEMA,
15919            ]) {
15920                Some(GrantObjects::AllFunctionsInSchema {
15921                    schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15922                })
15923            } else if self.parse_keywords(&[
15924                Keyword::FUTURE,
15925                Keyword::SCHEMAS,
15926                Keyword::IN,
15927                Keyword::DATABASE,
15928            ]) {
15929                Some(GrantObjects::FutureSchemasInDatabase {
15930                    databases: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15931                })
15932            } else if self.parse_keywords(&[
15933                Keyword::FUTURE,
15934                Keyword::TABLES,
15935                Keyword::IN,
15936                Keyword::SCHEMA,
15937            ]) {
15938                Some(GrantObjects::FutureTablesInSchema {
15939                    schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15940                })
15941            } else if self.parse_keywords(&[
15942                Keyword::FUTURE,
15943                Keyword::EXTERNAL,
15944                Keyword::TABLES,
15945                Keyword::IN,
15946                Keyword::SCHEMA,
15947            ]) {
15948                Some(GrantObjects::FutureExternalTablesInSchema {
15949                    schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15950                })
15951            } else if self.parse_keywords(&[
15952                Keyword::FUTURE,
15953                Keyword::VIEWS,
15954                Keyword::IN,
15955                Keyword::SCHEMA,
15956            ]) {
15957                Some(GrantObjects::FutureViewsInSchema {
15958                    schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15959                })
15960            } else if self.parse_keywords(&[
15961                Keyword::FUTURE,
15962                Keyword::MATERIALIZED,
15963                Keyword::VIEWS,
15964                Keyword::IN,
15965                Keyword::SCHEMA,
15966            ]) {
15967                Some(GrantObjects::FutureMaterializedViewsInSchema {
15968                    schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15969                })
15970            } else if self.parse_keywords(&[
15971                Keyword::ALL,
15972                Keyword::SEQUENCES,
15973                Keyword::IN,
15974                Keyword::SCHEMA,
15975            ]) {
15976                Some(GrantObjects::AllSequencesInSchema {
15977                    schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15978                })
15979            } else if self.parse_keywords(&[
15980                Keyword::FUTURE,
15981                Keyword::SEQUENCES,
15982                Keyword::IN,
15983                Keyword::SCHEMA,
15984            ]) {
15985                Some(GrantObjects::FutureSequencesInSchema {
15986                    schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15987                })
15988            } else if self.parse_keywords(&[Keyword::RESOURCE, Keyword::MONITOR]) {
15989                Some(GrantObjects::ResourceMonitors(
15990                    self.parse_comma_separated(|p| p.parse_object_name(false))?,
15991                ))
15992            } else if self.parse_keywords(&[Keyword::COMPUTE, Keyword::POOL]) {
15993                Some(GrantObjects::ComputePools(
15994                    self.parse_comma_separated(|p| p.parse_object_name(false))?,
15995                ))
15996            } else if self.parse_keywords(&[Keyword::FAILOVER, Keyword::GROUP]) {
15997                Some(GrantObjects::FailoverGroup(
15998                    self.parse_comma_separated(|p| p.parse_object_name(false))?,
15999                ))
16000            } else if self.parse_keywords(&[Keyword::REPLICATION, Keyword::GROUP]) {
16001                Some(GrantObjects::ReplicationGroup(
16002                    self.parse_comma_separated(|p| p.parse_object_name(false))?,
16003                ))
16004            } else if self.parse_keywords(&[Keyword::EXTERNAL, Keyword::VOLUME]) {
16005                Some(GrantObjects::ExternalVolumes(
16006                    self.parse_comma_separated(|p| p.parse_object_name(false))?,
16007                ))
16008            } else {
16009                let object_type = self.parse_one_of_keywords(&[
16010                    Keyword::SEQUENCE,
16011                    Keyword::DATABASE,
16012                    Keyword::SCHEMA,
16013                    Keyword::TABLE,
16014                    Keyword::VIEW,
16015                    Keyword::WAREHOUSE,
16016                    Keyword::INTEGRATION,
16017                    Keyword::VIEW,
16018                    Keyword::WAREHOUSE,
16019                    Keyword::INTEGRATION,
16020                    Keyword::USER,
16021                    Keyword::CONNECTION,
16022                    Keyword::PROCEDURE,
16023                    Keyword::FUNCTION,
16024                ]);
16025                let objects =
16026                    self.parse_comma_separated(|p| p.parse_object_name_inner(false, true));
16027                match object_type {
16028                    Some(Keyword::DATABASE) => Some(GrantObjects::Databases(objects?)),
16029                    Some(Keyword::SCHEMA) => Some(GrantObjects::Schemas(objects?)),
16030                    Some(Keyword::SEQUENCE) => Some(GrantObjects::Sequences(objects?)),
16031                    Some(Keyword::WAREHOUSE) => Some(GrantObjects::Warehouses(objects?)),
16032                    Some(Keyword::INTEGRATION) => Some(GrantObjects::Integrations(objects?)),
16033                    Some(Keyword::VIEW) => Some(GrantObjects::Views(objects?)),
16034                    Some(Keyword::USER) => Some(GrantObjects::Users(objects?)),
16035                    Some(Keyword::CONNECTION) => Some(GrantObjects::Connections(objects?)),
16036                    kw @ (Some(Keyword::PROCEDURE) | Some(Keyword::FUNCTION)) => {
16037                        if let Some(name) = objects?.first() {
16038                            self.parse_grant_procedure_or_function(name, &kw)?
16039                        } else {
16040                            self.expected("procedure or function name", self.peek_token())?
16041                        }
16042                    }
16043                    Some(Keyword::TABLE) | None => Some(GrantObjects::Tables(objects?)),
16044                    Some(unexpected_keyword) => return Err(ParserError::ParserError(
16045                        format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in grant objects"),
16046                    )),
16047                }
16048            }
16049        } else {
16050            None
16051        };
16052
16053        Ok((privileges, objects))
16054    }
16055
16056    fn parse_grant_procedure_or_function(
16057        &mut self,
16058        name: &ObjectName,
16059        kw: &Option<Keyword>,
16060    ) -> Result<Option<GrantObjects>, ParserError> {
16061        let arg_types = if self.consume_token(&Token::LParen) {
16062            let list = self.parse_comma_separated0(Self::parse_data_type, Token::RParen)?;
16063            self.expect_token(&Token::RParen)?;
16064            list
16065        } else {
16066            vec![]
16067        };
16068        match kw {
16069            Some(Keyword::PROCEDURE) => Ok(Some(GrantObjects::Procedure {
16070                name: name.clone(),
16071                arg_types,
16072            })),
16073            Some(Keyword::FUNCTION) => Ok(Some(GrantObjects::Function {
16074                name: name.clone(),
16075                arg_types,
16076            })),
16077            _ => self.expected("procedure or function keywords", self.peek_token())?,
16078        }
16079    }
16080
16081    pub fn parse_grant_permission(&mut self) -> Result<Action, ParserError> {
16082        fn parse_columns(parser: &mut Parser) -> Result<Option<Vec<Ident>>, ParserError> {
16083            let columns = parser.parse_parenthesized_column_list(Optional, false)?;
16084            if columns.is_empty() {
16085                Ok(None)
16086            } else {
16087                Ok(Some(columns))
16088            }
16089        }
16090
16091        // Multi-word privileges
16092        if self.parse_keywords(&[Keyword::IMPORTED, Keyword::PRIVILEGES]) {
16093            Ok(Action::ImportedPrivileges)
16094        } else if self.parse_keywords(&[Keyword::ADD, Keyword::SEARCH, Keyword::OPTIMIZATION]) {
16095            Ok(Action::AddSearchOptimization)
16096        } else if self.parse_keywords(&[Keyword::ATTACH, Keyword::LISTING]) {
16097            Ok(Action::AttachListing)
16098        } else if self.parse_keywords(&[Keyword::ATTACH, Keyword::POLICY]) {
16099            Ok(Action::AttachPolicy)
16100        } else if self.parse_keywords(&[Keyword::BIND, Keyword::SERVICE, Keyword::ENDPOINT]) {
16101            Ok(Action::BindServiceEndpoint)
16102        } else if self.parse_keywords(&[Keyword::DATABASE, Keyword::ROLE]) {
16103            let role = self.parse_object_name(false)?;
16104            Ok(Action::DatabaseRole { role })
16105        } else if self.parse_keywords(&[Keyword::EVOLVE, Keyword::SCHEMA]) {
16106            Ok(Action::EvolveSchema)
16107        } else if self.parse_keywords(&[Keyword::IMPORT, Keyword::SHARE]) {
16108            Ok(Action::ImportShare)
16109        } else if self.parse_keywords(&[Keyword::MANAGE, Keyword::VERSIONS]) {
16110            Ok(Action::ManageVersions)
16111        } else if self.parse_keywords(&[Keyword::MANAGE, Keyword::RELEASES]) {
16112            Ok(Action::ManageReleases)
16113        } else if self.parse_keywords(&[Keyword::OVERRIDE, Keyword::SHARE, Keyword::RESTRICTIONS]) {
16114            Ok(Action::OverrideShareRestrictions)
16115        } else if self.parse_keywords(&[
16116            Keyword::PURCHASE,
16117            Keyword::DATA,
16118            Keyword::EXCHANGE,
16119            Keyword::LISTING,
16120        ]) {
16121            Ok(Action::PurchaseDataExchangeListing)
16122        } else if self.parse_keywords(&[Keyword::RESOLVE, Keyword::ALL]) {
16123            Ok(Action::ResolveAll)
16124        } else if self.parse_keywords(&[Keyword::READ, Keyword::SESSION]) {
16125            Ok(Action::ReadSession)
16126
16127        // Single-word privileges
16128        } else if self.parse_keyword(Keyword::APPLY) {
16129            let apply_type = self.parse_action_apply_type()?;
16130            Ok(Action::Apply { apply_type })
16131        } else if self.parse_keyword(Keyword::APPLYBUDGET) {
16132            Ok(Action::ApplyBudget)
16133        } else if self.parse_keyword(Keyword::AUDIT) {
16134            Ok(Action::Audit)
16135        } else if self.parse_keyword(Keyword::CONNECT) {
16136            Ok(Action::Connect)
16137        } else if self.parse_keyword(Keyword::CREATE) {
16138            let obj_type = self.maybe_parse_action_create_object_type();
16139            Ok(Action::Create { obj_type })
16140        } else if self.parse_keyword(Keyword::DELETE) {
16141            Ok(Action::Delete)
16142        } else if self.parse_keyword(Keyword::EXEC) {
16143            let obj_type = self.maybe_parse_action_execute_obj_type();
16144            Ok(Action::Exec { obj_type })
16145        } else if self.parse_keyword(Keyword::EXECUTE) {
16146            let obj_type = self.maybe_parse_action_execute_obj_type();
16147            Ok(Action::Execute { obj_type })
16148        } else if self.parse_keyword(Keyword::FAILOVER) {
16149            Ok(Action::Failover)
16150        } else if self.parse_keyword(Keyword::INSERT) {
16151            Ok(Action::Insert {
16152                columns: parse_columns(self)?,
16153            })
16154        } else if self.parse_keyword(Keyword::MANAGE) {
16155            let manage_type = self.parse_action_manage_type()?;
16156            Ok(Action::Manage { manage_type })
16157        } else if self.parse_keyword(Keyword::MODIFY) {
16158            let modify_type = self.parse_action_modify_type();
16159            Ok(Action::Modify { modify_type })
16160        } else if self.parse_keyword(Keyword::MONITOR) {
16161            let monitor_type = self.parse_action_monitor_type();
16162            Ok(Action::Monitor { monitor_type })
16163        } else if self.parse_keyword(Keyword::OPERATE) {
16164            Ok(Action::Operate)
16165        } else if self.parse_keyword(Keyword::REFERENCES) {
16166            Ok(Action::References {
16167                columns: parse_columns(self)?,
16168            })
16169        } else if self.parse_keyword(Keyword::READ) {
16170            Ok(Action::Read)
16171        } else if self.parse_keyword(Keyword::REPLICATE) {
16172            Ok(Action::Replicate)
16173        } else if self.parse_keyword(Keyword::ROLE) {
16174            let role = self.parse_object_name(false)?;
16175            Ok(Action::Role { role })
16176        } else if self.parse_keyword(Keyword::SELECT) {
16177            Ok(Action::Select {
16178                columns: parse_columns(self)?,
16179            })
16180        } else if self.parse_keyword(Keyword::TEMPORARY) {
16181            Ok(Action::Temporary)
16182        } else if self.parse_keyword(Keyword::TRIGGER) {
16183            Ok(Action::Trigger)
16184        } else if self.parse_keyword(Keyword::TRUNCATE) {
16185            Ok(Action::Truncate)
16186        } else if self.parse_keyword(Keyword::UPDATE) {
16187            Ok(Action::Update {
16188                columns: parse_columns(self)?,
16189            })
16190        } else if self.parse_keyword(Keyword::USAGE) {
16191            Ok(Action::Usage)
16192        } else if self.parse_keyword(Keyword::OWNERSHIP) {
16193            Ok(Action::Ownership)
16194        } else if self.parse_keyword(Keyword::DROP) {
16195            Ok(Action::Drop)
16196        } else {
16197            self.expected("a privilege keyword", self.peek_token())?
16198        }
16199    }
16200
16201    fn maybe_parse_action_create_object_type(&mut self) -> Option<ActionCreateObjectType> {
16202        // Multi-word object types
16203        if self.parse_keywords(&[Keyword::APPLICATION, Keyword::PACKAGE]) {
16204            Some(ActionCreateObjectType::ApplicationPackage)
16205        } else if self.parse_keywords(&[Keyword::COMPUTE, Keyword::POOL]) {
16206            Some(ActionCreateObjectType::ComputePool)
16207        } else if self.parse_keywords(&[Keyword::DATA, Keyword::EXCHANGE, Keyword::LISTING]) {
16208            Some(ActionCreateObjectType::DataExchangeListing)
16209        } else if self.parse_keywords(&[Keyword::EXTERNAL, Keyword::VOLUME]) {
16210            Some(ActionCreateObjectType::ExternalVolume)
16211        } else if self.parse_keywords(&[Keyword::FAILOVER, Keyword::GROUP]) {
16212            Some(ActionCreateObjectType::FailoverGroup)
16213        } else if self.parse_keywords(&[Keyword::NETWORK, Keyword::POLICY]) {
16214            Some(ActionCreateObjectType::NetworkPolicy)
16215        } else if self.parse_keywords(&[Keyword::ORGANIZATION, Keyword::LISTING]) {
16216            Some(ActionCreateObjectType::OrganiationListing)
16217        } else if self.parse_keywords(&[Keyword::REPLICATION, Keyword::GROUP]) {
16218            Some(ActionCreateObjectType::ReplicationGroup)
16219        }
16220        // Single-word object types
16221        else if self.parse_keyword(Keyword::ACCOUNT) {
16222            Some(ActionCreateObjectType::Account)
16223        } else if self.parse_keyword(Keyword::APPLICATION) {
16224            Some(ActionCreateObjectType::Application)
16225        } else if self.parse_keyword(Keyword::DATABASE) {
16226            Some(ActionCreateObjectType::Database)
16227        } else if self.parse_keyword(Keyword::INTEGRATION) {
16228            Some(ActionCreateObjectType::Integration)
16229        } else if self.parse_keyword(Keyword::ROLE) {
16230            Some(ActionCreateObjectType::Role)
16231        } else if self.parse_keyword(Keyword::SCHEMA) {
16232            Some(ActionCreateObjectType::Schema)
16233        } else if self.parse_keyword(Keyword::SHARE) {
16234            Some(ActionCreateObjectType::Share)
16235        } else if self.parse_keyword(Keyword::USER) {
16236            Some(ActionCreateObjectType::User)
16237        } else if self.parse_keyword(Keyword::WAREHOUSE) {
16238            Some(ActionCreateObjectType::Warehouse)
16239        } else {
16240            None
16241        }
16242    }
16243
16244    fn parse_action_apply_type(&mut self) -> Result<ActionApplyType, ParserError> {
16245        if self.parse_keywords(&[Keyword::AGGREGATION, Keyword::POLICY]) {
16246            Ok(ActionApplyType::AggregationPolicy)
16247        } else if self.parse_keywords(&[Keyword::AUTHENTICATION, Keyword::POLICY]) {
16248            Ok(ActionApplyType::AuthenticationPolicy)
16249        } else if self.parse_keywords(&[Keyword::JOIN, Keyword::POLICY]) {
16250            Ok(ActionApplyType::JoinPolicy)
16251        } else if self.parse_keywords(&[Keyword::MASKING, Keyword::POLICY]) {
16252            Ok(ActionApplyType::MaskingPolicy)
16253        } else if self.parse_keywords(&[Keyword::PACKAGES, Keyword::POLICY]) {
16254            Ok(ActionApplyType::PackagesPolicy)
16255        } else if self.parse_keywords(&[Keyword::PASSWORD, Keyword::POLICY]) {
16256            Ok(ActionApplyType::PasswordPolicy)
16257        } else if self.parse_keywords(&[Keyword::PROJECTION, Keyword::POLICY]) {
16258            Ok(ActionApplyType::ProjectionPolicy)
16259        } else if self.parse_keywords(&[Keyword::ROW, Keyword::ACCESS, Keyword::POLICY]) {
16260            Ok(ActionApplyType::RowAccessPolicy)
16261        } else if self.parse_keywords(&[Keyword::SESSION, Keyword::POLICY]) {
16262            Ok(ActionApplyType::SessionPolicy)
16263        } else if self.parse_keyword(Keyword::TAG) {
16264            Ok(ActionApplyType::Tag)
16265        } else {
16266            self.expected("GRANT APPLY type", self.peek_token())
16267        }
16268    }
16269
16270    fn maybe_parse_action_execute_obj_type(&mut self) -> Option<ActionExecuteObjectType> {
16271        if self.parse_keywords(&[Keyword::DATA, Keyword::METRIC, Keyword::FUNCTION]) {
16272            Some(ActionExecuteObjectType::DataMetricFunction)
16273        } else if self.parse_keywords(&[Keyword::MANAGED, Keyword::ALERT]) {
16274            Some(ActionExecuteObjectType::ManagedAlert)
16275        } else if self.parse_keywords(&[Keyword::MANAGED, Keyword::TASK]) {
16276            Some(ActionExecuteObjectType::ManagedTask)
16277        } else if self.parse_keyword(Keyword::ALERT) {
16278            Some(ActionExecuteObjectType::Alert)
16279        } else if self.parse_keyword(Keyword::TASK) {
16280            Some(ActionExecuteObjectType::Task)
16281        } else {
16282            None
16283        }
16284    }
16285
16286    fn parse_action_manage_type(&mut self) -> Result<ActionManageType, ParserError> {
16287        if self.parse_keywords(&[Keyword::ACCOUNT, Keyword::SUPPORT, Keyword::CASES]) {
16288            Ok(ActionManageType::AccountSupportCases)
16289        } else if self.parse_keywords(&[Keyword::EVENT, Keyword::SHARING]) {
16290            Ok(ActionManageType::EventSharing)
16291        } else if self.parse_keywords(&[Keyword::LISTING, Keyword::AUTO, Keyword::FULFILLMENT]) {
16292            Ok(ActionManageType::ListingAutoFulfillment)
16293        } else if self.parse_keywords(&[Keyword::ORGANIZATION, Keyword::SUPPORT, Keyword::CASES]) {
16294            Ok(ActionManageType::OrganizationSupportCases)
16295        } else if self.parse_keywords(&[Keyword::USER, Keyword::SUPPORT, Keyword::CASES]) {
16296            Ok(ActionManageType::UserSupportCases)
16297        } else if self.parse_keyword(Keyword::GRANTS) {
16298            Ok(ActionManageType::Grants)
16299        } else if self.parse_keyword(Keyword::WAREHOUSES) {
16300            Ok(ActionManageType::Warehouses)
16301        } else {
16302            self.expected("GRANT MANAGE type", self.peek_token())
16303        }
16304    }
16305
16306    fn parse_action_modify_type(&mut self) -> Option<ActionModifyType> {
16307        if self.parse_keywords(&[Keyword::LOG, Keyword::LEVEL]) {
16308            Some(ActionModifyType::LogLevel)
16309        } else if self.parse_keywords(&[Keyword::TRACE, Keyword::LEVEL]) {
16310            Some(ActionModifyType::TraceLevel)
16311        } else if self.parse_keywords(&[Keyword::SESSION, Keyword::LOG, Keyword::LEVEL]) {
16312            Some(ActionModifyType::SessionLogLevel)
16313        } else if self.parse_keywords(&[Keyword::SESSION, Keyword::TRACE, Keyword::LEVEL]) {
16314            Some(ActionModifyType::SessionTraceLevel)
16315        } else {
16316            None
16317        }
16318    }
16319
16320    fn parse_action_monitor_type(&mut self) -> Option<ActionMonitorType> {
16321        if self.parse_keyword(Keyword::EXECUTION) {
16322            Some(ActionMonitorType::Execution)
16323        } else if self.parse_keyword(Keyword::SECURITY) {
16324            Some(ActionMonitorType::Security)
16325        } else if self.parse_keyword(Keyword::USAGE) {
16326            Some(ActionMonitorType::Usage)
16327        } else {
16328            None
16329        }
16330    }
16331
16332    pub fn parse_grantee_name(&mut self) -> Result<GranteeName, ParserError> {
16333        let mut name = self.parse_object_name(false)?;
16334        if self.dialect.supports_user_host_grantee()
16335            && name.0.len() == 1
16336            && name.0[0].as_ident().is_some()
16337            && self.consume_token(&Token::AtSign)
16338        {
16339            let user = name.0.pop().unwrap().as_ident().unwrap().clone();
16340            let host = self.parse_identifier()?;
16341            Ok(GranteeName::UserHost { user, host })
16342        } else {
16343            Ok(GranteeName::ObjectName(name))
16344        }
16345    }
16346
16347    /// Parse [`Statement::Deny`]
16348    pub fn parse_deny(&mut self) -> Result<Statement, ParserError> {
16349        self.expect_keyword(Keyword::DENY)?;
16350
16351        let (privileges, objects) = self.parse_grant_deny_revoke_privileges_objects()?;
16352        let objects = match objects {
16353            Some(o) => o,
16354            None => {
16355                return parser_err!(
16356                    "DENY statements must specify an object",
16357                    self.peek_token().span.start
16358                )
16359            }
16360        };
16361
16362        self.expect_keyword_is(Keyword::TO)?;
16363        let grantees = self.parse_grantees()?;
16364        let cascade = self.parse_cascade_option();
16365        let granted_by = if self.parse_keywords(&[Keyword::AS]) {
16366            Some(self.parse_identifier()?)
16367        } else {
16368            None
16369        };
16370
16371        Ok(Statement::Deny(DenyStatement {
16372            privileges,
16373            objects,
16374            grantees,
16375            cascade,
16376            granted_by,
16377        }))
16378    }
16379
16380    /// Parse a REVOKE statement
16381    pub fn parse_revoke(&mut self) -> Result<Statement, ParserError> {
16382        let (privileges, objects) = self.parse_grant_deny_revoke_privileges_objects()?;
16383
16384        self.expect_keyword_is(Keyword::FROM)?;
16385        let grantees = self.parse_grantees()?;
16386
16387        let granted_by = if self.parse_keywords(&[Keyword::GRANTED, Keyword::BY]) {
16388            Some(self.parse_identifier()?)
16389        } else {
16390            None
16391        };
16392
16393        let cascade = self.parse_cascade_option();
16394
16395        Ok(Statement::Revoke {
16396            privileges,
16397            objects,
16398            grantees,
16399            granted_by,
16400            cascade,
16401        })
16402    }
16403
16404    /// Parse an REPLACE statement
16405    pub fn parse_replace(
16406        &mut self,
16407        replace_token: TokenWithSpan,
16408    ) -> Result<Statement, ParserError> {
16409        if !dialect_of!(self is MySqlDialect | GenericDialect) {
16410            return parser_err!(
16411                "Unsupported statement REPLACE",
16412                self.peek_token().span.start
16413            );
16414        }
16415
16416        let mut insert = self.parse_insert(replace_token)?;
16417        if let Statement::Insert(Insert { replace_into, .. }) = &mut insert {
16418            *replace_into = true;
16419        }
16420
16421        Ok(insert)
16422    }
16423
16424    /// Parse an INSERT statement, returning a `Box`ed SetExpr
16425    ///
16426    /// This is used to reduce the size of the stack frames in debug builds
16427    fn parse_insert_setexpr_boxed(
16428        &mut self,
16429        insert_token: TokenWithSpan,
16430    ) -> Result<Box<SetExpr>, ParserError> {
16431        Ok(Box::new(SetExpr::Insert(self.parse_insert(insert_token)?)))
16432    }
16433
16434    /// Parse an INSERT statement
16435    pub fn parse_insert(&mut self, insert_token: TokenWithSpan) -> Result<Statement, ParserError> {
16436        let or = self.parse_conflict_clause();
16437        let priority = if !dialect_of!(self is MySqlDialect | GenericDialect) {
16438            None
16439        } else if self.parse_keyword(Keyword::LOW_PRIORITY) {
16440            Some(MysqlInsertPriority::LowPriority)
16441        } else if self.parse_keyword(Keyword::DELAYED) {
16442            Some(MysqlInsertPriority::Delayed)
16443        } else if self.parse_keyword(Keyword::HIGH_PRIORITY) {
16444            Some(MysqlInsertPriority::HighPriority)
16445        } else {
16446            None
16447        };
16448
16449        let ignore = dialect_of!(self is MySqlDialect | GenericDialect)
16450            && self.parse_keyword(Keyword::IGNORE);
16451
16452        let replace_into = false;
16453
16454        let overwrite = self.parse_keyword(Keyword::OVERWRITE);
16455        let into = self.parse_keyword(Keyword::INTO);
16456
16457        let local = self.parse_keyword(Keyword::LOCAL);
16458
16459        if self.parse_keyword(Keyword::DIRECTORY) {
16460            let path = self.parse_literal_string()?;
16461            let file_format = if self.parse_keywords(&[Keyword::STORED, Keyword::AS]) {
16462                Some(self.parse_file_format()?)
16463            } else {
16464                None
16465            };
16466            let source = self.parse_query()?;
16467            Ok(Statement::Directory {
16468                local,
16469                path,
16470                overwrite,
16471                file_format,
16472                source,
16473            })
16474        } else {
16475            // Hive lets you put table here regardless
16476            let table = self.parse_keyword(Keyword::TABLE);
16477            let table_object = self.parse_table_object()?;
16478
16479            let table_alias =
16480                if dialect_of!(self is PostgreSqlDialect) && self.parse_keyword(Keyword::AS) {
16481                    Some(self.parse_identifier()?)
16482                } else {
16483                    None
16484                };
16485
16486            let is_mysql = dialect_of!(self is MySqlDialect);
16487
16488            let (columns, partitioned, after_columns, source, assignments) = if self
16489                .parse_keywords(&[Keyword::DEFAULT, Keyword::VALUES])
16490            {
16491                (vec![], None, vec![], None, vec![])
16492            } else {
16493                let (columns, partitioned, after_columns) = if !self.peek_subquery_start() {
16494                    let columns = self.parse_parenthesized_column_list(Optional, is_mysql)?;
16495
16496                    let partitioned = self.parse_insert_partition()?;
16497                    // Hive allows you to specify columns after partitions as well if you want.
16498                    let after_columns = if dialect_of!(self is HiveDialect) {
16499                        self.parse_parenthesized_column_list(Optional, false)?
16500                    } else {
16501                        vec![]
16502                    };
16503                    (columns, partitioned, after_columns)
16504                } else {
16505                    Default::default()
16506                };
16507
16508                let (source, assignments) = if self.peek_keyword(Keyword::FORMAT)
16509                    || self.peek_keyword(Keyword::SETTINGS)
16510                {
16511                    (None, vec![])
16512                } else if self.dialect.supports_insert_set() && self.parse_keyword(Keyword::SET) {
16513                    (None, self.parse_comma_separated(Parser::parse_assignment)?)
16514                } else {
16515                    (Some(self.parse_query()?), vec![])
16516                };
16517
16518                (columns, partitioned, after_columns, source, assignments)
16519            };
16520
16521            let (format_clause, settings) = if self.dialect.supports_insert_format() {
16522                // Settings always comes before `FORMAT` for ClickHouse:
16523                // <https://clickhouse.com/docs/en/sql-reference/statements/insert-into>
16524                let settings = self.parse_settings()?;
16525
16526                let format = if self.parse_keyword(Keyword::FORMAT) {
16527                    Some(self.parse_input_format_clause()?)
16528                } else {
16529                    None
16530                };
16531
16532                (format, settings)
16533            } else {
16534                Default::default()
16535            };
16536
16537            let insert_alias = if dialect_of!(self is MySqlDialect | GenericDialect)
16538                && self.parse_keyword(Keyword::AS)
16539            {
16540                let row_alias = self.parse_object_name(false)?;
16541                let col_aliases = Some(self.parse_parenthesized_column_list(Optional, false)?);
16542                Some(InsertAliases {
16543                    row_alias,
16544                    col_aliases,
16545                })
16546            } else {
16547                None
16548            };
16549
16550            let on = if self.parse_keyword(Keyword::ON) {
16551                if self.parse_keyword(Keyword::CONFLICT) {
16552                    let conflict_target =
16553                        if self.parse_keywords(&[Keyword::ON, Keyword::CONSTRAINT]) {
16554                            Some(ConflictTarget::OnConstraint(self.parse_object_name(false)?))
16555                        } else if self.peek_token() == Token::LParen {
16556                            Some(ConflictTarget::Columns(
16557                                self.parse_parenthesized_column_list(IsOptional::Mandatory, false)?,
16558                            ))
16559                        } else {
16560                            None
16561                        };
16562
16563                    self.expect_keyword_is(Keyword::DO)?;
16564                    let action = if self.parse_keyword(Keyword::NOTHING) {
16565                        OnConflictAction::DoNothing
16566                    } else {
16567                        self.expect_keyword_is(Keyword::UPDATE)?;
16568                        self.expect_keyword_is(Keyword::SET)?;
16569                        let assignments = self.parse_comma_separated(Parser::parse_assignment)?;
16570                        let selection = if self.parse_keyword(Keyword::WHERE) {
16571                            Some(self.parse_expr()?)
16572                        } else {
16573                            None
16574                        };
16575                        OnConflictAction::DoUpdate(DoUpdate {
16576                            assignments,
16577                            selection,
16578                        })
16579                    };
16580
16581                    Some(OnInsert::OnConflict(OnConflict {
16582                        conflict_target,
16583                        action,
16584                    }))
16585                } else {
16586                    self.expect_keyword_is(Keyword::DUPLICATE)?;
16587                    self.expect_keyword_is(Keyword::KEY)?;
16588                    self.expect_keyword_is(Keyword::UPDATE)?;
16589                    let l = self.parse_comma_separated(Parser::parse_assignment)?;
16590
16591                    Some(OnInsert::DuplicateKeyUpdate(l))
16592                }
16593            } else {
16594                None
16595            };
16596
16597            let returning = if self.parse_keyword(Keyword::RETURNING) {
16598                Some(self.parse_comma_separated(Parser::parse_select_item)?)
16599            } else {
16600                None
16601            };
16602
16603            Ok(Statement::Insert(Insert {
16604                insert_token: insert_token.into(),
16605                or,
16606                table: table_object,
16607                table_alias,
16608                ignore,
16609                into,
16610                overwrite,
16611                partitioned,
16612                columns,
16613                after_columns,
16614                source,
16615                assignments,
16616                has_table_keyword: table,
16617                on,
16618                returning,
16619                replace_into,
16620                priority,
16621                insert_alias,
16622                settings,
16623                format_clause,
16624            }))
16625        }
16626    }
16627
16628    // Parses input format clause used for [ClickHouse].
16629    //
16630    // <https://clickhouse.com/docs/en/interfaces/formats>
16631    pub fn parse_input_format_clause(&mut self) -> Result<InputFormatClause, ParserError> {
16632        let ident = self.parse_identifier()?;
16633        let values = self
16634            .maybe_parse(|p| p.parse_comma_separated(|p| p.parse_expr()))?
16635            .unwrap_or_default();
16636
16637        Ok(InputFormatClause { ident, values })
16638    }
16639
16640    /// Returns true if the immediate tokens look like the
16641    /// beginning of a subquery. `(SELECT ...`
16642    fn peek_subquery_start(&mut self) -> bool {
16643        let [maybe_lparen, maybe_select] = self.peek_tokens();
16644        Token::LParen == maybe_lparen
16645            && matches!(maybe_select, Token::Word(w) if w.keyword == Keyword::SELECT)
16646    }
16647
16648    fn parse_conflict_clause(&mut self) -> Option<SqliteOnConflict> {
16649        if self.parse_keywords(&[Keyword::OR, Keyword::REPLACE]) {
16650            Some(SqliteOnConflict::Replace)
16651        } else if self.parse_keywords(&[Keyword::OR, Keyword::ROLLBACK]) {
16652            Some(SqliteOnConflict::Rollback)
16653        } else if self.parse_keywords(&[Keyword::OR, Keyword::ABORT]) {
16654            Some(SqliteOnConflict::Abort)
16655        } else if self.parse_keywords(&[Keyword::OR, Keyword::FAIL]) {
16656            Some(SqliteOnConflict::Fail)
16657        } else if self.parse_keywords(&[Keyword::OR, Keyword::IGNORE]) {
16658            Some(SqliteOnConflict::Ignore)
16659        } else if self.parse_keyword(Keyword::REPLACE) {
16660            Some(SqliteOnConflict::Replace)
16661        } else {
16662            None
16663        }
16664    }
16665
16666    pub fn parse_insert_partition(&mut self) -> Result<Option<Vec<Expr>>, ParserError> {
16667        if self.parse_keyword(Keyword::PARTITION) {
16668            self.expect_token(&Token::LParen)?;
16669            let partition_cols = Some(self.parse_comma_separated(Parser::parse_expr)?);
16670            self.expect_token(&Token::RParen)?;
16671            Ok(partition_cols)
16672        } else {
16673            Ok(None)
16674        }
16675    }
16676
16677    pub fn parse_load_data_table_format(
16678        &mut self,
16679    ) -> Result<Option<HiveLoadDataFormat>, ParserError> {
16680        if self.parse_keyword(Keyword::INPUTFORMAT) {
16681            let input_format = self.parse_expr()?;
16682            self.expect_keyword_is(Keyword::SERDE)?;
16683            let serde = self.parse_expr()?;
16684            Ok(Some(HiveLoadDataFormat {
16685                input_format,
16686                serde,
16687            }))
16688        } else {
16689            Ok(None)
16690        }
16691    }
16692
16693    /// Parse an UPDATE statement, returning a `Box`ed SetExpr
16694    ///
16695    /// This is used to reduce the size of the stack frames in debug builds
16696    fn parse_update_setexpr_boxed(
16697        &mut self,
16698        update_token: TokenWithSpan,
16699    ) -> Result<Box<SetExpr>, ParserError> {
16700        Ok(Box::new(SetExpr::Update(self.parse_update(update_token)?)))
16701    }
16702
16703    pub fn parse_update(&mut self, update_token: TokenWithSpan) -> Result<Statement, ParserError> {
16704        let or = self.parse_conflict_clause();
16705        let table = self.parse_table_and_joins()?;
16706        let from_before_set = if self.parse_keyword(Keyword::FROM) {
16707            Some(UpdateTableFromKind::BeforeSet(
16708                self.parse_table_with_joins()?,
16709            ))
16710        } else {
16711            None
16712        };
16713        self.expect_keyword(Keyword::SET)?;
16714        let assignments = self.parse_comma_separated(Parser::parse_assignment)?;
16715        let from = if from_before_set.is_none() && self.parse_keyword(Keyword::FROM) {
16716            Some(UpdateTableFromKind::AfterSet(
16717                self.parse_table_with_joins()?,
16718            ))
16719        } else {
16720            from_before_set
16721        };
16722        let selection = if self.parse_keyword(Keyword::WHERE) {
16723            Some(self.parse_expr()?)
16724        } else {
16725            None
16726        };
16727        let returning = if self.parse_keyword(Keyword::RETURNING) {
16728            Some(self.parse_comma_separated(Parser::parse_select_item)?)
16729        } else {
16730            None
16731        };
16732        let limit = if self.parse_keyword(Keyword::LIMIT) {
16733            Some(self.parse_expr()?)
16734        } else {
16735            None
16736        };
16737        Ok(Update {
16738            update_token: update_token.into(),
16739            table,
16740            assignments,
16741            from,
16742            selection,
16743            returning,
16744            or,
16745            limit,
16746        }
16747        .into())
16748    }
16749
16750    /// Parse a `var = expr` assignment, used in an UPDATE statement
16751    pub fn parse_assignment(&mut self) -> Result<Assignment, ParserError> {
16752        let target = self.parse_assignment_target()?;
16753        self.expect_token(&Token::Eq)?;
16754        let value = self.parse_expr()?;
16755        Ok(Assignment { target, value })
16756    }
16757
16758    /// Parse the left-hand side of an assignment, used in an UPDATE statement
16759    pub fn parse_assignment_target(&mut self) -> Result<AssignmentTarget, ParserError> {
16760        if self.consume_token(&Token::LParen) {
16761            let columns = self.parse_comma_separated(|p| p.parse_object_name(false))?;
16762            self.expect_token(&Token::RParen)?;
16763            Ok(AssignmentTarget::Tuple(columns))
16764        } else {
16765            let column = self.parse_object_name(false)?;
16766            Ok(AssignmentTarget::ColumnName(column))
16767        }
16768    }
16769
16770    pub fn parse_function_args(&mut self) -> Result<FunctionArg, ParserError> {
16771        let arg = if self.dialect.supports_named_fn_args_with_expr_name() {
16772            self.maybe_parse(|p| {
16773                let name = p.parse_expr()?;
16774                let operator = p.parse_function_named_arg_operator()?;
16775                let arg = p.parse_wildcard_expr()?.into();
16776                Ok(FunctionArg::ExprNamed {
16777                    name,
16778                    arg,
16779                    operator,
16780                })
16781            })?
16782        } else {
16783            self.maybe_parse(|p| {
16784                let name = p.parse_identifier()?;
16785                let operator = p.parse_function_named_arg_operator()?;
16786                let arg = p.parse_wildcard_expr()?.into();
16787                Ok(FunctionArg::Named {
16788                    name,
16789                    arg,
16790                    operator,
16791                })
16792            })?
16793        };
16794        if let Some(arg) = arg {
16795            return Ok(arg);
16796        }
16797        Ok(FunctionArg::Unnamed(self.parse_wildcard_expr()?.into()))
16798    }
16799
16800    fn parse_function_named_arg_operator(&mut self) -> Result<FunctionArgOperator, ParserError> {
16801        if self.parse_keyword(Keyword::VALUE) {
16802            return Ok(FunctionArgOperator::Value);
16803        }
16804        let tok = self.next_token();
16805        match tok.token {
16806            Token::RArrow if self.dialect.supports_named_fn_args_with_rarrow_operator() => {
16807                Ok(FunctionArgOperator::RightArrow)
16808            }
16809            Token::Eq if self.dialect.supports_named_fn_args_with_eq_operator() => {
16810                Ok(FunctionArgOperator::Equals)
16811            }
16812            Token::Assignment
16813                if self
16814                    .dialect
16815                    .supports_named_fn_args_with_assignment_operator() =>
16816            {
16817                Ok(FunctionArgOperator::Assignment)
16818            }
16819            Token::Colon if self.dialect.supports_named_fn_args_with_colon_operator() => {
16820                Ok(FunctionArgOperator::Colon)
16821            }
16822            _ => {
16823                self.prev_token();
16824                self.expected("argument operator", tok)
16825            }
16826        }
16827    }
16828
16829    pub fn parse_optional_args(&mut self) -> Result<Vec<FunctionArg>, ParserError> {
16830        if self.consume_token(&Token::RParen) {
16831            Ok(vec![])
16832        } else {
16833            let args = self.parse_comma_separated(Parser::parse_function_args)?;
16834            self.expect_token(&Token::RParen)?;
16835            Ok(args)
16836        }
16837    }
16838
16839    fn parse_table_function_args(&mut self) -> Result<TableFunctionArgs, ParserError> {
16840        if self.consume_token(&Token::RParen) {
16841            return Ok(TableFunctionArgs {
16842                args: vec![],
16843                settings: None,
16844            });
16845        }
16846        let mut args = vec![];
16847        let settings = loop {
16848            if let Some(settings) = self.parse_settings()? {
16849                break Some(settings);
16850            }
16851            args.push(self.parse_function_args()?);
16852            if self.is_parse_comma_separated_end() {
16853                break None;
16854            }
16855        };
16856        self.expect_token(&Token::RParen)?;
16857        Ok(TableFunctionArgs { args, settings })
16858    }
16859
16860    /// Parses a potentially empty list of arguments to a function
16861    /// (including the closing parenthesis).
16862    ///
16863    /// Examples:
16864    /// ```sql
16865    /// FIRST_VALUE(x ORDER BY 1,2,3);
16866    /// FIRST_VALUE(x IGNORE NULL);
16867    /// ```
16868    fn parse_function_argument_list(&mut self) -> Result<FunctionArgumentList, ParserError> {
16869        let mut clauses = vec![];
16870
16871        // Handle clauses that may exist with an empty argument list
16872
16873        if let Some(null_clause) = self.parse_json_null_clause() {
16874            clauses.push(FunctionArgumentClause::JsonNullClause(null_clause));
16875        }
16876
16877        if let Some(json_returning_clause) = self.maybe_parse_json_returning_clause()? {
16878            clauses.push(FunctionArgumentClause::JsonReturningClause(
16879                json_returning_clause,
16880            ));
16881        }
16882
16883        if self.consume_token(&Token::RParen) {
16884            return Ok(FunctionArgumentList {
16885                duplicate_treatment: None,
16886                args: vec![],
16887                clauses,
16888            });
16889        }
16890
16891        let duplicate_treatment = self.parse_duplicate_treatment()?;
16892        let args = self.parse_comma_separated(Parser::parse_function_args)?;
16893
16894        if self.dialect.supports_window_function_null_treatment_arg() {
16895            if let Some(null_treatment) = self.parse_null_treatment()? {
16896                clauses.push(FunctionArgumentClause::IgnoreOrRespectNulls(null_treatment));
16897            }
16898        }
16899
16900        if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
16901            clauses.push(FunctionArgumentClause::OrderBy(
16902                self.parse_comma_separated(Parser::parse_order_by_expr)?,
16903            ));
16904        }
16905
16906        if self.parse_keyword(Keyword::LIMIT) {
16907            clauses.push(FunctionArgumentClause::Limit(self.parse_expr()?));
16908        }
16909
16910        if dialect_of!(self is GenericDialect | BigQueryDialect)
16911            && self.parse_keyword(Keyword::HAVING)
16912        {
16913            let kind = match self.expect_one_of_keywords(&[Keyword::MIN, Keyword::MAX])? {
16914                Keyword::MIN => HavingBoundKind::Min,
16915                Keyword::MAX => HavingBoundKind::Max,
16916                unexpected_keyword => return Err(ParserError::ParserError(
16917                    format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in having bound"),
16918                )),
16919            };
16920            clauses.push(FunctionArgumentClause::Having(HavingBound(
16921                kind,
16922                self.parse_expr()?,
16923            )))
16924        }
16925
16926        if dialect_of!(self is GenericDialect | MySqlDialect)
16927            && self.parse_keyword(Keyword::SEPARATOR)
16928        {
16929            clauses.push(FunctionArgumentClause::Separator(self.parse_value()?.value));
16930        }
16931
16932        if let Some(on_overflow) = self.parse_listagg_on_overflow()? {
16933            clauses.push(FunctionArgumentClause::OnOverflow(on_overflow));
16934        }
16935
16936        if let Some(null_clause) = self.parse_json_null_clause() {
16937            clauses.push(FunctionArgumentClause::JsonNullClause(null_clause));
16938        }
16939
16940        if let Some(json_returning_clause) = self.maybe_parse_json_returning_clause()? {
16941            clauses.push(FunctionArgumentClause::JsonReturningClause(
16942                json_returning_clause,
16943            ));
16944        }
16945
16946        self.expect_token(&Token::RParen)?;
16947        Ok(FunctionArgumentList {
16948            duplicate_treatment,
16949            args,
16950            clauses,
16951        })
16952    }
16953
16954    fn parse_json_null_clause(&mut self) -> Option<JsonNullClause> {
16955        if self.parse_keywords(&[Keyword::ABSENT, Keyword::ON, Keyword::NULL]) {
16956            Some(JsonNullClause::AbsentOnNull)
16957        } else if self.parse_keywords(&[Keyword::NULL, Keyword::ON, Keyword::NULL]) {
16958            Some(JsonNullClause::NullOnNull)
16959        } else {
16960            None
16961        }
16962    }
16963
16964    fn maybe_parse_json_returning_clause(
16965        &mut self,
16966    ) -> Result<Option<JsonReturningClause>, ParserError> {
16967        if self.parse_keyword(Keyword::RETURNING) {
16968            let data_type = self.parse_data_type()?;
16969            Ok(Some(JsonReturningClause { data_type }))
16970        } else {
16971            Ok(None)
16972        }
16973    }
16974
16975    fn parse_duplicate_treatment(&mut self) -> Result<Option<DuplicateTreatment>, ParserError> {
16976        let loc = self.peek_token().span.start;
16977        match (
16978            self.parse_keyword(Keyword::ALL),
16979            self.parse_keyword(Keyword::DISTINCT),
16980        ) {
16981            (true, false) => Ok(Some(DuplicateTreatment::All)),
16982            (false, true) => Ok(Some(DuplicateTreatment::Distinct)),
16983            (false, false) => Ok(None),
16984            (true, true) => parser_err!("Cannot specify both ALL and DISTINCT".to_string(), loc),
16985        }
16986    }
16987
16988    /// Parse a comma-delimited list of projections after SELECT
16989    pub fn parse_select_item(&mut self) -> Result<SelectItem, ParserError> {
16990        let prefix = self
16991            .parse_one_of_keywords(
16992                self.dialect
16993                    .get_reserved_keywords_for_select_item_operator(),
16994            )
16995            .map(|keyword| Ident::new(format!("{keyword:?}")));
16996
16997        match self.parse_wildcard_expr()? {
16998            Expr::QualifiedWildcard(prefix, token) => Ok(SelectItem::QualifiedWildcard(
16999                SelectItemQualifiedWildcardKind::ObjectName(prefix),
17000                self.parse_wildcard_additional_options(token.0)?,
17001            )),
17002            Expr::Wildcard(token) => Ok(SelectItem::Wildcard(
17003                self.parse_wildcard_additional_options(token.0)?,
17004            )),
17005            Expr::Identifier(v) if v.value.to_lowercase() == "from" && v.quote_style.is_none() => {
17006                parser_err!(
17007                    format!("Expected an expression, found: {}", v),
17008                    self.peek_token().span.start
17009                )
17010            }
17011            Expr::BinaryOp {
17012                left,
17013                op: BinaryOperator::Eq,
17014                right,
17015            } if self.dialect.supports_eq_alias_assignment()
17016                && matches!(left.as_ref(), Expr::Identifier(_)) =>
17017            {
17018                let Expr::Identifier(alias) = *left else {
17019                    return parser_err!(
17020                        "BUG: expected identifier expression as alias",
17021                        self.peek_token().span.start
17022                    );
17023                };
17024                Ok(SelectItem::ExprWithAlias {
17025                    expr: *right,
17026                    alias,
17027                })
17028            }
17029            expr if self.dialect.supports_select_expr_star()
17030                && self.consume_tokens(&[Token::Period, Token::Mul]) =>
17031            {
17032                let wildcard_token = self.get_previous_token().clone();
17033                Ok(SelectItem::QualifiedWildcard(
17034                    SelectItemQualifiedWildcardKind::Expr(expr),
17035                    self.parse_wildcard_additional_options(wildcard_token)?,
17036                ))
17037            }
17038            expr => self
17039                .maybe_parse_select_item_alias()
17040                .map(|alias| match alias {
17041                    Some(alias) => SelectItem::ExprWithAlias {
17042                        expr: maybe_prefixed_expr(expr, prefix),
17043                        alias,
17044                    },
17045                    None => SelectItem::UnnamedExpr(maybe_prefixed_expr(expr, prefix)),
17046                }),
17047        }
17048    }
17049
17050    /// Parse an [`WildcardAdditionalOptions`] information for wildcard select items.
17051    ///
17052    /// If it is not possible to parse it, will return an option.
17053    pub fn parse_wildcard_additional_options(
17054        &mut self,
17055        wildcard_token: TokenWithSpan,
17056    ) -> Result<WildcardAdditionalOptions, ParserError> {
17057        let opt_ilike = if dialect_of!(self is GenericDialect | SnowflakeDialect) {
17058            self.parse_optional_select_item_ilike()?
17059        } else {
17060            None
17061        };
17062        let opt_exclude = if opt_ilike.is_none() && self.dialect.supports_select_wildcard_exclude()
17063        {
17064            self.parse_optional_select_item_exclude()?
17065        } else {
17066            None
17067        };
17068        let opt_except = if self.dialect.supports_select_wildcard_except() {
17069            self.parse_optional_select_item_except()?
17070        } else {
17071            None
17072        };
17073        let opt_replace = if dialect_of!(self is GenericDialect | BigQueryDialect | ClickHouseDialect | DuckDbDialect | SnowflakeDialect)
17074        {
17075            self.parse_optional_select_item_replace()?
17076        } else {
17077            None
17078        };
17079        let opt_rename = if dialect_of!(self is GenericDialect | SnowflakeDialect) {
17080            self.parse_optional_select_item_rename()?
17081        } else {
17082            None
17083        };
17084
17085        Ok(WildcardAdditionalOptions {
17086            wildcard_token: wildcard_token.into(),
17087            opt_ilike,
17088            opt_exclude,
17089            opt_except,
17090            opt_rename,
17091            opt_replace,
17092        })
17093    }
17094
17095    /// Parse an [`Ilike`](IlikeSelectItem) information for wildcard select items.
17096    ///
17097    /// If it is not possible to parse it, will return an option.
17098    pub fn parse_optional_select_item_ilike(
17099        &mut self,
17100    ) -> Result<Option<IlikeSelectItem>, ParserError> {
17101        let opt_ilike = if self.parse_keyword(Keyword::ILIKE) {
17102            let next_token = self.next_token();
17103            let pattern = match next_token.token {
17104                Token::SingleQuotedString(s) => s,
17105                _ => return self.expected("ilike pattern", next_token),
17106            };
17107            Some(IlikeSelectItem { pattern })
17108        } else {
17109            None
17110        };
17111        Ok(opt_ilike)
17112    }
17113
17114    /// Parse an [`Exclude`](ExcludeSelectItem) information for wildcard select items.
17115    ///
17116    /// If it is not possible to parse it, will return an option.
17117    pub fn parse_optional_select_item_exclude(
17118        &mut self,
17119    ) -> Result<Option<ExcludeSelectItem>, ParserError> {
17120        let opt_exclude = if self.parse_keyword(Keyword::EXCLUDE) {
17121            if self.consume_token(&Token::LParen) {
17122                let columns = self.parse_comma_separated(|parser| parser.parse_identifier())?;
17123                self.expect_token(&Token::RParen)?;
17124                Some(ExcludeSelectItem::Multiple(columns))
17125            } else {
17126                let column = self.parse_identifier()?;
17127                Some(ExcludeSelectItem::Single(column))
17128            }
17129        } else {
17130            None
17131        };
17132
17133        Ok(opt_exclude)
17134    }
17135
17136    /// Parse an [`Except`](ExceptSelectItem) information for wildcard select items.
17137    ///
17138    /// If it is not possible to parse it, will return an option.
17139    pub fn parse_optional_select_item_except(
17140        &mut self,
17141    ) -> Result<Option<ExceptSelectItem>, ParserError> {
17142        let opt_except = if self.parse_keyword(Keyword::EXCEPT) {
17143            if self.peek_token().token == Token::LParen {
17144                let idents = self.parse_parenthesized_column_list(Mandatory, false)?;
17145                match &idents[..] {
17146                    [] => {
17147                        return self.expected(
17148                            "at least one column should be parsed by the expect clause",
17149                            self.peek_token(),
17150                        )?;
17151                    }
17152                    [first, idents @ ..] => Some(ExceptSelectItem {
17153                        first_element: first.clone(),
17154                        additional_elements: idents.to_vec(),
17155                    }),
17156                }
17157            } else {
17158                // Clickhouse allows EXCEPT column_name
17159                let ident = self.parse_identifier()?;
17160                Some(ExceptSelectItem {
17161                    first_element: ident,
17162                    additional_elements: vec![],
17163                })
17164            }
17165        } else {
17166            None
17167        };
17168
17169        Ok(opt_except)
17170    }
17171
17172    /// Parse a [`Rename`](RenameSelectItem) information for wildcard select items.
17173    pub fn parse_optional_select_item_rename(
17174        &mut self,
17175    ) -> Result<Option<RenameSelectItem>, ParserError> {
17176        let opt_rename = if self.parse_keyword(Keyword::RENAME) {
17177            if self.consume_token(&Token::LParen) {
17178                let idents =
17179                    self.parse_comma_separated(|parser| parser.parse_identifier_with_alias())?;
17180                self.expect_token(&Token::RParen)?;
17181                Some(RenameSelectItem::Multiple(idents))
17182            } else {
17183                let ident = self.parse_identifier_with_alias()?;
17184                Some(RenameSelectItem::Single(ident))
17185            }
17186        } else {
17187            None
17188        };
17189
17190        Ok(opt_rename)
17191    }
17192
17193    /// Parse a [`Replace`](ReplaceSelectItem) information for wildcard select items.
17194    pub fn parse_optional_select_item_replace(
17195        &mut self,
17196    ) -> Result<Option<ReplaceSelectItem>, ParserError> {
17197        let opt_replace = if self.parse_keyword(Keyword::REPLACE) {
17198            if self.consume_token(&Token::LParen) {
17199                let items = self.parse_comma_separated(|parser| {
17200                    Ok(Box::new(parser.parse_replace_elements()?))
17201                })?;
17202                self.expect_token(&Token::RParen)?;
17203                Some(ReplaceSelectItem { items })
17204            } else {
17205                let tok = self.next_token();
17206                return self.expected("( after REPLACE but", tok);
17207            }
17208        } else {
17209            None
17210        };
17211
17212        Ok(opt_replace)
17213    }
17214    pub fn parse_replace_elements(&mut self) -> Result<ReplaceSelectElement, ParserError> {
17215        let expr = self.parse_expr()?;
17216        let as_keyword = self.parse_keyword(Keyword::AS);
17217        let ident = self.parse_identifier()?;
17218        Ok(ReplaceSelectElement {
17219            expr,
17220            column_name: ident,
17221            as_keyword,
17222        })
17223    }
17224
17225    /// Parse ASC or DESC, returns an Option with true if ASC, false of DESC or `None` if none of
17226    /// them.
17227    pub fn parse_asc_desc(&mut self) -> Option<bool> {
17228        if self.parse_keyword(Keyword::ASC) {
17229            Some(true)
17230        } else if self.parse_keyword(Keyword::DESC) {
17231            Some(false)
17232        } else {
17233            None
17234        }
17235    }
17236
17237    /// Parse an [OrderByExpr] expression.
17238    pub fn parse_order_by_expr(&mut self) -> Result<OrderByExpr, ParserError> {
17239        self.parse_order_by_expr_inner(false)
17240            .map(|(order_by, _)| order_by)
17241    }
17242
17243    /// Parse an [IndexColumn].
17244    pub fn parse_create_index_expr(&mut self) -> Result<IndexColumn, ParserError> {
17245        self.parse_order_by_expr_inner(true)
17246            .map(|(column, operator_class)| IndexColumn {
17247                column,
17248                operator_class,
17249            })
17250    }
17251
17252    fn parse_order_by_expr_inner(
17253        &mut self,
17254        with_operator_class: bool,
17255    ) -> Result<(OrderByExpr, Option<ObjectName>), ParserError> {
17256        let expr = self.parse_expr()?;
17257
17258        let operator_class: Option<ObjectName> = if with_operator_class {
17259            // We check that if non of the following keywords are present, then we parse an
17260            // identifier as operator class.
17261            if self
17262                .peek_one_of_keywords(&[Keyword::ASC, Keyword::DESC, Keyword::NULLS, Keyword::WITH])
17263                .is_some()
17264            {
17265                None
17266            } else {
17267                self.maybe_parse(|parser| parser.parse_object_name(false))?
17268            }
17269        } else {
17270            None
17271        };
17272
17273        let options = self.parse_order_by_options()?;
17274
17275        let with_fill = if dialect_of!(self is ClickHouseDialect | GenericDialect)
17276            && self.parse_keywords(&[Keyword::WITH, Keyword::FILL])
17277        {
17278            Some(self.parse_with_fill()?)
17279        } else {
17280            None
17281        };
17282
17283        Ok((
17284            OrderByExpr {
17285                expr,
17286                options,
17287                with_fill,
17288            },
17289            operator_class,
17290        ))
17291    }
17292
17293    fn parse_order_by_options(&mut self) -> Result<OrderByOptions, ParserError> {
17294        let asc = self.parse_asc_desc();
17295
17296        let nulls_first = if self.parse_keywords(&[Keyword::NULLS, Keyword::FIRST]) {
17297            Some(true)
17298        } else if self.parse_keywords(&[Keyword::NULLS, Keyword::LAST]) {
17299            Some(false)
17300        } else {
17301            None
17302        };
17303
17304        Ok(OrderByOptions { asc, nulls_first })
17305    }
17306
17307    // Parse a WITH FILL clause (ClickHouse dialect)
17308    // that follow the WITH FILL keywords in a ORDER BY clause
17309    pub fn parse_with_fill(&mut self) -> Result<WithFill, ParserError> {
17310        let from = if self.parse_keyword(Keyword::FROM) {
17311            Some(self.parse_expr()?)
17312        } else {
17313            None
17314        };
17315
17316        let to = if self.parse_keyword(Keyword::TO) {
17317            Some(self.parse_expr()?)
17318        } else {
17319            None
17320        };
17321
17322        let step = if self.parse_keyword(Keyword::STEP) {
17323            Some(self.parse_expr()?)
17324        } else {
17325            None
17326        };
17327
17328        Ok(WithFill { from, to, step })
17329    }
17330
17331    // Parse a set of comma separated INTERPOLATE expressions (ClickHouse dialect)
17332    // that follow the INTERPOLATE keyword in an ORDER BY clause with the WITH FILL modifier
17333    pub fn parse_interpolations(&mut self) -> Result<Option<Interpolate>, ParserError> {
17334        if !self.parse_keyword(Keyword::INTERPOLATE) {
17335            return Ok(None);
17336        }
17337
17338        if self.consume_token(&Token::LParen) {
17339            let interpolations =
17340                self.parse_comma_separated0(|p| p.parse_interpolation(), Token::RParen)?;
17341            self.expect_token(&Token::RParen)?;
17342            // INTERPOLATE () and INTERPOLATE ( ... ) variants
17343            return Ok(Some(Interpolate {
17344                exprs: Some(interpolations),
17345            }));
17346        }
17347
17348        // INTERPOLATE
17349        Ok(Some(Interpolate { exprs: None }))
17350    }
17351
17352    // Parse a INTERPOLATE expression (ClickHouse dialect)
17353    pub fn parse_interpolation(&mut self) -> Result<InterpolateExpr, ParserError> {
17354        let column = self.parse_identifier()?;
17355        let expr = if self.parse_keyword(Keyword::AS) {
17356            Some(self.parse_expr()?)
17357        } else {
17358            None
17359        };
17360        Ok(InterpolateExpr { column, expr })
17361    }
17362
17363    /// Parse a TOP clause, MSSQL equivalent of LIMIT,
17364    /// that follows after `SELECT [DISTINCT]`.
17365    pub fn parse_top(&mut self) -> Result<Top, ParserError> {
17366        let quantity = if self.consume_token(&Token::LParen) {
17367            let quantity = self.parse_expr()?;
17368            self.expect_token(&Token::RParen)?;
17369            Some(TopQuantity::Expr(quantity))
17370        } else {
17371            let next_token = self.next_token();
17372            let quantity = match next_token.token {
17373                Token::Number(s, _) => Self::parse::<u64>(s, next_token.span.start)?,
17374                _ => self.expected("literal int", next_token)?,
17375            };
17376            Some(TopQuantity::Constant(quantity))
17377        };
17378
17379        let percent = self.parse_keyword(Keyword::PERCENT);
17380
17381        let with_ties = self.parse_keywords(&[Keyword::WITH, Keyword::TIES]);
17382
17383        Ok(Top {
17384            with_ties,
17385            percent,
17386            quantity,
17387        })
17388    }
17389
17390    /// Parse a LIMIT clause
17391    pub fn parse_limit(&mut self) -> Result<Option<Expr>, ParserError> {
17392        if self.parse_keyword(Keyword::ALL) {
17393            Ok(None)
17394        } else {
17395            Ok(Some(self.parse_expr()?))
17396        }
17397    }
17398
17399    /// Parse an OFFSET clause
17400    pub fn parse_offset(&mut self) -> Result<Offset, ParserError> {
17401        let value = self.parse_expr()?;
17402        let rows = if self.parse_keyword(Keyword::ROW) {
17403            OffsetRows::Row
17404        } else if self.parse_keyword(Keyword::ROWS) {
17405            OffsetRows::Rows
17406        } else {
17407            OffsetRows::None
17408        };
17409        Ok(Offset { value, rows })
17410    }
17411
17412    /// Parse a FETCH clause
17413    pub fn parse_fetch(&mut self) -> Result<Fetch, ParserError> {
17414        let _ = self.parse_one_of_keywords(&[Keyword::FIRST, Keyword::NEXT]);
17415
17416        let (quantity, percent) = if self
17417            .parse_one_of_keywords(&[Keyword::ROW, Keyword::ROWS])
17418            .is_some()
17419        {
17420            (None, false)
17421        } else {
17422            let quantity = Expr::Value(self.parse_value()?);
17423            let percent = self.parse_keyword(Keyword::PERCENT);
17424            let _ = self.parse_one_of_keywords(&[Keyword::ROW, Keyword::ROWS]);
17425            (Some(quantity), percent)
17426        };
17427
17428        let with_ties = if self.parse_keyword(Keyword::ONLY) {
17429            false
17430        } else {
17431            self.parse_keywords(&[Keyword::WITH, Keyword::TIES])
17432        };
17433
17434        Ok(Fetch {
17435            with_ties,
17436            percent,
17437            quantity,
17438        })
17439    }
17440
17441    /// Parse a FOR UPDATE/FOR SHARE clause
17442    pub fn parse_lock(&mut self) -> Result<LockClause, ParserError> {
17443        let lock_type = match self.expect_one_of_keywords(&[Keyword::UPDATE, Keyword::SHARE])? {
17444            Keyword::UPDATE => LockType::Update,
17445            Keyword::SHARE => LockType::Share,
17446            unexpected_keyword => return Err(ParserError::ParserError(
17447                format!("Internal parser error: expected any of {{UPDATE, SHARE}}, got {unexpected_keyword:?}"),
17448            )),
17449        };
17450        let of = if self.parse_keyword(Keyword::OF) {
17451            Some(self.parse_object_name(false)?)
17452        } else {
17453            None
17454        };
17455        let nonblock = if self.parse_keyword(Keyword::NOWAIT) {
17456            Some(NonBlock::Nowait)
17457        } else if self.parse_keywords(&[Keyword::SKIP, Keyword::LOCKED]) {
17458            Some(NonBlock::SkipLocked)
17459        } else {
17460            None
17461        };
17462        Ok(LockClause {
17463            lock_type,
17464            of,
17465            nonblock,
17466        })
17467    }
17468
17469    pub fn parse_values(
17470        &mut self,
17471        allow_empty: bool,
17472        value_keyword: bool,
17473    ) -> Result<Values, ParserError> {
17474        let mut explicit_row = false;
17475
17476        let rows = self.parse_comma_separated(|parser| {
17477            if parser.parse_keyword(Keyword::ROW) {
17478                explicit_row = true;
17479            }
17480
17481            parser.expect_token(&Token::LParen)?;
17482            if allow_empty && parser.peek_token().token == Token::RParen {
17483                parser.next_token();
17484                Ok(vec![])
17485            } else {
17486                let exprs = parser.parse_comma_separated(Parser::parse_expr)?;
17487                parser.expect_token(&Token::RParen)?;
17488                Ok(exprs)
17489            }
17490        })?;
17491        Ok(Values {
17492            explicit_row,
17493            rows,
17494            value_keyword,
17495        })
17496    }
17497
17498    pub fn parse_start_transaction(&mut self) -> Result<Statement, ParserError> {
17499        self.expect_keyword_is(Keyword::TRANSACTION)?;
17500        Ok(Statement::StartTransaction {
17501            modes: self.parse_transaction_modes()?,
17502            begin: false,
17503            transaction: Some(BeginTransactionKind::Transaction),
17504            modifier: None,
17505            statements: vec![],
17506            exception: None,
17507            has_end_keyword: false,
17508        })
17509    }
17510
17511    pub fn parse_begin(&mut self) -> Result<Statement, ParserError> {
17512        let modifier = if !self.dialect.supports_start_transaction_modifier() {
17513            None
17514        } else if self.parse_keyword(Keyword::DEFERRED) {
17515            Some(TransactionModifier::Deferred)
17516        } else if self.parse_keyword(Keyword::IMMEDIATE) {
17517            Some(TransactionModifier::Immediate)
17518        } else if self.parse_keyword(Keyword::EXCLUSIVE) {
17519            Some(TransactionModifier::Exclusive)
17520        } else if self.parse_keyword(Keyword::TRY) {
17521            Some(TransactionModifier::Try)
17522        } else if self.parse_keyword(Keyword::CATCH) {
17523            Some(TransactionModifier::Catch)
17524        } else {
17525            None
17526        };
17527        let transaction = match self.parse_one_of_keywords(&[Keyword::TRANSACTION, Keyword::WORK]) {
17528            Some(Keyword::TRANSACTION) => Some(BeginTransactionKind::Transaction),
17529            Some(Keyword::WORK) => Some(BeginTransactionKind::Work),
17530            _ => None,
17531        };
17532        Ok(Statement::StartTransaction {
17533            modes: self.parse_transaction_modes()?,
17534            begin: true,
17535            transaction,
17536            modifier,
17537            statements: vec![],
17538            exception: None,
17539            has_end_keyword: false,
17540        })
17541    }
17542
17543    pub fn parse_begin_exception_end(&mut self) -> Result<Statement, ParserError> {
17544        let statements = self.parse_statement_list(&[Keyword::EXCEPTION, Keyword::END])?;
17545
17546        let exception = if self.parse_keyword(Keyword::EXCEPTION) {
17547            let mut when = Vec::new();
17548
17549            // We can have multiple `WHEN` arms so we consume all cases until `END`
17550            while !self.peek_keyword(Keyword::END) {
17551                self.expect_keyword(Keyword::WHEN)?;
17552
17553                // Each `WHEN` case can have one or more conditions, e.g.
17554                // WHEN EXCEPTION_1 [OR EXCEPTION_2] THEN
17555                // So we parse identifiers until the `THEN` keyword.
17556                let mut idents = Vec::new();
17557
17558                while !self.parse_keyword(Keyword::THEN) {
17559                    let ident = self.parse_identifier()?;
17560                    idents.push(ident);
17561
17562                    self.maybe_parse(|p| p.expect_keyword(Keyword::OR))?;
17563                }
17564
17565                let statements = self.parse_statement_list(&[Keyword::WHEN, Keyword::END])?;
17566
17567                when.push(ExceptionWhen { idents, statements });
17568            }
17569
17570            Some(when)
17571        } else {
17572            None
17573        };
17574
17575        self.expect_keyword(Keyword::END)?;
17576
17577        Ok(Statement::StartTransaction {
17578            begin: true,
17579            statements,
17580            exception,
17581            has_end_keyword: true,
17582            transaction: None,
17583            modifier: None,
17584            modes: Default::default(),
17585        })
17586    }
17587
17588    pub fn parse_end(&mut self) -> Result<Statement, ParserError> {
17589        let modifier = if !self.dialect.supports_end_transaction_modifier() {
17590            None
17591        } else if self.parse_keyword(Keyword::TRY) {
17592            Some(TransactionModifier::Try)
17593        } else if self.parse_keyword(Keyword::CATCH) {
17594            Some(TransactionModifier::Catch)
17595        } else {
17596            None
17597        };
17598        Ok(Statement::Commit {
17599            chain: self.parse_commit_rollback_chain()?,
17600            end: true,
17601            modifier,
17602        })
17603    }
17604
17605    pub fn parse_transaction_modes(&mut self) -> Result<Vec<TransactionMode>, ParserError> {
17606        let mut modes = vec![];
17607        let mut required = false;
17608        loop {
17609            let mode = if self.parse_keywords(&[Keyword::ISOLATION, Keyword::LEVEL]) {
17610                let iso_level = if self.parse_keywords(&[Keyword::READ, Keyword::UNCOMMITTED]) {
17611                    TransactionIsolationLevel::ReadUncommitted
17612                } else if self.parse_keywords(&[Keyword::READ, Keyword::COMMITTED]) {
17613                    TransactionIsolationLevel::ReadCommitted
17614                } else if self.parse_keywords(&[Keyword::REPEATABLE, Keyword::READ]) {
17615                    TransactionIsolationLevel::RepeatableRead
17616                } else if self.parse_keyword(Keyword::SERIALIZABLE) {
17617                    TransactionIsolationLevel::Serializable
17618                } else if self.parse_keyword(Keyword::SNAPSHOT) {
17619                    TransactionIsolationLevel::Snapshot
17620                } else {
17621                    self.expected("isolation level", self.peek_token())?
17622                };
17623                TransactionMode::IsolationLevel(iso_level)
17624            } else if self.parse_keywords(&[Keyword::READ, Keyword::ONLY]) {
17625                TransactionMode::AccessMode(TransactionAccessMode::ReadOnly)
17626            } else if self.parse_keywords(&[Keyword::READ, Keyword::WRITE]) {
17627                TransactionMode::AccessMode(TransactionAccessMode::ReadWrite)
17628            } else if required {
17629                self.expected("transaction mode", self.peek_token())?
17630            } else {
17631                break;
17632            };
17633            modes.push(mode);
17634            // ANSI requires a comma after each transaction mode, but
17635            // PostgreSQL, for historical reasons, does not. We follow
17636            // PostgreSQL in making the comma optional, since that is strictly
17637            // more general.
17638            required = self.consume_token(&Token::Comma);
17639        }
17640        Ok(modes)
17641    }
17642
17643    pub fn parse_commit(&mut self) -> Result<Statement, ParserError> {
17644        Ok(Statement::Commit {
17645            chain: self.parse_commit_rollback_chain()?,
17646            end: false,
17647            modifier: None,
17648        })
17649    }
17650
17651    pub fn parse_rollback(&mut self) -> Result<Statement, ParserError> {
17652        let chain = self.parse_commit_rollback_chain()?;
17653        let savepoint = self.parse_rollback_savepoint()?;
17654
17655        Ok(Statement::Rollback { chain, savepoint })
17656    }
17657
17658    pub fn parse_commit_rollback_chain(&mut self) -> Result<bool, ParserError> {
17659        let _ = self.parse_one_of_keywords(&[Keyword::TRANSACTION, Keyword::WORK]);
17660        if self.parse_keyword(Keyword::AND) {
17661            let chain = !self.parse_keyword(Keyword::NO);
17662            self.expect_keyword_is(Keyword::CHAIN)?;
17663            Ok(chain)
17664        } else {
17665            Ok(false)
17666        }
17667    }
17668
17669    pub fn parse_rollback_savepoint(&mut self) -> Result<Option<Ident>, ParserError> {
17670        if self.parse_keyword(Keyword::TO) {
17671            let _ = self.parse_keyword(Keyword::SAVEPOINT);
17672            let savepoint = self.parse_identifier()?;
17673
17674            Ok(Some(savepoint))
17675        } else {
17676            Ok(None)
17677        }
17678    }
17679
17680    /// Parse a 'RAISERROR' statement
17681    pub fn parse_raiserror(&mut self) -> Result<Statement, ParserError> {
17682        self.expect_token(&Token::LParen)?;
17683        let message = Box::new(self.parse_expr()?);
17684        self.expect_token(&Token::Comma)?;
17685        let severity = Box::new(self.parse_expr()?);
17686        self.expect_token(&Token::Comma)?;
17687        let state = Box::new(self.parse_expr()?);
17688        let arguments = if self.consume_token(&Token::Comma) {
17689            self.parse_comma_separated(Parser::parse_expr)?
17690        } else {
17691            vec![]
17692        };
17693        self.expect_token(&Token::RParen)?;
17694        let options = if self.parse_keyword(Keyword::WITH) {
17695            self.parse_comma_separated(Parser::parse_raiserror_option)?
17696        } else {
17697            vec![]
17698        };
17699        Ok(Statement::RaisError {
17700            message,
17701            severity,
17702            state,
17703            arguments,
17704            options,
17705        })
17706    }
17707
17708    pub fn parse_raiserror_option(&mut self) -> Result<RaisErrorOption, ParserError> {
17709        match self.expect_one_of_keywords(&[Keyword::LOG, Keyword::NOWAIT, Keyword::SETERROR])? {
17710            Keyword::LOG => Ok(RaisErrorOption::Log),
17711            Keyword::NOWAIT => Ok(RaisErrorOption::NoWait),
17712            Keyword::SETERROR => Ok(RaisErrorOption::SetError),
17713            _ => self.expected(
17714                "LOG, NOWAIT OR SETERROR raiserror option",
17715                self.peek_token(),
17716            ),
17717        }
17718    }
17719
17720    pub fn parse_deallocate(&mut self) -> Result<Statement, ParserError> {
17721        let prepare = self.parse_keyword(Keyword::PREPARE);
17722        let name = self.parse_identifier()?;
17723        Ok(Statement::Deallocate { name, prepare })
17724    }
17725
17726    pub fn parse_execute(&mut self) -> Result<Statement, ParserError> {
17727        let name = if self.dialect.supports_execute_immediate()
17728            && self.parse_keyword(Keyword::IMMEDIATE)
17729        {
17730            None
17731        } else {
17732            let has_parentheses = self.consume_token(&Token::LParen);
17733            let name = self.parse_object_name(false)?;
17734            if has_parentheses {
17735                self.expect_token(&Token::RParen)?;
17736            }
17737            Some(name)
17738        };
17739
17740        let has_parentheses = self.consume_token(&Token::LParen);
17741
17742        let end_kws = &[Keyword::USING, Keyword::OUTPUT, Keyword::DEFAULT];
17743        let end_token = match (has_parentheses, self.peek_token().token) {
17744            (true, _) => Token::RParen,
17745            (false, Token::EOF) => Token::EOF,
17746            (false, Token::Word(w)) if end_kws.contains(&w.keyword) => Token::Word(w),
17747            (false, _) => Token::SemiColon,
17748        };
17749
17750        let parameters = self.parse_comma_separated0(Parser::parse_expr, end_token)?;
17751
17752        if has_parentheses {
17753            self.expect_token(&Token::RParen)?;
17754        }
17755
17756        let into = if self.parse_keyword(Keyword::INTO) {
17757            self.parse_comma_separated(Self::parse_identifier)?
17758        } else {
17759            vec![]
17760        };
17761
17762        let using = if self.parse_keyword(Keyword::USING) {
17763            self.parse_comma_separated(Self::parse_expr_with_alias)?
17764        } else {
17765            vec![]
17766        };
17767
17768        let output = self.parse_keyword(Keyword::OUTPUT);
17769
17770        let default = self.parse_keyword(Keyword::DEFAULT);
17771
17772        Ok(Statement::Execute {
17773            immediate: name.is_none(),
17774            name,
17775            parameters,
17776            has_parentheses,
17777            into,
17778            using,
17779            output,
17780            default,
17781        })
17782    }
17783
17784    pub fn parse_prepare(&mut self) -> Result<Statement, ParserError> {
17785        let name = self.parse_identifier()?;
17786
17787        let mut data_types = vec![];
17788        if self.consume_token(&Token::LParen) {
17789            data_types = self.parse_comma_separated(Parser::parse_data_type)?;
17790            self.expect_token(&Token::RParen)?;
17791        }
17792
17793        self.expect_keyword_is(Keyword::AS)?;
17794        let statement = Box::new(self.parse_statement()?);
17795        Ok(Statement::Prepare {
17796            name,
17797            data_types,
17798            statement,
17799        })
17800    }
17801
17802    pub fn parse_unload(&mut self) -> Result<Statement, ParserError> {
17803        self.expect_keyword(Keyword::UNLOAD)?;
17804        self.expect_token(&Token::LParen)?;
17805        let (query, query_text) = if matches!(self.peek_token().token, Token::SingleQuotedString(_))
17806        {
17807            (None, Some(self.parse_literal_string()?))
17808        } else {
17809            (Some(self.parse_query()?), None)
17810        };
17811        self.expect_token(&Token::RParen)?;
17812
17813        self.expect_keyword_is(Keyword::TO)?;
17814        let to = self.parse_identifier()?;
17815        let auth = if self.parse_keyword(Keyword::IAM_ROLE) {
17816            Some(self.parse_iam_role_kind()?)
17817        } else {
17818            None
17819        };
17820        let with = self.parse_options(Keyword::WITH)?;
17821        let mut options = vec![];
17822        while let Some(opt) = self.maybe_parse(|parser| parser.parse_copy_legacy_option())? {
17823            options.push(opt);
17824        }
17825        Ok(Statement::Unload {
17826            query,
17827            query_text,
17828            to,
17829            auth,
17830            with,
17831            options,
17832        })
17833    }
17834
17835    fn parse_select_into(&mut self) -> Result<SelectInto, ParserError> {
17836        let temporary = self
17837            .parse_one_of_keywords(&[Keyword::TEMP, Keyword::TEMPORARY])
17838            .is_some();
17839        let unlogged = self.parse_keyword(Keyword::UNLOGGED);
17840        let table = self.parse_keyword(Keyword::TABLE);
17841        let name = self.parse_object_name(false)?;
17842
17843        Ok(SelectInto {
17844            temporary,
17845            unlogged,
17846            table,
17847            name,
17848        })
17849    }
17850
17851    fn parse_pragma_value(&mut self) -> Result<Value, ParserError> {
17852        match self.parse_value()?.value {
17853            v @ Value::SingleQuotedString(_) => Ok(v),
17854            v @ Value::DoubleQuotedString(_) => Ok(v),
17855            v @ Value::Number(_, _) => Ok(v),
17856            v @ Value::Placeholder(_) => Ok(v),
17857            _ => {
17858                self.prev_token();
17859                self.expected("number or string or ? placeholder", self.peek_token())
17860            }
17861        }
17862    }
17863
17864    // PRAGMA [schema-name '.'] pragma-name [('=' pragma-value) | '(' pragma-value ')']
17865    pub fn parse_pragma(&mut self) -> Result<Statement, ParserError> {
17866        let name = self.parse_object_name(false)?;
17867        if self.consume_token(&Token::LParen) {
17868            let value = self.parse_pragma_value()?;
17869            self.expect_token(&Token::RParen)?;
17870            Ok(Statement::Pragma {
17871                name,
17872                value: Some(value),
17873                is_eq: false,
17874            })
17875        } else if self.consume_token(&Token::Eq) {
17876            Ok(Statement::Pragma {
17877                name,
17878                value: Some(self.parse_pragma_value()?),
17879                is_eq: true,
17880            })
17881        } else {
17882            Ok(Statement::Pragma {
17883                name,
17884                value: None,
17885                is_eq: false,
17886            })
17887        }
17888    }
17889
17890    /// `INSTALL [extension_name]`
17891    pub fn parse_install(&mut self) -> Result<Statement, ParserError> {
17892        let extension_name = self.parse_identifier()?;
17893
17894        Ok(Statement::Install { extension_name })
17895    }
17896
17897    /// Parse a SQL LOAD statement
17898    pub fn parse_load(&mut self) -> Result<Statement, ParserError> {
17899        if self.dialect.supports_load_extension() {
17900            let extension_name = self.parse_identifier()?;
17901            Ok(Statement::Load { extension_name })
17902        } else if self.parse_keyword(Keyword::DATA) && self.dialect.supports_load_data() {
17903            let local = self.parse_one_of_keywords(&[Keyword::LOCAL]).is_some();
17904            self.expect_keyword_is(Keyword::INPATH)?;
17905            let inpath = self.parse_literal_string()?;
17906            let overwrite = self.parse_one_of_keywords(&[Keyword::OVERWRITE]).is_some();
17907            self.expect_keyword_is(Keyword::INTO)?;
17908            self.expect_keyword_is(Keyword::TABLE)?;
17909            let table_name = self.parse_object_name(false)?;
17910            let partitioned = self.parse_insert_partition()?;
17911            let table_format = self.parse_load_data_table_format()?;
17912            Ok(Statement::LoadData {
17913                local,
17914                inpath,
17915                overwrite,
17916                table_name,
17917                partitioned,
17918                table_format,
17919            })
17920        } else {
17921            self.expected(
17922                "`DATA` or an extension name after `LOAD`",
17923                self.peek_token(),
17924            )
17925        }
17926    }
17927
17928    /// ```sql
17929    /// OPTIMIZE TABLE [db.]name [ON CLUSTER cluster] [PARTITION partition | PARTITION ID 'partition_id'] [FINAL] [DEDUPLICATE [BY expression]]
17930    /// ```
17931    /// [ClickHouse](https://clickhouse.com/docs/en/sql-reference/statements/optimize)
17932    pub fn parse_optimize_table(&mut self) -> Result<Statement, ParserError> {
17933        self.expect_keyword_is(Keyword::TABLE)?;
17934        let name = self.parse_object_name(false)?;
17935        let on_cluster = self.parse_optional_on_cluster()?;
17936
17937        let partition = if self.parse_keyword(Keyword::PARTITION) {
17938            if self.parse_keyword(Keyword::ID) {
17939                Some(Partition::Identifier(self.parse_identifier()?))
17940            } else {
17941                Some(Partition::Expr(self.parse_expr()?))
17942            }
17943        } else {
17944            None
17945        };
17946
17947        let include_final = self.parse_keyword(Keyword::FINAL);
17948        let deduplicate = if self.parse_keyword(Keyword::DEDUPLICATE) {
17949            if self.parse_keyword(Keyword::BY) {
17950                Some(Deduplicate::ByExpression(self.parse_expr()?))
17951            } else {
17952                Some(Deduplicate::All)
17953            }
17954        } else {
17955            None
17956        };
17957
17958        Ok(Statement::OptimizeTable {
17959            name,
17960            on_cluster,
17961            partition,
17962            include_final,
17963            deduplicate,
17964        })
17965    }
17966
17967    /// ```sql
17968    /// CREATE [ { TEMPORARY | TEMP } ] SEQUENCE [ IF NOT EXISTS ] <sequence_name>
17969    /// ```
17970    ///
17971    /// See [Postgres docs](https://www.postgresql.org/docs/current/sql-createsequence.html) for more details.
17972    pub fn parse_create_sequence(&mut self, temporary: bool) -> Result<Statement, ParserError> {
17973        //[ IF NOT EXISTS ]
17974        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
17975        //name
17976        let name = self.parse_object_name(false)?;
17977        //[ AS data_type ]
17978        let mut data_type: Option<DataType> = None;
17979        if self.parse_keywords(&[Keyword::AS]) {
17980            data_type = Some(self.parse_data_type()?)
17981        }
17982        let sequence_options = self.parse_create_sequence_options()?;
17983        // [ OWNED BY { table_name.column_name | NONE } ]
17984        let owned_by = if self.parse_keywords(&[Keyword::OWNED, Keyword::BY]) {
17985            if self.parse_keywords(&[Keyword::NONE]) {
17986                Some(ObjectName::from(vec![Ident::new("NONE")]))
17987            } else {
17988                Some(self.parse_object_name(false)?)
17989            }
17990        } else {
17991            None
17992        };
17993        Ok(Statement::CreateSequence {
17994            temporary,
17995            if_not_exists,
17996            name,
17997            data_type,
17998            sequence_options,
17999            owned_by,
18000        })
18001    }
18002
18003    fn parse_create_sequence_options(&mut self) -> Result<Vec<SequenceOptions>, ParserError> {
18004        let mut sequence_options = vec![];
18005        //[ INCREMENT [ BY ] increment ]
18006        if self.parse_keywords(&[Keyword::INCREMENT]) {
18007            if self.parse_keywords(&[Keyword::BY]) {
18008                sequence_options.push(SequenceOptions::IncrementBy(self.parse_number()?, true));
18009            } else {
18010                sequence_options.push(SequenceOptions::IncrementBy(self.parse_number()?, false));
18011            }
18012        }
18013        //[ MINVALUE minvalue | NO MINVALUE ]
18014        if self.parse_keyword(Keyword::MINVALUE) {
18015            sequence_options.push(SequenceOptions::MinValue(Some(self.parse_number()?)));
18016        } else if self.parse_keywords(&[Keyword::NO, Keyword::MINVALUE]) {
18017            sequence_options.push(SequenceOptions::MinValue(None));
18018        }
18019        //[ MAXVALUE maxvalue | NO MAXVALUE ]
18020        if self.parse_keywords(&[Keyword::MAXVALUE]) {
18021            sequence_options.push(SequenceOptions::MaxValue(Some(self.parse_number()?)));
18022        } else if self.parse_keywords(&[Keyword::NO, Keyword::MAXVALUE]) {
18023            sequence_options.push(SequenceOptions::MaxValue(None));
18024        }
18025
18026        //[ START [ WITH ] start ]
18027        if self.parse_keywords(&[Keyword::START]) {
18028            if self.parse_keywords(&[Keyword::WITH]) {
18029                sequence_options.push(SequenceOptions::StartWith(self.parse_number()?, true));
18030            } else {
18031                sequence_options.push(SequenceOptions::StartWith(self.parse_number()?, false));
18032            }
18033        }
18034        //[ CACHE cache ]
18035        if self.parse_keywords(&[Keyword::CACHE]) {
18036            sequence_options.push(SequenceOptions::Cache(self.parse_number()?));
18037        }
18038        // [ [ NO ] CYCLE ]
18039        if self.parse_keywords(&[Keyword::NO, Keyword::CYCLE]) {
18040            sequence_options.push(SequenceOptions::Cycle(true));
18041        } else if self.parse_keywords(&[Keyword::CYCLE]) {
18042            sequence_options.push(SequenceOptions::Cycle(false));
18043        }
18044
18045        Ok(sequence_options)
18046    }
18047
18048    ///   Parse a `CREATE SERVER` statement.
18049    ///
18050    ///  See [Statement::CreateServer]
18051    pub fn parse_pg_create_server(&mut self) -> Result<Statement, ParserError> {
18052        let ine = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
18053        let name = self.parse_object_name(false)?;
18054
18055        let server_type = if self.parse_keyword(Keyword::TYPE) {
18056            Some(self.parse_identifier()?)
18057        } else {
18058            None
18059        };
18060
18061        let version = if self.parse_keyword(Keyword::VERSION) {
18062            Some(self.parse_identifier()?)
18063        } else {
18064            None
18065        };
18066
18067        self.expect_keywords(&[Keyword::FOREIGN, Keyword::DATA, Keyword::WRAPPER])?;
18068        let foreign_data_wrapper = self.parse_object_name(false)?;
18069
18070        let mut options = None;
18071        if self.parse_keyword(Keyword::OPTIONS) {
18072            self.expect_token(&Token::LParen)?;
18073            options = Some(self.parse_comma_separated(|p| {
18074                let key = p.parse_identifier()?;
18075                let value = p.parse_identifier()?;
18076                Ok(CreateServerOption { key, value })
18077            })?);
18078            self.expect_token(&Token::RParen)?;
18079        }
18080
18081        Ok(Statement::CreateServer(CreateServerStatement {
18082            name,
18083            if_not_exists: ine,
18084            server_type,
18085            version,
18086            foreign_data_wrapper,
18087            options,
18088        }))
18089    }
18090
18091    /// The index of the first unprocessed token.
18092    pub fn index(&self) -> usize {
18093        self.index
18094    }
18095
18096    pub fn parse_named_window(&mut self) -> Result<NamedWindowDefinition, ParserError> {
18097        let ident = self.parse_identifier()?;
18098        self.expect_keyword_is(Keyword::AS)?;
18099
18100        let window_expr = if self.consume_token(&Token::LParen) {
18101            NamedWindowExpr::WindowSpec(self.parse_window_spec()?)
18102        } else if self.dialect.supports_window_clause_named_window_reference() {
18103            NamedWindowExpr::NamedWindow(self.parse_identifier()?)
18104        } else {
18105            return self.expected("(", self.peek_token());
18106        };
18107
18108        Ok(NamedWindowDefinition(ident, window_expr))
18109    }
18110
18111    pub fn parse_create_procedure(&mut self, or_alter: bool) -> Result<Statement, ParserError> {
18112        let name = self.parse_object_name(false)?;
18113        let params = self.parse_optional_procedure_parameters()?;
18114
18115        let language = if self.parse_keyword(Keyword::LANGUAGE) {
18116            Some(self.parse_identifier()?)
18117        } else {
18118            None
18119        };
18120
18121        self.expect_keyword_is(Keyword::AS)?;
18122
18123        let body = self.parse_conditional_statements(&[Keyword::END])?;
18124
18125        Ok(Statement::CreateProcedure {
18126            name,
18127            or_alter,
18128            params,
18129            language,
18130            body,
18131        })
18132    }
18133
18134    pub fn parse_window_spec(&mut self) -> Result<WindowSpec, ParserError> {
18135        let window_name = match self.peek_token().token {
18136            Token::Word(word) if word.keyword == Keyword::NoKeyword => {
18137                self.parse_optional_ident()?
18138            }
18139            _ => None,
18140        };
18141
18142        let partition_by = if self.parse_keywords(&[Keyword::PARTITION, Keyword::BY]) {
18143            self.parse_comma_separated(Parser::parse_expr)?
18144        } else {
18145            vec![]
18146        };
18147        let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
18148            self.parse_comma_separated(Parser::parse_order_by_expr)?
18149        } else {
18150            vec![]
18151        };
18152
18153        let window_frame = if !self.consume_token(&Token::RParen) {
18154            let window_frame = self.parse_window_frame()?;
18155            self.expect_token(&Token::RParen)?;
18156            Some(window_frame)
18157        } else {
18158            None
18159        };
18160        Ok(WindowSpec {
18161            window_name,
18162            partition_by,
18163            order_by,
18164            window_frame,
18165        })
18166    }
18167
18168    pub fn parse_create_type(&mut self) -> Result<Statement, ParserError> {
18169        let name = self.parse_object_name(false)?;
18170
18171        // Check if we have AS keyword
18172        let has_as = self.parse_keyword(Keyword::AS);
18173
18174        if !has_as {
18175            // Two cases: CREATE TYPE name; or CREATE TYPE name (options);
18176            if self.consume_token(&Token::LParen) {
18177                // CREATE TYPE name (options) - SQL definition without AS
18178                let options = self.parse_create_type_sql_definition_options()?;
18179                self.expect_token(&Token::RParen)?;
18180                return Ok(Statement::CreateType {
18181                    name,
18182                    representation: Some(UserDefinedTypeRepresentation::SqlDefinition { options }),
18183                });
18184            }
18185
18186            // CREATE TYPE name; - no representation
18187            return Ok(Statement::CreateType {
18188                name,
18189                representation: None,
18190            });
18191        }
18192
18193        // We have AS keyword
18194        if self.parse_keyword(Keyword::ENUM) {
18195            // CREATE TYPE name AS ENUM (labels)
18196            self.parse_create_type_enum(name)
18197        } else if self.parse_keyword(Keyword::RANGE) {
18198            // CREATE TYPE name AS RANGE (options)
18199            self.parse_create_type_range(name)
18200        } else if self.consume_token(&Token::LParen) {
18201            // CREATE TYPE name AS (attributes) - Composite
18202            self.parse_create_type_composite(name)
18203        } else {
18204            self.expected("ENUM, RANGE, or '(' after AS", self.peek_token())
18205        }
18206    }
18207
18208    /// Parse remainder of `CREATE TYPE AS (attributes)` statement (composite type)
18209    ///
18210    /// See [PostgreSQL](https://www.postgresql.org/docs/current/sql-createtype.html)
18211    fn parse_create_type_composite(&mut self, name: ObjectName) -> Result<Statement, ParserError> {
18212        if self.consume_token(&Token::RParen) {
18213            // Empty composite type
18214            return Ok(Statement::CreateType {
18215                name,
18216                representation: Some(UserDefinedTypeRepresentation::Composite {
18217                    attributes: vec![],
18218                }),
18219            });
18220        }
18221
18222        let mut attributes = vec![];
18223        loop {
18224            let attr_name = self.parse_identifier()?;
18225            let attr_data_type = self.parse_data_type()?;
18226            let attr_collation = if self.parse_keyword(Keyword::COLLATE) {
18227                Some(self.parse_object_name(false)?)
18228            } else {
18229                None
18230            };
18231            attributes.push(UserDefinedTypeCompositeAttributeDef {
18232                name: attr_name,
18233                data_type: attr_data_type,
18234                collation: attr_collation,
18235            });
18236
18237            if !self.consume_token(&Token::Comma) {
18238                break;
18239            }
18240        }
18241        self.expect_token(&Token::RParen)?;
18242
18243        Ok(Statement::CreateType {
18244            name,
18245            representation: Some(UserDefinedTypeRepresentation::Composite { attributes }),
18246        })
18247    }
18248
18249    /// Parse remainder of `CREATE TYPE AS ENUM` statement (see [Statement::CreateType] and [Self::parse_create_type])
18250    ///
18251    /// See [PostgreSQL](https://www.postgresql.org/docs/current/sql-createtype.html)
18252    pub fn parse_create_type_enum(&mut self, name: ObjectName) -> Result<Statement, ParserError> {
18253        self.expect_token(&Token::LParen)?;
18254        let labels = self.parse_comma_separated0(|p| p.parse_identifier(), Token::RParen)?;
18255        self.expect_token(&Token::RParen)?;
18256
18257        Ok(Statement::CreateType {
18258            name,
18259            representation: Some(UserDefinedTypeRepresentation::Enum { labels }),
18260        })
18261    }
18262
18263    /// Parse remainder of `CREATE TYPE AS RANGE` statement
18264    ///
18265    /// See [PostgreSQL](https://www.postgresql.org/docs/current/sql-createtype.html)
18266    fn parse_create_type_range(&mut self, name: ObjectName) -> Result<Statement, ParserError> {
18267        self.expect_token(&Token::LParen)?;
18268        let options = self.parse_comma_separated0(|p| p.parse_range_option(), Token::RParen)?;
18269        self.expect_token(&Token::RParen)?;
18270
18271        Ok(Statement::CreateType {
18272            name,
18273            representation: Some(UserDefinedTypeRepresentation::Range { options }),
18274        })
18275    }
18276
18277    /// Parse a single range option for a `CREATE TYPE AS RANGE` statement
18278    fn parse_range_option(&mut self) -> Result<UserDefinedTypeRangeOption, ParserError> {
18279        let keyword = self.parse_one_of_keywords(&[
18280            Keyword::SUBTYPE,
18281            Keyword::SUBTYPE_OPCLASS,
18282            Keyword::COLLATION,
18283            Keyword::CANONICAL,
18284            Keyword::SUBTYPE_DIFF,
18285            Keyword::MULTIRANGE_TYPE_NAME,
18286        ]);
18287
18288        match keyword {
18289            Some(Keyword::SUBTYPE) => {
18290                self.expect_token(&Token::Eq)?;
18291                let data_type = self.parse_data_type()?;
18292                Ok(UserDefinedTypeRangeOption::Subtype(data_type))
18293            }
18294            Some(Keyword::SUBTYPE_OPCLASS) => {
18295                self.expect_token(&Token::Eq)?;
18296                let name = self.parse_object_name(false)?;
18297                Ok(UserDefinedTypeRangeOption::SubtypeOpClass(name))
18298            }
18299            Some(Keyword::COLLATION) => {
18300                self.expect_token(&Token::Eq)?;
18301                let name = self.parse_object_name(false)?;
18302                Ok(UserDefinedTypeRangeOption::Collation(name))
18303            }
18304            Some(Keyword::CANONICAL) => {
18305                self.expect_token(&Token::Eq)?;
18306                let name = self.parse_object_name(false)?;
18307                Ok(UserDefinedTypeRangeOption::Canonical(name))
18308            }
18309            Some(Keyword::SUBTYPE_DIFF) => {
18310                self.expect_token(&Token::Eq)?;
18311                let name = self.parse_object_name(false)?;
18312                Ok(UserDefinedTypeRangeOption::SubtypeDiff(name))
18313            }
18314            Some(Keyword::MULTIRANGE_TYPE_NAME) => {
18315                self.expect_token(&Token::Eq)?;
18316                let name = self.parse_object_name(false)?;
18317                Ok(UserDefinedTypeRangeOption::MultirangeTypeName(name))
18318            }
18319            _ => self.expected("range option keyword", self.peek_token()),
18320        }
18321    }
18322
18323    /// Parse SQL definition options for CREATE TYPE (options)
18324    fn parse_create_type_sql_definition_options(
18325        &mut self,
18326    ) -> Result<Vec<UserDefinedTypeSqlDefinitionOption>, ParserError> {
18327        self.parse_comma_separated0(|p| p.parse_sql_definition_option(), Token::RParen)
18328    }
18329
18330    /// Parse a single SQL definition option for CREATE TYPE (options)
18331    fn parse_sql_definition_option(
18332        &mut self,
18333    ) -> Result<UserDefinedTypeSqlDefinitionOption, ParserError> {
18334        let keyword = self.parse_one_of_keywords(&[
18335            Keyword::INPUT,
18336            Keyword::OUTPUT,
18337            Keyword::RECEIVE,
18338            Keyword::SEND,
18339            Keyword::TYPMOD_IN,
18340            Keyword::TYPMOD_OUT,
18341            Keyword::ANALYZE,
18342            Keyword::SUBSCRIPT,
18343            Keyword::INTERNALLENGTH,
18344            Keyword::PASSEDBYVALUE,
18345            Keyword::ALIGNMENT,
18346            Keyword::STORAGE,
18347            Keyword::LIKE,
18348            Keyword::CATEGORY,
18349            Keyword::PREFERRED,
18350            Keyword::DEFAULT,
18351            Keyword::ELEMENT,
18352            Keyword::DELIMITER,
18353            Keyword::COLLATABLE,
18354        ]);
18355
18356        match keyword {
18357            Some(Keyword::INPUT) => {
18358                self.expect_token(&Token::Eq)?;
18359                let name = self.parse_object_name(false)?;
18360                Ok(UserDefinedTypeSqlDefinitionOption::Input(name))
18361            }
18362            Some(Keyword::OUTPUT) => {
18363                self.expect_token(&Token::Eq)?;
18364                let name = self.parse_object_name(false)?;
18365                Ok(UserDefinedTypeSqlDefinitionOption::Output(name))
18366            }
18367            Some(Keyword::RECEIVE) => {
18368                self.expect_token(&Token::Eq)?;
18369                let name = self.parse_object_name(false)?;
18370                Ok(UserDefinedTypeSqlDefinitionOption::Receive(name))
18371            }
18372            Some(Keyword::SEND) => {
18373                self.expect_token(&Token::Eq)?;
18374                let name = self.parse_object_name(false)?;
18375                Ok(UserDefinedTypeSqlDefinitionOption::Send(name))
18376            }
18377            Some(Keyword::TYPMOD_IN) => {
18378                self.expect_token(&Token::Eq)?;
18379                let name = self.parse_object_name(false)?;
18380                Ok(UserDefinedTypeSqlDefinitionOption::TypmodIn(name))
18381            }
18382            Some(Keyword::TYPMOD_OUT) => {
18383                self.expect_token(&Token::Eq)?;
18384                let name = self.parse_object_name(false)?;
18385                Ok(UserDefinedTypeSqlDefinitionOption::TypmodOut(name))
18386            }
18387            Some(Keyword::ANALYZE) => {
18388                self.expect_token(&Token::Eq)?;
18389                let name = self.parse_object_name(false)?;
18390                Ok(UserDefinedTypeSqlDefinitionOption::Analyze(name))
18391            }
18392            Some(Keyword::SUBSCRIPT) => {
18393                self.expect_token(&Token::Eq)?;
18394                let name = self.parse_object_name(false)?;
18395                Ok(UserDefinedTypeSqlDefinitionOption::Subscript(name))
18396            }
18397            Some(Keyword::INTERNALLENGTH) => {
18398                self.expect_token(&Token::Eq)?;
18399                if self.parse_keyword(Keyword::VARIABLE) {
18400                    Ok(UserDefinedTypeSqlDefinitionOption::InternalLength(
18401                        UserDefinedTypeInternalLength::Variable,
18402                    ))
18403                } else {
18404                    let value = self.parse_literal_uint()?;
18405                    Ok(UserDefinedTypeSqlDefinitionOption::InternalLength(
18406                        UserDefinedTypeInternalLength::Fixed(value),
18407                    ))
18408                }
18409            }
18410            Some(Keyword::PASSEDBYVALUE) => Ok(UserDefinedTypeSqlDefinitionOption::PassedByValue),
18411            Some(Keyword::ALIGNMENT) => {
18412                self.expect_token(&Token::Eq)?;
18413                let align_keyword = self.parse_one_of_keywords(&[
18414                    Keyword::CHAR,
18415                    Keyword::INT2,
18416                    Keyword::INT4,
18417                    Keyword::DOUBLE,
18418                ]);
18419                match align_keyword {
18420                    Some(Keyword::CHAR) => Ok(UserDefinedTypeSqlDefinitionOption::Alignment(
18421                        Alignment::Char,
18422                    )),
18423                    Some(Keyword::INT2) => Ok(UserDefinedTypeSqlDefinitionOption::Alignment(
18424                        Alignment::Int2,
18425                    )),
18426                    Some(Keyword::INT4) => Ok(UserDefinedTypeSqlDefinitionOption::Alignment(
18427                        Alignment::Int4,
18428                    )),
18429                    Some(Keyword::DOUBLE) => Ok(UserDefinedTypeSqlDefinitionOption::Alignment(
18430                        Alignment::Double,
18431                    )),
18432                    _ => self.expected(
18433                        "alignment value (char, int2, int4, or double)",
18434                        self.peek_token(),
18435                    ),
18436                }
18437            }
18438            Some(Keyword::STORAGE) => {
18439                self.expect_token(&Token::Eq)?;
18440                let storage_keyword = self.parse_one_of_keywords(&[
18441                    Keyword::PLAIN,
18442                    Keyword::EXTERNAL,
18443                    Keyword::EXTENDED,
18444                    Keyword::MAIN,
18445                ]);
18446                match storage_keyword {
18447                    Some(Keyword::PLAIN) => Ok(UserDefinedTypeSqlDefinitionOption::Storage(
18448                        UserDefinedTypeStorage::Plain,
18449                    )),
18450                    Some(Keyword::EXTERNAL) => Ok(UserDefinedTypeSqlDefinitionOption::Storage(
18451                        UserDefinedTypeStorage::External,
18452                    )),
18453                    Some(Keyword::EXTENDED) => Ok(UserDefinedTypeSqlDefinitionOption::Storage(
18454                        UserDefinedTypeStorage::Extended,
18455                    )),
18456                    Some(Keyword::MAIN) => Ok(UserDefinedTypeSqlDefinitionOption::Storage(
18457                        UserDefinedTypeStorage::Main,
18458                    )),
18459                    _ => self.expected(
18460                        "storage value (plain, external, extended, or main)",
18461                        self.peek_token(),
18462                    ),
18463                }
18464            }
18465            Some(Keyword::LIKE) => {
18466                self.expect_token(&Token::Eq)?;
18467                let name = self.parse_object_name(false)?;
18468                Ok(UserDefinedTypeSqlDefinitionOption::Like(name))
18469            }
18470            Some(Keyword::CATEGORY) => {
18471                self.expect_token(&Token::Eq)?;
18472                let category_str = self.parse_literal_string()?;
18473                let category_char = category_str.chars().next().ok_or_else(|| {
18474                    ParserError::ParserError(
18475                        "CATEGORY value must be a single character".to_string(),
18476                    )
18477                })?;
18478                Ok(UserDefinedTypeSqlDefinitionOption::Category(category_char))
18479            }
18480            Some(Keyword::PREFERRED) => {
18481                self.expect_token(&Token::Eq)?;
18482                let value =
18483                    self.parse_keyword(Keyword::TRUE) || !self.parse_keyword(Keyword::FALSE);
18484                Ok(UserDefinedTypeSqlDefinitionOption::Preferred(value))
18485            }
18486            Some(Keyword::DEFAULT) => {
18487                self.expect_token(&Token::Eq)?;
18488                let expr = self.parse_expr()?;
18489                Ok(UserDefinedTypeSqlDefinitionOption::Default(expr))
18490            }
18491            Some(Keyword::ELEMENT) => {
18492                self.expect_token(&Token::Eq)?;
18493                let data_type = self.parse_data_type()?;
18494                Ok(UserDefinedTypeSqlDefinitionOption::Element(data_type))
18495            }
18496            Some(Keyword::DELIMITER) => {
18497                self.expect_token(&Token::Eq)?;
18498                let delimiter = self.parse_literal_string()?;
18499                Ok(UserDefinedTypeSqlDefinitionOption::Delimiter(delimiter))
18500            }
18501            Some(Keyword::COLLATABLE) => {
18502                self.expect_token(&Token::Eq)?;
18503                let value =
18504                    self.parse_keyword(Keyword::TRUE) || !self.parse_keyword(Keyword::FALSE);
18505                Ok(UserDefinedTypeSqlDefinitionOption::Collatable(value))
18506            }
18507            _ => self.expected("SQL definition option keyword", self.peek_token()),
18508        }
18509    }
18510
18511    fn parse_parenthesized_identifiers(&mut self) -> Result<Vec<Ident>, ParserError> {
18512        self.expect_token(&Token::LParen)?;
18513        let idents = self.parse_comma_separated0(|p| p.parse_identifier(), Token::RParen)?;
18514        self.expect_token(&Token::RParen)?;
18515        Ok(idents)
18516    }
18517
18518    fn parse_column_position(&mut self) -> Result<Option<MySQLColumnPosition>, ParserError> {
18519        if dialect_of!(self is MySqlDialect | GenericDialect) {
18520            if self.parse_keyword(Keyword::FIRST) {
18521                Ok(Some(MySQLColumnPosition::First))
18522            } else if self.parse_keyword(Keyword::AFTER) {
18523                let ident = self.parse_identifier()?;
18524                Ok(Some(MySQLColumnPosition::After(ident)))
18525            } else {
18526                Ok(None)
18527            }
18528        } else {
18529            Ok(None)
18530        }
18531    }
18532
18533    /// Parse [Statement::Print]
18534    fn parse_print(&mut self) -> Result<Statement, ParserError> {
18535        Ok(Statement::Print(PrintStatement {
18536            message: Box::new(self.parse_expr()?),
18537        }))
18538    }
18539
18540    /// Parse [Statement::Return]
18541    fn parse_return(&mut self) -> Result<Statement, ParserError> {
18542        match self.maybe_parse(|p| p.parse_expr())? {
18543            Some(expr) => Ok(Statement::Return(ReturnStatement {
18544                value: Some(ReturnStatementValue::Expr(expr)),
18545            })),
18546            None => Ok(Statement::Return(ReturnStatement { value: None })),
18547        }
18548    }
18549
18550    /// /// Parse a `EXPORT DATA` statement.
18551    ///
18552    /// See [Statement::ExportData]
18553    fn parse_export_data(&mut self) -> Result<Statement, ParserError> {
18554        self.expect_keywords(&[Keyword::EXPORT, Keyword::DATA])?;
18555
18556        let connection = if self.parse_keywords(&[Keyword::WITH, Keyword::CONNECTION]) {
18557            Some(self.parse_object_name(false)?)
18558        } else {
18559            None
18560        };
18561        self.expect_keyword(Keyword::OPTIONS)?;
18562        self.expect_token(&Token::LParen)?;
18563        let options = self.parse_comma_separated(|p| p.parse_sql_option())?;
18564        self.expect_token(&Token::RParen)?;
18565        self.expect_keyword(Keyword::AS)?;
18566        let query = self.parse_query()?;
18567        Ok(Statement::ExportData(ExportData {
18568            options,
18569            query,
18570            connection,
18571        }))
18572    }
18573
18574    fn parse_vacuum(&mut self) -> Result<Statement, ParserError> {
18575        self.expect_keyword(Keyword::VACUUM)?;
18576        let full = self.parse_keyword(Keyword::FULL);
18577        let sort_only = self.parse_keywords(&[Keyword::SORT, Keyword::ONLY]);
18578        let delete_only = self.parse_keywords(&[Keyword::DELETE, Keyword::ONLY]);
18579        let reindex = self.parse_keyword(Keyword::REINDEX);
18580        let recluster = self.parse_keyword(Keyword::RECLUSTER);
18581        let (table_name, threshold, boost) =
18582            match self.maybe_parse(|p| p.parse_object_name(false))? {
18583                Some(table_name) => {
18584                    let threshold = if self.parse_keyword(Keyword::TO) {
18585                        let value = self.parse_value()?;
18586                        self.expect_keyword(Keyword::PERCENT)?;
18587                        Some(value.value)
18588                    } else {
18589                        None
18590                    };
18591                    let boost = self.parse_keyword(Keyword::BOOST);
18592                    (Some(table_name), threshold, boost)
18593                }
18594                _ => (None, None, false),
18595            };
18596        Ok(Statement::Vacuum(VacuumStatement {
18597            full,
18598            sort_only,
18599            delete_only,
18600            reindex,
18601            recluster,
18602            table_name,
18603            threshold,
18604            boost,
18605        }))
18606    }
18607
18608    /// Consume the parser and return its underlying token buffer
18609    pub fn into_tokens(self) -> Vec<TokenWithSpan> {
18610        self.tokens
18611    }
18612
18613    /// Returns true if the next keyword indicates a sub query, i.e. SELECT or WITH
18614    fn peek_sub_query(&mut self) -> bool {
18615        if self
18616            .parse_one_of_keywords(&[Keyword::SELECT, Keyword::WITH])
18617            .is_some()
18618        {
18619            self.prev_token();
18620            return true;
18621        }
18622        false
18623    }
18624
18625    pub(crate) fn parse_show_stmt_options(&mut self) -> Result<ShowStatementOptions, ParserError> {
18626        let show_in;
18627        let mut filter_position = None;
18628        if self.dialect.supports_show_like_before_in() {
18629            if let Some(filter) = self.parse_show_statement_filter()? {
18630                filter_position = Some(ShowStatementFilterPosition::Infix(filter));
18631            }
18632            show_in = self.maybe_parse_show_stmt_in()?;
18633        } else {
18634            show_in = self.maybe_parse_show_stmt_in()?;
18635            if let Some(filter) = self.parse_show_statement_filter()? {
18636                filter_position = Some(ShowStatementFilterPosition::Suffix(filter));
18637            }
18638        }
18639        let starts_with = self.maybe_parse_show_stmt_starts_with()?;
18640        let limit = self.maybe_parse_show_stmt_limit()?;
18641        let from = self.maybe_parse_show_stmt_from()?;
18642        Ok(ShowStatementOptions {
18643            filter_position,
18644            show_in,
18645            starts_with,
18646            limit,
18647            limit_from: from,
18648        })
18649    }
18650
18651    fn maybe_parse_show_stmt_in(&mut self) -> Result<Option<ShowStatementIn>, ParserError> {
18652        let clause = match self.parse_one_of_keywords(&[Keyword::FROM, Keyword::IN]) {
18653            Some(Keyword::FROM) => ShowStatementInClause::FROM,
18654            Some(Keyword::IN) => ShowStatementInClause::IN,
18655            None => return Ok(None),
18656            _ => return self.expected("FROM or IN", self.peek_token()),
18657        };
18658
18659        let (parent_type, parent_name) = match self.parse_one_of_keywords(&[
18660            Keyword::ACCOUNT,
18661            Keyword::DATABASE,
18662            Keyword::SCHEMA,
18663            Keyword::TABLE,
18664            Keyword::VIEW,
18665        ]) {
18666            // If we see these next keywords it means we don't have a parent name
18667            Some(Keyword::DATABASE)
18668                if self.peek_keywords(&[Keyword::STARTS, Keyword::WITH])
18669                    | self.peek_keyword(Keyword::LIMIT) =>
18670            {
18671                (Some(ShowStatementInParentType::Database), None)
18672            }
18673            Some(Keyword::SCHEMA)
18674                if self.peek_keywords(&[Keyword::STARTS, Keyword::WITH])
18675                    | self.peek_keyword(Keyword::LIMIT) =>
18676            {
18677                (Some(ShowStatementInParentType::Schema), None)
18678            }
18679            Some(parent_kw) => {
18680                // The parent name here is still optional, for example:
18681                // SHOW TABLES IN ACCOUNT, so parsing the object name
18682                // may fail because the statement ends.
18683                let parent_name = self.maybe_parse(|p| p.parse_object_name(false))?;
18684                match parent_kw {
18685                    Keyword::ACCOUNT => (Some(ShowStatementInParentType::Account), parent_name),
18686                    Keyword::DATABASE => (Some(ShowStatementInParentType::Database), parent_name),
18687                    Keyword::SCHEMA => (Some(ShowStatementInParentType::Schema), parent_name),
18688                    Keyword::TABLE => (Some(ShowStatementInParentType::Table), parent_name),
18689                    Keyword::VIEW => (Some(ShowStatementInParentType::View), parent_name),
18690                    _ => {
18691                        return self.expected(
18692                            "one of ACCOUNT, DATABASE, SCHEMA, TABLE or VIEW",
18693                            self.peek_token(),
18694                        )
18695                    }
18696                }
18697            }
18698            None => {
18699                // Parsing MySQL style FROM tbl_name FROM db_name
18700                // which is equivalent to FROM tbl_name.db_name
18701                let mut parent_name = self.parse_object_name(false)?;
18702                if self
18703                    .parse_one_of_keywords(&[Keyword::FROM, Keyword::IN])
18704                    .is_some()
18705                {
18706                    parent_name
18707                        .0
18708                        .insert(0, ObjectNamePart::Identifier(self.parse_identifier()?));
18709                }
18710                (None, Some(parent_name))
18711            }
18712        };
18713
18714        Ok(Some(ShowStatementIn {
18715            clause,
18716            parent_type,
18717            parent_name,
18718        }))
18719    }
18720
18721    fn maybe_parse_show_stmt_starts_with(&mut self) -> Result<Option<Value>, ParserError> {
18722        if self.parse_keywords(&[Keyword::STARTS, Keyword::WITH]) {
18723            Ok(Some(self.parse_value()?.value))
18724        } else {
18725            Ok(None)
18726        }
18727    }
18728
18729    fn maybe_parse_show_stmt_limit(&mut self) -> Result<Option<Expr>, ParserError> {
18730        if self.parse_keyword(Keyword::LIMIT) {
18731            Ok(self.parse_limit()?)
18732        } else {
18733            Ok(None)
18734        }
18735    }
18736
18737    fn maybe_parse_show_stmt_from(&mut self) -> Result<Option<Value>, ParserError> {
18738        if self.parse_keyword(Keyword::FROM) {
18739            Ok(Some(self.parse_value()?.value))
18740        } else {
18741            Ok(None)
18742        }
18743    }
18744
18745    pub(crate) fn in_column_definition_state(&self) -> bool {
18746        matches!(self.state, ColumnDefinition)
18747    }
18748
18749    /// Parses options provided in key-value format.
18750    ///
18751    /// * `parenthesized` - true if the options are enclosed in parenthesis
18752    /// * `end_words` - a list of keywords that any of them indicates the end of the options section
18753    pub(crate) fn parse_key_value_options(
18754        &mut self,
18755        parenthesized: bool,
18756        end_words: &[Keyword],
18757    ) -> Result<KeyValueOptions, ParserError> {
18758        let mut options: Vec<KeyValueOption> = Vec::new();
18759        let mut delimiter = KeyValueOptionsDelimiter::Space;
18760        if parenthesized {
18761            self.expect_token(&Token::LParen)?;
18762        }
18763        loop {
18764            match self.next_token().token {
18765                Token::RParen => {
18766                    if parenthesized {
18767                        break;
18768                    } else {
18769                        return self.expected(" another option or EOF", self.peek_token());
18770                    }
18771                }
18772                Token::EOF => break,
18773                Token::Comma => {
18774                    delimiter = KeyValueOptionsDelimiter::Comma;
18775                    continue;
18776                }
18777                Token::Word(w) if !end_words.contains(&w.keyword) => {
18778                    options.push(self.parse_key_value_option(&w)?)
18779                }
18780                Token::Word(w) if end_words.contains(&w.keyword) => {
18781                    self.prev_token();
18782                    break;
18783                }
18784                _ => return self.expected("another option, EOF, Comma or ')'", self.peek_token()),
18785            };
18786        }
18787
18788        Ok(KeyValueOptions { delimiter, options })
18789    }
18790
18791    /// Parses a `KEY = VALUE` construct based on the specified key
18792    pub(crate) fn parse_key_value_option(
18793        &mut self,
18794        key: &Word,
18795    ) -> Result<KeyValueOption, ParserError> {
18796        self.expect_token(&Token::Eq)?;
18797        match self.peek_token().token {
18798            Token::SingleQuotedString(_) => Ok(KeyValueOption {
18799                option_name: key.value.clone(),
18800                option_value: KeyValueOptionKind::Single(self.parse_value()?.into()),
18801            }),
18802            Token::Word(word)
18803                if word.keyword == Keyword::TRUE || word.keyword == Keyword::FALSE =>
18804            {
18805                Ok(KeyValueOption {
18806                    option_name: key.value.clone(),
18807                    option_value: KeyValueOptionKind::Single(self.parse_value()?.into()),
18808                })
18809            }
18810            Token::Number(..) => Ok(KeyValueOption {
18811                option_name: key.value.clone(),
18812                option_value: KeyValueOptionKind::Single(self.parse_value()?.into()),
18813            }),
18814            Token::Word(word) => {
18815                self.next_token();
18816                Ok(KeyValueOption {
18817                    option_name: key.value.clone(),
18818                    option_value: KeyValueOptionKind::Single(Value::Placeholder(
18819                        word.value.clone(),
18820                    )),
18821                })
18822            }
18823            Token::LParen => {
18824                // Can be a list of values or a list of key value properties.
18825                // Try to parse a list of values and if that fails, try to parse
18826                // a list of key-value properties.
18827                match self.maybe_parse(|parser| {
18828                    parser.expect_token(&Token::LParen)?;
18829                    let values = parser.parse_comma_separated0(|p| p.parse_value(), Token::RParen);
18830                    parser.expect_token(&Token::RParen)?;
18831                    values
18832                })? {
18833                    Some(values) => {
18834                        let values = values.into_iter().map(|v| v.value).collect();
18835                        Ok(KeyValueOption {
18836                            option_name: key.value.clone(),
18837                            option_value: KeyValueOptionKind::Multi(values),
18838                        })
18839                    }
18840                    None => Ok(KeyValueOption {
18841                        option_name: key.value.clone(),
18842                        option_value: KeyValueOptionKind::KeyValueOptions(Box::new(
18843                            self.parse_key_value_options(true, &[])?,
18844                        )),
18845                    }),
18846                }
18847            }
18848            _ => self.expected("expected option value", self.peek_token()),
18849        }
18850    }
18851
18852    /// Parses a RESET statement
18853    fn parse_reset(&mut self) -> Result<Statement, ParserError> {
18854        if self.parse_keyword(Keyword::ALL) {
18855            return Ok(Statement::Reset(ResetStatement { reset: Reset::ALL }));
18856        }
18857
18858        let obj = self.parse_object_name(false)?;
18859        Ok(Statement::Reset(ResetStatement {
18860            reset: Reset::ConfigurationParameter(obj),
18861        }))
18862    }
18863}
18864
18865fn maybe_prefixed_expr(expr: Expr, prefix: Option<Ident>) -> Expr {
18866    if let Some(prefix) = prefix {
18867        Expr::Prefixed {
18868            prefix,
18869            value: Box::new(expr),
18870        }
18871    } else {
18872        expr
18873    }
18874}
18875
18876impl Word {
18877    #[deprecated(since = "0.54.0", note = "please use `into_ident` instead")]
18878    pub fn to_ident(&self, span: Span) -> Ident {
18879        Ident {
18880            value: self.value.clone(),
18881            quote_style: self.quote_style,
18882            span,
18883        }
18884    }
18885
18886    /// Convert this word into an [`Ident`] identifier
18887    pub fn into_ident(self, span: Span) -> Ident {
18888        Ident {
18889            value: self.value,
18890            quote_style: self.quote_style,
18891            span,
18892        }
18893    }
18894}
18895
18896#[cfg(test)]
18897mod tests {
18898    use crate::test_utils::{all_dialects, TestedDialects};
18899
18900    use super::*;
18901
18902    #[test]
18903    fn test_prev_index() {
18904        let sql = "SELECT version";
18905        all_dialects().run_parser_method(sql, |parser| {
18906            assert_eq!(parser.peek_token(), Token::make_keyword("SELECT"));
18907            assert_eq!(parser.next_token(), Token::make_keyword("SELECT"));
18908            parser.prev_token();
18909            assert_eq!(parser.next_token(), Token::make_keyword("SELECT"));
18910            assert_eq!(parser.next_token(), Token::make_word("version", None));
18911            parser.prev_token();
18912            assert_eq!(parser.peek_token(), Token::make_word("version", None));
18913            assert_eq!(parser.next_token(), Token::make_word("version", None));
18914            assert_eq!(parser.peek_token(), Token::EOF);
18915            parser.prev_token();
18916            assert_eq!(parser.next_token(), Token::make_word("version", None));
18917            assert_eq!(parser.next_token(), Token::EOF);
18918            assert_eq!(parser.next_token(), Token::EOF);
18919            parser.prev_token();
18920        });
18921    }
18922
18923    #[test]
18924    fn test_peek_tokens() {
18925        all_dialects().run_parser_method("SELECT foo AS bar FROM baz", |parser| {
18926            assert!(matches!(
18927                parser.peek_tokens(),
18928                [Token::Word(Word {
18929                    keyword: Keyword::SELECT,
18930                    ..
18931                })]
18932            ));
18933
18934            assert!(matches!(
18935                parser.peek_tokens(),
18936                [
18937                    Token::Word(Word {
18938                        keyword: Keyword::SELECT,
18939                        ..
18940                    }),
18941                    Token::Word(_),
18942                    Token::Word(Word {
18943                        keyword: Keyword::AS,
18944                        ..
18945                    }),
18946                ]
18947            ));
18948
18949            for _ in 0..4 {
18950                parser.next_token();
18951            }
18952
18953            assert!(matches!(
18954                parser.peek_tokens(),
18955                [
18956                    Token::Word(Word {
18957                        keyword: Keyword::FROM,
18958                        ..
18959                    }),
18960                    Token::Word(_),
18961                    Token::EOF,
18962                    Token::EOF,
18963                ]
18964            ))
18965        })
18966    }
18967
18968    #[cfg(test)]
18969    mod test_parse_data_type {
18970        use crate::ast::{
18971            CharLengthUnits, CharacterLength, DataType, ExactNumberInfo, ObjectName, TimezoneInfo,
18972        };
18973        use crate::dialect::{AnsiDialect, GenericDialect, PostgreSqlDialect};
18974        use crate::test_utils::TestedDialects;
18975
18976        macro_rules! test_parse_data_type {
18977            ($dialect:expr, $input:expr, $expected_type:expr $(,)?) => {{
18978                $dialect.run_parser_method(&*$input, |parser| {
18979                    let data_type = parser.parse_data_type().unwrap();
18980                    assert_eq!($expected_type, data_type);
18981                    assert_eq!($input.to_string(), data_type.to_string());
18982                });
18983            }};
18984        }
18985
18986        #[test]
18987        fn test_ansii_character_string_types() {
18988            // Character string types: <https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#character-string-type>
18989            let dialect =
18990                TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
18991
18992            test_parse_data_type!(dialect, "CHARACTER", DataType::Character(None));
18993
18994            test_parse_data_type!(
18995                dialect,
18996                "CHARACTER(20)",
18997                DataType::Character(Some(CharacterLength::IntegerLength {
18998                    length: 20,
18999                    unit: None
19000                }))
19001            );
19002
19003            test_parse_data_type!(
19004                dialect,
19005                "CHARACTER(20 CHARACTERS)",
19006                DataType::Character(Some(CharacterLength::IntegerLength {
19007                    length: 20,
19008                    unit: Some(CharLengthUnits::Characters)
19009                }))
19010            );
19011
19012            test_parse_data_type!(
19013                dialect,
19014                "CHARACTER(20 OCTETS)",
19015                DataType::Character(Some(CharacterLength::IntegerLength {
19016                    length: 20,
19017                    unit: Some(CharLengthUnits::Octets)
19018                }))
19019            );
19020
19021            test_parse_data_type!(dialect, "CHAR", DataType::Char(None));
19022
19023            test_parse_data_type!(
19024                dialect,
19025                "CHAR(20)",
19026                DataType::Char(Some(CharacterLength::IntegerLength {
19027                    length: 20,
19028                    unit: None
19029                }))
19030            );
19031
19032            test_parse_data_type!(
19033                dialect,
19034                "CHAR(20 CHARACTERS)",
19035                DataType::Char(Some(CharacterLength::IntegerLength {
19036                    length: 20,
19037                    unit: Some(CharLengthUnits::Characters)
19038                }))
19039            );
19040
19041            test_parse_data_type!(
19042                dialect,
19043                "CHAR(20 OCTETS)",
19044                DataType::Char(Some(CharacterLength::IntegerLength {
19045                    length: 20,
19046                    unit: Some(CharLengthUnits::Octets)
19047                }))
19048            );
19049
19050            test_parse_data_type!(
19051                dialect,
19052                "CHARACTER VARYING(20)",
19053                DataType::CharacterVarying(Some(CharacterLength::IntegerLength {
19054                    length: 20,
19055                    unit: None
19056                }))
19057            );
19058
19059            test_parse_data_type!(
19060                dialect,
19061                "CHARACTER VARYING(20 CHARACTERS)",
19062                DataType::CharacterVarying(Some(CharacterLength::IntegerLength {
19063                    length: 20,
19064                    unit: Some(CharLengthUnits::Characters)
19065                }))
19066            );
19067
19068            test_parse_data_type!(
19069                dialect,
19070                "CHARACTER VARYING(20 OCTETS)",
19071                DataType::CharacterVarying(Some(CharacterLength::IntegerLength {
19072                    length: 20,
19073                    unit: Some(CharLengthUnits::Octets)
19074                }))
19075            );
19076
19077            test_parse_data_type!(
19078                dialect,
19079                "CHAR VARYING(20)",
19080                DataType::CharVarying(Some(CharacterLength::IntegerLength {
19081                    length: 20,
19082                    unit: None
19083                }))
19084            );
19085
19086            test_parse_data_type!(
19087                dialect,
19088                "CHAR VARYING(20 CHARACTERS)",
19089                DataType::CharVarying(Some(CharacterLength::IntegerLength {
19090                    length: 20,
19091                    unit: Some(CharLengthUnits::Characters)
19092                }))
19093            );
19094
19095            test_parse_data_type!(
19096                dialect,
19097                "CHAR VARYING(20 OCTETS)",
19098                DataType::CharVarying(Some(CharacterLength::IntegerLength {
19099                    length: 20,
19100                    unit: Some(CharLengthUnits::Octets)
19101                }))
19102            );
19103
19104            test_parse_data_type!(
19105                dialect,
19106                "VARCHAR(20)",
19107                DataType::Varchar(Some(CharacterLength::IntegerLength {
19108                    length: 20,
19109                    unit: None
19110                }))
19111            );
19112        }
19113
19114        #[test]
19115        fn test_ansii_character_large_object_types() {
19116            // Character large object types: <https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#character-large-object-length>
19117            let dialect =
19118                TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
19119
19120            test_parse_data_type!(
19121                dialect,
19122                "CHARACTER LARGE OBJECT",
19123                DataType::CharacterLargeObject(None)
19124            );
19125            test_parse_data_type!(
19126                dialect,
19127                "CHARACTER LARGE OBJECT(20)",
19128                DataType::CharacterLargeObject(Some(20))
19129            );
19130
19131            test_parse_data_type!(
19132                dialect,
19133                "CHAR LARGE OBJECT",
19134                DataType::CharLargeObject(None)
19135            );
19136            test_parse_data_type!(
19137                dialect,
19138                "CHAR LARGE OBJECT(20)",
19139                DataType::CharLargeObject(Some(20))
19140            );
19141
19142            test_parse_data_type!(dialect, "CLOB", DataType::Clob(None));
19143            test_parse_data_type!(dialect, "CLOB(20)", DataType::Clob(Some(20)));
19144        }
19145
19146        #[test]
19147        fn test_parse_custom_types() {
19148            let dialect =
19149                TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
19150
19151            test_parse_data_type!(
19152                dialect,
19153                "GEOMETRY",
19154                DataType::Custom(ObjectName::from(vec!["GEOMETRY".into()]), vec![])
19155            );
19156
19157            test_parse_data_type!(
19158                dialect,
19159                "GEOMETRY(POINT)",
19160                DataType::Custom(
19161                    ObjectName::from(vec!["GEOMETRY".into()]),
19162                    vec!["POINT".to_string()]
19163                )
19164            );
19165
19166            test_parse_data_type!(
19167                dialect,
19168                "GEOMETRY(POINT, 4326)",
19169                DataType::Custom(
19170                    ObjectName::from(vec!["GEOMETRY".into()]),
19171                    vec!["POINT".to_string(), "4326".to_string()]
19172                )
19173            );
19174        }
19175
19176        #[test]
19177        fn test_ansii_exact_numeric_types() {
19178            // Exact numeric types: <https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#exact-numeric-type>
19179            let dialect = TestedDialects::new(vec![
19180                Box::new(GenericDialect {}),
19181                Box::new(AnsiDialect {}),
19182                Box::new(PostgreSqlDialect {}),
19183            ]);
19184
19185            test_parse_data_type!(dialect, "NUMERIC", DataType::Numeric(ExactNumberInfo::None));
19186
19187            test_parse_data_type!(
19188                dialect,
19189                "NUMERIC(2)",
19190                DataType::Numeric(ExactNumberInfo::Precision(2))
19191            );
19192
19193            test_parse_data_type!(
19194                dialect,
19195                "NUMERIC(2,10)",
19196                DataType::Numeric(ExactNumberInfo::PrecisionAndScale(2, 10))
19197            );
19198
19199            test_parse_data_type!(dialect, "DECIMAL", DataType::Decimal(ExactNumberInfo::None));
19200
19201            test_parse_data_type!(
19202                dialect,
19203                "DECIMAL(2)",
19204                DataType::Decimal(ExactNumberInfo::Precision(2))
19205            );
19206
19207            test_parse_data_type!(
19208                dialect,
19209                "DECIMAL(2,10)",
19210                DataType::Decimal(ExactNumberInfo::PrecisionAndScale(2, 10))
19211            );
19212
19213            test_parse_data_type!(dialect, "DEC", DataType::Dec(ExactNumberInfo::None));
19214
19215            test_parse_data_type!(
19216                dialect,
19217                "DEC(2)",
19218                DataType::Dec(ExactNumberInfo::Precision(2))
19219            );
19220
19221            test_parse_data_type!(
19222                dialect,
19223                "DEC(2,10)",
19224                DataType::Dec(ExactNumberInfo::PrecisionAndScale(2, 10))
19225            );
19226
19227            // Test negative scale values.
19228            test_parse_data_type!(
19229                dialect,
19230                "NUMERIC(10,-2)",
19231                DataType::Numeric(ExactNumberInfo::PrecisionAndScale(10, -2))
19232            );
19233
19234            test_parse_data_type!(
19235                dialect,
19236                "DECIMAL(1000,-10)",
19237                DataType::Decimal(ExactNumberInfo::PrecisionAndScale(1000, -10))
19238            );
19239
19240            test_parse_data_type!(
19241                dialect,
19242                "DEC(5,-1000)",
19243                DataType::Dec(ExactNumberInfo::PrecisionAndScale(5, -1000))
19244            );
19245
19246            test_parse_data_type!(
19247                dialect,
19248                "NUMERIC(10,-5)",
19249                DataType::Numeric(ExactNumberInfo::PrecisionAndScale(10, -5))
19250            );
19251
19252            test_parse_data_type!(
19253                dialect,
19254                "DECIMAL(20,-10)",
19255                DataType::Decimal(ExactNumberInfo::PrecisionAndScale(20, -10))
19256            );
19257
19258            test_parse_data_type!(
19259                dialect,
19260                "DEC(5,-2)",
19261                DataType::Dec(ExactNumberInfo::PrecisionAndScale(5, -2))
19262            );
19263
19264            dialect.run_parser_method("NUMERIC(10,+5)", |parser| {
19265                let data_type = parser.parse_data_type().unwrap();
19266                assert_eq!(
19267                    DataType::Numeric(ExactNumberInfo::PrecisionAndScale(10, 5)),
19268                    data_type
19269                );
19270                // Note: Explicit '+' sign is not preserved in output, which is correct
19271                assert_eq!("NUMERIC(10,5)", data_type.to_string());
19272            });
19273        }
19274
19275        #[test]
19276        fn test_ansii_date_type() {
19277            // Datetime types: <https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#datetime-type>
19278            let dialect =
19279                TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
19280
19281            test_parse_data_type!(dialect, "DATE", DataType::Date);
19282
19283            test_parse_data_type!(dialect, "TIME", DataType::Time(None, TimezoneInfo::None));
19284
19285            test_parse_data_type!(
19286                dialect,
19287                "TIME(6)",
19288                DataType::Time(Some(6), TimezoneInfo::None)
19289            );
19290
19291            test_parse_data_type!(
19292                dialect,
19293                "TIME WITH TIME ZONE",
19294                DataType::Time(None, TimezoneInfo::WithTimeZone)
19295            );
19296
19297            test_parse_data_type!(
19298                dialect,
19299                "TIME(6) WITH TIME ZONE",
19300                DataType::Time(Some(6), TimezoneInfo::WithTimeZone)
19301            );
19302
19303            test_parse_data_type!(
19304                dialect,
19305                "TIME WITHOUT TIME ZONE",
19306                DataType::Time(None, TimezoneInfo::WithoutTimeZone)
19307            );
19308
19309            test_parse_data_type!(
19310                dialect,
19311                "TIME(6) WITHOUT TIME ZONE",
19312                DataType::Time(Some(6), TimezoneInfo::WithoutTimeZone)
19313            );
19314
19315            test_parse_data_type!(
19316                dialect,
19317                "TIMESTAMP",
19318                DataType::Timestamp(None, TimezoneInfo::None)
19319            );
19320
19321            test_parse_data_type!(
19322                dialect,
19323                "TIMESTAMP(22)",
19324                DataType::Timestamp(Some(22), TimezoneInfo::None)
19325            );
19326
19327            test_parse_data_type!(
19328                dialect,
19329                "TIMESTAMP(22) WITH TIME ZONE",
19330                DataType::Timestamp(Some(22), TimezoneInfo::WithTimeZone)
19331            );
19332
19333            test_parse_data_type!(
19334                dialect,
19335                "TIMESTAMP(33) WITHOUT TIME ZONE",
19336                DataType::Timestamp(Some(33), TimezoneInfo::WithoutTimeZone)
19337            );
19338        }
19339    }
19340
19341    #[test]
19342    fn test_parse_schema_name() {
19343        // The expected name should be identical as the input name, that's why I don't receive both
19344        macro_rules! test_parse_schema_name {
19345            ($input:expr, $expected_name:expr $(,)?) => {{
19346                all_dialects().run_parser_method(&*$input, |parser| {
19347                    let schema_name = parser.parse_schema_name().unwrap();
19348                    // Validate that the structure is the same as expected
19349                    assert_eq!(schema_name, $expected_name);
19350                    // Validate that the input and the expected structure serialization are the same
19351                    assert_eq!(schema_name.to_string(), $input.to_string());
19352                });
19353            }};
19354        }
19355
19356        let dummy_name = ObjectName::from(vec![Ident::new("dummy_name")]);
19357        let dummy_authorization = Ident::new("dummy_authorization");
19358
19359        test_parse_schema_name!(
19360            format!("{dummy_name}"),
19361            SchemaName::Simple(dummy_name.clone())
19362        );
19363
19364        test_parse_schema_name!(
19365            format!("AUTHORIZATION {dummy_authorization}"),
19366            SchemaName::UnnamedAuthorization(dummy_authorization.clone()),
19367        );
19368        test_parse_schema_name!(
19369            format!("{dummy_name} AUTHORIZATION {dummy_authorization}"),
19370            SchemaName::NamedAuthorization(dummy_name.clone(), dummy_authorization.clone()),
19371        );
19372    }
19373
19374    #[test]
19375    fn mysql_parse_index_table_constraint() {
19376        macro_rules! test_parse_table_constraint {
19377            ($dialect:expr, $input:expr, $expected:expr $(,)?) => {{
19378                $dialect.run_parser_method(&*$input, |parser| {
19379                    let constraint = parser.parse_optional_table_constraint().unwrap().unwrap();
19380                    // Validate that the structure is the same as expected
19381                    assert_eq!(constraint, $expected);
19382                    // Validate that the input and the expected structure serialization are the same
19383                    assert_eq!(constraint.to_string(), $input.to_string());
19384                });
19385            }};
19386        }
19387
19388        fn mk_expected_col(name: &str) -> IndexColumn {
19389            IndexColumn {
19390                column: OrderByExpr {
19391                    expr: Expr::Identifier(name.into()),
19392                    options: OrderByOptions {
19393                        asc: None,
19394                        nulls_first: None,
19395                    },
19396                    with_fill: None,
19397                },
19398                operator_class: None,
19399            }
19400        }
19401
19402        let dialect =
19403            TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(MySqlDialect {})]);
19404
19405        test_parse_table_constraint!(
19406            dialect,
19407            "INDEX (c1)",
19408            IndexConstraint {
19409                display_as_key: false,
19410                name: None,
19411                index_type: None,
19412                columns: vec![mk_expected_col("c1")],
19413                index_options: vec![],
19414            }
19415            .into()
19416        );
19417
19418        test_parse_table_constraint!(
19419            dialect,
19420            "KEY (c1)",
19421            IndexConstraint {
19422                display_as_key: true,
19423                name: None,
19424                index_type: None,
19425                columns: vec![mk_expected_col("c1")],
19426                index_options: vec![],
19427            }
19428            .into()
19429        );
19430
19431        test_parse_table_constraint!(
19432            dialect,
19433            "INDEX 'index' (c1, c2)",
19434            TableConstraint::Index(IndexConstraint {
19435                display_as_key: false,
19436                name: Some(Ident::with_quote('\'', "index")),
19437                index_type: None,
19438                columns: vec![mk_expected_col("c1"), mk_expected_col("c2")],
19439                index_options: vec![],
19440            })
19441        );
19442
19443        test_parse_table_constraint!(
19444            dialect,
19445            "INDEX USING BTREE (c1)",
19446            IndexConstraint {
19447                display_as_key: false,
19448                name: None,
19449                index_type: Some(IndexType::BTree),
19450                columns: vec![mk_expected_col("c1")],
19451                index_options: vec![],
19452            }
19453            .into()
19454        );
19455
19456        test_parse_table_constraint!(
19457            dialect,
19458            "INDEX USING HASH (c1)",
19459            IndexConstraint {
19460                display_as_key: false,
19461                name: None,
19462                index_type: Some(IndexType::Hash),
19463                columns: vec![mk_expected_col("c1")],
19464                index_options: vec![],
19465            }
19466            .into()
19467        );
19468
19469        test_parse_table_constraint!(
19470            dialect,
19471            "INDEX idx_name USING BTREE (c1)",
19472            IndexConstraint {
19473                display_as_key: false,
19474                name: Some(Ident::new("idx_name")),
19475                index_type: Some(IndexType::BTree),
19476                columns: vec![mk_expected_col("c1")],
19477                index_options: vec![],
19478            }
19479            .into()
19480        );
19481
19482        test_parse_table_constraint!(
19483            dialect,
19484            "INDEX idx_name USING HASH (c1)",
19485            IndexConstraint {
19486                display_as_key: false,
19487                name: Some(Ident::new("idx_name")),
19488                index_type: Some(IndexType::Hash),
19489                columns: vec![mk_expected_col("c1")],
19490                index_options: vec![],
19491            }
19492            .into()
19493        );
19494    }
19495
19496    #[test]
19497    fn test_tokenizer_error_loc() {
19498        let sql = "foo '";
19499        let ast = Parser::parse_sql(&GenericDialect, sql);
19500        assert_eq!(
19501            ast,
19502            Err(ParserError::TokenizerError(
19503                "Unterminated string literal at Line: 1, Column: 5".to_string()
19504            ))
19505        );
19506    }
19507
19508    #[test]
19509    fn test_parser_error_loc() {
19510        let sql = "SELECT this is a syntax error";
19511        let ast = Parser::parse_sql(&GenericDialect, sql);
19512        assert_eq!(
19513            ast,
19514            Err(ParserError::ParserError(
19515                "Expected: [NOT] NULL | TRUE | FALSE | DISTINCT | [form] NORMALIZED FROM after IS, found: a at Line: 1, Column: 16"
19516                    .to_string()
19517            ))
19518        );
19519    }
19520
19521    #[test]
19522    fn test_nested_explain_error() {
19523        let sql = "EXPLAIN EXPLAIN SELECT 1";
19524        let ast = Parser::parse_sql(&GenericDialect, sql);
19525        assert_eq!(
19526            ast,
19527            Err(ParserError::ParserError(
19528                "Explain must be root of the plan".to_string()
19529            ))
19530        );
19531    }
19532
19533    #[test]
19534    fn test_parse_multipart_identifier_positive() {
19535        let dialect = TestedDialects::new(vec![Box::new(GenericDialect {})]);
19536
19537        // parse multipart with quotes
19538        let expected = vec![
19539            Ident {
19540                value: "CATALOG".to_string(),
19541                quote_style: None,
19542                span: Span::empty(),
19543            },
19544            Ident {
19545                value: "F(o)o. \"bar".to_string(),
19546                quote_style: Some('"'),
19547                span: Span::empty(),
19548            },
19549            Ident {
19550                value: "table".to_string(),
19551                quote_style: None,
19552                span: Span::empty(),
19553            },
19554        ];
19555        dialect.run_parser_method(r#"CATALOG."F(o)o. ""bar".table"#, |parser| {
19556            let actual = parser.parse_multipart_identifier().unwrap();
19557            assert_eq!(expected, actual);
19558        });
19559
19560        // allow whitespace between ident parts
19561        let expected = vec![
19562            Ident {
19563                value: "CATALOG".to_string(),
19564                quote_style: None,
19565                span: Span::empty(),
19566            },
19567            Ident {
19568                value: "table".to_string(),
19569                quote_style: None,
19570                span: Span::empty(),
19571            },
19572        ];
19573        dialect.run_parser_method("CATALOG . table", |parser| {
19574            let actual = parser.parse_multipart_identifier().unwrap();
19575            assert_eq!(expected, actual);
19576        });
19577    }
19578
19579    #[test]
19580    fn test_parse_multipart_identifier_negative() {
19581        macro_rules! test_parse_multipart_identifier_error {
19582            ($input:expr, $expected_err:expr $(,)?) => {{
19583                all_dialects().run_parser_method(&*$input, |parser| {
19584                    let actual_err = parser.parse_multipart_identifier().unwrap_err();
19585                    assert_eq!(actual_err.to_string(), $expected_err);
19586                });
19587            }};
19588        }
19589
19590        test_parse_multipart_identifier_error!(
19591            "",
19592            "sql parser error: Empty input when parsing identifier",
19593        );
19594
19595        test_parse_multipart_identifier_error!(
19596            "*schema.table",
19597            "sql parser error: Unexpected token in identifier: *",
19598        );
19599
19600        test_parse_multipart_identifier_error!(
19601            "schema.table*",
19602            "sql parser error: Unexpected token in identifier: *",
19603        );
19604
19605        test_parse_multipart_identifier_error!(
19606            "schema.table.",
19607            "sql parser error: Trailing period in identifier",
19608        );
19609
19610        test_parse_multipart_identifier_error!(
19611            "schema.*",
19612            "sql parser error: Unexpected token following period in identifier: *",
19613        );
19614    }
19615
19616    #[test]
19617    fn test_mysql_partition_selection() {
19618        let sql = "SELECT * FROM employees PARTITION (p0, p2)";
19619        let expected = vec!["p0", "p2"];
19620
19621        let ast: Vec<Statement> = Parser::parse_sql(&MySqlDialect {}, sql).unwrap();
19622        assert_eq!(ast.len(), 1);
19623        if let Statement::Query(v) = &ast[0] {
19624            if let SetExpr::Select(select) = &*v.body {
19625                assert_eq!(select.from.len(), 1);
19626                let from: &TableWithJoins = &select.from[0];
19627                let table_factor = &from.relation;
19628                if let TableFactor::Table { partitions, .. } = table_factor {
19629                    let actual: Vec<&str> = partitions
19630                        .iter()
19631                        .map(|ident| ident.value.as_str())
19632                        .collect();
19633                    assert_eq!(expected, actual);
19634                }
19635            }
19636        } else {
19637            panic!("fail to parse mysql partition selection");
19638        }
19639    }
19640
19641    #[test]
19642    fn test_replace_into_placeholders() {
19643        let sql = "REPLACE INTO t (a) VALUES (&a)";
19644
19645        assert!(Parser::parse_sql(&GenericDialect {}, sql).is_err());
19646    }
19647
19648    #[test]
19649    fn test_replace_into_set_placeholder() {
19650        let sql = "REPLACE INTO t SET ?";
19651
19652        assert!(Parser::parse_sql(&GenericDialect {}, sql).is_err());
19653    }
19654
19655    #[test]
19656    fn test_replace_incomplete() {
19657        let sql = r#"REPLACE"#;
19658
19659        assert!(Parser::parse_sql(&MySqlDialect {}, sql).is_err());
19660    }
19661
19662    #[test]
19663    fn test_placeholder_invalid_whitespace() {
19664        for w in ["  ", "/*invalid*/"] {
19665            let sql = format!("\nSELECT\n  :{w}fooBar");
19666            assert!(Parser::parse_sql(&GenericDialect, &sql).is_err());
19667        }
19668    }
19669}