sqlparser/parser/
mod.rs

1// Licensed under the Apache License, Version 2.0 (the "License");
2// you may not use this file except in compliance with the License.
3// You may obtain a copy of the License at
4//
5// http://www.apache.org/licenses/LICENSE-2.0
6//
7// Unless required by applicable law or agreed to in writing, software
8// distributed under the License is distributed on an "AS IS" BASIS,
9// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
10// See the License for the specific language governing permissions and
11// limitations under the License.
12
13//! SQL Parser
14
15#[cfg(not(feature = "std"))]
16use alloc::{
17    boxed::Box,
18    format,
19    string::{String, ToString},
20    vec,
21    vec::Vec,
22};
23use core::{
24    fmt::{self, Display},
25    str::FromStr,
26};
27use helpers::attached_token::AttachedToken;
28
29use log::debug;
30
31use recursion::RecursionCounter;
32use IsLateral::*;
33use IsOptional::*;
34
35use crate::ast::helpers::{
36    key_value_options::{
37        KeyValueOption, KeyValueOptionKind, KeyValueOptions, KeyValueOptionsDelimiter,
38    },
39    stmt_create_table::{CreateTableBuilder, CreateTableConfiguration},
40};
41use crate::ast::Statement::CreatePolicy;
42use crate::ast::*;
43use crate::dialect::*;
44use crate::keywords::{Keyword, ALL_KEYWORDS};
45use crate::tokenizer::*;
46use sqlparser::parser::ParserState::ColumnDefinition;
47
48mod alter;
49
50#[derive(Debug, Clone, PartialEq, Eq)]
51pub enum ParserError {
52    TokenizerError(String),
53    ParserError(String),
54    RecursionLimitExceeded,
55}
56
57// Use `Parser::expected` instead, if possible
58macro_rules! parser_err {
59    ($MSG:expr, $loc:expr) => {
60        Err(ParserError::ParserError(format!("{}{}", $MSG, $loc)))
61    };
62}
63
64#[cfg(feature = "std")]
65/// Implementation [`RecursionCounter`] if std is available
66mod recursion {
67    use std::cell::Cell;
68    use std::rc::Rc;
69
70    use super::ParserError;
71
72    /// Tracks remaining recursion depth. This value is decremented on
73    /// each call to [`RecursionCounter::try_decrease()`], when it reaches 0 an error will
74    /// be returned.
75    ///
76    /// Note: Uses an [`std::rc::Rc`] and [`std::cell::Cell`] in order to satisfy the Rust
77    /// borrow checker so the automatic [`DepthGuard`] decrement a
78    /// reference to the counter.
79    ///
80    /// Note: when "recursive-protection" feature is enabled, this crate uses additional stack overflow protection
81    /// for some of its recursive methods. See [`recursive::recursive`] for more information.
82    pub(crate) struct RecursionCounter {
83        remaining_depth: Rc<Cell<usize>>,
84    }
85
86    impl RecursionCounter {
87        /// Creates a [`RecursionCounter`] with the specified maximum
88        /// depth
89        pub fn new(remaining_depth: usize) -> Self {
90            Self {
91                remaining_depth: Rc::new(remaining_depth.into()),
92            }
93        }
94
95        /// Decreases the remaining depth by 1.
96        ///
97        /// Returns [`Err`] if the remaining depth falls to 0.
98        ///
99        /// Returns a [`DepthGuard`] which will adds 1 to the
100        /// remaining depth upon drop;
101        pub fn try_decrease(&self) -> Result<DepthGuard, ParserError> {
102            let old_value = self.remaining_depth.get();
103            // ran out of space
104            if old_value == 0 {
105                Err(ParserError::RecursionLimitExceeded)
106            } else {
107                self.remaining_depth.set(old_value - 1);
108                Ok(DepthGuard::new(Rc::clone(&self.remaining_depth)))
109            }
110        }
111    }
112
113    /// Guard that increases the remaining depth by 1 on drop
114    pub struct DepthGuard {
115        remaining_depth: Rc<Cell<usize>>,
116    }
117
118    impl DepthGuard {
119        fn new(remaining_depth: Rc<Cell<usize>>) -> Self {
120            Self { remaining_depth }
121        }
122    }
123    impl Drop for DepthGuard {
124        fn drop(&mut self) {
125            let old_value = self.remaining_depth.get();
126            self.remaining_depth.set(old_value + 1);
127        }
128    }
129}
130
131#[cfg(not(feature = "std"))]
132mod recursion {
133    /// Implementation [`RecursionCounter`] if std is NOT available (and does not
134    /// guard against stack overflow).
135    ///
136    /// Has the same API as the std [`RecursionCounter`] implementation
137    /// but does not actually limit stack depth.
138    pub(crate) struct RecursionCounter {}
139
140    impl RecursionCounter {
141        pub fn new(_remaining_depth: usize) -> Self {
142            Self {}
143        }
144        pub fn try_decrease(&self) -> Result<DepthGuard, super::ParserError> {
145            Ok(DepthGuard {})
146        }
147    }
148
149    pub struct DepthGuard {}
150}
151
152#[derive(PartialEq, Eq)]
153pub enum IsOptional {
154    Optional,
155    Mandatory,
156}
157
158pub enum IsLateral {
159    Lateral,
160    NotLateral,
161}
162
163pub enum WildcardExpr {
164    Expr(Expr),
165    QualifiedWildcard(ObjectName),
166    Wildcard,
167}
168
169impl From<TokenizerError> for ParserError {
170    fn from(e: TokenizerError) -> Self {
171        ParserError::TokenizerError(e.to_string())
172    }
173}
174
175impl fmt::Display for ParserError {
176    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
177        write!(
178            f,
179            "sql parser error: {}",
180            match self {
181                ParserError::TokenizerError(s) => s,
182                ParserError::ParserError(s) => s,
183                ParserError::RecursionLimitExceeded => "recursion limit exceeded",
184            }
185        )
186    }
187}
188
189#[cfg(feature = "std")]
190impl std::error::Error for ParserError {}
191
192// By default, allow expressions up to this deep before erroring
193const DEFAULT_REMAINING_DEPTH: usize = 50;
194
195// A constant EOF token that can be referenced.
196const EOF_TOKEN: TokenWithSpan = TokenWithSpan {
197    token: Token::EOF,
198    span: Span {
199        start: Location { line: 0, column: 0 },
200        end: Location { line: 0, column: 0 },
201    },
202};
203
204/// Composite types declarations using angle brackets syntax can be arbitrary
205/// nested such that the following declaration is possible:
206///      `ARRAY<ARRAY<INT>>`
207/// But the tokenizer recognizes the `>>` as a ShiftRight token.
208/// We work around that limitation when parsing a data type by accepting
209/// either a `>` or `>>` token in such cases, remembering which variant we
210/// matched.
211/// In the latter case having matched a `>>`, the parent type will not look to
212/// match its closing `>` as a result since that will have taken place at the
213/// child type.
214///
215/// See [Parser::parse_data_type] for details
216struct MatchedTrailingBracket(bool);
217
218impl From<bool> for MatchedTrailingBracket {
219    fn from(value: bool) -> Self {
220        Self(value)
221    }
222}
223
224/// Options that control how the [`Parser`] parses SQL text
225#[derive(Debug, Clone, PartialEq, Eq)]
226pub struct ParserOptions {
227    pub trailing_commas: bool,
228    /// Controls how literal values are unescaped. See
229    /// [`Tokenizer::with_unescape`] for more details.
230    pub unescape: bool,
231    /// Controls if the parser expects a semi-colon token
232    /// between statements. Default is `true`.
233    pub require_semicolon_stmt_delimiter: bool,
234}
235
236impl Default for ParserOptions {
237    fn default() -> Self {
238        Self {
239            trailing_commas: false,
240            unescape: true,
241            require_semicolon_stmt_delimiter: true,
242        }
243    }
244}
245
246impl ParserOptions {
247    /// Create a new [`ParserOptions`]
248    pub fn new() -> Self {
249        Default::default()
250    }
251
252    /// Set if trailing commas are allowed.
253    ///
254    /// If this option is `false` (the default), the following SQL will
255    /// not parse. If the option is `true`, the SQL will parse.
256    ///
257    /// ```sql
258    ///  SELECT
259    ///   foo,
260    ///   bar,
261    ///  FROM baz
262    /// ```
263    pub fn with_trailing_commas(mut self, trailing_commas: bool) -> Self {
264        self.trailing_commas = trailing_commas;
265        self
266    }
267
268    /// Set if literal values are unescaped. Defaults to true. See
269    /// [`Tokenizer::with_unescape`] for more details.
270    pub fn with_unescape(mut self, unescape: bool) -> Self {
271        self.unescape = unescape;
272        self
273    }
274}
275
276#[derive(Copy, Clone)]
277enum ParserState {
278    /// The default state of the parser.
279    Normal,
280    /// The state when parsing a CONNECT BY expression. This allows parsing
281    /// PRIOR expressions while still allowing prior as an identifier name
282    /// in other contexts.
283    ConnectBy,
284    /// The state when parsing column definitions.  This state prohibits
285    /// NOT NULL as an alias for IS NOT NULL.  For example:
286    /// ```sql
287    /// CREATE TABLE foo (abc BIGINT NOT NULL);
288    /// ```
289    ColumnDefinition,
290}
291
292/// A SQL Parser
293///
294/// This struct is the main entry point for parsing SQL queries.
295///
296/// # Functionality:
297/// * Parsing SQL: see examples on [`Parser::new`] and [`Parser::parse_sql`]
298/// * Controlling recursion: See [`Parser::with_recursion_limit`]
299/// * Controlling parser options: See [`Parser::with_options`]
300/// * Providing your own tokens: See [`Parser::with_tokens`]
301///
302/// # Internals
303///
304/// The parser uses a [`Tokenizer`] to tokenize the input SQL string into a
305/// `Vec` of [`TokenWithSpan`]s and maintains an `index` to the current token
306/// being processed. The token vec may contain multiple SQL statements.
307///
308/// * The "current" token is the token at `index - 1`
309/// * The "next" token is the token at `index`
310/// * The "previous" token is the token at `index - 2`
311///
312/// If `index` is equal to the length of the token stream, the 'next' token is
313/// [`Token::EOF`].
314///
315/// For example, the SQL string "SELECT * FROM foo" will be tokenized into
316/// following tokens:
317/// ```text
318///  [
319///    "SELECT", // token index 0
320///    " ",      // whitespace
321///    "*",
322///    " ",
323///    "FROM",
324///    " ",
325///    "foo"
326///   ]
327/// ```
328///
329///
330pub struct Parser<'a> {
331    /// The tokens
332    tokens: Vec<TokenWithSpan>,
333    /// The index of the first unprocessed token in [`Parser::tokens`].
334    index: usize,
335    /// The current state of the parser.
336    state: ParserState,
337    /// The SQL dialect to use.
338    dialect: &'a dyn Dialect,
339    /// Additional options that allow you to mix & match behavior
340    /// otherwise constrained to certain dialects (e.g. trailing
341    /// commas) and/or format of parse (e.g. unescaping).
342    options: ParserOptions,
343    /// Ensures the stack does not overflow by limiting recursion depth.
344    recursion_counter: RecursionCounter,
345}
346
347impl<'a> Parser<'a> {
348    /// Create a parser for a [`Dialect`]
349    ///
350    /// See also [`Parser::parse_sql`]
351    ///
352    /// Example:
353    /// ```
354    /// # use sqlparser::{parser::{Parser, ParserError}, dialect::GenericDialect};
355    /// # fn main() -> Result<(), ParserError> {
356    /// let dialect = GenericDialect{};
357    /// let statements = Parser::new(&dialect)
358    ///   .try_with_sql("SELECT * FROM foo")?
359    ///   .parse_statements()?;
360    /// # Ok(())
361    /// # }
362    /// ```
363    pub fn new(dialect: &'a dyn Dialect) -> Self {
364        Self {
365            tokens: vec![],
366            index: 0,
367            state: ParserState::Normal,
368            dialect,
369            recursion_counter: RecursionCounter::new(DEFAULT_REMAINING_DEPTH),
370            options: ParserOptions::new().with_trailing_commas(dialect.supports_trailing_commas()),
371        }
372    }
373
374    /// Specify the maximum recursion limit while parsing.
375    ///
376    /// [`Parser`] prevents stack overflows by returning
377    /// [`ParserError::RecursionLimitExceeded`] if the parser exceeds
378    /// this depth while processing the query.
379    ///
380    /// Example:
381    /// ```
382    /// # use sqlparser::{parser::{Parser, ParserError}, dialect::GenericDialect};
383    /// # fn main() -> Result<(), ParserError> {
384    /// let dialect = GenericDialect{};
385    /// let result = Parser::new(&dialect)
386    ///   .with_recursion_limit(1)
387    ///   .try_with_sql("SELECT * FROM foo WHERE (a OR (b OR (c OR d)))")?
388    ///   .parse_statements();
389    ///   assert_eq!(result, Err(ParserError::RecursionLimitExceeded));
390    /// # Ok(())
391    /// # }
392    /// ```
393    ///
394    /// Note: when "recursive-protection" feature is enabled, this crate uses additional stack overflow protection
395    //  for some of its recursive methods. See [`recursive::recursive`] for more information.
396    pub fn with_recursion_limit(mut self, recursion_limit: usize) -> Self {
397        self.recursion_counter = RecursionCounter::new(recursion_limit);
398        self
399    }
400
401    /// Specify additional parser options
402    ///
403    /// [`Parser`] supports additional options ([`ParserOptions`])
404    /// that allow you to mix & match behavior otherwise constrained
405    /// to certain dialects (e.g. trailing commas).
406    ///
407    /// Example:
408    /// ```
409    /// # use sqlparser::{parser::{Parser, ParserError, ParserOptions}, dialect::GenericDialect};
410    /// # fn main() -> Result<(), ParserError> {
411    /// let dialect = GenericDialect{};
412    /// let options = ParserOptions::new()
413    ///    .with_trailing_commas(true)
414    ///    .with_unescape(false);
415    /// let result = Parser::new(&dialect)
416    ///   .with_options(options)
417    ///   .try_with_sql("SELECT a, b, COUNT(*), FROM foo GROUP BY a, b,")?
418    ///   .parse_statements();
419    ///   assert!(matches!(result, Ok(_)));
420    /// # Ok(())
421    /// # }
422    /// ```
423    pub fn with_options(mut self, options: ParserOptions) -> Self {
424        self.options = options;
425        self
426    }
427
428    /// Reset this parser to parse the specified token stream
429    pub fn with_tokens_with_locations(mut self, tokens: Vec<TokenWithSpan>) -> Self {
430        self.tokens = tokens;
431        self.index = 0;
432        self
433    }
434
435    /// Reset this parser state to parse the specified tokens
436    pub fn with_tokens(self, tokens: Vec<Token>) -> Self {
437        // Put in dummy locations
438        let tokens_with_locations: Vec<TokenWithSpan> = tokens
439            .into_iter()
440            .map(|token| TokenWithSpan {
441                token,
442                span: Span::empty(),
443            })
444            .collect();
445        self.with_tokens_with_locations(tokens_with_locations)
446    }
447
448    /// Tokenize the sql string and sets this [`Parser`]'s state to
449    /// parse the resulting tokens
450    ///
451    /// Returns an error if there was an error tokenizing the SQL string.
452    ///
453    /// See example on [`Parser::new()`] for an example
454    pub fn try_with_sql(self, sql: &str) -> Result<Self, ParserError> {
455        debug!("Parsing sql '{sql}'...");
456        let tokens = Tokenizer::new(self.dialect, sql)
457            .with_unescape(self.options.unescape)
458            .tokenize_with_location()?;
459        Ok(self.with_tokens_with_locations(tokens))
460    }
461
462    /// Parse potentially multiple statements
463    ///
464    /// Example
465    /// ```
466    /// # use sqlparser::{parser::{Parser, ParserError}, dialect::GenericDialect};
467    /// # fn main() -> Result<(), ParserError> {
468    /// let dialect = GenericDialect{};
469    /// let statements = Parser::new(&dialect)
470    ///   // Parse a SQL string with 2 separate statements
471    ///   .try_with_sql("SELECT * FROM foo; SELECT * FROM bar;")?
472    ///   .parse_statements()?;
473    /// assert_eq!(statements.len(), 2);
474    /// # Ok(())
475    /// # }
476    /// ```
477    pub fn parse_statements(&mut self) -> Result<Vec<Statement>, ParserError> {
478        let mut stmts = Vec::new();
479        let mut expecting_statement_delimiter = false;
480        loop {
481            // ignore empty statements (between successive statement delimiters)
482            while self.consume_token(&Token::SemiColon) {
483                expecting_statement_delimiter = false;
484            }
485
486            if !self.options.require_semicolon_stmt_delimiter {
487                expecting_statement_delimiter = false;
488            }
489
490            match self.peek_token().token {
491                Token::EOF => break,
492
493                // end of statement
494                Token::Word(word) => {
495                    if expecting_statement_delimiter && word.keyword == Keyword::END {
496                        break;
497                    }
498                }
499                _ => {}
500            }
501
502            if expecting_statement_delimiter {
503                return self.expected("end of statement", self.peek_token());
504            }
505
506            let statement = self.parse_statement()?;
507            stmts.push(statement);
508            expecting_statement_delimiter = true;
509        }
510        Ok(stmts)
511    }
512
513    /// Convenience method to parse a string with one or more SQL
514    /// statements into produce an Abstract Syntax Tree (AST).
515    ///
516    /// Example
517    /// ```
518    /// # use sqlparser::{parser::{Parser, ParserError}, dialect::GenericDialect};
519    /// # fn main() -> Result<(), ParserError> {
520    /// let dialect = GenericDialect{};
521    /// let statements = Parser::parse_sql(
522    ///   &dialect, "SELECT * FROM foo"
523    /// )?;
524    /// assert_eq!(statements.len(), 1);
525    /// # Ok(())
526    /// # }
527    /// ```
528    pub fn parse_sql(dialect: &dyn Dialect, sql: &str) -> Result<Vec<Statement>, ParserError> {
529        Parser::new(dialect).try_with_sql(sql)?.parse_statements()
530    }
531
532    /// Parse a single top-level statement (such as SELECT, INSERT, CREATE, etc.),
533    /// stopping before the statement separator, if any.
534    pub fn parse_statement(&mut self) -> Result<Statement, ParserError> {
535        let _guard = self.recursion_counter.try_decrease()?;
536
537        // allow the dialect to override statement parsing
538        if let Some(statement) = self.dialect.parse_statement(self) {
539            return statement;
540        }
541
542        let next_token = self.next_token();
543        match &next_token.token {
544            Token::Word(w) => match w.keyword {
545                Keyword::KILL => self.parse_kill(),
546                Keyword::FLUSH => self.parse_flush(),
547                Keyword::DESC => self.parse_explain(DescribeAlias::Desc),
548                Keyword::DESCRIBE => self.parse_explain(DescribeAlias::Describe),
549                Keyword::EXPLAIN => self.parse_explain(DescribeAlias::Explain),
550                Keyword::ANALYZE => self.parse_analyze(),
551                Keyword::CASE => {
552                    self.prev_token();
553                    self.parse_case_stmt()
554                }
555                Keyword::IF => {
556                    self.prev_token();
557                    self.parse_if_stmt()
558                }
559                Keyword::WHILE => {
560                    self.prev_token();
561                    self.parse_while()
562                }
563                Keyword::RAISE => {
564                    self.prev_token();
565                    self.parse_raise_stmt()
566                }
567                Keyword::SELECT | Keyword::WITH | Keyword::VALUES | Keyword::FROM => {
568                    self.prev_token();
569                    self.parse_query().map(Statement::Query)
570                }
571                Keyword::TRUNCATE => self.parse_truncate(),
572                Keyword::ATTACH => {
573                    if dialect_of!(self is DuckDbDialect) {
574                        self.parse_attach_duckdb_database()
575                    } else {
576                        self.parse_attach_database()
577                    }
578                }
579                Keyword::DETACH if dialect_of!(self is DuckDbDialect | GenericDialect) => {
580                    self.parse_detach_duckdb_database()
581                }
582                Keyword::MSCK => self.parse_msck(),
583                Keyword::CREATE => self.parse_create(),
584                Keyword::CACHE => self.parse_cache_table(),
585                Keyword::DROP => self.parse_drop(),
586                Keyword::DISCARD => self.parse_discard(),
587                Keyword::DECLARE => self.parse_declare(),
588                Keyword::FETCH => self.parse_fetch_statement(),
589                Keyword::DELETE => self.parse_delete(next_token),
590                Keyword::INSERT => self.parse_insert(next_token),
591                Keyword::REPLACE => self.parse_replace(next_token),
592                Keyword::UNCACHE => self.parse_uncache_table(),
593                Keyword::UPDATE => self.parse_update(next_token),
594                Keyword::ALTER => self.parse_alter(),
595                Keyword::CALL => self.parse_call(),
596                Keyword::COPY => self.parse_copy(),
597                Keyword::OPEN => {
598                    self.prev_token();
599                    self.parse_open()
600                }
601                Keyword::CLOSE => self.parse_close(),
602                Keyword::SET => self.parse_set(),
603                Keyword::SHOW => self.parse_show(),
604                Keyword::USE => self.parse_use(),
605                Keyword::GRANT => self.parse_grant(),
606                Keyword::DENY => {
607                    self.prev_token();
608                    self.parse_deny()
609                }
610                Keyword::REVOKE => self.parse_revoke(),
611                Keyword::START => self.parse_start_transaction(),
612                Keyword::BEGIN => self.parse_begin(),
613                Keyword::END => self.parse_end(),
614                Keyword::SAVEPOINT => self.parse_savepoint(),
615                Keyword::RELEASE => self.parse_release(),
616                Keyword::COMMIT => self.parse_commit(),
617                Keyword::RAISERROR => Ok(self.parse_raiserror()?),
618                Keyword::ROLLBACK => self.parse_rollback(),
619                Keyword::ASSERT => self.parse_assert(),
620                // `PREPARE`, `EXECUTE` and `DEALLOCATE` are Postgres-specific
621                // syntaxes. They are used for Postgres prepared statement.
622                Keyword::DEALLOCATE => self.parse_deallocate(),
623                Keyword::EXECUTE | Keyword::EXEC => self.parse_execute(),
624                Keyword::PREPARE => self.parse_prepare(),
625                Keyword::MERGE => self.parse_merge(next_token),
626                // `LISTEN`, `UNLISTEN` and `NOTIFY` are Postgres-specific
627                // syntaxes. They are used for Postgres statement.
628                Keyword::LISTEN if self.dialect.supports_listen_notify() => self.parse_listen(),
629                Keyword::UNLISTEN if self.dialect.supports_listen_notify() => self.parse_unlisten(),
630                Keyword::NOTIFY if self.dialect.supports_listen_notify() => self.parse_notify(),
631                // `PRAGMA` is sqlite specific https://www.sqlite.org/pragma.html
632                Keyword::PRAGMA => self.parse_pragma(),
633                Keyword::UNLOAD => {
634                    self.prev_token();
635                    self.parse_unload()
636                }
637                Keyword::RENAME => self.parse_rename(),
638                // `INSTALL` is duckdb specific https://duckdb.org/docs/extensions/overview
639                Keyword::INSTALL if dialect_of!(self is DuckDbDialect | GenericDialect) => {
640                    self.parse_install()
641                }
642                Keyword::LOAD => self.parse_load(),
643                // `OPTIMIZE` is clickhouse specific https://clickhouse.tech/docs/en/sql-reference/statements/optimize/
644                Keyword::OPTIMIZE if dialect_of!(self is ClickHouseDialect | GenericDialect) => {
645                    self.parse_optimize_table()
646                }
647                // `COMMENT` is snowflake specific https://docs.snowflake.com/en/sql-reference/sql/comment
648                Keyword::COMMENT if self.dialect.supports_comment_on() => self.parse_comment(),
649                Keyword::PRINT => self.parse_print(),
650                Keyword::RETURN => self.parse_return(),
651                Keyword::EXPORT => {
652                    self.prev_token();
653                    self.parse_export_data()
654                }
655                Keyword::VACUUM => {
656                    self.prev_token();
657                    self.parse_vacuum()
658                }
659                Keyword::RESET => self.parse_reset(),
660                _ => self.expected("an SQL statement", next_token),
661            },
662            Token::LParen => {
663                self.prev_token();
664                self.parse_query().map(Statement::Query)
665            }
666            _ => self.expected("an SQL statement", next_token),
667        }
668    }
669
670    /// Parse a `CASE` statement.
671    ///
672    /// See [Statement::Case]
673    pub fn parse_case_stmt(&mut self) -> Result<Statement, ParserError> {
674        let case_token = self.expect_keyword(Keyword::CASE)?;
675
676        let match_expr = if self.peek_keyword(Keyword::WHEN) {
677            None
678        } else {
679            Some(self.parse_expr()?)
680        };
681
682        self.expect_keyword_is(Keyword::WHEN)?;
683        let when_blocks = self.parse_keyword_separated(Keyword::WHEN, |parser| {
684            parser.parse_conditional_statement_block(&[Keyword::WHEN, Keyword::ELSE, Keyword::END])
685        })?;
686
687        let else_block = if self.parse_keyword(Keyword::ELSE) {
688            Some(self.parse_conditional_statement_block(&[Keyword::END])?)
689        } else {
690            None
691        };
692
693        let mut end_case_token = self.expect_keyword(Keyword::END)?;
694        if self.peek_keyword(Keyword::CASE) {
695            end_case_token = self.expect_keyword(Keyword::CASE)?;
696        }
697
698        Ok(Statement::Case(CaseStatement {
699            case_token: AttachedToken(case_token),
700            match_expr,
701            when_blocks,
702            else_block,
703            end_case_token: AttachedToken(end_case_token),
704        }))
705    }
706
707    /// Parse an `IF` statement.
708    ///
709    /// See [Statement::If]
710    pub fn parse_if_stmt(&mut self) -> Result<Statement, ParserError> {
711        self.expect_keyword_is(Keyword::IF)?;
712        let if_block = self.parse_conditional_statement_block(&[
713            Keyword::ELSE,
714            Keyword::ELSEIF,
715            Keyword::END,
716        ])?;
717
718        let elseif_blocks = if self.parse_keyword(Keyword::ELSEIF) {
719            self.parse_keyword_separated(Keyword::ELSEIF, |parser| {
720                parser.parse_conditional_statement_block(&[
721                    Keyword::ELSEIF,
722                    Keyword::ELSE,
723                    Keyword::END,
724                ])
725            })?
726        } else {
727            vec![]
728        };
729
730        let else_block = if self.parse_keyword(Keyword::ELSE) {
731            Some(self.parse_conditional_statement_block(&[Keyword::END])?)
732        } else {
733            None
734        };
735
736        self.expect_keyword_is(Keyword::END)?;
737        let end_token = self.expect_keyword(Keyword::IF)?;
738
739        Ok(Statement::If(IfStatement {
740            if_block,
741            elseif_blocks,
742            else_block,
743            end_token: Some(AttachedToken(end_token)),
744        }))
745    }
746
747    /// Parse a `WHILE` statement.
748    ///
749    /// See [Statement::While]
750    fn parse_while(&mut self) -> Result<Statement, ParserError> {
751        self.expect_keyword_is(Keyword::WHILE)?;
752        let while_block = self.parse_conditional_statement_block(&[Keyword::END])?;
753
754        Ok(Statement::While(WhileStatement { while_block }))
755    }
756
757    /// Parses an expression and associated list of statements
758    /// belonging to a conditional statement like `IF` or `WHEN` or `WHILE`.
759    ///
760    /// Example:
761    /// ```sql
762    /// IF condition THEN statement1; statement2;
763    /// ```
764    fn parse_conditional_statement_block(
765        &mut self,
766        terminal_keywords: &[Keyword],
767    ) -> Result<ConditionalStatementBlock, ParserError> {
768        let start_token = self.get_current_token().clone(); // self.expect_keyword(keyword)?;
769        let mut then_token = None;
770
771        let condition = match &start_token.token {
772            Token::Word(w) if w.keyword == Keyword::ELSE => None,
773            Token::Word(w) if w.keyword == Keyword::WHILE => {
774                let expr = self.parse_expr()?;
775                Some(expr)
776            }
777            _ => {
778                let expr = self.parse_expr()?;
779                then_token = Some(AttachedToken(self.expect_keyword(Keyword::THEN)?));
780                Some(expr)
781            }
782        };
783
784        let conditional_statements = self.parse_conditional_statements(terminal_keywords)?;
785
786        Ok(ConditionalStatementBlock {
787            start_token: AttachedToken(start_token),
788            condition,
789            then_token,
790            conditional_statements,
791        })
792    }
793
794    /// Parse a BEGIN/END block or a sequence of statements
795    /// This could be inside of a conditional (IF, CASE, WHILE etc.) or an object body defined optionally BEGIN/END and one or more statements.
796    pub(crate) fn parse_conditional_statements(
797        &mut self,
798        terminal_keywords: &[Keyword],
799    ) -> Result<ConditionalStatements, ParserError> {
800        let conditional_statements = if self.peek_keyword(Keyword::BEGIN) {
801            let begin_token = self.expect_keyword(Keyword::BEGIN)?;
802            let statements = self.parse_statement_list(terminal_keywords)?;
803            let end_token = self.expect_keyword(Keyword::END)?;
804
805            ConditionalStatements::BeginEnd(BeginEndStatements {
806                begin_token: AttachedToken(begin_token),
807                statements,
808                end_token: AttachedToken(end_token),
809            })
810        } else {
811            ConditionalStatements::Sequence {
812                statements: self.parse_statement_list(terminal_keywords)?,
813            }
814        };
815        Ok(conditional_statements)
816    }
817
818    /// Parse a `RAISE` statement.
819    ///
820    /// See [Statement::Raise]
821    pub fn parse_raise_stmt(&mut self) -> Result<Statement, ParserError> {
822        self.expect_keyword_is(Keyword::RAISE)?;
823
824        let value = if self.parse_keywords(&[Keyword::USING, Keyword::MESSAGE]) {
825            self.expect_token(&Token::Eq)?;
826            Some(RaiseStatementValue::UsingMessage(self.parse_expr()?))
827        } else {
828            self.maybe_parse(|parser| parser.parse_expr().map(RaiseStatementValue::Expr))?
829        };
830
831        Ok(Statement::Raise(RaiseStatement { value }))
832    }
833
834    pub fn parse_comment(&mut self) -> Result<Statement, ParserError> {
835        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
836
837        self.expect_keyword_is(Keyword::ON)?;
838        let token = self.next_token();
839
840        let (object_type, object_name) = match token.token {
841            Token::Word(w) if w.keyword == Keyword::COLUMN => {
842                (CommentObject::Column, self.parse_object_name(false)?)
843            }
844            Token::Word(w) if w.keyword == Keyword::TABLE => {
845                (CommentObject::Table, self.parse_object_name(false)?)
846            }
847            Token::Word(w) if w.keyword == Keyword::EXTENSION => {
848                (CommentObject::Extension, self.parse_object_name(false)?)
849            }
850            Token::Word(w) if w.keyword == Keyword::SCHEMA => {
851                (CommentObject::Schema, self.parse_object_name(false)?)
852            }
853            Token::Word(w) if w.keyword == Keyword::DATABASE => {
854                (CommentObject::Database, self.parse_object_name(false)?)
855            }
856            Token::Word(w) if w.keyword == Keyword::USER => {
857                (CommentObject::User, self.parse_object_name(false)?)
858            }
859            Token::Word(w) if w.keyword == Keyword::ROLE => {
860                (CommentObject::Role, self.parse_object_name(false)?)
861            }
862            _ => self.expected("comment object_type", token)?,
863        };
864
865        self.expect_keyword_is(Keyword::IS)?;
866        let comment = if self.parse_keyword(Keyword::NULL) {
867            None
868        } else {
869            Some(self.parse_literal_string()?)
870        };
871        Ok(Statement::Comment {
872            object_type,
873            object_name,
874            comment,
875            if_exists,
876        })
877    }
878
879    pub fn parse_flush(&mut self) -> Result<Statement, ParserError> {
880        let mut channel = None;
881        let mut tables: Vec<ObjectName> = vec![];
882        let mut read_lock = false;
883        let mut export = false;
884
885        if !dialect_of!(self is MySqlDialect | GenericDialect) {
886            return parser_err!("Unsupported statement FLUSH", self.peek_token().span.start);
887        }
888
889        let location = if self.parse_keyword(Keyword::NO_WRITE_TO_BINLOG) {
890            Some(FlushLocation::NoWriteToBinlog)
891        } else if self.parse_keyword(Keyword::LOCAL) {
892            Some(FlushLocation::Local)
893        } else {
894            None
895        };
896
897        let object_type = if self.parse_keywords(&[Keyword::BINARY, Keyword::LOGS]) {
898            FlushType::BinaryLogs
899        } else if self.parse_keywords(&[Keyword::ENGINE, Keyword::LOGS]) {
900            FlushType::EngineLogs
901        } else if self.parse_keywords(&[Keyword::ERROR, Keyword::LOGS]) {
902            FlushType::ErrorLogs
903        } else if self.parse_keywords(&[Keyword::GENERAL, Keyword::LOGS]) {
904            FlushType::GeneralLogs
905        } else if self.parse_keywords(&[Keyword::HOSTS]) {
906            FlushType::Hosts
907        } else if self.parse_keyword(Keyword::PRIVILEGES) {
908            FlushType::Privileges
909        } else if self.parse_keyword(Keyword::OPTIMIZER_COSTS) {
910            FlushType::OptimizerCosts
911        } else if self.parse_keywords(&[Keyword::RELAY, Keyword::LOGS]) {
912            if self.parse_keywords(&[Keyword::FOR, Keyword::CHANNEL]) {
913                channel = Some(self.parse_object_name(false).unwrap().to_string());
914            }
915            FlushType::RelayLogs
916        } else if self.parse_keywords(&[Keyword::SLOW, Keyword::LOGS]) {
917            FlushType::SlowLogs
918        } else if self.parse_keyword(Keyword::STATUS) {
919            FlushType::Status
920        } else if self.parse_keyword(Keyword::USER_RESOURCES) {
921            FlushType::UserResources
922        } else if self.parse_keywords(&[Keyword::LOGS]) {
923            FlushType::Logs
924        } else if self.parse_keywords(&[Keyword::TABLES]) {
925            loop {
926                let next_token = self.next_token();
927                match &next_token.token {
928                    Token::Word(w) => match w.keyword {
929                        Keyword::WITH => {
930                            read_lock = self.parse_keywords(&[Keyword::READ, Keyword::LOCK]);
931                        }
932                        Keyword::FOR => {
933                            export = self.parse_keyword(Keyword::EXPORT);
934                        }
935                        Keyword::NoKeyword => {
936                            self.prev_token();
937                            tables = self.parse_comma_separated(|p| p.parse_object_name(false))?;
938                        }
939                        _ => {}
940                    },
941                    _ => {
942                        break;
943                    }
944                }
945            }
946
947            FlushType::Tables
948        } else {
949            return self.expected(
950                "BINARY LOGS, ENGINE LOGS, ERROR LOGS, GENERAL LOGS, HOSTS, LOGS, PRIVILEGES, OPTIMIZER_COSTS,\
951                 RELAY LOGS [FOR CHANNEL channel], SLOW LOGS, STATUS, USER_RESOURCES",
952                self.peek_token(),
953            );
954        };
955
956        Ok(Statement::Flush {
957            object_type,
958            location,
959            channel,
960            read_lock,
961            export,
962            tables,
963        })
964    }
965
966    pub fn parse_msck(&mut self) -> Result<Statement, ParserError> {
967        let repair = self.parse_keyword(Keyword::REPAIR);
968        self.expect_keyword_is(Keyword::TABLE)?;
969        let table_name = self.parse_object_name(false)?;
970        let partition_action = self
971            .maybe_parse(|parser| {
972                let pa = match parser.parse_one_of_keywords(&[
973                    Keyword::ADD,
974                    Keyword::DROP,
975                    Keyword::SYNC,
976                ]) {
977                    Some(Keyword::ADD) => Some(AddDropSync::ADD),
978                    Some(Keyword::DROP) => Some(AddDropSync::DROP),
979                    Some(Keyword::SYNC) => Some(AddDropSync::SYNC),
980                    _ => None,
981                };
982                parser.expect_keyword_is(Keyword::PARTITIONS)?;
983                Ok(pa)
984            })?
985            .unwrap_or_default();
986        Ok(Msck {
987            repair,
988            table_name,
989            partition_action,
990        }
991        .into())
992    }
993
994    pub fn parse_truncate(&mut self) -> Result<Statement, ParserError> {
995        let table = self.parse_keyword(Keyword::TABLE);
996
997        let table_names = self
998            .parse_comma_separated(|p| {
999                Ok((p.parse_keyword(Keyword::ONLY), p.parse_object_name(false)?))
1000            })?
1001            .into_iter()
1002            .map(|(only, name)| TruncateTableTarget { name, only })
1003            .collect();
1004
1005        let mut partitions = None;
1006        if self.parse_keyword(Keyword::PARTITION) {
1007            self.expect_token(&Token::LParen)?;
1008            partitions = Some(self.parse_comma_separated(Parser::parse_expr)?);
1009            self.expect_token(&Token::RParen)?;
1010        }
1011
1012        let mut identity = None;
1013        let mut cascade = None;
1014
1015        if dialect_of!(self is PostgreSqlDialect | GenericDialect) {
1016            identity = if self.parse_keywords(&[Keyword::RESTART, Keyword::IDENTITY]) {
1017                Some(TruncateIdentityOption::Restart)
1018            } else if self.parse_keywords(&[Keyword::CONTINUE, Keyword::IDENTITY]) {
1019                Some(TruncateIdentityOption::Continue)
1020            } else {
1021                None
1022            };
1023
1024            cascade = self.parse_cascade_option();
1025        };
1026
1027        let on_cluster = self.parse_optional_on_cluster()?;
1028
1029        Ok(Truncate {
1030            table_names,
1031            partitions,
1032            table,
1033            identity,
1034            cascade,
1035            on_cluster,
1036        }
1037        .into())
1038    }
1039
1040    fn parse_cascade_option(&mut self) -> Option<CascadeOption> {
1041        if self.parse_keyword(Keyword::CASCADE) {
1042            Some(CascadeOption::Cascade)
1043        } else if self.parse_keyword(Keyword::RESTRICT) {
1044            Some(CascadeOption::Restrict)
1045        } else {
1046            None
1047        }
1048    }
1049
1050    pub fn parse_attach_duckdb_database_options(
1051        &mut self,
1052    ) -> Result<Vec<AttachDuckDBDatabaseOption>, ParserError> {
1053        if !self.consume_token(&Token::LParen) {
1054            return Ok(vec![]);
1055        }
1056
1057        let mut options = vec![];
1058        loop {
1059            if self.parse_keyword(Keyword::READ_ONLY) {
1060                let boolean = if self.parse_keyword(Keyword::TRUE) {
1061                    Some(true)
1062                } else if self.parse_keyword(Keyword::FALSE) {
1063                    Some(false)
1064                } else {
1065                    None
1066                };
1067                options.push(AttachDuckDBDatabaseOption::ReadOnly(boolean));
1068            } else if self.parse_keyword(Keyword::TYPE) {
1069                let ident = self.parse_identifier()?;
1070                options.push(AttachDuckDBDatabaseOption::Type(ident));
1071            } else {
1072                return self.expected("expected one of: ), READ_ONLY, TYPE", self.peek_token());
1073            };
1074
1075            if self.consume_token(&Token::RParen) {
1076                return Ok(options);
1077            } else if self.consume_token(&Token::Comma) {
1078                continue;
1079            } else {
1080                return self.expected("expected one of: ')', ','", self.peek_token());
1081            }
1082        }
1083    }
1084
1085    pub fn parse_attach_duckdb_database(&mut self) -> Result<Statement, ParserError> {
1086        let database = self.parse_keyword(Keyword::DATABASE);
1087        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
1088        let database_path = self.parse_identifier()?;
1089        let database_alias = if self.parse_keyword(Keyword::AS) {
1090            Some(self.parse_identifier()?)
1091        } else {
1092            None
1093        };
1094
1095        let attach_options = self.parse_attach_duckdb_database_options()?;
1096        Ok(Statement::AttachDuckDBDatabase {
1097            if_not_exists,
1098            database,
1099            database_path,
1100            database_alias,
1101            attach_options,
1102        })
1103    }
1104
1105    pub fn parse_detach_duckdb_database(&mut self) -> Result<Statement, ParserError> {
1106        let database = self.parse_keyword(Keyword::DATABASE);
1107        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
1108        let database_alias = self.parse_identifier()?;
1109        Ok(Statement::DetachDuckDBDatabase {
1110            if_exists,
1111            database,
1112            database_alias,
1113        })
1114    }
1115
1116    pub fn parse_attach_database(&mut self) -> Result<Statement, ParserError> {
1117        let database = self.parse_keyword(Keyword::DATABASE);
1118        let database_file_name = self.parse_expr()?;
1119        self.expect_keyword_is(Keyword::AS)?;
1120        let schema_name = self.parse_identifier()?;
1121        Ok(Statement::AttachDatabase {
1122            database,
1123            schema_name,
1124            database_file_name,
1125        })
1126    }
1127
1128    pub fn parse_analyze(&mut self) -> Result<Statement, ParserError> {
1129        let has_table_keyword = self.parse_keyword(Keyword::TABLE);
1130        let table_name = self.parse_object_name(false)?;
1131        let mut for_columns = false;
1132        let mut cache_metadata = false;
1133        let mut noscan = false;
1134        let mut partitions = None;
1135        let mut compute_statistics = false;
1136        let mut columns = vec![];
1137        loop {
1138            match self.parse_one_of_keywords(&[
1139                Keyword::PARTITION,
1140                Keyword::FOR,
1141                Keyword::CACHE,
1142                Keyword::NOSCAN,
1143                Keyword::COMPUTE,
1144            ]) {
1145                Some(Keyword::PARTITION) => {
1146                    self.expect_token(&Token::LParen)?;
1147                    partitions = Some(self.parse_comma_separated(Parser::parse_expr)?);
1148                    self.expect_token(&Token::RParen)?;
1149                }
1150                Some(Keyword::NOSCAN) => noscan = true,
1151                Some(Keyword::FOR) => {
1152                    self.expect_keyword_is(Keyword::COLUMNS)?;
1153
1154                    columns = self
1155                        .maybe_parse(|parser| {
1156                            parser.parse_comma_separated(|p| p.parse_identifier())
1157                        })?
1158                        .unwrap_or_default();
1159                    for_columns = true
1160                }
1161                Some(Keyword::CACHE) => {
1162                    self.expect_keyword_is(Keyword::METADATA)?;
1163                    cache_metadata = true
1164                }
1165                Some(Keyword::COMPUTE) => {
1166                    self.expect_keyword_is(Keyword::STATISTICS)?;
1167                    compute_statistics = true
1168                }
1169                _ => break,
1170            }
1171        }
1172
1173        Ok(Analyze {
1174            has_table_keyword,
1175            table_name,
1176            for_columns,
1177            columns,
1178            partitions,
1179            cache_metadata,
1180            noscan,
1181            compute_statistics,
1182        }
1183        .into())
1184    }
1185
1186    /// Parse a new expression including wildcard & qualified wildcard.
1187    pub fn parse_wildcard_expr(&mut self) -> Result<Expr, ParserError> {
1188        let index = self.index;
1189
1190        let next_token = self.next_token();
1191        match next_token.token {
1192            t @ (Token::Word(_) | Token::SingleQuotedString(_)) => {
1193                if self.peek_token().token == Token::Period {
1194                    let mut id_parts: Vec<Ident> = vec![match t {
1195                        Token::Word(w) => w.into_ident(next_token.span),
1196                        Token::SingleQuotedString(s) => Ident::with_quote('\'', s),
1197                        _ => unreachable!(), // We matched above
1198                    }];
1199
1200                    while self.consume_token(&Token::Period) {
1201                        let next_token = self.next_token();
1202                        match next_token.token {
1203                            Token::Word(w) => id_parts.push(w.into_ident(next_token.span)),
1204                            Token::SingleQuotedString(s) => {
1205                                // SQLite has single-quoted identifiers
1206                                id_parts.push(Ident::with_quote('\'', s))
1207                            }
1208                            Token::Mul => {
1209                                return Ok(Expr::QualifiedWildcard(
1210                                    ObjectName::from(id_parts),
1211                                    AttachedToken(next_token),
1212                                ));
1213                            }
1214                            _ => {
1215                                return self
1216                                    .expected("an identifier or a '*' after '.'", next_token);
1217                            }
1218                        }
1219                    }
1220                }
1221            }
1222            Token::Mul => {
1223                return Ok(Expr::Wildcard(AttachedToken(next_token)));
1224            }
1225            _ => (),
1226        };
1227
1228        self.index = index;
1229        self.parse_expr()
1230    }
1231
1232    /// Parse a new expression.
1233    pub fn parse_expr(&mut self) -> Result<Expr, ParserError> {
1234        self.parse_subexpr(self.dialect.prec_unknown())
1235    }
1236
1237    pub fn parse_expr_with_alias_and_order_by(
1238        &mut self,
1239    ) -> Result<ExprWithAliasAndOrderBy, ParserError> {
1240        let expr = self.parse_expr()?;
1241
1242        fn validator(explicit: bool, kw: &Keyword, _parser: &mut Parser) -> bool {
1243            explicit || !&[Keyword::ASC, Keyword::DESC, Keyword::GROUP].contains(kw)
1244        }
1245        let alias = self.parse_optional_alias_inner(None, validator)?;
1246        let order_by = OrderByOptions {
1247            asc: self.parse_asc_desc(),
1248            nulls_first: None,
1249        };
1250        Ok(ExprWithAliasAndOrderBy {
1251            expr: ExprWithAlias { expr, alias },
1252            order_by,
1253        })
1254    }
1255
1256    /// Parse tokens until the precedence changes.
1257    pub fn parse_subexpr(&mut self, precedence: u8) -> Result<Expr, ParserError> {
1258        let _guard = self.recursion_counter.try_decrease()?;
1259        debug!("parsing expr");
1260        let mut expr = self.parse_prefix()?;
1261
1262        expr = self.parse_compound_expr(expr, vec![])?;
1263
1264        debug!("prefix: {expr:?}");
1265        loop {
1266            let next_precedence = self.get_next_precedence()?;
1267            debug!("next precedence: {next_precedence:?}");
1268
1269            if precedence >= next_precedence {
1270                break;
1271            }
1272
1273            // The period operator is handled exclusively by the
1274            // compound field access parsing.
1275            if Token::Period == self.peek_token_ref().token {
1276                break;
1277            }
1278
1279            expr = self.parse_infix(expr, next_precedence)?;
1280        }
1281        Ok(expr)
1282    }
1283
1284    pub fn parse_assert(&mut self) -> Result<Statement, ParserError> {
1285        let condition = self.parse_expr()?;
1286        let message = if self.parse_keyword(Keyword::AS) {
1287            Some(self.parse_expr()?)
1288        } else {
1289            None
1290        };
1291
1292        Ok(Statement::Assert { condition, message })
1293    }
1294
1295    pub fn parse_savepoint(&mut self) -> Result<Statement, ParserError> {
1296        let name = self.parse_identifier()?;
1297        Ok(Statement::Savepoint { name })
1298    }
1299
1300    pub fn parse_release(&mut self) -> Result<Statement, ParserError> {
1301        let _ = self.parse_keyword(Keyword::SAVEPOINT);
1302        let name = self.parse_identifier()?;
1303
1304        Ok(Statement::ReleaseSavepoint { name })
1305    }
1306
1307    pub fn parse_listen(&mut self) -> Result<Statement, ParserError> {
1308        let channel = self.parse_identifier()?;
1309        Ok(Statement::LISTEN { channel })
1310    }
1311
1312    pub fn parse_unlisten(&mut self) -> Result<Statement, ParserError> {
1313        let channel = if self.consume_token(&Token::Mul) {
1314            Ident::new(Expr::Wildcard(AttachedToken::empty()).to_string())
1315        } else {
1316            match self.parse_identifier() {
1317                Ok(expr) => expr,
1318                _ => {
1319                    self.prev_token();
1320                    return self.expected("wildcard or identifier", self.peek_token());
1321                }
1322            }
1323        };
1324        Ok(Statement::UNLISTEN { channel })
1325    }
1326
1327    pub fn parse_notify(&mut self) -> Result<Statement, ParserError> {
1328        let channel = self.parse_identifier()?;
1329        let payload = if self.consume_token(&Token::Comma) {
1330            Some(self.parse_literal_string()?)
1331        } else {
1332            None
1333        };
1334        Ok(Statement::NOTIFY { channel, payload })
1335    }
1336
1337    /// Parses a `RENAME TABLE` statement. See [Statement::RenameTable]
1338    pub fn parse_rename(&mut self) -> Result<Statement, ParserError> {
1339        if self.peek_keyword(Keyword::TABLE) {
1340            self.expect_keyword(Keyword::TABLE)?;
1341            let rename_tables = self.parse_comma_separated(|parser| {
1342                let old_name = parser.parse_object_name(false)?;
1343                parser.expect_keyword(Keyword::TO)?;
1344                let new_name = parser.parse_object_name(false)?;
1345
1346                Ok(RenameTable { old_name, new_name })
1347            })?;
1348            Ok(Statement::RenameTable(rename_tables))
1349        } else {
1350            self.expected("KEYWORD `TABLE` after RENAME", self.peek_token())
1351        }
1352    }
1353
1354    /// Tries to parse an expression by matching the specified word to known keywords that have a special meaning in the dialect.
1355    /// Returns `None if no match is found.
1356    fn parse_expr_prefix_by_reserved_word(
1357        &mut self,
1358        w: &Word,
1359        w_span: Span,
1360    ) -> Result<Option<Expr>, ParserError> {
1361        match w.keyword {
1362            Keyword::TRUE | Keyword::FALSE if self.dialect.supports_boolean_literals() => {
1363                self.prev_token();
1364                Ok(Some(Expr::Value(self.parse_value()?)))
1365            }
1366            Keyword::NULL => {
1367                self.prev_token();
1368                Ok(Some(Expr::Value(self.parse_value()?)))
1369            }
1370            Keyword::CURRENT_CATALOG
1371            | Keyword::CURRENT_USER
1372            | Keyword::SESSION_USER
1373            | Keyword::USER
1374            if dialect_of!(self is PostgreSqlDialect | GenericDialect) =>
1375                {
1376                    Ok(Some(Expr::Function(Function {
1377                        name: ObjectName::from(vec![w.clone().into_ident(w_span)]),
1378                        uses_odbc_syntax: false,
1379                        parameters: FunctionArguments::None,
1380                        args: FunctionArguments::None,
1381                        null_treatment: None,
1382                        filter: None,
1383                        over: None,
1384                        within_group: vec![],
1385                    })))
1386                }
1387            Keyword::CURRENT_TIMESTAMP
1388            | Keyword::CURRENT_TIME
1389            | Keyword::CURRENT_DATE
1390            | Keyword::LOCALTIME
1391            | Keyword::LOCALTIMESTAMP => {
1392                Ok(Some(self.parse_time_functions(ObjectName::from(vec![w.clone().into_ident(w_span)]))?))
1393            }
1394            Keyword::CASE => Ok(Some(self.parse_case_expr()?)),
1395            Keyword::CONVERT => Ok(Some(self.parse_convert_expr(false)?)),
1396            Keyword::TRY_CONVERT if self.dialect.supports_try_convert() => Ok(Some(self.parse_convert_expr(true)?)),
1397            Keyword::CAST => Ok(Some(self.parse_cast_expr(CastKind::Cast)?)),
1398            Keyword::TRY_CAST => Ok(Some(self.parse_cast_expr(CastKind::TryCast)?)),
1399            Keyword::SAFE_CAST => Ok(Some(self.parse_cast_expr(CastKind::SafeCast)?)),
1400            Keyword::EXISTS
1401            // Support parsing Databricks has a function named `exists`.
1402            if !dialect_of!(self is DatabricksDialect)
1403                || matches!(
1404                        self.peek_nth_token_ref(1).token,
1405                        Token::Word(Word {
1406                            keyword: Keyword::SELECT | Keyword::WITH,
1407                            ..
1408                        })
1409                    ) =>
1410                {
1411                    Ok(Some(self.parse_exists_expr(false)?))
1412                }
1413            Keyword::EXTRACT => Ok(Some(self.parse_extract_expr()?)),
1414            Keyword::CEIL => Ok(Some(self.parse_ceil_floor_expr(true)?)),
1415            Keyword::FLOOR => Ok(Some(self.parse_ceil_floor_expr(false)?)),
1416            Keyword::POSITION if self.peek_token_ref().token == Token::LParen => {
1417                Ok(Some(self.parse_position_expr(w.clone().into_ident(w_span))?))
1418            }
1419            Keyword::SUBSTR | Keyword::SUBSTRING => {
1420                self.prev_token();
1421                Ok(Some(self.parse_substring()?))
1422            }
1423            Keyword::OVERLAY => Ok(Some(self.parse_overlay_expr()?)),
1424            Keyword::TRIM => Ok(Some(self.parse_trim_expr()?)),
1425            Keyword::INTERVAL => Ok(Some(self.parse_interval()?)),
1426            // Treat ARRAY[1,2,3] as an array [1,2,3], otherwise try as subquery or a function call
1427            Keyword::ARRAY if *self.peek_token_ref() == Token::LBracket => {
1428                self.expect_token(&Token::LBracket)?;
1429                Ok(Some(self.parse_array_expr(true)?))
1430            }
1431            Keyword::ARRAY
1432            if self.peek_token() == Token::LParen
1433                && !dialect_of!(self is ClickHouseDialect | DatabricksDialect) =>
1434                {
1435                    self.expect_token(&Token::LParen)?;
1436                    let query = self.parse_query()?;
1437                    self.expect_token(&Token::RParen)?;
1438                    Ok(Some(Expr::Function(Function {
1439                        name: ObjectName::from(vec![w.clone().into_ident(w_span)]),
1440                        uses_odbc_syntax: false,
1441                        parameters: FunctionArguments::None,
1442                        args: FunctionArguments::Subquery(query),
1443                        filter: None,
1444                        null_treatment: None,
1445                        over: None,
1446                        within_group: vec![],
1447                    })))
1448                }
1449            Keyword::NOT => Ok(Some(self.parse_not()?)),
1450            Keyword::MATCH if self.dialect.supports_match_against() => {
1451                Ok(Some(self.parse_match_against()?))
1452            }
1453            Keyword::STRUCT if self.dialect.supports_struct_literal() => {
1454                let struct_expr = self.parse_struct_literal()?;
1455                Ok(Some(struct_expr))
1456            }
1457            Keyword::PRIOR if matches!(self.state, ParserState::ConnectBy) => {
1458                let expr = self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?;
1459                Ok(Some(Expr::Prior(Box::new(expr))))
1460            }
1461            Keyword::MAP if *self.peek_token_ref() == Token::LBrace && self.dialect.support_map_literal_syntax() => {
1462                Ok(Some(self.parse_duckdb_map_literal()?))
1463            }
1464            _ if self.dialect.supports_geometric_types() => match w.keyword {
1465                Keyword::CIRCLE => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Circle)?)),
1466                Keyword::BOX => Ok(Some(self.parse_geometric_type(GeometricTypeKind::GeometricBox)?)),
1467                Keyword::PATH => Ok(Some(self.parse_geometric_type(GeometricTypeKind::GeometricPath)?)),
1468                Keyword::LINE => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Line)?)),
1469                Keyword::LSEG => Ok(Some(self.parse_geometric_type(GeometricTypeKind::LineSegment)?)),
1470                Keyword::POINT => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Point)?)),
1471                Keyword::POLYGON => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Polygon)?)),
1472                _ => Ok(None),
1473            },
1474            _ => Ok(None),
1475        }
1476    }
1477
1478    /// Tries to parse an expression by a word that is not known to have a special meaning in the dialect.
1479    fn parse_expr_prefix_by_unreserved_word(
1480        &mut self,
1481        w: &Word,
1482        w_span: Span,
1483    ) -> Result<Expr, ParserError> {
1484        match self.peek_token().token {
1485            Token::LParen if !self.peek_outer_join_operator() => {
1486                let id_parts = vec![w.clone().into_ident(w_span)];
1487                self.parse_function(ObjectName::from(id_parts))
1488            }
1489            // string introducer https://dev.mysql.com/doc/refman/8.0/en/charset-introducer.html
1490            Token::SingleQuotedString(_)
1491            | Token::DoubleQuotedString(_)
1492            | Token::HexStringLiteral(_)
1493                if w.value.starts_with('_') =>
1494            {
1495                Ok(Expr::Prefixed {
1496                    prefix: w.clone().into_ident(w_span),
1497                    value: self.parse_introduced_string_expr()?.into(),
1498                })
1499            }
1500            // string introducer https://dev.mysql.com/doc/refman/8.0/en/charset-introducer.html
1501            Token::SingleQuotedString(_)
1502            | Token::DoubleQuotedString(_)
1503            | Token::HexStringLiteral(_)
1504                if w.value.starts_with('_') =>
1505            {
1506                Ok(Expr::Prefixed {
1507                    prefix: w.clone().into_ident(w_span),
1508                    value: self.parse_introduced_string_expr()?.into(),
1509                })
1510            }
1511            Token::Arrow if self.dialect.supports_lambda_functions() => {
1512                self.expect_token(&Token::Arrow)?;
1513                Ok(Expr::Lambda(LambdaFunction {
1514                    params: OneOrManyWithParens::One(w.clone().into_ident(w_span)),
1515                    body: Box::new(self.parse_expr()?),
1516                }))
1517            }
1518            _ => Ok(Expr::Identifier(w.clone().into_ident(w_span))),
1519        }
1520    }
1521
1522    /// Parse an expression prefix.
1523    pub fn parse_prefix(&mut self) -> Result<Expr, ParserError> {
1524        // allow the dialect to override prefix parsing
1525        if let Some(prefix) = self.dialect.parse_prefix(self) {
1526            return prefix;
1527        }
1528
1529        // PostgreSQL allows any string literal to be preceded by a type name, indicating that the
1530        // string literal represents a literal of that type. Some examples:
1531        //
1532        //      DATE '2020-05-20'
1533        //      TIMESTAMP WITH TIME ZONE '2020-05-20 7:43:54'
1534        //      BOOL 'true'
1535        //
1536        // The first two are standard SQL, while the latter is a PostgreSQL extension. Complicating
1537        // matters is the fact that INTERVAL string literals may optionally be followed by special
1538        // keywords, e.g.:
1539        //
1540        //      INTERVAL '7' DAY
1541        //
1542        // Note also that naively `SELECT date` looks like a syntax error because the `date` type
1543        // name is not followed by a string literal, but in fact in PostgreSQL it is a valid
1544        // expression that should parse as the column name "date".
1545        let loc = self.peek_token_ref().span.start;
1546        let opt_expr = self.maybe_parse(|parser| {
1547            match parser.parse_data_type()? {
1548                DataType::Interval { .. } => parser.parse_interval(),
1549                // PostgreSQL allows almost any identifier to be used as custom data type name,
1550                // and we support that in `parse_data_type()`. But unlike Postgres we don't
1551                // have a list of globally reserved keywords (since they vary across dialects),
1552                // so given `NOT 'a' LIKE 'b'`, we'd accept `NOT` as a possible custom data type
1553                // name, resulting in `NOT 'a'` being recognized as a `TypedString` instead of
1554                // an unary negation `NOT ('a' LIKE 'b')`. To solve this, we don't accept the
1555                // `type 'string'` syntax for the custom data types at all.
1556                DataType::Custom(..) => parser_err!("dummy", loc),
1557                data_type => Ok(Expr::TypedString(TypedString {
1558                    data_type,
1559                    value: parser.parse_value()?,
1560                    uses_odbc_syntax: false,
1561                })),
1562            }
1563        })?;
1564
1565        if let Some(expr) = opt_expr {
1566            return Ok(expr);
1567        }
1568
1569        // Cache some dialect properties to avoid lifetime issues with the
1570        // next_token reference.
1571
1572        let dialect = self.dialect;
1573
1574        self.advance_token();
1575        let next_token_index = self.get_current_index();
1576        let next_token = self.get_current_token();
1577        let span = next_token.span;
1578        let expr = match &next_token.token {
1579            Token::Word(w) => {
1580                // The word we consumed may fall into one of two cases: it has a special meaning, or not.
1581                // For example, in Snowflake, the word `interval` may have two meanings depending on the context:
1582                // `SELECT CURRENT_DATE() + INTERVAL '1 DAY', MAX(interval) FROM tbl;`
1583                //                          ^^^^^^^^^^^^^^^^      ^^^^^^^^
1584                //                         interval expression   identifier
1585                //
1586                // We first try to parse the word and following tokens as a special expression, and if that fails,
1587                // we rollback and try to parse it as an identifier.
1588                let w = w.clone();
1589                match self.try_parse(|parser| parser.parse_expr_prefix_by_reserved_word(&w, span)) {
1590                    // This word indicated an expression prefix and parsing was successful
1591                    Ok(Some(expr)) => Ok(expr),
1592
1593                    // No expression prefix associated with this word
1594                    Ok(None) => Ok(self.parse_expr_prefix_by_unreserved_word(&w, span)?),
1595
1596                    // If parsing of the word as a special expression failed, we are facing two options:
1597                    // 1. The statement is malformed, e.g. `SELECT INTERVAL '1 DAI` (`DAI` instead of `DAY`)
1598                    // 2. The word is used as an identifier, e.g. `SELECT MAX(interval) FROM tbl`
1599                    // We first try to parse the word as an identifier and if that fails
1600                    // we rollback and return the parsing error we got from trying to parse a
1601                    // special expression (to maintain backwards compatibility of parsing errors).
1602                    Err(e) => {
1603                        if !self.dialect.is_reserved_for_identifier(w.keyword) {
1604                            if let Ok(Some(expr)) = self.maybe_parse(|parser| {
1605                                parser.parse_expr_prefix_by_unreserved_word(&w, span)
1606                            }) {
1607                                return Ok(expr);
1608                            }
1609                        }
1610                        return Err(e);
1611                    }
1612                }
1613            } // End of Token::Word
1614            // array `[1, 2, 3]`
1615            Token::LBracket => self.parse_array_expr(false),
1616            tok @ Token::Minus | tok @ Token::Plus => {
1617                let op = if *tok == Token::Plus {
1618                    UnaryOperator::Plus
1619                } else {
1620                    UnaryOperator::Minus
1621                };
1622                Ok(Expr::UnaryOp {
1623                    op,
1624                    expr: Box::new(
1625                        self.parse_subexpr(self.dialect.prec_value(Precedence::MulDivModOp))?,
1626                    ),
1627                })
1628            }
1629            Token::ExclamationMark if dialect.supports_bang_not_operator() => Ok(Expr::UnaryOp {
1630                op: UnaryOperator::BangNot,
1631                expr: Box::new(self.parse_subexpr(self.dialect.prec_value(Precedence::UnaryNot))?),
1632            }),
1633            tok @ Token::DoubleExclamationMark
1634            | tok @ Token::PGSquareRoot
1635            | tok @ Token::PGCubeRoot
1636            | tok @ Token::AtSign
1637                if dialect_is!(dialect is PostgreSqlDialect) =>
1638            {
1639                let op = match tok {
1640                    Token::DoubleExclamationMark => UnaryOperator::PGPrefixFactorial,
1641                    Token::PGSquareRoot => UnaryOperator::PGSquareRoot,
1642                    Token::PGCubeRoot => UnaryOperator::PGCubeRoot,
1643                    Token::AtSign => UnaryOperator::PGAbs,
1644                    _ => unreachable!(),
1645                };
1646                Ok(Expr::UnaryOp {
1647                    op,
1648                    expr: Box::new(
1649                        self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?,
1650                    ),
1651                })
1652            }
1653            Token::Tilde => Ok(Expr::UnaryOp {
1654                op: UnaryOperator::BitwiseNot,
1655                expr: Box::new(self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?),
1656            }),
1657            tok @ Token::Sharp
1658            | tok @ Token::AtDashAt
1659            | tok @ Token::AtAt
1660            | tok @ Token::QuestionMarkDash
1661            | tok @ Token::QuestionPipe
1662                if self.dialect.supports_geometric_types() =>
1663            {
1664                let op = match tok {
1665                    Token::Sharp => UnaryOperator::Hash,
1666                    Token::AtDashAt => UnaryOperator::AtDashAt,
1667                    Token::AtAt => UnaryOperator::DoubleAt,
1668                    Token::QuestionMarkDash => UnaryOperator::QuestionDash,
1669                    Token::QuestionPipe => UnaryOperator::QuestionPipe,
1670                    _ => {
1671                        return Err(ParserError::ParserError(format!(
1672                            "Unexpected token in unary operator parsing: {tok:?}"
1673                        )))
1674                    }
1675                };
1676                Ok(Expr::UnaryOp {
1677                    op,
1678                    expr: Box::new(
1679                        self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?,
1680                    ),
1681                })
1682            }
1683            Token::EscapedStringLiteral(_) if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) =>
1684            {
1685                self.prev_token();
1686                Ok(Expr::Value(self.parse_value()?))
1687            }
1688            Token::UnicodeStringLiteral(_) => {
1689                self.prev_token();
1690                Ok(Expr::Value(self.parse_value()?))
1691            }
1692            Token::Number(_, _)
1693            | Token::SingleQuotedString(_)
1694            | Token::DoubleQuotedString(_)
1695            | Token::TripleSingleQuotedString(_)
1696            | Token::TripleDoubleQuotedString(_)
1697            | Token::DollarQuotedString(_)
1698            | Token::SingleQuotedByteStringLiteral(_)
1699            | Token::DoubleQuotedByteStringLiteral(_)
1700            | Token::TripleSingleQuotedByteStringLiteral(_)
1701            | Token::TripleDoubleQuotedByteStringLiteral(_)
1702            | Token::SingleQuotedRawStringLiteral(_)
1703            | Token::DoubleQuotedRawStringLiteral(_)
1704            | Token::TripleSingleQuotedRawStringLiteral(_)
1705            | Token::TripleDoubleQuotedRawStringLiteral(_)
1706            | Token::NationalStringLiteral(_)
1707            | Token::HexStringLiteral(_) => {
1708                self.prev_token();
1709                Ok(Expr::Value(self.parse_value()?))
1710            }
1711            Token::LParen => {
1712                let expr = if let Some(expr) = self.try_parse_expr_sub_query()? {
1713                    expr
1714                } else if let Some(lambda) = self.try_parse_lambda()? {
1715                    return Ok(lambda);
1716                } else {
1717                    let exprs = self.parse_comma_separated(Parser::parse_expr)?;
1718                    match exprs.len() {
1719                        0 => unreachable!(), // parse_comma_separated ensures 1 or more
1720                        1 => Expr::Nested(Box::new(exprs.into_iter().next().unwrap())),
1721                        _ => Expr::Tuple(exprs),
1722                    }
1723                };
1724                self.expect_token(&Token::RParen)?;
1725                Ok(expr)
1726            }
1727            Token::Placeholder(_) | Token::Colon | Token::AtSign => {
1728                self.prev_token();
1729                Ok(Expr::Value(self.parse_value()?))
1730            }
1731            Token::LBrace => {
1732                self.prev_token();
1733                self.parse_lbrace_expr()
1734            }
1735            _ => self.expected_at("an expression", next_token_index),
1736        }?;
1737
1738        if !self.in_column_definition_state() && self.parse_keyword(Keyword::COLLATE) {
1739            Ok(Expr::Collate {
1740                expr: Box::new(expr),
1741                collation: self.parse_object_name(false)?,
1742            })
1743        } else {
1744            Ok(expr)
1745        }
1746    }
1747
1748    fn parse_geometric_type(&mut self, kind: GeometricTypeKind) -> Result<Expr, ParserError> {
1749        Ok(Expr::TypedString(TypedString {
1750            data_type: DataType::GeometricType(kind),
1751            value: self.parse_value()?,
1752            uses_odbc_syntax: false,
1753        }))
1754    }
1755
1756    /// Try to parse an [Expr::CompoundFieldAccess] like `a.b.c` or `a.b[1].c`.
1757    /// If all the fields are `Expr::Identifier`s, return an [Expr::CompoundIdentifier] instead.
1758    /// If only the root exists, return the root.
1759    /// Parses compound expressions which may be delimited by period
1760    /// or bracket notation.
1761    /// For example: `a.b.c`, `a.b[1]`.
1762    pub fn parse_compound_expr(
1763        &mut self,
1764        root: Expr,
1765        mut chain: Vec<AccessExpr>,
1766    ) -> Result<Expr, ParserError> {
1767        let mut ending_wildcard: Option<TokenWithSpan> = None;
1768        loop {
1769            if self.consume_token(&Token::Period) {
1770                let next_token = self.peek_token_ref();
1771                match &next_token.token {
1772                    Token::Mul => {
1773                        // Postgres explicitly allows funcnm(tablenm.*) and the
1774                        // function array_agg traverses this control flow
1775                        if dialect_of!(self is PostgreSqlDialect) {
1776                            ending_wildcard = Some(self.next_token());
1777                        } else {
1778                            // Put back the consumed `.` tokens before exiting.
1779                            // If this expression is being parsed in the
1780                            // context of a projection, then the `.*` could imply
1781                            // a wildcard expansion. For example:
1782                            // `SELECT STRUCT('foo').* FROM T`
1783                            self.prev_token(); // .
1784                        }
1785
1786                        break;
1787                    }
1788                    Token::SingleQuotedString(s) => {
1789                        let expr =
1790                            Expr::Identifier(Ident::with_quote_and_span('\'', next_token.span, s));
1791                        chain.push(AccessExpr::Dot(expr));
1792                        self.advance_token(); // The consumed string
1793                    }
1794                    // Fallback to parsing an arbitrary expression.
1795                    _ => match self.parse_subexpr(self.dialect.prec_value(Precedence::Period))? {
1796                        // If we get back a compound field access or identifier,
1797                        // we flatten the nested expression.
1798                        // For example if the current root is `foo`
1799                        // and we get back a compound identifier expression `bar.baz`
1800                        // The full expression should be `foo.bar.baz` (i.e.
1801                        // a root with an access chain with 2 entries) and not
1802                        // `foo.(bar.baz)` (i.e. a root with an access chain with
1803                        // 1 entry`).
1804                        Expr::CompoundFieldAccess { root, access_chain } => {
1805                            chain.push(AccessExpr::Dot(*root));
1806                            chain.extend(access_chain);
1807                        }
1808                        Expr::CompoundIdentifier(parts) => chain
1809                            .extend(parts.into_iter().map(Expr::Identifier).map(AccessExpr::Dot)),
1810                        expr => {
1811                            chain.push(AccessExpr::Dot(expr));
1812                        }
1813                    },
1814                }
1815            } else if !self.dialect.supports_partiql()
1816                && self.peek_token_ref().token == Token::LBracket
1817            {
1818                self.parse_multi_dim_subscript(&mut chain)?;
1819            } else {
1820                break;
1821            }
1822        }
1823
1824        let tok_index = self.get_current_index();
1825        if let Some(wildcard_token) = ending_wildcard {
1826            if !Self::is_all_ident(&root, &chain) {
1827                return self.expected("an identifier or a '*' after '.'", self.peek_token());
1828            };
1829            Ok(Expr::QualifiedWildcard(
1830                ObjectName::from(Self::exprs_to_idents(root, chain)?),
1831                AttachedToken(wildcard_token),
1832            ))
1833        } else if self.maybe_parse_outer_join_operator() {
1834            if !Self::is_all_ident(&root, &chain) {
1835                return self.expected_at("column identifier before (+)", tok_index);
1836            };
1837            let expr = if chain.is_empty() {
1838                root
1839            } else {
1840                Expr::CompoundIdentifier(Self::exprs_to_idents(root, chain)?)
1841            };
1842            Ok(Expr::OuterJoin(expr.into()))
1843        } else {
1844            Self::build_compound_expr(root, chain)
1845        }
1846    }
1847
1848    /// Combines a root expression and access chain to form
1849    /// a compound expression. Which may be a [Expr::CompoundFieldAccess]
1850    /// or other special cased expressions like [Expr::CompoundIdentifier],
1851    /// [Expr::OuterJoin].
1852    fn build_compound_expr(
1853        root: Expr,
1854        mut access_chain: Vec<AccessExpr>,
1855    ) -> Result<Expr, ParserError> {
1856        if access_chain.is_empty() {
1857            return Ok(root);
1858        }
1859
1860        if Self::is_all_ident(&root, &access_chain) {
1861            return Ok(Expr::CompoundIdentifier(Self::exprs_to_idents(
1862                root,
1863                access_chain,
1864            )?));
1865        }
1866
1867        // Flatten qualified function calls.
1868        // For example, the expression `a.b.c.foo(1,2,3)` should
1869        // represent a function called `a.b.c.foo`, rather than
1870        // a composite expression.
1871        if matches!(root, Expr::Identifier(_))
1872            && matches!(
1873                access_chain.last(),
1874                Some(AccessExpr::Dot(Expr::Function(_)))
1875            )
1876            && access_chain
1877                .iter()
1878                .rev()
1879                .skip(1) // All except the Function
1880                .all(|access| matches!(access, AccessExpr::Dot(Expr::Identifier(_))))
1881        {
1882            let Some(AccessExpr::Dot(Expr::Function(mut func))) = access_chain.pop() else {
1883                return parser_err!("expected function expression", root.span().start);
1884            };
1885
1886            let compound_func_name = [root]
1887                .into_iter()
1888                .chain(access_chain.into_iter().flat_map(|access| match access {
1889                    AccessExpr::Dot(expr) => Some(expr),
1890                    _ => None,
1891                }))
1892                .flat_map(|expr| match expr {
1893                    Expr::Identifier(ident) => Some(ident),
1894                    _ => None,
1895                })
1896                .map(ObjectNamePart::Identifier)
1897                .chain(func.name.0)
1898                .collect::<Vec<_>>();
1899            func.name = ObjectName(compound_func_name);
1900
1901            return Ok(Expr::Function(func));
1902        }
1903
1904        // Flatten qualified outer join expressions.
1905        // For example, the expression `T.foo(+)` should
1906        // represent an outer join on the column name `T.foo`
1907        // rather than a composite expression.
1908        if access_chain.len() == 1
1909            && matches!(
1910                access_chain.last(),
1911                Some(AccessExpr::Dot(Expr::OuterJoin(_)))
1912            )
1913        {
1914            let Some(AccessExpr::Dot(Expr::OuterJoin(inner_expr))) = access_chain.pop() else {
1915                return parser_err!("expected (+) expression", root.span().start);
1916            };
1917
1918            if !Self::is_all_ident(&root, &[]) {
1919                return parser_err!("column identifier before (+)", root.span().start);
1920            };
1921
1922            let token_start = root.span().start;
1923            let mut idents = Self::exprs_to_idents(root, vec![])?;
1924            match *inner_expr {
1925                Expr::CompoundIdentifier(suffix) => idents.extend(suffix),
1926                Expr::Identifier(suffix) => idents.push(suffix),
1927                _ => {
1928                    return parser_err!("column identifier before (+)", token_start);
1929                }
1930            }
1931
1932            return Ok(Expr::OuterJoin(Expr::CompoundIdentifier(idents).into()));
1933        }
1934
1935        Ok(Expr::CompoundFieldAccess {
1936            root: Box::new(root),
1937            access_chain,
1938        })
1939    }
1940
1941    fn keyword_to_modifier(k: Keyword) -> Option<ContextModifier> {
1942        match k {
1943            Keyword::LOCAL => Some(ContextModifier::Local),
1944            Keyword::GLOBAL => Some(ContextModifier::Global),
1945            Keyword::SESSION => Some(ContextModifier::Session),
1946            _ => None,
1947        }
1948    }
1949
1950    /// Check if the root is an identifier and all fields are identifiers.
1951    fn is_all_ident(root: &Expr, fields: &[AccessExpr]) -> bool {
1952        if !matches!(root, Expr::Identifier(_)) {
1953            return false;
1954        }
1955        fields
1956            .iter()
1957            .all(|x| matches!(x, AccessExpr::Dot(Expr::Identifier(_))))
1958    }
1959
1960    /// Convert a root and a list of fields to a list of identifiers.
1961    fn exprs_to_idents(root: Expr, fields: Vec<AccessExpr>) -> Result<Vec<Ident>, ParserError> {
1962        let mut idents = vec![];
1963        if let Expr::Identifier(root) = root {
1964            idents.push(root);
1965            for x in fields {
1966                if let AccessExpr::Dot(Expr::Identifier(ident)) = x {
1967                    idents.push(ident);
1968                } else {
1969                    return parser_err!(
1970                        format!("Expected identifier, found: {}", x),
1971                        x.span().start
1972                    );
1973                }
1974            }
1975            Ok(idents)
1976        } else {
1977            parser_err!(
1978                format!("Expected identifier, found: {}", root),
1979                root.span().start
1980            )
1981        }
1982    }
1983
1984    /// Returns true if the next tokens indicate the outer join operator `(+)`.
1985    fn peek_outer_join_operator(&mut self) -> bool {
1986        if !self.dialect.supports_outer_join_operator() {
1987            return false;
1988        }
1989
1990        let [maybe_lparen, maybe_plus, maybe_rparen] = self.peek_tokens_ref();
1991        Token::LParen == maybe_lparen.token
1992            && Token::Plus == maybe_plus.token
1993            && Token::RParen == maybe_rparen.token
1994    }
1995
1996    /// If the next tokens indicates the outer join operator `(+)`, consume
1997    /// the tokens and return true.
1998    fn maybe_parse_outer_join_operator(&mut self) -> bool {
1999        self.dialect.supports_outer_join_operator()
2000            && self.consume_tokens(&[Token::LParen, Token::Plus, Token::RParen])
2001    }
2002
2003    pub fn parse_utility_options(&mut self) -> Result<Vec<UtilityOption>, ParserError> {
2004        self.expect_token(&Token::LParen)?;
2005        let options = self.parse_comma_separated(Self::parse_utility_option)?;
2006        self.expect_token(&Token::RParen)?;
2007
2008        Ok(options)
2009    }
2010
2011    fn parse_utility_option(&mut self) -> Result<UtilityOption, ParserError> {
2012        let name = self.parse_identifier()?;
2013
2014        let next_token = self.peek_token();
2015        if next_token == Token::Comma || next_token == Token::RParen {
2016            return Ok(UtilityOption { name, arg: None });
2017        }
2018        let arg = self.parse_expr()?;
2019
2020        Ok(UtilityOption {
2021            name,
2022            arg: Some(arg),
2023        })
2024    }
2025
2026    fn try_parse_expr_sub_query(&mut self) -> Result<Option<Expr>, ParserError> {
2027        if !self.peek_sub_query() {
2028            return Ok(None);
2029        }
2030
2031        Ok(Some(Expr::Subquery(self.parse_query()?)))
2032    }
2033
2034    fn try_parse_lambda(&mut self) -> Result<Option<Expr>, ParserError> {
2035        if !self.dialect.supports_lambda_functions() {
2036            return Ok(None);
2037        }
2038        self.maybe_parse(|p| {
2039            let params = p.parse_comma_separated(|p| p.parse_identifier())?;
2040            p.expect_token(&Token::RParen)?;
2041            p.expect_token(&Token::Arrow)?;
2042            let expr = p.parse_expr()?;
2043            Ok(Expr::Lambda(LambdaFunction {
2044                params: OneOrManyWithParens::Many(params),
2045                body: Box::new(expr),
2046            }))
2047        })
2048    }
2049
2050    /// Tries to parse the body of an [ODBC escaping sequence]
2051    /// i.e. without the enclosing braces
2052    /// Currently implemented:
2053    /// Scalar Function Calls
2054    /// Date, Time, and Timestamp Literals
2055    /// See <https://learn.microsoft.com/en-us/sql/odbc/reference/develop-app/escape-sequences-in-odbc?view=sql-server-2017>
2056    fn maybe_parse_odbc_body(&mut self) -> Result<Option<Expr>, ParserError> {
2057        // Attempt 1: Try to parse it as a function.
2058        if let Some(expr) = self.maybe_parse_odbc_fn_body()? {
2059            return Ok(Some(expr));
2060        }
2061        // Attempt 2: Try to parse it as a Date, Time or Timestamp Literal
2062        self.maybe_parse_odbc_body_datetime()
2063    }
2064
2065    /// Tries to parse the body of an [ODBC Date, Time, and Timestamp Literals] call.
2066    ///
2067    /// ```sql
2068    /// {d '2025-07-17'}
2069    /// {t '14:12:01'}
2070    /// {ts '2025-07-17 14:12:01'}
2071    /// ```
2072    ///
2073    /// [ODBC Date, Time, and Timestamp Literals]:
2074    /// https://learn.microsoft.com/en-us/sql/odbc/reference/develop-app/date-time-and-timestamp-literals?view=sql-server-2017
2075    fn maybe_parse_odbc_body_datetime(&mut self) -> Result<Option<Expr>, ParserError> {
2076        self.maybe_parse(|p| {
2077            let token = p.next_token().clone();
2078            let word_string = token.token.to_string();
2079            let data_type = match word_string.as_str() {
2080                "t" => DataType::Time(None, TimezoneInfo::None),
2081                "d" => DataType::Date,
2082                "ts" => DataType::Timestamp(None, TimezoneInfo::None),
2083                _ => return p.expected("ODBC datetime keyword (t, d, or ts)", token),
2084            };
2085            let value = p.parse_value()?;
2086            Ok(Expr::TypedString(TypedString {
2087                data_type,
2088                value,
2089                uses_odbc_syntax: true,
2090            }))
2091        })
2092    }
2093
2094    /// Tries to parse the body of an [ODBC function] call.
2095    /// i.e. without the enclosing braces
2096    ///
2097    /// ```sql
2098    /// fn myfunc(1,2,3)
2099    /// ```
2100    ///
2101    /// [ODBC function]: https://learn.microsoft.com/en-us/sql/odbc/reference/develop-app/scalar-function-calls?view=sql-server-2017
2102    fn maybe_parse_odbc_fn_body(&mut self) -> Result<Option<Expr>, ParserError> {
2103        self.maybe_parse(|p| {
2104            p.expect_keyword(Keyword::FN)?;
2105            let fn_name = p.parse_object_name(false)?;
2106            let mut fn_call = p.parse_function_call(fn_name)?;
2107            fn_call.uses_odbc_syntax = true;
2108            Ok(Expr::Function(fn_call))
2109        })
2110    }
2111
2112    pub fn parse_function(&mut self, name: ObjectName) -> Result<Expr, ParserError> {
2113        self.parse_function_call(name).map(Expr::Function)
2114    }
2115
2116    fn parse_function_call(&mut self, name: ObjectName) -> Result<Function, ParserError> {
2117        self.expect_token(&Token::LParen)?;
2118
2119        // Snowflake permits a subquery to be passed as an argument without
2120        // an enclosing set of parens if it's the only argument.
2121        if dialect_of!(self is SnowflakeDialect) && self.peek_sub_query() {
2122            let subquery = self.parse_query()?;
2123            self.expect_token(&Token::RParen)?;
2124            return Ok(Function {
2125                name,
2126                uses_odbc_syntax: false,
2127                parameters: FunctionArguments::None,
2128                args: FunctionArguments::Subquery(subquery),
2129                filter: None,
2130                null_treatment: None,
2131                over: None,
2132                within_group: vec![],
2133            });
2134        }
2135
2136        let mut args = self.parse_function_argument_list()?;
2137        let mut parameters = FunctionArguments::None;
2138        // ClickHouse aggregations support parametric functions like `HISTOGRAM(0.5, 0.6)(x, y)`
2139        // which (0.5, 0.6) is a parameter to the function.
2140        if dialect_of!(self is ClickHouseDialect | GenericDialect)
2141            && self.consume_token(&Token::LParen)
2142        {
2143            parameters = FunctionArguments::List(args);
2144            args = self.parse_function_argument_list()?;
2145        }
2146
2147        let within_group = if self.parse_keywords(&[Keyword::WITHIN, Keyword::GROUP]) {
2148            self.expect_token(&Token::LParen)?;
2149            self.expect_keywords(&[Keyword::ORDER, Keyword::BY])?;
2150            let order_by = self.parse_comma_separated(Parser::parse_order_by_expr)?;
2151            self.expect_token(&Token::RParen)?;
2152            order_by
2153        } else {
2154            vec![]
2155        };
2156
2157        let filter = if self.dialect.supports_filter_during_aggregation()
2158            && self.parse_keyword(Keyword::FILTER)
2159            && self.consume_token(&Token::LParen)
2160            && self.parse_keyword(Keyword::WHERE)
2161        {
2162            let filter = Some(Box::new(self.parse_expr()?));
2163            self.expect_token(&Token::RParen)?;
2164            filter
2165        } else {
2166            None
2167        };
2168
2169        // Syntax for null treatment shows up either in the args list
2170        // or after the function call, but not both.
2171        let null_treatment = if args
2172            .clauses
2173            .iter()
2174            .all(|clause| !matches!(clause, FunctionArgumentClause::IgnoreOrRespectNulls(_)))
2175        {
2176            self.parse_null_treatment()?
2177        } else {
2178            None
2179        };
2180
2181        let over = if self.parse_keyword(Keyword::OVER) {
2182            if self.consume_token(&Token::LParen) {
2183                let window_spec = self.parse_window_spec()?;
2184                Some(WindowType::WindowSpec(window_spec))
2185            } else {
2186                Some(WindowType::NamedWindow(self.parse_identifier()?))
2187            }
2188        } else {
2189            None
2190        };
2191
2192        Ok(Function {
2193            name,
2194            uses_odbc_syntax: false,
2195            parameters,
2196            args: FunctionArguments::List(args),
2197            null_treatment,
2198            filter,
2199            over,
2200            within_group,
2201        })
2202    }
2203
2204    /// Optionally parses a null treatment clause.
2205    fn parse_null_treatment(&mut self) -> Result<Option<NullTreatment>, ParserError> {
2206        match self.parse_one_of_keywords(&[Keyword::RESPECT, Keyword::IGNORE]) {
2207            Some(keyword) => {
2208                self.expect_keyword_is(Keyword::NULLS)?;
2209
2210                Ok(match keyword {
2211                    Keyword::RESPECT => Some(NullTreatment::RespectNulls),
2212                    Keyword::IGNORE => Some(NullTreatment::IgnoreNulls),
2213                    _ => None,
2214                })
2215            }
2216            None => Ok(None),
2217        }
2218    }
2219
2220    pub fn parse_time_functions(&mut self, name: ObjectName) -> Result<Expr, ParserError> {
2221        let args = if self.consume_token(&Token::LParen) {
2222            FunctionArguments::List(self.parse_function_argument_list()?)
2223        } else {
2224            FunctionArguments::None
2225        };
2226        Ok(Expr::Function(Function {
2227            name,
2228            uses_odbc_syntax: false,
2229            parameters: FunctionArguments::None,
2230            args,
2231            filter: None,
2232            over: None,
2233            null_treatment: None,
2234            within_group: vec![],
2235        }))
2236    }
2237
2238    pub fn parse_window_frame_units(&mut self) -> Result<WindowFrameUnits, ParserError> {
2239        let next_token = self.next_token();
2240        match &next_token.token {
2241            Token::Word(w) => match w.keyword {
2242                Keyword::ROWS => Ok(WindowFrameUnits::Rows),
2243                Keyword::RANGE => Ok(WindowFrameUnits::Range),
2244                Keyword::GROUPS => Ok(WindowFrameUnits::Groups),
2245                _ => self.expected("ROWS, RANGE, GROUPS", next_token)?,
2246            },
2247            _ => self.expected("ROWS, RANGE, GROUPS", next_token),
2248        }
2249    }
2250
2251    pub fn parse_window_frame(&mut self) -> Result<WindowFrame, ParserError> {
2252        let units = self.parse_window_frame_units()?;
2253        let (start_bound, end_bound) = if self.parse_keyword(Keyword::BETWEEN) {
2254            let start_bound = self.parse_window_frame_bound()?;
2255            self.expect_keyword_is(Keyword::AND)?;
2256            let end_bound = Some(self.parse_window_frame_bound()?);
2257            (start_bound, end_bound)
2258        } else {
2259            (self.parse_window_frame_bound()?, None)
2260        };
2261        Ok(WindowFrame {
2262            units,
2263            start_bound,
2264            end_bound,
2265        })
2266    }
2267
2268    /// Parse `CURRENT ROW` or `{ <positive number> | UNBOUNDED } { PRECEDING | FOLLOWING }`
2269    pub fn parse_window_frame_bound(&mut self) -> Result<WindowFrameBound, ParserError> {
2270        if self.parse_keywords(&[Keyword::CURRENT, Keyword::ROW]) {
2271            Ok(WindowFrameBound::CurrentRow)
2272        } else {
2273            let rows = if self.parse_keyword(Keyword::UNBOUNDED) {
2274                None
2275            } else {
2276                Some(Box::new(match self.peek_token().token {
2277                    Token::SingleQuotedString(_) => self.parse_interval()?,
2278                    _ => self.parse_expr()?,
2279                }))
2280            };
2281            if self.parse_keyword(Keyword::PRECEDING) {
2282                Ok(WindowFrameBound::Preceding(rows))
2283            } else if self.parse_keyword(Keyword::FOLLOWING) {
2284                Ok(WindowFrameBound::Following(rows))
2285            } else {
2286                self.expected("PRECEDING or FOLLOWING", self.peek_token())
2287            }
2288        }
2289    }
2290
2291    /// Parse a group by expr. Group by expr can be one of group sets, roll up, cube, or simple expr.
2292    fn parse_group_by_expr(&mut self) -> Result<Expr, ParserError> {
2293        if self.dialect.supports_group_by_expr() {
2294            if self.parse_keywords(&[Keyword::GROUPING, Keyword::SETS]) {
2295                self.expect_token(&Token::LParen)?;
2296                let result = self.parse_comma_separated(|p| p.parse_tuple(false, true))?;
2297                self.expect_token(&Token::RParen)?;
2298                Ok(Expr::GroupingSets(result))
2299            } else if self.parse_keyword(Keyword::CUBE) {
2300                self.expect_token(&Token::LParen)?;
2301                let result = self.parse_comma_separated(|p| p.parse_tuple(true, true))?;
2302                self.expect_token(&Token::RParen)?;
2303                Ok(Expr::Cube(result))
2304            } else if self.parse_keyword(Keyword::ROLLUP) {
2305                self.expect_token(&Token::LParen)?;
2306                let result = self.parse_comma_separated(|p| p.parse_tuple(true, true))?;
2307                self.expect_token(&Token::RParen)?;
2308                Ok(Expr::Rollup(result))
2309            } else if self.consume_tokens(&[Token::LParen, Token::RParen]) {
2310                // PostgreSQL allow to use empty tuple as a group by expression,
2311                // e.g. `GROUP BY (), name`. Please refer to GROUP BY Clause section in
2312                // [PostgreSQL](https://www.postgresql.org/docs/16/sql-select.html)
2313                Ok(Expr::Tuple(vec![]))
2314            } else {
2315                self.parse_expr()
2316            }
2317        } else {
2318            // TODO parse rollup for other dialects
2319            self.parse_expr()
2320        }
2321    }
2322
2323    /// Parse a tuple with `(` and `)`.
2324    /// If `lift_singleton` is true, then a singleton tuple is lifted to a tuple of length 1, otherwise it will fail.
2325    /// If `allow_empty` is true, then an empty tuple is allowed.
2326    fn parse_tuple(
2327        &mut self,
2328        lift_singleton: bool,
2329        allow_empty: bool,
2330    ) -> Result<Vec<Expr>, ParserError> {
2331        if lift_singleton {
2332            if self.consume_token(&Token::LParen) {
2333                let result = if allow_empty && self.consume_token(&Token::RParen) {
2334                    vec![]
2335                } else {
2336                    let result = self.parse_comma_separated(Parser::parse_expr)?;
2337                    self.expect_token(&Token::RParen)?;
2338                    result
2339                };
2340                Ok(result)
2341            } else {
2342                Ok(vec![self.parse_expr()?])
2343            }
2344        } else {
2345            self.expect_token(&Token::LParen)?;
2346            let result = if allow_empty && self.consume_token(&Token::RParen) {
2347                vec![]
2348            } else {
2349                let result = self.parse_comma_separated(Parser::parse_expr)?;
2350                self.expect_token(&Token::RParen)?;
2351                result
2352            };
2353            Ok(result)
2354        }
2355    }
2356
2357    pub fn parse_case_expr(&mut self) -> Result<Expr, ParserError> {
2358        let case_token = AttachedToken(self.get_current_token().clone());
2359        let mut operand = None;
2360        if !self.parse_keyword(Keyword::WHEN) {
2361            operand = Some(Box::new(self.parse_expr()?));
2362            self.expect_keyword_is(Keyword::WHEN)?;
2363        }
2364        let mut conditions = vec![];
2365        loop {
2366            let condition = self.parse_expr()?;
2367            self.expect_keyword_is(Keyword::THEN)?;
2368            let result = self.parse_expr()?;
2369            conditions.push(CaseWhen { condition, result });
2370            if !self.parse_keyword(Keyword::WHEN) {
2371                break;
2372            }
2373        }
2374        let else_result = if self.parse_keyword(Keyword::ELSE) {
2375            Some(Box::new(self.parse_expr()?))
2376        } else {
2377            None
2378        };
2379        let end_token = AttachedToken(self.expect_keyword(Keyword::END)?);
2380        Ok(Expr::Case {
2381            case_token,
2382            end_token,
2383            operand,
2384            conditions,
2385            else_result,
2386        })
2387    }
2388
2389    pub fn parse_optional_cast_format(&mut self) -> Result<Option<CastFormat>, ParserError> {
2390        if self.parse_keyword(Keyword::FORMAT) {
2391            let value = self.parse_value()?.value;
2392            match self.parse_optional_time_zone()? {
2393                Some(tz) => Ok(Some(CastFormat::ValueAtTimeZone(value, tz))),
2394                None => Ok(Some(CastFormat::Value(value))),
2395            }
2396        } else {
2397            Ok(None)
2398        }
2399    }
2400
2401    pub fn parse_optional_time_zone(&mut self) -> Result<Option<Value>, ParserError> {
2402        if self.parse_keywords(&[Keyword::AT, Keyword::TIME, Keyword::ZONE]) {
2403            self.parse_value().map(|v| Some(v.value))
2404        } else {
2405            Ok(None)
2406        }
2407    }
2408
2409    /// mssql-like convert function
2410    fn parse_mssql_convert(&mut self, is_try: bool) -> Result<Expr, ParserError> {
2411        self.expect_token(&Token::LParen)?;
2412        let data_type = self.parse_data_type()?;
2413        self.expect_token(&Token::Comma)?;
2414        let expr = self.parse_expr()?;
2415        let styles = if self.consume_token(&Token::Comma) {
2416            self.parse_comma_separated(Parser::parse_expr)?
2417        } else {
2418            Default::default()
2419        };
2420        self.expect_token(&Token::RParen)?;
2421        Ok(Expr::Convert {
2422            is_try,
2423            expr: Box::new(expr),
2424            data_type: Some(data_type),
2425            charset: None,
2426            target_before_value: true,
2427            styles,
2428        })
2429    }
2430
2431    /// Parse a SQL CONVERT function:
2432    ///  - `CONVERT('héhé' USING utf8mb4)` (MySQL)
2433    ///  - `CONVERT('héhé', CHAR CHARACTER SET utf8mb4)` (MySQL)
2434    ///  - `CONVERT(DECIMAL(10, 5), 42)` (MSSQL) - the type comes first
2435    pub fn parse_convert_expr(&mut self, is_try: bool) -> Result<Expr, ParserError> {
2436        if self.dialect.convert_type_before_value() {
2437            return self.parse_mssql_convert(is_try);
2438        }
2439        self.expect_token(&Token::LParen)?;
2440        let expr = self.parse_expr()?;
2441        if self.parse_keyword(Keyword::USING) {
2442            let charset = self.parse_object_name(false)?;
2443            self.expect_token(&Token::RParen)?;
2444            return Ok(Expr::Convert {
2445                is_try,
2446                expr: Box::new(expr),
2447                data_type: None,
2448                charset: Some(charset),
2449                target_before_value: false,
2450                styles: vec![],
2451            });
2452        }
2453        self.expect_token(&Token::Comma)?;
2454        let data_type = self.parse_data_type()?;
2455        let charset = if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
2456            Some(self.parse_object_name(false)?)
2457        } else {
2458            None
2459        };
2460        self.expect_token(&Token::RParen)?;
2461        Ok(Expr::Convert {
2462            is_try,
2463            expr: Box::new(expr),
2464            data_type: Some(data_type),
2465            charset,
2466            target_before_value: false,
2467            styles: vec![],
2468        })
2469    }
2470
2471    /// Parse a SQL CAST function e.g. `CAST(expr AS FLOAT)`
2472    pub fn parse_cast_expr(&mut self, kind: CastKind) -> Result<Expr, ParserError> {
2473        self.expect_token(&Token::LParen)?;
2474        let expr = self.parse_expr()?;
2475        self.expect_keyword_is(Keyword::AS)?;
2476        let data_type = self.parse_data_type()?;
2477        let format = self.parse_optional_cast_format()?;
2478        self.expect_token(&Token::RParen)?;
2479        Ok(Expr::Cast {
2480            kind,
2481            expr: Box::new(expr),
2482            data_type,
2483            format,
2484        })
2485    }
2486
2487    /// Parse a SQL EXISTS expression e.g. `WHERE EXISTS(SELECT ...)`.
2488    pub fn parse_exists_expr(&mut self, negated: bool) -> Result<Expr, ParserError> {
2489        self.expect_token(&Token::LParen)?;
2490        let exists_node = Expr::Exists {
2491            negated,
2492            subquery: self.parse_query()?,
2493        };
2494        self.expect_token(&Token::RParen)?;
2495        Ok(exists_node)
2496    }
2497
2498    pub fn parse_extract_expr(&mut self) -> Result<Expr, ParserError> {
2499        self.expect_token(&Token::LParen)?;
2500        let field = self.parse_date_time_field()?;
2501
2502        let syntax = if self.parse_keyword(Keyword::FROM) {
2503            ExtractSyntax::From
2504        } else if self.consume_token(&Token::Comma)
2505            && dialect_of!(self is SnowflakeDialect | GenericDialect)
2506        {
2507            ExtractSyntax::Comma
2508        } else {
2509            return Err(ParserError::ParserError(
2510                "Expected 'FROM' or ','".to_string(),
2511            ));
2512        };
2513
2514        let expr = self.parse_expr()?;
2515        self.expect_token(&Token::RParen)?;
2516        Ok(Expr::Extract {
2517            field,
2518            expr: Box::new(expr),
2519            syntax,
2520        })
2521    }
2522
2523    pub fn parse_ceil_floor_expr(&mut self, is_ceil: bool) -> Result<Expr, ParserError> {
2524        self.expect_token(&Token::LParen)?;
2525        let expr = self.parse_expr()?;
2526        // Parse `CEIL/FLOOR(expr)`
2527        let field = if self.parse_keyword(Keyword::TO) {
2528            // Parse `CEIL/FLOOR(expr TO DateTimeField)`
2529            CeilFloorKind::DateTimeField(self.parse_date_time_field()?)
2530        } else if self.consume_token(&Token::Comma) {
2531            // Parse `CEIL/FLOOR(expr, scale)`
2532            match self.parse_value()?.value {
2533                Value::Number(n, s) => CeilFloorKind::Scale(Value::Number(n, s)),
2534                _ => {
2535                    return Err(ParserError::ParserError(
2536                        "Scale field can only be of number type".to_string(),
2537                    ))
2538                }
2539            }
2540        } else {
2541            CeilFloorKind::DateTimeField(DateTimeField::NoDateTime)
2542        };
2543        self.expect_token(&Token::RParen)?;
2544        if is_ceil {
2545            Ok(Expr::Ceil {
2546                expr: Box::new(expr),
2547                field,
2548            })
2549        } else {
2550            Ok(Expr::Floor {
2551                expr: Box::new(expr),
2552                field,
2553            })
2554        }
2555    }
2556
2557    pub fn parse_position_expr(&mut self, ident: Ident) -> Result<Expr, ParserError> {
2558        let between_prec = self.dialect.prec_value(Precedence::Between);
2559        let position_expr = self.maybe_parse(|p| {
2560            // PARSE SELECT POSITION('@' in field)
2561            p.expect_token(&Token::LParen)?;
2562
2563            // Parse the subexpr till the IN keyword
2564            let expr = p.parse_subexpr(between_prec)?;
2565            p.expect_keyword_is(Keyword::IN)?;
2566            let from = p.parse_expr()?;
2567            p.expect_token(&Token::RParen)?;
2568            Ok(Expr::Position {
2569                expr: Box::new(expr),
2570                r#in: Box::new(from),
2571            })
2572        })?;
2573        match position_expr {
2574            Some(expr) => Ok(expr),
2575            // Snowflake supports `position` as an ordinary function call
2576            // without the special `IN` syntax.
2577            None => self.parse_function(ObjectName::from(vec![ident])),
2578        }
2579    }
2580
2581    // { SUBSTRING | SUBSTR } (<EXPR> [FROM 1] [FOR 3])
2582    pub fn parse_substring(&mut self) -> Result<Expr, ParserError> {
2583        let shorthand = match self.expect_one_of_keywords(&[Keyword::SUBSTR, Keyword::SUBSTRING])? {
2584            Keyword::SUBSTR => true,
2585            Keyword::SUBSTRING => false,
2586            _ => {
2587                self.prev_token();
2588                return self.expected("SUBSTR or SUBSTRING", self.peek_token());
2589            }
2590        };
2591        self.expect_token(&Token::LParen)?;
2592        let expr = self.parse_expr()?;
2593        let mut from_expr = None;
2594        let special = self.consume_token(&Token::Comma);
2595        if special || self.parse_keyword(Keyword::FROM) {
2596            from_expr = Some(self.parse_expr()?);
2597        }
2598
2599        let mut to_expr = None;
2600        if self.parse_keyword(Keyword::FOR) || self.consume_token(&Token::Comma) {
2601            to_expr = Some(self.parse_expr()?);
2602        }
2603        self.expect_token(&Token::RParen)?;
2604
2605        Ok(Expr::Substring {
2606            expr: Box::new(expr),
2607            substring_from: from_expr.map(Box::new),
2608            substring_for: to_expr.map(Box::new),
2609            special,
2610            shorthand,
2611        })
2612    }
2613
2614    pub fn parse_overlay_expr(&mut self) -> Result<Expr, ParserError> {
2615        // PARSE OVERLAY (EXPR PLACING EXPR FROM 1 [FOR 3])
2616        self.expect_token(&Token::LParen)?;
2617        let expr = self.parse_expr()?;
2618        self.expect_keyword_is(Keyword::PLACING)?;
2619        let what_expr = self.parse_expr()?;
2620        self.expect_keyword_is(Keyword::FROM)?;
2621        let from_expr = self.parse_expr()?;
2622        let mut for_expr = None;
2623        if self.parse_keyword(Keyword::FOR) {
2624            for_expr = Some(self.parse_expr()?);
2625        }
2626        self.expect_token(&Token::RParen)?;
2627
2628        Ok(Expr::Overlay {
2629            expr: Box::new(expr),
2630            overlay_what: Box::new(what_expr),
2631            overlay_from: Box::new(from_expr),
2632            overlay_for: for_expr.map(Box::new),
2633        })
2634    }
2635
2636    /// ```sql
2637    /// TRIM ([WHERE] ['text' FROM] 'text')
2638    /// TRIM ('text')
2639    /// TRIM(<expr>, [, characters]) -- only Snowflake or BigQuery
2640    /// ```
2641    pub fn parse_trim_expr(&mut self) -> Result<Expr, ParserError> {
2642        self.expect_token(&Token::LParen)?;
2643        let mut trim_where = None;
2644        if let Token::Word(word) = self.peek_token().token {
2645            if [Keyword::BOTH, Keyword::LEADING, Keyword::TRAILING].contains(&word.keyword) {
2646                trim_where = Some(self.parse_trim_where()?);
2647            }
2648        }
2649        let expr = self.parse_expr()?;
2650        if self.parse_keyword(Keyword::FROM) {
2651            let trim_what = Box::new(expr);
2652            let expr = self.parse_expr()?;
2653            self.expect_token(&Token::RParen)?;
2654            Ok(Expr::Trim {
2655                expr: Box::new(expr),
2656                trim_where,
2657                trim_what: Some(trim_what),
2658                trim_characters: None,
2659            })
2660        } else if self.consume_token(&Token::Comma)
2661            && dialect_of!(self is DuckDbDialect | SnowflakeDialect | BigQueryDialect | GenericDialect)
2662        {
2663            let characters = self.parse_comma_separated(Parser::parse_expr)?;
2664            self.expect_token(&Token::RParen)?;
2665            Ok(Expr::Trim {
2666                expr: Box::new(expr),
2667                trim_where: None,
2668                trim_what: None,
2669                trim_characters: Some(characters),
2670            })
2671        } else {
2672            self.expect_token(&Token::RParen)?;
2673            Ok(Expr::Trim {
2674                expr: Box::new(expr),
2675                trim_where,
2676                trim_what: None,
2677                trim_characters: None,
2678            })
2679        }
2680    }
2681
2682    pub fn parse_trim_where(&mut self) -> Result<TrimWhereField, ParserError> {
2683        let next_token = self.next_token();
2684        match &next_token.token {
2685            Token::Word(w) => match w.keyword {
2686                Keyword::BOTH => Ok(TrimWhereField::Both),
2687                Keyword::LEADING => Ok(TrimWhereField::Leading),
2688                Keyword::TRAILING => Ok(TrimWhereField::Trailing),
2689                _ => self.expected("trim_where field", next_token)?,
2690            },
2691            _ => self.expected("trim_where field", next_token),
2692        }
2693    }
2694
2695    /// Parses an array expression `[ex1, ex2, ..]`
2696    /// if `named` is `true`, came from an expression like  `ARRAY[ex1, ex2]`
2697    pub fn parse_array_expr(&mut self, named: bool) -> Result<Expr, ParserError> {
2698        let exprs = self.parse_comma_separated0(Parser::parse_expr, Token::RBracket)?;
2699        self.expect_token(&Token::RBracket)?;
2700        Ok(Expr::Array(Array { elem: exprs, named }))
2701    }
2702
2703    pub fn parse_listagg_on_overflow(&mut self) -> Result<Option<ListAggOnOverflow>, ParserError> {
2704        if self.parse_keywords(&[Keyword::ON, Keyword::OVERFLOW]) {
2705            if self.parse_keyword(Keyword::ERROR) {
2706                Ok(Some(ListAggOnOverflow::Error))
2707            } else {
2708                self.expect_keyword_is(Keyword::TRUNCATE)?;
2709                let filler = match self.peek_token().token {
2710                    Token::Word(w)
2711                        if w.keyword == Keyword::WITH || w.keyword == Keyword::WITHOUT =>
2712                    {
2713                        None
2714                    }
2715                    Token::SingleQuotedString(_)
2716                    | Token::EscapedStringLiteral(_)
2717                    | Token::UnicodeStringLiteral(_)
2718                    | Token::NationalStringLiteral(_)
2719                    | Token::HexStringLiteral(_) => Some(Box::new(self.parse_expr()?)),
2720                    _ => self.expected(
2721                        "either filler, WITH, or WITHOUT in LISTAGG",
2722                        self.peek_token(),
2723                    )?,
2724                };
2725                let with_count = self.parse_keyword(Keyword::WITH);
2726                if !with_count && !self.parse_keyword(Keyword::WITHOUT) {
2727                    self.expected("either WITH or WITHOUT in LISTAGG", self.peek_token())?;
2728                }
2729                self.expect_keyword_is(Keyword::COUNT)?;
2730                Ok(Some(ListAggOnOverflow::Truncate { filler, with_count }))
2731            }
2732        } else {
2733            Ok(None)
2734        }
2735    }
2736
2737    // This function parses date/time fields for the EXTRACT function-like
2738    // operator, interval qualifiers, and the ceil/floor operations.
2739    // EXTRACT supports a wider set of date/time fields than interval qualifiers,
2740    // so this function may need to be split in two.
2741    pub fn parse_date_time_field(&mut self) -> Result<DateTimeField, ParserError> {
2742        let next_token = self.next_token();
2743        match &next_token.token {
2744            Token::Word(w) => match w.keyword {
2745                Keyword::YEAR => Ok(DateTimeField::Year),
2746                Keyword::YEARS => Ok(DateTimeField::Years),
2747                Keyword::MONTH => Ok(DateTimeField::Month),
2748                Keyword::MONTHS => Ok(DateTimeField::Months),
2749                Keyword::WEEK => {
2750                    let week_day = if dialect_of!(self is BigQueryDialect | GenericDialect)
2751                        && self.consume_token(&Token::LParen)
2752                    {
2753                        let week_day = self.parse_identifier()?;
2754                        self.expect_token(&Token::RParen)?;
2755                        Some(week_day)
2756                    } else {
2757                        None
2758                    };
2759                    Ok(DateTimeField::Week(week_day))
2760                }
2761                Keyword::WEEKS => Ok(DateTimeField::Weeks),
2762                Keyword::DAY => Ok(DateTimeField::Day),
2763                Keyword::DAYOFWEEK => Ok(DateTimeField::DayOfWeek),
2764                Keyword::DAYOFYEAR => Ok(DateTimeField::DayOfYear),
2765                Keyword::DAYS => Ok(DateTimeField::Days),
2766                Keyword::DATE => Ok(DateTimeField::Date),
2767                Keyword::DATETIME => Ok(DateTimeField::Datetime),
2768                Keyword::HOUR => Ok(DateTimeField::Hour),
2769                Keyword::HOURS => Ok(DateTimeField::Hours),
2770                Keyword::MINUTE => Ok(DateTimeField::Minute),
2771                Keyword::MINUTES => Ok(DateTimeField::Minutes),
2772                Keyword::SECOND => Ok(DateTimeField::Second),
2773                Keyword::SECONDS => Ok(DateTimeField::Seconds),
2774                Keyword::CENTURY => Ok(DateTimeField::Century),
2775                Keyword::DECADE => Ok(DateTimeField::Decade),
2776                Keyword::DOY => Ok(DateTimeField::Doy),
2777                Keyword::DOW => Ok(DateTimeField::Dow),
2778                Keyword::EPOCH => Ok(DateTimeField::Epoch),
2779                Keyword::ISODOW => Ok(DateTimeField::Isodow),
2780                Keyword::ISOYEAR => Ok(DateTimeField::Isoyear),
2781                Keyword::ISOWEEK => Ok(DateTimeField::IsoWeek),
2782                Keyword::JULIAN => Ok(DateTimeField::Julian),
2783                Keyword::MICROSECOND => Ok(DateTimeField::Microsecond),
2784                Keyword::MICROSECONDS => Ok(DateTimeField::Microseconds),
2785                Keyword::MILLENIUM => Ok(DateTimeField::Millenium),
2786                Keyword::MILLENNIUM => Ok(DateTimeField::Millennium),
2787                Keyword::MILLISECOND => Ok(DateTimeField::Millisecond),
2788                Keyword::MILLISECONDS => Ok(DateTimeField::Milliseconds),
2789                Keyword::NANOSECOND => Ok(DateTimeField::Nanosecond),
2790                Keyword::NANOSECONDS => Ok(DateTimeField::Nanoseconds),
2791                Keyword::QUARTER => Ok(DateTimeField::Quarter),
2792                Keyword::TIME => Ok(DateTimeField::Time),
2793                Keyword::TIMEZONE => Ok(DateTimeField::Timezone),
2794                Keyword::TIMEZONE_ABBR => Ok(DateTimeField::TimezoneAbbr),
2795                Keyword::TIMEZONE_HOUR => Ok(DateTimeField::TimezoneHour),
2796                Keyword::TIMEZONE_MINUTE => Ok(DateTimeField::TimezoneMinute),
2797                Keyword::TIMEZONE_REGION => Ok(DateTimeField::TimezoneRegion),
2798                _ if self.dialect.allow_extract_custom() => {
2799                    self.prev_token();
2800                    let custom = self.parse_identifier()?;
2801                    Ok(DateTimeField::Custom(custom))
2802                }
2803                _ => self.expected("date/time field", next_token),
2804            },
2805            Token::SingleQuotedString(_) if self.dialect.allow_extract_single_quotes() => {
2806                self.prev_token();
2807                let custom = self.parse_identifier()?;
2808                Ok(DateTimeField::Custom(custom))
2809            }
2810            _ => self.expected("date/time field", next_token),
2811        }
2812    }
2813
2814    pub fn parse_not(&mut self) -> Result<Expr, ParserError> {
2815        match self.peek_token().token {
2816            Token::Word(w) => match w.keyword {
2817                Keyword::EXISTS => {
2818                    let negated = true;
2819                    let _ = self.parse_keyword(Keyword::EXISTS);
2820                    self.parse_exists_expr(negated)
2821                }
2822                _ => Ok(Expr::UnaryOp {
2823                    op: UnaryOperator::Not,
2824                    expr: Box::new(
2825                        self.parse_subexpr(self.dialect.prec_value(Precedence::UnaryNot))?,
2826                    ),
2827                }),
2828            },
2829            _ => Ok(Expr::UnaryOp {
2830                op: UnaryOperator::Not,
2831                expr: Box::new(self.parse_subexpr(self.dialect.prec_value(Precedence::UnaryNot))?),
2832            }),
2833        }
2834    }
2835
2836    /// Parse expression types that start with a left brace '{'.
2837    /// Examples:
2838    /// ```sql
2839    /// -- Dictionary expr.
2840    /// {'key1': 'value1', 'key2': 'value2'}
2841    ///
2842    /// -- Function call using the ODBC syntax.
2843    /// { fn CONCAT('foo', 'bar') }
2844    /// ```
2845    fn parse_lbrace_expr(&mut self) -> Result<Expr, ParserError> {
2846        let token = self.expect_token(&Token::LBrace)?;
2847
2848        if let Some(fn_expr) = self.maybe_parse_odbc_body()? {
2849            self.expect_token(&Token::RBrace)?;
2850            return Ok(fn_expr);
2851        }
2852
2853        if self.dialect.supports_dictionary_syntax() {
2854            self.prev_token(); // Put back the '{'
2855            return self.parse_dictionary();
2856        }
2857
2858        self.expected("an expression", token)
2859    }
2860
2861    /// Parses fulltext expressions [`sqlparser::ast::Expr::MatchAgainst`]
2862    ///
2863    /// # Errors
2864    /// This method will raise an error if the column list is empty or with invalid identifiers,
2865    /// the match expression is not a literal string, or if the search modifier is not valid.
2866    pub fn parse_match_against(&mut self) -> Result<Expr, ParserError> {
2867        let columns = self.parse_parenthesized_qualified_column_list(Mandatory, false)?;
2868
2869        self.expect_keyword_is(Keyword::AGAINST)?;
2870
2871        self.expect_token(&Token::LParen)?;
2872
2873        // MySQL is too permissive about the value, IMO we can't validate it perfectly on syntax level.
2874        let match_value = self.parse_value()?.value;
2875
2876        let in_natural_language_mode_keywords = &[
2877            Keyword::IN,
2878            Keyword::NATURAL,
2879            Keyword::LANGUAGE,
2880            Keyword::MODE,
2881        ];
2882
2883        let with_query_expansion_keywords = &[Keyword::WITH, Keyword::QUERY, Keyword::EXPANSION];
2884
2885        let in_boolean_mode_keywords = &[Keyword::IN, Keyword::BOOLEAN, Keyword::MODE];
2886
2887        let opt_search_modifier = if self.parse_keywords(in_natural_language_mode_keywords) {
2888            if self.parse_keywords(with_query_expansion_keywords) {
2889                Some(SearchModifier::InNaturalLanguageModeWithQueryExpansion)
2890            } else {
2891                Some(SearchModifier::InNaturalLanguageMode)
2892            }
2893        } else if self.parse_keywords(in_boolean_mode_keywords) {
2894            Some(SearchModifier::InBooleanMode)
2895        } else if self.parse_keywords(with_query_expansion_keywords) {
2896            Some(SearchModifier::WithQueryExpansion)
2897        } else {
2898            None
2899        };
2900
2901        self.expect_token(&Token::RParen)?;
2902
2903        Ok(Expr::MatchAgainst {
2904            columns,
2905            match_value,
2906            opt_search_modifier,
2907        })
2908    }
2909
2910    /// Parse an `INTERVAL` expression.
2911    ///
2912    /// Some syntactically valid intervals:
2913    ///
2914    /// ```sql
2915    ///   1. INTERVAL '1' DAY
2916    ///   2. INTERVAL '1-1' YEAR TO MONTH
2917    ///   3. INTERVAL '1' SECOND
2918    ///   4. INTERVAL '1:1:1.1' HOUR (5) TO SECOND (5)
2919    ///   5. INTERVAL '1.1' SECOND (2, 2)
2920    ///   6. INTERVAL '1:1' HOUR (5) TO MINUTE (5)
2921    ///   7. (MySql & BigQuery only): INTERVAL 1 DAY
2922    /// ```
2923    ///
2924    /// Note that we do not currently attempt to parse the quoted value.
2925    pub fn parse_interval(&mut self) -> Result<Expr, ParserError> {
2926        // The SQL standard allows an optional sign before the value string, but
2927        // it is not clear if any implementations support that syntax, so we
2928        // don't currently try to parse it. (The sign can instead be included
2929        // inside the value string.)
2930
2931        // to match the different flavours of INTERVAL syntax, we only allow expressions
2932        // if the dialect requires an interval qualifier,
2933        // see https://github.com/sqlparser-rs/sqlparser-rs/pull/1398 for more details
2934        let value = if self.dialect.require_interval_qualifier() {
2935            // parse a whole expression so `INTERVAL 1 + 1 DAY` is valid
2936            self.parse_expr()?
2937        } else {
2938            // parse a prefix expression so `INTERVAL 1 DAY` is valid, but `INTERVAL 1 + 1 DAY` is not
2939            // this also means that `INTERVAL '5 days' > INTERVAL '1 day'` treated properly
2940            self.parse_prefix()?
2941        };
2942
2943        // Following the string literal is a qualifier which indicates the units
2944        // of the duration specified in the string literal.
2945        //
2946        // Note that PostgreSQL allows omitting the qualifier, so we provide
2947        // this more general implementation.
2948        let leading_field = if self.next_token_is_temporal_unit() {
2949            Some(self.parse_date_time_field()?)
2950        } else if self.dialect.require_interval_qualifier() {
2951            return parser_err!(
2952                "INTERVAL requires a unit after the literal value",
2953                self.peek_token().span.start
2954            );
2955        } else {
2956            None
2957        };
2958
2959        let (leading_precision, last_field, fsec_precision) =
2960            if leading_field == Some(DateTimeField::Second) {
2961                // SQL mandates special syntax for `SECOND TO SECOND` literals.
2962                // Instead of
2963                //     `SECOND [(<leading precision>)] TO SECOND[(<fractional seconds precision>)]`
2964                // one must use the special format:
2965                //     `SECOND [( <leading precision> [ , <fractional seconds precision>] )]`
2966                let last_field = None;
2967                let (leading_precision, fsec_precision) = self.parse_optional_precision_scale()?;
2968                (leading_precision, last_field, fsec_precision)
2969            } else {
2970                let leading_precision = self.parse_optional_precision()?;
2971                if self.parse_keyword(Keyword::TO) {
2972                    let last_field = Some(self.parse_date_time_field()?);
2973                    let fsec_precision = if last_field == Some(DateTimeField::Second) {
2974                        self.parse_optional_precision()?
2975                    } else {
2976                        None
2977                    };
2978                    (leading_precision, last_field, fsec_precision)
2979                } else {
2980                    (leading_precision, None, None)
2981                }
2982            };
2983
2984        Ok(Expr::Interval(Interval {
2985            value: Box::new(value),
2986            leading_field,
2987            leading_precision,
2988            last_field,
2989            fractional_seconds_precision: fsec_precision,
2990        }))
2991    }
2992
2993    /// Peek at the next token and determine if it is a temporal unit
2994    /// like `second`.
2995    pub fn next_token_is_temporal_unit(&mut self) -> bool {
2996        if let Token::Word(word) = self.peek_token().token {
2997            matches!(
2998                word.keyword,
2999                Keyword::YEAR
3000                    | Keyword::YEARS
3001                    | Keyword::MONTH
3002                    | Keyword::MONTHS
3003                    | Keyword::WEEK
3004                    | Keyword::WEEKS
3005                    | Keyword::DAY
3006                    | Keyword::DAYS
3007                    | Keyword::HOUR
3008                    | Keyword::HOURS
3009                    | Keyword::MINUTE
3010                    | Keyword::MINUTES
3011                    | Keyword::SECOND
3012                    | Keyword::SECONDS
3013                    | Keyword::CENTURY
3014                    | Keyword::DECADE
3015                    | Keyword::DOW
3016                    | Keyword::DOY
3017                    | Keyword::EPOCH
3018                    | Keyword::ISODOW
3019                    | Keyword::ISOYEAR
3020                    | Keyword::JULIAN
3021                    | Keyword::MICROSECOND
3022                    | Keyword::MICROSECONDS
3023                    | Keyword::MILLENIUM
3024                    | Keyword::MILLENNIUM
3025                    | Keyword::MILLISECOND
3026                    | Keyword::MILLISECONDS
3027                    | Keyword::NANOSECOND
3028                    | Keyword::NANOSECONDS
3029                    | Keyword::QUARTER
3030                    | Keyword::TIMEZONE
3031                    | Keyword::TIMEZONE_HOUR
3032                    | Keyword::TIMEZONE_MINUTE
3033            )
3034        } else {
3035            false
3036        }
3037    }
3038
3039    /// Syntax
3040    /// ```sql
3041    /// -- typed
3042    /// STRUCT<[field_name] field_type, ...>( expr1 [, ... ])
3043    /// -- typeless
3044    /// STRUCT( expr1 [AS field_name] [, ... ])
3045    /// ```
3046    fn parse_struct_literal(&mut self) -> Result<Expr, ParserError> {
3047        // Parse the fields definition if exist `<[field_name] field_type, ...>`
3048        self.prev_token();
3049        let (fields, trailing_bracket) =
3050            self.parse_struct_type_def(Self::parse_struct_field_def)?;
3051        if trailing_bracket.0 {
3052            return parser_err!(
3053                "unmatched > in STRUCT literal",
3054                self.peek_token().span.start
3055            );
3056        }
3057
3058        // Parse the struct values `(expr1 [, ... ])`
3059        self.expect_token(&Token::LParen)?;
3060        let values = self
3061            .parse_comma_separated(|parser| parser.parse_struct_field_expr(!fields.is_empty()))?;
3062        self.expect_token(&Token::RParen)?;
3063
3064        Ok(Expr::Struct { values, fields })
3065    }
3066
3067    /// Parse an expression value for a struct literal
3068    /// Syntax
3069    /// ```sql
3070    /// expr [AS name]
3071    /// ```
3072    ///
3073    /// For biquery [1], Parameter typed_syntax is set to true if the expression
3074    /// is to be parsed as a field expression declared using typed
3075    /// struct syntax [2], and false if using typeless struct syntax [3].
3076    ///
3077    /// [1]: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#constructing_a_struct
3078    /// [2]: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#typed_struct_syntax
3079    /// [3]: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#typeless_struct_syntax
3080    fn parse_struct_field_expr(&mut self, typed_syntax: bool) -> Result<Expr, ParserError> {
3081        let expr = self.parse_expr()?;
3082        if self.parse_keyword(Keyword::AS) {
3083            if typed_syntax {
3084                return parser_err!("Typed syntax does not allow AS", {
3085                    self.prev_token();
3086                    self.peek_token().span.start
3087                });
3088            }
3089            let field_name = self.parse_identifier()?;
3090            Ok(Expr::Named {
3091                expr: expr.into(),
3092                name: field_name,
3093            })
3094        } else {
3095            Ok(expr)
3096        }
3097    }
3098
3099    /// Parse a Struct type definition as a sequence of field-value pairs.
3100    /// The syntax of the Struct elem differs by dialect so it is customised
3101    /// by the `elem_parser` argument.
3102    ///
3103    /// Syntax
3104    /// ```sql
3105    /// Hive:
3106    /// STRUCT<field_name: field_type>
3107    ///
3108    /// BigQuery:
3109    /// STRUCT<[field_name] field_type>
3110    /// ```
3111    fn parse_struct_type_def<F>(
3112        &mut self,
3113        mut elem_parser: F,
3114    ) -> Result<(Vec<StructField>, MatchedTrailingBracket), ParserError>
3115    where
3116        F: FnMut(&mut Parser<'a>) -> Result<(StructField, MatchedTrailingBracket), ParserError>,
3117    {
3118        self.expect_keyword_is(Keyword::STRUCT)?;
3119
3120        // Nothing to do if we have no type information.
3121        if Token::Lt != self.peek_token() {
3122            return Ok((Default::default(), false.into()));
3123        }
3124        self.next_token();
3125
3126        let mut field_defs = vec![];
3127        let trailing_bracket = loop {
3128            let (def, trailing_bracket) = elem_parser(self)?;
3129            field_defs.push(def);
3130            // The struct field definition is finished if it occurs `>>` or comma.
3131            if trailing_bracket.0 || !self.consume_token(&Token::Comma) {
3132                break trailing_bracket;
3133            }
3134        };
3135
3136        Ok((
3137            field_defs,
3138            self.expect_closing_angle_bracket(trailing_bracket)?,
3139        ))
3140    }
3141
3142    /// Duckdb Struct Data Type <https://duckdb.org/docs/sql/data_types/struct.html#retrieving-from-structs>
3143    fn parse_duckdb_struct_type_def(&mut self) -> Result<Vec<StructField>, ParserError> {
3144        self.expect_keyword_is(Keyword::STRUCT)?;
3145        self.expect_token(&Token::LParen)?;
3146        let struct_body = self.parse_comma_separated(|parser| {
3147            let field_name = parser.parse_identifier()?;
3148            let field_type = parser.parse_data_type()?;
3149
3150            Ok(StructField {
3151                field_name: Some(field_name),
3152                field_type,
3153                options: None,
3154            })
3155        });
3156        self.expect_token(&Token::RParen)?;
3157        struct_body
3158    }
3159
3160    /// Parse a field definition in a [struct] or [tuple].
3161    /// Syntax:
3162    ///
3163    /// ```sql
3164    /// [field_name] field_type
3165    /// ```
3166    ///
3167    /// [struct]: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#declaring_a_struct_type
3168    /// [tuple]: https://clickhouse.com/docs/en/sql-reference/data-types/tuple
3169    fn parse_struct_field_def(
3170        &mut self,
3171    ) -> Result<(StructField, MatchedTrailingBracket), ParserError> {
3172        // Look beyond the next item to infer whether both field name
3173        // and type are specified.
3174        let is_anonymous_field = !matches!(
3175            (self.peek_nth_token(0).token, self.peek_nth_token(1).token),
3176            (Token::Word(_), Token::Word(_))
3177        );
3178
3179        let field_name = if is_anonymous_field {
3180            None
3181        } else {
3182            Some(self.parse_identifier()?)
3183        };
3184
3185        let (field_type, trailing_bracket) = self.parse_data_type_helper()?;
3186
3187        let options = self.maybe_parse_options(Keyword::OPTIONS)?;
3188        Ok((
3189            StructField {
3190                field_name,
3191                field_type,
3192                options,
3193            },
3194            trailing_bracket,
3195        ))
3196    }
3197
3198    /// DuckDB specific: Parse a Union type definition as a sequence of field-value pairs.
3199    ///
3200    /// Syntax:
3201    ///
3202    /// ```sql
3203    /// UNION(field_name field_type[,...])
3204    /// ```
3205    ///
3206    /// [1]: https://duckdb.org/docs/sql/data_types/union.html
3207    fn parse_union_type_def(&mut self) -> Result<Vec<UnionField>, ParserError> {
3208        self.expect_keyword_is(Keyword::UNION)?;
3209
3210        self.expect_token(&Token::LParen)?;
3211
3212        let fields = self.parse_comma_separated(|p| {
3213            Ok(UnionField {
3214                field_name: p.parse_identifier()?,
3215                field_type: p.parse_data_type()?,
3216            })
3217        })?;
3218
3219        self.expect_token(&Token::RParen)?;
3220
3221        Ok(fields)
3222    }
3223
3224    /// DuckDB and ClickHouse specific: Parse a duckdb [dictionary] or a clickhouse [map] setting
3225    ///
3226    /// Syntax:
3227    ///
3228    /// ```sql
3229    /// {'field_name': expr1[, ... ]}
3230    /// ```
3231    ///
3232    /// [dictionary]: https://duckdb.org/docs/sql/data_types/struct#creating-structs
3233    /// [map]: https://clickhouse.com/docs/operations/settings/settings#additional_table_filters
3234    fn parse_dictionary(&mut self) -> Result<Expr, ParserError> {
3235        self.expect_token(&Token::LBrace)?;
3236
3237        let fields = self.parse_comma_separated0(Self::parse_dictionary_field, Token::RBrace)?;
3238
3239        self.expect_token(&Token::RBrace)?;
3240
3241        Ok(Expr::Dictionary(fields))
3242    }
3243
3244    /// Parse a field for a duckdb [dictionary] or a clickhouse [map] setting
3245    ///
3246    /// Syntax
3247    ///
3248    /// ```sql
3249    /// 'name': expr
3250    /// ```
3251    ///
3252    /// [dictionary]: https://duckdb.org/docs/sql/data_types/struct#creating-structs
3253    /// [map]: https://clickhouse.com/docs/operations/settings/settings#additional_table_filters
3254    fn parse_dictionary_field(&mut self) -> Result<DictionaryField, ParserError> {
3255        let key = self.parse_identifier()?;
3256
3257        self.expect_token(&Token::Colon)?;
3258
3259        let expr = self.parse_expr()?;
3260
3261        Ok(DictionaryField {
3262            key,
3263            value: Box::new(expr),
3264        })
3265    }
3266
3267    /// DuckDB specific: Parse a duckdb [map]
3268    ///
3269    /// Syntax:
3270    ///
3271    /// ```sql
3272    /// Map {key1: value1[, ... ]}
3273    /// ```
3274    ///
3275    /// [map]: https://duckdb.org/docs/sql/data_types/map.html#creating-maps
3276    fn parse_duckdb_map_literal(&mut self) -> Result<Expr, ParserError> {
3277        self.expect_token(&Token::LBrace)?;
3278        let fields = self.parse_comma_separated0(Self::parse_duckdb_map_field, Token::RBrace)?;
3279        self.expect_token(&Token::RBrace)?;
3280        Ok(Expr::Map(Map { entries: fields }))
3281    }
3282
3283    /// Parse a field for a duckdb [map]
3284    ///
3285    /// Syntax
3286    ///
3287    /// ```sql
3288    /// key: value
3289    /// ```
3290    ///
3291    /// [map]: https://duckdb.org/docs/sql/data_types/map.html#creating-maps
3292    fn parse_duckdb_map_field(&mut self) -> Result<MapEntry, ParserError> {
3293        let key = self.parse_expr()?;
3294
3295        self.expect_token(&Token::Colon)?;
3296
3297        let value = self.parse_expr()?;
3298
3299        Ok(MapEntry {
3300            key: Box::new(key),
3301            value: Box::new(value),
3302        })
3303    }
3304
3305    /// Parse clickhouse [map]
3306    ///
3307    /// Syntax
3308    ///
3309    /// ```sql
3310    /// Map(key_data_type, value_data_type)
3311    /// ```
3312    ///
3313    /// [map]: https://clickhouse.com/docs/en/sql-reference/data-types/map
3314    fn parse_click_house_map_def(&mut self) -> Result<(DataType, DataType), ParserError> {
3315        self.expect_keyword_is(Keyword::MAP)?;
3316        self.expect_token(&Token::LParen)?;
3317        let key_data_type = self.parse_data_type()?;
3318        self.expect_token(&Token::Comma)?;
3319        let value_data_type = self.parse_data_type()?;
3320        self.expect_token(&Token::RParen)?;
3321
3322        Ok((key_data_type, value_data_type))
3323    }
3324
3325    /// Parse clickhouse [tuple]
3326    ///
3327    /// Syntax
3328    ///
3329    /// ```sql
3330    /// Tuple([field_name] field_type, ...)
3331    /// ```
3332    ///
3333    /// [tuple]: https://clickhouse.com/docs/en/sql-reference/data-types/tuple
3334    fn parse_click_house_tuple_def(&mut self) -> Result<Vec<StructField>, ParserError> {
3335        self.expect_keyword_is(Keyword::TUPLE)?;
3336        self.expect_token(&Token::LParen)?;
3337        let mut field_defs = vec![];
3338        loop {
3339            let (def, _) = self.parse_struct_field_def()?;
3340            field_defs.push(def);
3341            if !self.consume_token(&Token::Comma) {
3342                break;
3343            }
3344        }
3345        self.expect_token(&Token::RParen)?;
3346
3347        Ok(field_defs)
3348    }
3349
3350    /// For nested types that use the angle bracket syntax, this matches either
3351    /// `>`, `>>` or nothing depending on which variant is expected (specified by the previously
3352    /// matched `trailing_bracket` argument). It returns whether there is a trailing
3353    /// left to be matched - (i.e. if '>>' was matched).
3354    fn expect_closing_angle_bracket(
3355        &mut self,
3356        trailing_bracket: MatchedTrailingBracket,
3357    ) -> Result<MatchedTrailingBracket, ParserError> {
3358        let trailing_bracket = if !trailing_bracket.0 {
3359            match self.peek_token().token {
3360                Token::Gt => {
3361                    self.next_token();
3362                    false.into()
3363                }
3364                Token::ShiftRight => {
3365                    self.next_token();
3366                    true.into()
3367                }
3368                _ => return self.expected(">", self.peek_token()),
3369            }
3370        } else {
3371            false.into()
3372        };
3373
3374        Ok(trailing_bracket)
3375    }
3376
3377    /// Parse an operator following an expression
3378    pub fn parse_infix(&mut self, expr: Expr, precedence: u8) -> Result<Expr, ParserError> {
3379        // allow the dialect to override infix parsing
3380        if let Some(infix) = self.dialect.parse_infix(self, &expr, precedence) {
3381            return infix;
3382        }
3383
3384        let dialect = self.dialect;
3385
3386        self.advance_token();
3387        let tok = self.get_current_token();
3388        debug!("infix: {tok:?}");
3389        let tok_index = self.get_current_index();
3390        let span = tok.span;
3391        let regular_binary_operator = match &tok.token {
3392            Token::Spaceship => Some(BinaryOperator::Spaceship),
3393            Token::DoubleEq => Some(BinaryOperator::Eq),
3394            Token::Assignment => Some(BinaryOperator::Assignment),
3395            Token::Eq => Some(BinaryOperator::Eq),
3396            Token::Neq => Some(BinaryOperator::NotEq),
3397            Token::Gt => Some(BinaryOperator::Gt),
3398            Token::GtEq => Some(BinaryOperator::GtEq),
3399            Token::Lt => Some(BinaryOperator::Lt),
3400            Token::LtEq => Some(BinaryOperator::LtEq),
3401            Token::Plus => Some(BinaryOperator::Plus),
3402            Token::Minus => Some(BinaryOperator::Minus),
3403            Token::Mul => Some(BinaryOperator::Multiply),
3404            Token::Mod => Some(BinaryOperator::Modulo),
3405            Token::StringConcat => Some(BinaryOperator::StringConcat),
3406            Token::Pipe => Some(BinaryOperator::BitwiseOr),
3407            Token::Caret => {
3408                // In PostgreSQL, ^ stands for the exponentiation operation,
3409                // and # stands for XOR. See https://www.postgresql.org/docs/current/functions-math.html
3410                if dialect_is!(dialect is PostgreSqlDialect) {
3411                    Some(BinaryOperator::PGExp)
3412                } else {
3413                    Some(BinaryOperator::BitwiseXor)
3414                }
3415            }
3416            Token::Ampersand => Some(BinaryOperator::BitwiseAnd),
3417            Token::Div => Some(BinaryOperator::Divide),
3418            Token::DuckIntDiv if dialect_is!(dialect is DuckDbDialect | GenericDialect) => {
3419                Some(BinaryOperator::DuckIntegerDivide)
3420            }
3421            Token::ShiftLeft if dialect_is!(dialect is PostgreSqlDialect | DuckDbDialect | GenericDialect | RedshiftSqlDialect) => {
3422                Some(BinaryOperator::PGBitwiseShiftLeft)
3423            }
3424            Token::ShiftRight if dialect_is!(dialect is PostgreSqlDialect | DuckDbDialect | GenericDialect | RedshiftSqlDialect) => {
3425                Some(BinaryOperator::PGBitwiseShiftRight)
3426            }
3427            Token::Sharp if dialect_is!(dialect is PostgreSqlDialect | RedshiftSqlDialect) => {
3428                Some(BinaryOperator::PGBitwiseXor)
3429            }
3430            Token::Overlap if dialect_is!(dialect is PostgreSqlDialect | RedshiftSqlDialect) => {
3431                Some(BinaryOperator::PGOverlap)
3432            }
3433            Token::Overlap if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
3434                Some(BinaryOperator::PGOverlap)
3435            }
3436            Token::CaretAt if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
3437                Some(BinaryOperator::PGStartsWith)
3438            }
3439            Token::Tilde => Some(BinaryOperator::PGRegexMatch),
3440            Token::TildeAsterisk => Some(BinaryOperator::PGRegexIMatch),
3441            Token::ExclamationMarkTilde => Some(BinaryOperator::PGRegexNotMatch),
3442            Token::ExclamationMarkTildeAsterisk => Some(BinaryOperator::PGRegexNotIMatch),
3443            Token::DoubleTilde => Some(BinaryOperator::PGLikeMatch),
3444            Token::DoubleTildeAsterisk => Some(BinaryOperator::PGILikeMatch),
3445            Token::ExclamationMarkDoubleTilde => Some(BinaryOperator::PGNotLikeMatch),
3446            Token::ExclamationMarkDoubleTildeAsterisk => Some(BinaryOperator::PGNotILikeMatch),
3447            Token::Arrow => Some(BinaryOperator::Arrow),
3448            Token::LongArrow => Some(BinaryOperator::LongArrow),
3449            Token::HashArrow => Some(BinaryOperator::HashArrow),
3450            Token::HashLongArrow => Some(BinaryOperator::HashLongArrow),
3451            Token::AtArrow => Some(BinaryOperator::AtArrow),
3452            Token::ArrowAt => Some(BinaryOperator::ArrowAt),
3453            Token::HashMinus => Some(BinaryOperator::HashMinus),
3454            Token::AtQuestion => Some(BinaryOperator::AtQuestion),
3455            Token::AtAt => Some(BinaryOperator::AtAt),
3456            Token::Question => Some(BinaryOperator::Question),
3457            Token::QuestionAnd => Some(BinaryOperator::QuestionAnd),
3458            Token::QuestionPipe => Some(BinaryOperator::QuestionPipe),
3459            Token::CustomBinaryOperator(s) => Some(BinaryOperator::Custom(s.clone())),
3460            Token::DoubleSharp if self.dialect.supports_geometric_types() => {
3461                Some(BinaryOperator::DoubleHash)
3462            }
3463
3464            Token::AmpersandLeftAngleBracket if self.dialect.supports_geometric_types() => {
3465                Some(BinaryOperator::AndLt)
3466            }
3467            Token::AmpersandRightAngleBracket if self.dialect.supports_geometric_types() => {
3468                Some(BinaryOperator::AndGt)
3469            }
3470            Token::QuestionMarkDash if self.dialect.supports_geometric_types() => {
3471                Some(BinaryOperator::QuestionDash)
3472            }
3473            Token::AmpersandLeftAngleBracketVerticalBar
3474                if self.dialect.supports_geometric_types() =>
3475            {
3476                Some(BinaryOperator::AndLtPipe)
3477            }
3478            Token::VerticalBarAmpersandRightAngleBracket
3479                if self.dialect.supports_geometric_types() =>
3480            {
3481                Some(BinaryOperator::PipeAndGt)
3482            }
3483            Token::TwoWayArrow if self.dialect.supports_geometric_types() => {
3484                Some(BinaryOperator::LtDashGt)
3485            }
3486            Token::LeftAngleBracketCaret if self.dialect.supports_geometric_types() => {
3487                Some(BinaryOperator::LtCaret)
3488            }
3489            Token::RightAngleBracketCaret if self.dialect.supports_geometric_types() => {
3490                Some(BinaryOperator::GtCaret)
3491            }
3492            Token::QuestionMarkSharp if self.dialect.supports_geometric_types() => {
3493                Some(BinaryOperator::QuestionHash)
3494            }
3495            Token::QuestionMarkDoubleVerticalBar if self.dialect.supports_geometric_types() => {
3496                Some(BinaryOperator::QuestionDoublePipe)
3497            }
3498            Token::QuestionMarkDashVerticalBar if self.dialect.supports_geometric_types() => {
3499                Some(BinaryOperator::QuestionDashPipe)
3500            }
3501            Token::TildeEqual if self.dialect.supports_geometric_types() => {
3502                Some(BinaryOperator::TildeEq)
3503            }
3504            Token::ShiftLeftVerticalBar if self.dialect.supports_geometric_types() => {
3505                Some(BinaryOperator::LtLtPipe)
3506            }
3507            Token::VerticalBarShiftRight if self.dialect.supports_geometric_types() => {
3508                Some(BinaryOperator::PipeGtGt)
3509            }
3510            Token::AtSign if self.dialect.supports_geometric_types() => Some(BinaryOperator::At),
3511
3512            Token::Word(w) => match w.keyword {
3513                Keyword::AND => Some(BinaryOperator::And),
3514                Keyword::OR => Some(BinaryOperator::Or),
3515                Keyword::XOR => Some(BinaryOperator::Xor),
3516                Keyword::OVERLAPS => Some(BinaryOperator::Overlaps),
3517                Keyword::OPERATOR if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
3518                    self.expect_token(&Token::LParen)?;
3519                    // there are special rules for operator names in
3520                    // postgres so we can not use 'parse_object'
3521                    // or similar.
3522                    // See https://www.postgresql.org/docs/current/sql-createoperator.html
3523                    let mut idents = vec![];
3524                    loop {
3525                        self.advance_token();
3526                        idents.push(self.get_current_token().to_string());
3527                        if !self.consume_token(&Token::Period) {
3528                            break;
3529                        }
3530                    }
3531                    self.expect_token(&Token::RParen)?;
3532                    Some(BinaryOperator::PGCustomBinaryOperator(idents))
3533                }
3534                _ => None,
3535            },
3536            _ => None,
3537        };
3538
3539        let tok = self.token_at(tok_index);
3540        if let Some(op) = regular_binary_operator {
3541            if let Some(keyword) =
3542                self.parse_one_of_keywords(&[Keyword::ANY, Keyword::ALL, Keyword::SOME])
3543            {
3544                self.expect_token(&Token::LParen)?;
3545                let right = if self.peek_sub_query() {
3546                    // We have a subquery ahead (SELECT\WITH ...) need to rewind and
3547                    // use the parenthesis for parsing the subquery as an expression.
3548                    self.prev_token(); // LParen
3549                    self.parse_subexpr(precedence)?
3550                } else {
3551                    // Non-subquery expression
3552                    let right = self.parse_subexpr(precedence)?;
3553                    self.expect_token(&Token::RParen)?;
3554                    right
3555                };
3556
3557                if !matches!(
3558                    op,
3559                    BinaryOperator::Gt
3560                        | BinaryOperator::Lt
3561                        | BinaryOperator::GtEq
3562                        | BinaryOperator::LtEq
3563                        | BinaryOperator::Eq
3564                        | BinaryOperator::NotEq
3565                        | BinaryOperator::PGRegexMatch
3566                        | BinaryOperator::PGRegexIMatch
3567                        | BinaryOperator::PGRegexNotMatch
3568                        | BinaryOperator::PGRegexNotIMatch
3569                        | BinaryOperator::PGLikeMatch
3570                        | BinaryOperator::PGILikeMatch
3571                        | BinaryOperator::PGNotLikeMatch
3572                        | BinaryOperator::PGNotILikeMatch
3573                ) {
3574                    return parser_err!(
3575                        format!(
3576                        "Expected one of [=, >, <, =>, =<, !=, ~, ~*, !~, !~*, ~~, ~~*, !~~, !~~*] as comparison operator, found: {op}"
3577                    ),
3578                        span.start
3579                    );
3580                };
3581
3582                Ok(match keyword {
3583                    Keyword::ALL => Expr::AllOp {
3584                        left: Box::new(expr),
3585                        compare_op: op,
3586                        right: Box::new(right),
3587                    },
3588                    Keyword::ANY | Keyword::SOME => Expr::AnyOp {
3589                        left: Box::new(expr),
3590                        compare_op: op,
3591                        right: Box::new(right),
3592                        is_some: keyword == Keyword::SOME,
3593                    },
3594                    _ => unreachable!(),
3595                })
3596            } else {
3597                Ok(Expr::BinaryOp {
3598                    left: Box::new(expr),
3599                    op,
3600                    right: Box::new(self.parse_subexpr(precedence)?),
3601                })
3602            }
3603        } else if let Token::Word(w) = &tok.token {
3604            match w.keyword {
3605                Keyword::IS => {
3606                    if self.parse_keyword(Keyword::NULL) {
3607                        Ok(Expr::IsNull(Box::new(expr)))
3608                    } else if self.parse_keywords(&[Keyword::NOT, Keyword::NULL]) {
3609                        Ok(Expr::IsNotNull(Box::new(expr)))
3610                    } else if self.parse_keywords(&[Keyword::TRUE]) {
3611                        Ok(Expr::IsTrue(Box::new(expr)))
3612                    } else if self.parse_keywords(&[Keyword::NOT, Keyword::TRUE]) {
3613                        Ok(Expr::IsNotTrue(Box::new(expr)))
3614                    } else if self.parse_keywords(&[Keyword::FALSE]) {
3615                        Ok(Expr::IsFalse(Box::new(expr)))
3616                    } else if self.parse_keywords(&[Keyword::NOT, Keyword::FALSE]) {
3617                        Ok(Expr::IsNotFalse(Box::new(expr)))
3618                    } else if self.parse_keywords(&[Keyword::UNKNOWN]) {
3619                        Ok(Expr::IsUnknown(Box::new(expr)))
3620                    } else if self.parse_keywords(&[Keyword::NOT, Keyword::UNKNOWN]) {
3621                        Ok(Expr::IsNotUnknown(Box::new(expr)))
3622                    } else if self.parse_keywords(&[Keyword::DISTINCT, Keyword::FROM]) {
3623                        let expr2 = self.parse_expr()?;
3624                        Ok(Expr::IsDistinctFrom(Box::new(expr), Box::new(expr2)))
3625                    } else if self.parse_keywords(&[Keyword::NOT, Keyword::DISTINCT, Keyword::FROM])
3626                    {
3627                        let expr2 = self.parse_expr()?;
3628                        Ok(Expr::IsNotDistinctFrom(Box::new(expr), Box::new(expr2)))
3629                    } else if let Ok(is_normalized) = self.parse_unicode_is_normalized(expr) {
3630                        Ok(is_normalized)
3631                    } else {
3632                        self.expected(
3633                            "[NOT] NULL | TRUE | FALSE | DISTINCT | [form] NORMALIZED FROM after IS",
3634                            self.peek_token(),
3635                        )
3636                    }
3637                }
3638                Keyword::AT => {
3639                    self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
3640                    Ok(Expr::AtTimeZone {
3641                        timestamp: Box::new(expr),
3642                        time_zone: Box::new(self.parse_subexpr(precedence)?),
3643                    })
3644                }
3645                Keyword::NOT
3646                | Keyword::IN
3647                | Keyword::BETWEEN
3648                | Keyword::LIKE
3649                | Keyword::ILIKE
3650                | Keyword::SIMILAR
3651                | Keyword::REGEXP
3652                | Keyword::RLIKE => {
3653                    self.prev_token();
3654                    let negated = self.parse_keyword(Keyword::NOT);
3655                    let regexp = self.parse_keyword(Keyword::REGEXP);
3656                    let rlike = self.parse_keyword(Keyword::RLIKE);
3657                    let null = if !self.in_column_definition_state() {
3658                        self.parse_keyword(Keyword::NULL)
3659                    } else {
3660                        false
3661                    };
3662                    if regexp || rlike {
3663                        Ok(Expr::RLike {
3664                            negated,
3665                            expr: Box::new(expr),
3666                            pattern: Box::new(
3667                                self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
3668                            ),
3669                            regexp,
3670                        })
3671                    } else if negated && null {
3672                        Ok(Expr::IsNotNull(Box::new(expr)))
3673                    } else if self.parse_keyword(Keyword::IN) {
3674                        self.parse_in(expr, negated)
3675                    } else if self.parse_keyword(Keyword::BETWEEN) {
3676                        self.parse_between(expr, negated)
3677                    } else if self.parse_keyword(Keyword::LIKE) {
3678                        Ok(Expr::Like {
3679                            negated,
3680                            any: self.parse_keyword(Keyword::ANY),
3681                            expr: Box::new(expr),
3682                            pattern: Box::new(
3683                                self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
3684                            ),
3685                            escape_char: self.parse_escape_char()?,
3686                        })
3687                    } else if self.parse_keyword(Keyword::ILIKE) {
3688                        Ok(Expr::ILike {
3689                            negated,
3690                            any: self.parse_keyword(Keyword::ANY),
3691                            expr: Box::new(expr),
3692                            pattern: Box::new(
3693                                self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
3694                            ),
3695                            escape_char: self.parse_escape_char()?,
3696                        })
3697                    } else if self.parse_keywords(&[Keyword::SIMILAR, Keyword::TO]) {
3698                        Ok(Expr::SimilarTo {
3699                            negated,
3700                            expr: Box::new(expr),
3701                            pattern: Box::new(
3702                                self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
3703                            ),
3704                            escape_char: self.parse_escape_char()?,
3705                        })
3706                    } else {
3707                        self.expected("IN or BETWEEN after NOT", self.peek_token())
3708                    }
3709                }
3710                Keyword::NOTNULL if dialect.supports_notnull_operator() => {
3711                    Ok(Expr::IsNotNull(Box::new(expr)))
3712                }
3713                Keyword::MEMBER => {
3714                    if self.parse_keyword(Keyword::OF) {
3715                        self.expect_token(&Token::LParen)?;
3716                        let array = self.parse_expr()?;
3717                        self.expect_token(&Token::RParen)?;
3718                        Ok(Expr::MemberOf(MemberOf {
3719                            value: Box::new(expr),
3720                            array: Box::new(array),
3721                        }))
3722                    } else {
3723                        self.expected("OF after MEMBER", self.peek_token())
3724                    }
3725                }
3726                // Can only happen if `get_next_precedence` got out of sync with this function
3727                _ => parser_err!(
3728                    format!("No infix parser for token {:?}", tok.token),
3729                    tok.span.start
3730                ),
3731            }
3732        } else if Token::DoubleColon == *tok {
3733            Ok(Expr::Cast {
3734                kind: CastKind::DoubleColon,
3735                expr: Box::new(expr),
3736                data_type: self.parse_data_type()?,
3737                format: None,
3738            })
3739        } else if Token::ExclamationMark == *tok && self.dialect.supports_factorial_operator() {
3740            Ok(Expr::UnaryOp {
3741                op: UnaryOperator::PGPostfixFactorial,
3742                expr: Box::new(expr),
3743            })
3744        } else if Token::LBracket == *tok && self.dialect.supports_partiql()
3745            || (dialect_of!(self is SnowflakeDialect | GenericDialect) && Token::Colon == *tok)
3746        {
3747            self.prev_token();
3748            self.parse_json_access(expr)
3749        } else {
3750            // Can only happen if `get_next_precedence` got out of sync with this function
3751            parser_err!(
3752                format!("No infix parser for token {:?}", tok.token),
3753                tok.span.start
3754            )
3755        }
3756    }
3757
3758    /// Parse the `ESCAPE CHAR` portion of `LIKE`, `ILIKE`, and `SIMILAR TO`
3759    pub fn parse_escape_char(&mut self) -> Result<Option<Value>, ParserError> {
3760        if self.parse_keyword(Keyword::ESCAPE) {
3761            Ok(Some(self.parse_value()?.into()))
3762        } else {
3763            Ok(None)
3764        }
3765    }
3766
3767    /// Parses an array subscript like
3768    /// * `[:]`
3769    /// * `[l]`
3770    /// * `[l:]`
3771    /// * `[:u]`
3772    /// * `[l:u]`
3773    /// * `[l:u:s]`
3774    ///
3775    /// Parser is right after `[`
3776    fn parse_subscript_inner(&mut self) -> Result<Subscript, ParserError> {
3777        // at either `<lower>:(rest)` or `:(rest)]`
3778        let lower_bound = if self.consume_token(&Token::Colon) {
3779            None
3780        } else {
3781            Some(self.parse_expr()?)
3782        };
3783
3784        // check for end
3785        if self.consume_token(&Token::RBracket) {
3786            if let Some(lower_bound) = lower_bound {
3787                return Ok(Subscript::Index { index: lower_bound });
3788            };
3789            return Ok(Subscript::Slice {
3790                lower_bound,
3791                upper_bound: None,
3792                stride: None,
3793            });
3794        }
3795
3796        // consume the `:`
3797        if lower_bound.is_some() {
3798            self.expect_token(&Token::Colon)?;
3799        }
3800
3801        // we are now at either `]`, `<upper>(rest)]`
3802        let upper_bound = if self.consume_token(&Token::RBracket) {
3803            return Ok(Subscript::Slice {
3804                lower_bound,
3805                upper_bound: None,
3806                stride: None,
3807            });
3808        } else {
3809            Some(self.parse_expr()?)
3810        };
3811
3812        // check for end
3813        if self.consume_token(&Token::RBracket) {
3814            return Ok(Subscript::Slice {
3815                lower_bound,
3816                upper_bound,
3817                stride: None,
3818            });
3819        }
3820
3821        // we are now at `:]` or `:stride]`
3822        self.expect_token(&Token::Colon)?;
3823        let stride = if self.consume_token(&Token::RBracket) {
3824            None
3825        } else {
3826            Some(self.parse_expr()?)
3827        };
3828
3829        if stride.is_some() {
3830            self.expect_token(&Token::RBracket)?;
3831        }
3832
3833        Ok(Subscript::Slice {
3834            lower_bound,
3835            upper_bound,
3836            stride,
3837        })
3838    }
3839
3840    /// Parse a multi-dimension array accessing like `[1:3][1][1]`
3841    pub fn parse_multi_dim_subscript(
3842        &mut self,
3843        chain: &mut Vec<AccessExpr>,
3844    ) -> Result<(), ParserError> {
3845        while self.consume_token(&Token::LBracket) {
3846            self.parse_subscript(chain)?;
3847        }
3848        Ok(())
3849    }
3850
3851    /// Parses an array subscript like `[1:3]`
3852    ///
3853    /// Parser is right after `[`
3854    fn parse_subscript(&mut self, chain: &mut Vec<AccessExpr>) -> Result<(), ParserError> {
3855        let subscript = self.parse_subscript_inner()?;
3856        chain.push(AccessExpr::Subscript(subscript));
3857        Ok(())
3858    }
3859
3860    fn parse_json_path_object_key(&mut self) -> Result<JsonPathElem, ParserError> {
3861        let token = self.next_token();
3862        match token.token {
3863            Token::Word(Word {
3864                value,
3865                // path segments in SF dot notation can be unquoted or double-quoted
3866                quote_style: quote_style @ (Some('"') | None),
3867                // some experimentation suggests that snowflake permits
3868                // any keyword here unquoted.
3869                keyword: _,
3870            }) => Ok(JsonPathElem::Dot {
3871                key: value,
3872                quoted: quote_style.is_some(),
3873            }),
3874
3875            // This token should never be generated on snowflake or generic
3876            // dialects, but we handle it just in case this is used on future
3877            // dialects.
3878            Token::DoubleQuotedString(key) => Ok(JsonPathElem::Dot { key, quoted: true }),
3879
3880            _ => self.expected("variant object key name", token),
3881        }
3882    }
3883
3884    fn parse_json_access(&mut self, expr: Expr) -> Result<Expr, ParserError> {
3885        let path = self.parse_json_path()?;
3886        Ok(Expr::JsonAccess {
3887            value: Box::new(expr),
3888            path,
3889        })
3890    }
3891
3892    fn parse_json_path(&mut self) -> Result<JsonPath, ParserError> {
3893        let mut path = Vec::new();
3894        loop {
3895            match self.next_token().token {
3896                Token::Colon if path.is_empty() => {
3897                    path.push(self.parse_json_path_object_key()?);
3898                }
3899                Token::Period if !path.is_empty() => {
3900                    path.push(self.parse_json_path_object_key()?);
3901                }
3902                Token::LBracket => {
3903                    let key = self.parse_expr()?;
3904                    self.expect_token(&Token::RBracket)?;
3905
3906                    path.push(JsonPathElem::Bracket { key });
3907                }
3908                _ => {
3909                    self.prev_token();
3910                    break;
3911                }
3912            };
3913        }
3914
3915        debug_assert!(!path.is_empty());
3916        Ok(JsonPath { path })
3917    }
3918
3919    /// Parses the parens following the `[ NOT ] IN` operator.
3920    pub fn parse_in(&mut self, expr: Expr, negated: bool) -> Result<Expr, ParserError> {
3921        // BigQuery allows `IN UNNEST(array_expression)`
3922        // https://cloud.google.com/bigquery/docs/reference/standard-sql/operators#in_operators
3923        if self.parse_keyword(Keyword::UNNEST) {
3924            self.expect_token(&Token::LParen)?;
3925            let array_expr = self.parse_expr()?;
3926            self.expect_token(&Token::RParen)?;
3927            return Ok(Expr::InUnnest {
3928                expr: Box::new(expr),
3929                array_expr: Box::new(array_expr),
3930                negated,
3931            });
3932        }
3933        self.expect_token(&Token::LParen)?;
3934        let in_op = match self.maybe_parse(|p| p.parse_query())? {
3935            Some(subquery) => Expr::InSubquery {
3936                expr: Box::new(expr),
3937                subquery,
3938                negated,
3939            },
3940            None => Expr::InList {
3941                expr: Box::new(expr),
3942                list: if self.dialect.supports_in_empty_list() {
3943                    self.parse_comma_separated0(Parser::parse_expr, Token::RParen)?
3944                } else {
3945                    self.parse_comma_separated(Parser::parse_expr)?
3946                },
3947                negated,
3948            },
3949        };
3950        self.expect_token(&Token::RParen)?;
3951        Ok(in_op)
3952    }
3953
3954    /// Parses `BETWEEN <low> AND <high>`, assuming the `BETWEEN` keyword was already consumed.
3955    pub fn parse_between(&mut self, expr: Expr, negated: bool) -> Result<Expr, ParserError> {
3956        // Stop parsing subexpressions for <low> and <high> on tokens with
3957        // precedence lower than that of `BETWEEN`, such as `AND`, `IS`, etc.
3958        let low = self.parse_subexpr(self.dialect.prec_value(Precedence::Between))?;
3959        self.expect_keyword_is(Keyword::AND)?;
3960        let high = self.parse_subexpr(self.dialect.prec_value(Precedence::Between))?;
3961        Ok(Expr::Between {
3962            expr: Box::new(expr),
3963            negated,
3964            low: Box::new(low),
3965            high: Box::new(high),
3966        })
3967    }
3968
3969    /// Parse a PostgreSQL casting style which is in the form of `expr::datatype`.
3970    pub fn parse_pg_cast(&mut self, expr: Expr) -> Result<Expr, ParserError> {
3971        Ok(Expr::Cast {
3972            kind: CastKind::DoubleColon,
3973            expr: Box::new(expr),
3974            data_type: self.parse_data_type()?,
3975            format: None,
3976        })
3977    }
3978
3979    /// Get the precedence of the next token
3980    pub fn get_next_precedence(&self) -> Result<u8, ParserError> {
3981        self.dialect.get_next_precedence_default(self)
3982    }
3983
3984    /// Return the token at the given location, or EOF if the index is beyond
3985    /// the length of the current set of tokens.
3986    pub fn token_at(&self, index: usize) -> &TokenWithSpan {
3987        self.tokens.get(index).unwrap_or(&EOF_TOKEN)
3988    }
3989
3990    /// Return the first non-whitespace token that has not yet been processed
3991    /// or Token::EOF
3992    ///
3993    /// See [`Self::peek_token_ref`] to avoid the copy.
3994    pub fn peek_token(&self) -> TokenWithSpan {
3995        self.peek_nth_token(0)
3996    }
3997
3998    /// Return a reference to the first non-whitespace token that has not yet
3999    /// been processed or Token::EOF
4000    pub fn peek_token_ref(&self) -> &TokenWithSpan {
4001        self.peek_nth_token_ref(0)
4002    }
4003
4004    /// Returns the `N` next non-whitespace tokens that have not yet been
4005    /// processed.
4006    ///
4007    /// Example:
4008    /// ```rust
4009    /// # use sqlparser::dialect::GenericDialect;
4010    /// # use sqlparser::parser::Parser;
4011    /// # use sqlparser::keywords::Keyword;
4012    /// # use sqlparser::tokenizer::{Token, Word};
4013    /// let dialect = GenericDialect {};
4014    /// let mut parser = Parser::new(&dialect).try_with_sql("ORDER BY foo, bar").unwrap();
4015    ///
4016    /// // Note that Rust infers the number of tokens to peek based on the
4017    /// // length of the slice pattern!
4018    /// assert!(matches!(
4019    ///     parser.peek_tokens(),
4020    ///     [
4021    ///         Token::Word(Word { keyword: Keyword::ORDER, .. }),
4022    ///         Token::Word(Word { keyword: Keyword::BY, .. }),
4023    ///     ]
4024    /// ));
4025    /// ```
4026    pub fn peek_tokens<const N: usize>(&self) -> [Token; N] {
4027        self.peek_tokens_with_location()
4028            .map(|with_loc| with_loc.token)
4029    }
4030
4031    /// Returns the `N` next non-whitespace tokens with locations that have not
4032    /// yet been processed.
4033    ///
4034    /// See [`Self::peek_token`] for an example.
4035    pub fn peek_tokens_with_location<const N: usize>(&self) -> [TokenWithSpan; N] {
4036        let mut index = self.index;
4037        core::array::from_fn(|_| loop {
4038            let token = self.tokens.get(index);
4039            index += 1;
4040            if let Some(TokenWithSpan {
4041                token: Token::Whitespace(_),
4042                span: _,
4043            }) = token
4044            {
4045                continue;
4046            }
4047            break token.cloned().unwrap_or(TokenWithSpan {
4048                token: Token::EOF,
4049                span: Span::empty(),
4050            });
4051        })
4052    }
4053
4054    /// Returns references to the `N` next non-whitespace tokens
4055    /// that have not yet been processed.
4056    ///
4057    /// See [`Self::peek_tokens`] for an example.
4058    pub fn peek_tokens_ref<const N: usize>(&self) -> [&TokenWithSpan; N] {
4059        let mut index = self.index;
4060        core::array::from_fn(|_| loop {
4061            let token = self.tokens.get(index);
4062            index += 1;
4063            if let Some(TokenWithSpan {
4064                token: Token::Whitespace(_),
4065                span: _,
4066            }) = token
4067            {
4068                continue;
4069            }
4070            break token.unwrap_or(&EOF_TOKEN);
4071        })
4072    }
4073
4074    /// Return nth non-whitespace token that has not yet been processed
4075    pub fn peek_nth_token(&self, n: usize) -> TokenWithSpan {
4076        self.peek_nth_token_ref(n).clone()
4077    }
4078
4079    /// Return nth non-whitespace token that has not yet been processed
4080    pub fn peek_nth_token_ref(&self, mut n: usize) -> &TokenWithSpan {
4081        let mut index = self.index;
4082        loop {
4083            index += 1;
4084            match self.tokens.get(index - 1) {
4085                Some(TokenWithSpan {
4086                    token: Token::Whitespace(_),
4087                    span: _,
4088                }) => continue,
4089                non_whitespace => {
4090                    if n == 0 {
4091                        return non_whitespace.unwrap_or(&EOF_TOKEN);
4092                    }
4093                    n -= 1;
4094                }
4095            }
4096        }
4097    }
4098
4099    /// Return the first token, possibly whitespace, that has not yet been processed
4100    /// (or None if reached end-of-file).
4101    pub fn peek_token_no_skip(&self) -> TokenWithSpan {
4102        self.peek_nth_token_no_skip(0)
4103    }
4104
4105    /// Return nth token, possibly whitespace, that has not yet been processed.
4106    pub fn peek_nth_token_no_skip(&self, n: usize) -> TokenWithSpan {
4107        self.tokens
4108            .get(self.index + n)
4109            .cloned()
4110            .unwrap_or(TokenWithSpan {
4111                token: Token::EOF,
4112                span: Span::empty(),
4113            })
4114    }
4115
4116    /// Return true if the next tokens exactly `expected`
4117    ///
4118    /// Does not advance the current token.
4119    fn peek_keywords(&mut self, expected: &[Keyword]) -> bool {
4120        let index = self.index;
4121        let matched = self.parse_keywords(expected);
4122        self.index = index;
4123        matched
4124    }
4125
4126    /// Advances to the next non-whitespace token and returns a copy.
4127    ///
4128    /// Please use [`Self::advance_token`] and [`Self::get_current_token`] to
4129    /// avoid the copy.
4130    pub fn next_token(&mut self) -> TokenWithSpan {
4131        self.advance_token();
4132        self.get_current_token().clone()
4133    }
4134
4135    /// Returns the index of the current token
4136    ///
4137    /// This can be used with APIs that expect an index, such as
4138    /// [`Self::token_at`]
4139    pub fn get_current_index(&self) -> usize {
4140        self.index.saturating_sub(1)
4141    }
4142
4143    /// Return the next unprocessed token, possibly whitespace.
4144    pub fn next_token_no_skip(&mut self) -> Option<&TokenWithSpan> {
4145        self.index += 1;
4146        self.tokens.get(self.index - 1)
4147    }
4148
4149    /// Advances the current token to the next non-whitespace token
4150    ///
4151    /// See [`Self::get_current_token`] to get the current token after advancing
4152    pub fn advance_token(&mut self) {
4153        loop {
4154            self.index += 1;
4155            match self.tokens.get(self.index - 1) {
4156                Some(TokenWithSpan {
4157                    token: Token::Whitespace(_),
4158                    span: _,
4159                }) => continue,
4160                _ => break,
4161            }
4162        }
4163    }
4164
4165    /// Returns a reference to the current token
4166    ///
4167    /// Does not advance the current token.
4168    pub fn get_current_token(&self) -> &TokenWithSpan {
4169        self.token_at(self.index.saturating_sub(1))
4170    }
4171
4172    /// Returns a reference to the previous token
4173    ///
4174    /// Does not advance the current token.
4175    pub fn get_previous_token(&self) -> &TokenWithSpan {
4176        self.token_at(self.index.saturating_sub(2))
4177    }
4178
4179    /// Returns a reference to the next token
4180    ///
4181    /// Does not advance the current token.
4182    pub fn get_next_token(&self) -> &TokenWithSpan {
4183        self.token_at(self.index)
4184    }
4185
4186    /// Seek back the last one non-whitespace token.
4187    ///
4188    /// Must be called after `next_token()`, otherwise might panic. OK to call
4189    /// after `next_token()` indicates an EOF.
4190    ///
4191    // TODO rename to backup_token and deprecate prev_token?
4192    pub fn prev_token(&mut self) {
4193        loop {
4194            assert!(self.index > 0);
4195            self.index -= 1;
4196            if let Some(TokenWithSpan {
4197                token: Token::Whitespace(_),
4198                span: _,
4199            }) = self.tokens.get(self.index)
4200            {
4201                continue;
4202            }
4203            return;
4204        }
4205    }
4206
4207    /// Report `found` was encountered instead of `expected`
4208    pub fn expected<T>(&self, expected: &str, found: TokenWithSpan) -> Result<T, ParserError> {
4209        parser_err!(
4210            format!("Expected: {expected}, found: {found}"),
4211            found.span.start
4212        )
4213    }
4214
4215    /// report `found` was encountered instead of `expected`
4216    pub fn expected_ref<T>(&self, expected: &str, found: &TokenWithSpan) -> Result<T, ParserError> {
4217        parser_err!(
4218            format!("Expected: {expected}, found: {found}"),
4219            found.span.start
4220        )
4221    }
4222
4223    /// Report that the token at `index` was found instead of `expected`.
4224    pub fn expected_at<T>(&self, expected: &str, index: usize) -> Result<T, ParserError> {
4225        let found = self.tokens.get(index).unwrap_or(&EOF_TOKEN);
4226        parser_err!(
4227            format!("Expected: {expected}, found: {found}"),
4228            found.span.start
4229        )
4230    }
4231
4232    /// If the current token is the `expected` keyword, consume it and returns
4233    /// true. Otherwise, no tokens are consumed and returns false.
4234    #[must_use]
4235    pub fn parse_keyword(&mut self, expected: Keyword) -> bool {
4236        if self.peek_keyword(expected) {
4237            self.advance_token();
4238            true
4239        } else {
4240            false
4241        }
4242    }
4243
4244    #[must_use]
4245    pub fn peek_keyword(&self, expected: Keyword) -> bool {
4246        matches!(&self.peek_token_ref().token, Token::Word(w) if expected == w.keyword)
4247    }
4248
4249    /// If the current token is the `expected` keyword followed by
4250    /// specified tokens, consume them and returns true.
4251    /// Otherwise, no tokens are consumed and returns false.
4252    ///
4253    /// Note that if the length of `tokens` is too long, this function will
4254    /// not be efficient as it does a loop on the tokens with `peek_nth_token`
4255    /// each time.
4256    pub fn parse_keyword_with_tokens(&mut self, expected: Keyword, tokens: &[Token]) -> bool {
4257        self.keyword_with_tokens(expected, tokens, true)
4258    }
4259
4260    /// Peeks to see if the current token is the `expected` keyword followed by specified tokens
4261    /// without consuming them.
4262    ///
4263    /// See [Self::parse_keyword_with_tokens] for details.
4264    pub(crate) fn peek_keyword_with_tokens(&mut self, expected: Keyword, tokens: &[Token]) -> bool {
4265        self.keyword_with_tokens(expected, tokens, false)
4266    }
4267
4268    fn keyword_with_tokens(&mut self, expected: Keyword, tokens: &[Token], consume: bool) -> bool {
4269        match &self.peek_token_ref().token {
4270            Token::Word(w) if expected == w.keyword => {
4271                for (idx, token) in tokens.iter().enumerate() {
4272                    if self.peek_nth_token_ref(idx + 1).token != *token {
4273                        return false;
4274                    }
4275                }
4276
4277                if consume {
4278                    for _ in 0..(tokens.len() + 1) {
4279                        self.advance_token();
4280                    }
4281                }
4282
4283                true
4284            }
4285            _ => false,
4286        }
4287    }
4288
4289    /// If the current and subsequent tokens exactly match the `keywords`
4290    /// sequence, consume them and returns true. Otherwise, no tokens are
4291    /// consumed and returns false
4292    #[must_use]
4293    pub fn parse_keywords(&mut self, keywords: &[Keyword]) -> bool {
4294        let index = self.index;
4295        for &keyword in keywords {
4296            if !self.parse_keyword(keyword) {
4297                // println!("parse_keywords aborting .. did not find {:?}", keyword);
4298                // reset index and return immediately
4299                self.index = index;
4300                return false;
4301            }
4302        }
4303        true
4304    }
4305
4306    /// If the current token is one of the given `keywords`, returns the keyword
4307    /// that matches, without consuming the token. Otherwise, returns [`None`].
4308    #[must_use]
4309    pub fn peek_one_of_keywords(&self, keywords: &[Keyword]) -> Option<Keyword> {
4310        for keyword in keywords {
4311            if self.peek_keyword(*keyword) {
4312                return Some(*keyword);
4313            }
4314        }
4315        None
4316    }
4317
4318    /// If the current token is one of the given `keywords`, consume the token
4319    /// and return the keyword that matches. Otherwise, no tokens are consumed
4320    /// and returns [`None`].
4321    #[must_use]
4322    pub fn parse_one_of_keywords(&mut self, keywords: &[Keyword]) -> Option<Keyword> {
4323        match &self.peek_token_ref().token {
4324            Token::Word(w) => {
4325                keywords
4326                    .iter()
4327                    .find(|keyword| **keyword == w.keyword)
4328                    .map(|keyword| {
4329                        self.advance_token();
4330                        *keyword
4331                    })
4332            }
4333            _ => None,
4334        }
4335    }
4336
4337    /// If the current token is one of the expected keywords, consume the token
4338    /// and return the keyword that matches. Otherwise, return an error.
4339    pub fn expect_one_of_keywords(&mut self, keywords: &[Keyword]) -> Result<Keyword, ParserError> {
4340        if let Some(keyword) = self.parse_one_of_keywords(keywords) {
4341            Ok(keyword)
4342        } else {
4343            let keywords: Vec<String> = keywords.iter().map(|x| format!("{x:?}")).collect();
4344            self.expected_ref(
4345                &format!("one of {}", keywords.join(" or ")),
4346                self.peek_token_ref(),
4347            )
4348        }
4349    }
4350
4351    /// If the current token is the `expected` keyword, consume the token.
4352    /// Otherwise, return an error.
4353    ///
4354    // todo deprecate in favor of expected_keyword_is
4355    pub fn expect_keyword(&mut self, expected: Keyword) -> Result<TokenWithSpan, ParserError> {
4356        if self.parse_keyword(expected) {
4357            Ok(self.get_current_token().clone())
4358        } else {
4359            self.expected_ref(format!("{:?}", &expected).as_str(), self.peek_token_ref())
4360        }
4361    }
4362
4363    /// If the current token is the `expected` keyword, consume the token.
4364    /// Otherwise, return an error.
4365    ///
4366    /// This differs from expect_keyword only in that the matched keyword
4367    /// token is not returned.
4368    pub fn expect_keyword_is(&mut self, expected: Keyword) -> Result<(), ParserError> {
4369        if self.parse_keyword(expected) {
4370            Ok(())
4371        } else {
4372            self.expected_ref(format!("{:?}", &expected).as_str(), self.peek_token_ref())
4373        }
4374    }
4375
4376    /// If the current and subsequent tokens exactly match the `keywords`
4377    /// sequence, consume them and returns Ok. Otherwise, return an Error.
4378    pub fn expect_keywords(&mut self, expected: &[Keyword]) -> Result<(), ParserError> {
4379        for &kw in expected {
4380            self.expect_keyword_is(kw)?;
4381        }
4382        Ok(())
4383    }
4384
4385    /// Consume the next token if it matches the expected token, otherwise return false
4386    ///
4387    /// See [Self::advance_token] to consume the token unconditionally
4388    #[must_use]
4389    pub fn consume_token(&mut self, expected: &Token) -> bool {
4390        if self.peek_token_ref() == expected {
4391            self.advance_token();
4392            true
4393        } else {
4394            false
4395        }
4396    }
4397
4398    /// If the current and subsequent tokens exactly match the `tokens`
4399    /// sequence, consume them and returns true. Otherwise, no tokens are
4400    /// consumed and returns false
4401    #[must_use]
4402    pub fn consume_tokens(&mut self, tokens: &[Token]) -> bool {
4403        let index = self.index;
4404        for token in tokens {
4405            if !self.consume_token(token) {
4406                self.index = index;
4407                return false;
4408            }
4409        }
4410        true
4411    }
4412
4413    /// Bail out if the current token is not an expected keyword, or consume it if it is
4414    pub fn expect_token(&mut self, expected: &Token) -> Result<TokenWithSpan, ParserError> {
4415        if self.peek_token_ref() == expected {
4416            Ok(self.next_token())
4417        } else {
4418            self.expected_ref(&expected.to_string(), self.peek_token_ref())
4419        }
4420    }
4421
4422    fn parse<T: FromStr>(s: String, loc: Location) -> Result<T, ParserError>
4423    where
4424        <T as FromStr>::Err: Display,
4425    {
4426        s.parse::<T>().map_err(|e| {
4427            ParserError::ParserError(format!(
4428                "Could not parse '{s}' as {}: {e}{loc}",
4429                core::any::type_name::<T>()
4430            ))
4431        })
4432    }
4433
4434    /// Parse a comma-separated list of 1+ SelectItem
4435    pub fn parse_projection(&mut self) -> Result<Vec<SelectItem>, ParserError> {
4436        // BigQuery and Snowflake allow trailing commas, but only in project lists
4437        // e.g. `SELECT 1, 2, FROM t`
4438        // https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#trailing_commas
4439        // https://docs.snowflake.com/en/release-notes/2024/8_11#select-supports-trailing-commas
4440
4441        let trailing_commas =
4442            self.options.trailing_commas | self.dialect.supports_projection_trailing_commas();
4443
4444        self.parse_comma_separated_with_trailing_commas(
4445            |p| p.parse_select_item(),
4446            trailing_commas,
4447            Self::is_reserved_for_column_alias,
4448        )
4449    }
4450
4451    pub fn parse_actions_list(&mut self) -> Result<Vec<Action>, ParserError> {
4452        let mut values = vec![];
4453        loop {
4454            values.push(self.parse_grant_permission()?);
4455            if !self.consume_token(&Token::Comma) {
4456                break;
4457            } else if self.options.trailing_commas {
4458                match self.peek_token().token {
4459                    Token::Word(kw) if kw.keyword == Keyword::ON => {
4460                        break;
4461                    }
4462                    Token::RParen
4463                    | Token::SemiColon
4464                    | Token::EOF
4465                    | Token::RBracket
4466                    | Token::RBrace => break,
4467                    _ => continue,
4468                }
4469            }
4470        }
4471        Ok(values)
4472    }
4473
4474    /// Parse a list of [TableWithJoins]
4475    fn parse_table_with_joins(&mut self) -> Result<Vec<TableWithJoins>, ParserError> {
4476        let trailing_commas = self.dialect.supports_from_trailing_commas();
4477
4478        self.parse_comma_separated_with_trailing_commas(
4479            Parser::parse_table_and_joins,
4480            trailing_commas,
4481            |kw, parser| !self.dialect.is_table_factor(kw, parser),
4482        )
4483    }
4484
4485    /// Parse the comma of a comma-separated syntax element.
4486    /// `R` is a predicate that should return true if the next
4487    /// keyword is a reserved keyword.
4488    /// Allows for control over trailing commas
4489    ///
4490    /// Returns true if there is a next element
4491    fn is_parse_comma_separated_end_with_trailing_commas<R>(
4492        &mut self,
4493        trailing_commas: bool,
4494        is_reserved_keyword: &R,
4495    ) -> bool
4496    where
4497        R: Fn(&Keyword, &mut Parser) -> bool,
4498    {
4499        if !self.consume_token(&Token::Comma) {
4500            true
4501        } else if trailing_commas {
4502            let token = self.next_token().token;
4503            let is_end = match token {
4504                Token::Word(ref kw) if is_reserved_keyword(&kw.keyword, self) => true,
4505                Token::RParen | Token::SemiColon | Token::EOF | Token::RBracket | Token::RBrace => {
4506                    true
4507                }
4508                _ => false,
4509            };
4510            self.prev_token();
4511
4512            is_end
4513        } else {
4514            false
4515        }
4516    }
4517
4518    /// Parse the comma of a comma-separated syntax element.
4519    /// Returns true if there is a next element
4520    fn is_parse_comma_separated_end(&mut self) -> bool {
4521        self.is_parse_comma_separated_end_with_trailing_commas(
4522            self.options.trailing_commas,
4523            &Self::is_reserved_for_column_alias,
4524        )
4525    }
4526
4527    /// Parse a comma-separated list of 1+ items accepted by `F`
4528    pub fn parse_comma_separated<T, F>(&mut self, f: F) -> Result<Vec<T>, ParserError>
4529    where
4530        F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4531    {
4532        self.parse_comma_separated_with_trailing_commas(
4533            f,
4534            self.options.trailing_commas,
4535            Self::is_reserved_for_column_alias,
4536        )
4537    }
4538
4539    /// Parse a comma-separated list of 1+ items accepted by `F`.
4540    /// `R` is a predicate that should return true if the next
4541    /// keyword is a reserved keyword.
4542    /// Allows for control over trailing commas.
4543    fn parse_comma_separated_with_trailing_commas<T, F, R>(
4544        &mut self,
4545        mut f: F,
4546        trailing_commas: bool,
4547        is_reserved_keyword: R,
4548    ) -> Result<Vec<T>, ParserError>
4549    where
4550        F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4551        R: Fn(&Keyword, &mut Parser) -> bool,
4552    {
4553        let mut values = vec![];
4554        loop {
4555            values.push(f(self)?);
4556            if self.is_parse_comma_separated_end_with_trailing_commas(
4557                trailing_commas,
4558                &is_reserved_keyword,
4559            ) {
4560                break;
4561            }
4562        }
4563        Ok(values)
4564    }
4565
4566    /// Parse a period-separated list of 1+ items accepted by `F`
4567    fn parse_period_separated<T, F>(&mut self, mut f: F) -> Result<Vec<T>, ParserError>
4568    where
4569        F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4570    {
4571        let mut values = vec![];
4572        loop {
4573            values.push(f(self)?);
4574            if !self.consume_token(&Token::Period) {
4575                break;
4576            }
4577        }
4578        Ok(values)
4579    }
4580
4581    /// Parse a keyword-separated list of 1+ items accepted by `F`
4582    pub fn parse_keyword_separated<T, F>(
4583        &mut self,
4584        keyword: Keyword,
4585        mut f: F,
4586    ) -> Result<Vec<T>, ParserError>
4587    where
4588        F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4589    {
4590        let mut values = vec![];
4591        loop {
4592            values.push(f(self)?);
4593            if !self.parse_keyword(keyword) {
4594                break;
4595            }
4596        }
4597        Ok(values)
4598    }
4599
4600    pub fn parse_parenthesized<T, F>(&mut self, mut f: F) -> Result<T, ParserError>
4601    where
4602        F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4603    {
4604        self.expect_token(&Token::LParen)?;
4605        let res = f(self)?;
4606        self.expect_token(&Token::RParen)?;
4607        Ok(res)
4608    }
4609
4610    /// Parse a comma-separated list of 0+ items accepted by `F`
4611    /// * `end_token` - expected end token for the closure (e.g. [Token::RParen], [Token::RBrace] ...)
4612    pub fn parse_comma_separated0<T, F>(
4613        &mut self,
4614        f: F,
4615        end_token: Token,
4616    ) -> Result<Vec<T>, ParserError>
4617    where
4618        F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4619    {
4620        if self.peek_token().token == end_token {
4621            return Ok(vec![]);
4622        }
4623
4624        if self.options.trailing_commas && self.peek_tokens() == [Token::Comma, end_token] {
4625            let _ = self.consume_token(&Token::Comma);
4626            return Ok(vec![]);
4627        }
4628
4629        self.parse_comma_separated(f)
4630    }
4631
4632    /// Parses 0 or more statements, each followed by a semicolon.
4633    /// If the next token is any of `terminal_keywords` then no more
4634    /// statements will be parsed.
4635    pub(crate) fn parse_statement_list(
4636        &mut self,
4637        terminal_keywords: &[Keyword],
4638    ) -> Result<Vec<Statement>, ParserError> {
4639        let mut values = vec![];
4640        loop {
4641            match &self.peek_nth_token_ref(0).token {
4642                Token::EOF => break,
4643                Token::Word(w) => {
4644                    if w.quote_style.is_none() && terminal_keywords.contains(&w.keyword) {
4645                        break;
4646                    }
4647                }
4648                _ => {}
4649            }
4650
4651            values.push(self.parse_statement()?);
4652            self.expect_token(&Token::SemiColon)?;
4653        }
4654        Ok(values)
4655    }
4656
4657    /// Default implementation of a predicate that returns true if
4658    /// the specified keyword is reserved for column alias.
4659    /// See [Dialect::is_column_alias]
4660    fn is_reserved_for_column_alias(kw: &Keyword, parser: &mut Parser) -> bool {
4661        !parser.dialect.is_column_alias(kw, parser)
4662    }
4663
4664    /// Run a parser method `f`, reverting back to the current position if unsuccessful.
4665    /// Returns `ParserError::RecursionLimitExceeded` if `f` returns a `RecursionLimitExceeded`.
4666    /// Returns `Ok(None)` if `f` returns any other error.
4667    pub fn maybe_parse<T, F>(&mut self, f: F) -> Result<Option<T>, ParserError>
4668    where
4669        F: FnMut(&mut Parser) -> Result<T, ParserError>,
4670    {
4671        match self.try_parse(f) {
4672            Ok(t) => Ok(Some(t)),
4673            Err(ParserError::RecursionLimitExceeded) => Err(ParserError::RecursionLimitExceeded),
4674            _ => Ok(None),
4675        }
4676    }
4677
4678    /// Run a parser method `f`, reverting back to the current position if unsuccessful.
4679    pub fn try_parse<T, F>(&mut self, mut f: F) -> Result<T, ParserError>
4680    where
4681        F: FnMut(&mut Parser) -> Result<T, ParserError>,
4682    {
4683        let index = self.index;
4684        match f(self) {
4685            Ok(t) => Ok(t),
4686            Err(e) => {
4687                // Unwind stack if limit exceeded
4688                self.index = index;
4689                Err(e)
4690            }
4691        }
4692    }
4693
4694    /// Parse either `ALL`, `DISTINCT` or `DISTINCT ON (...)`. Returns [`None`] if `ALL` is parsed
4695    /// and results in a [`ParserError`] if both `ALL` and `DISTINCT` are found.
4696    pub fn parse_all_or_distinct(&mut self) -> Result<Option<Distinct>, ParserError> {
4697        let loc = self.peek_token().span.start;
4698        let all = self.parse_keyword(Keyword::ALL);
4699        let distinct = self.parse_keyword(Keyword::DISTINCT);
4700        if !distinct {
4701            return Ok(None);
4702        }
4703        if all {
4704            return parser_err!("Cannot specify both ALL and DISTINCT".to_string(), loc);
4705        }
4706        let on = self.parse_keyword(Keyword::ON);
4707        if !on {
4708            return Ok(Some(Distinct::Distinct));
4709        }
4710
4711        self.expect_token(&Token::LParen)?;
4712        let col_names = if self.consume_token(&Token::RParen) {
4713            self.prev_token();
4714            Vec::new()
4715        } else {
4716            self.parse_comma_separated(Parser::parse_expr)?
4717        };
4718        self.expect_token(&Token::RParen)?;
4719        Ok(Some(Distinct::On(col_names)))
4720    }
4721
4722    /// Parse a SQL CREATE statement
4723    pub fn parse_create(&mut self) -> Result<Statement, ParserError> {
4724        let or_replace = self.parse_keywords(&[Keyword::OR, Keyword::REPLACE]);
4725        let or_alter = self.parse_keywords(&[Keyword::OR, Keyword::ALTER]);
4726        let local = self.parse_one_of_keywords(&[Keyword::LOCAL]).is_some();
4727        let global = self.parse_one_of_keywords(&[Keyword::GLOBAL]).is_some();
4728        let transient = self.parse_one_of_keywords(&[Keyword::TRANSIENT]).is_some();
4729        let global: Option<bool> = if global {
4730            Some(true)
4731        } else if local {
4732            Some(false)
4733        } else {
4734            None
4735        };
4736        let temporary = self
4737            .parse_one_of_keywords(&[Keyword::TEMP, Keyword::TEMPORARY])
4738            .is_some();
4739        let persistent = dialect_of!(self is DuckDbDialect)
4740            && self.parse_one_of_keywords(&[Keyword::PERSISTENT]).is_some();
4741        let create_view_params = self.parse_create_view_params()?;
4742        if self.parse_keyword(Keyword::TABLE) {
4743            self.parse_create_table(or_replace, temporary, global, transient)
4744        } else if self.peek_keyword(Keyword::MATERIALIZED)
4745            || self.peek_keyword(Keyword::VIEW)
4746            || self.peek_keywords(&[Keyword::SECURE, Keyword::MATERIALIZED, Keyword::VIEW])
4747            || self.peek_keywords(&[Keyword::SECURE, Keyword::VIEW])
4748        {
4749            self.parse_create_view(or_alter, or_replace, temporary, create_view_params)
4750        } else if self.parse_keyword(Keyword::POLICY) {
4751            self.parse_create_policy()
4752        } else if self.parse_keyword(Keyword::EXTERNAL) {
4753            self.parse_create_external_table(or_replace)
4754        } else if self.parse_keyword(Keyword::FUNCTION) {
4755            self.parse_create_function(or_alter, or_replace, temporary)
4756        } else if self.parse_keyword(Keyword::DOMAIN) {
4757            self.parse_create_domain()
4758        } else if self.parse_keyword(Keyword::TRIGGER) {
4759            self.parse_create_trigger(temporary, or_alter, or_replace, false)
4760        } else if self.parse_keywords(&[Keyword::CONSTRAINT, Keyword::TRIGGER]) {
4761            self.parse_create_trigger(temporary, or_alter, or_replace, true)
4762        } else if self.parse_keyword(Keyword::MACRO) {
4763            self.parse_create_macro(or_replace, temporary)
4764        } else if self.parse_keyword(Keyword::SECRET) {
4765            self.parse_create_secret(or_replace, temporary, persistent)
4766        } else if self.parse_keyword(Keyword::USER) {
4767            self.parse_create_user(or_replace)
4768        } else if or_replace {
4769            self.expected(
4770                "[EXTERNAL] TABLE or [MATERIALIZED] VIEW or FUNCTION after CREATE OR REPLACE",
4771                self.peek_token(),
4772            )
4773        } else if self.parse_keyword(Keyword::EXTENSION) {
4774            self.parse_create_extension()
4775        } else if self.parse_keyword(Keyword::INDEX) {
4776            self.parse_create_index(false)
4777        } else if self.parse_keywords(&[Keyword::UNIQUE, Keyword::INDEX]) {
4778            self.parse_create_index(true)
4779        } else if self.parse_keyword(Keyword::VIRTUAL) {
4780            self.parse_create_virtual_table()
4781        } else if self.parse_keyword(Keyword::SCHEMA) {
4782            self.parse_create_schema()
4783        } else if self.parse_keyword(Keyword::DATABASE) {
4784            self.parse_create_database()
4785        } else if self.parse_keyword(Keyword::ROLE) {
4786            self.parse_create_role()
4787        } else if self.parse_keyword(Keyword::SEQUENCE) {
4788            self.parse_create_sequence(temporary)
4789        } else if self.parse_keyword(Keyword::TYPE) {
4790            self.parse_create_type()
4791        } else if self.parse_keyword(Keyword::PROCEDURE) {
4792            self.parse_create_procedure(or_alter)
4793        } else if self.parse_keyword(Keyword::CONNECTOR) {
4794            self.parse_create_connector()
4795        } else if self.parse_keyword(Keyword::OPERATOR) {
4796            // Check if this is CREATE OPERATOR FAMILY or CREATE OPERATOR CLASS
4797            if self.parse_keyword(Keyword::FAMILY) {
4798                self.parse_create_operator_family()
4799            } else if self.parse_keyword(Keyword::CLASS) {
4800                self.parse_create_operator_class()
4801            } else {
4802                self.parse_create_operator()
4803            }
4804        } else if self.parse_keyword(Keyword::SERVER) {
4805            self.parse_pg_create_server()
4806        } else {
4807            self.expected("an object type after CREATE", self.peek_token())
4808        }
4809    }
4810
4811    fn parse_create_user(&mut self, or_replace: bool) -> Result<Statement, ParserError> {
4812        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
4813        let name = self.parse_identifier()?;
4814        let options = self
4815            .parse_key_value_options(false, &[Keyword::WITH, Keyword::TAG])?
4816            .options;
4817        let with_tags = self.parse_keyword(Keyword::WITH);
4818        let tags = if self.parse_keyword(Keyword::TAG) {
4819            self.parse_key_value_options(true, &[])?.options
4820        } else {
4821            vec![]
4822        };
4823        Ok(Statement::CreateUser(CreateUser {
4824            or_replace,
4825            if_not_exists,
4826            name,
4827            options: KeyValueOptions {
4828                options,
4829                delimiter: KeyValueOptionsDelimiter::Space,
4830            },
4831            with_tags,
4832            tags: KeyValueOptions {
4833                options: tags,
4834                delimiter: KeyValueOptionsDelimiter::Comma,
4835            },
4836        }))
4837    }
4838
4839    /// See [DuckDB Docs](https://duckdb.org/docs/sql/statements/create_secret.html) for more details.
4840    pub fn parse_create_secret(
4841        &mut self,
4842        or_replace: bool,
4843        temporary: bool,
4844        persistent: bool,
4845    ) -> Result<Statement, ParserError> {
4846        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
4847
4848        let mut storage_specifier = None;
4849        let mut name = None;
4850        if self.peek_token() != Token::LParen {
4851            if self.parse_keyword(Keyword::IN) {
4852                storage_specifier = self.parse_identifier().ok()
4853            } else {
4854                name = self.parse_identifier().ok();
4855            }
4856
4857            // Storage specifier may follow the name
4858            if storage_specifier.is_none()
4859                && self.peek_token() != Token::LParen
4860                && self.parse_keyword(Keyword::IN)
4861            {
4862                storage_specifier = self.parse_identifier().ok();
4863            }
4864        }
4865
4866        self.expect_token(&Token::LParen)?;
4867        self.expect_keyword_is(Keyword::TYPE)?;
4868        let secret_type = self.parse_identifier()?;
4869
4870        let mut options = Vec::new();
4871        if self.consume_token(&Token::Comma) {
4872            options.append(&mut self.parse_comma_separated(|p| {
4873                let key = p.parse_identifier()?;
4874                let value = p.parse_identifier()?;
4875                Ok(SecretOption { key, value })
4876            })?);
4877        }
4878        self.expect_token(&Token::RParen)?;
4879
4880        let temp = match (temporary, persistent) {
4881            (true, false) => Some(true),
4882            (false, true) => Some(false),
4883            (false, false) => None,
4884            _ => self.expected("TEMPORARY or PERSISTENT", self.peek_token())?,
4885        };
4886
4887        Ok(Statement::CreateSecret {
4888            or_replace,
4889            temporary: temp,
4890            if_not_exists,
4891            name,
4892            storage_specifier,
4893            secret_type,
4894            options,
4895        })
4896    }
4897
4898    /// Parse a CACHE TABLE statement
4899    pub fn parse_cache_table(&mut self) -> Result<Statement, ParserError> {
4900        let (mut table_flag, mut options, mut has_as, mut query) = (None, vec![], false, None);
4901        if self.parse_keyword(Keyword::TABLE) {
4902            let table_name = self.parse_object_name(false)?;
4903            if self.peek_token().token != Token::EOF {
4904                if let Token::Word(word) = self.peek_token().token {
4905                    if word.keyword == Keyword::OPTIONS {
4906                        options = self.parse_options(Keyword::OPTIONS)?
4907                    }
4908                };
4909
4910                if self.peek_token().token != Token::EOF {
4911                    let (a, q) = self.parse_as_query()?;
4912                    has_as = a;
4913                    query = Some(q);
4914                }
4915
4916                Ok(Statement::Cache {
4917                    table_flag,
4918                    table_name,
4919                    has_as,
4920                    options,
4921                    query,
4922                })
4923            } else {
4924                Ok(Statement::Cache {
4925                    table_flag,
4926                    table_name,
4927                    has_as,
4928                    options,
4929                    query,
4930                })
4931            }
4932        } else {
4933            table_flag = Some(self.parse_object_name(false)?);
4934            if self.parse_keyword(Keyword::TABLE) {
4935                let table_name = self.parse_object_name(false)?;
4936                if self.peek_token() != Token::EOF {
4937                    if let Token::Word(word) = self.peek_token().token {
4938                        if word.keyword == Keyword::OPTIONS {
4939                            options = self.parse_options(Keyword::OPTIONS)?
4940                        }
4941                    };
4942
4943                    if self.peek_token() != Token::EOF {
4944                        let (a, q) = self.parse_as_query()?;
4945                        has_as = a;
4946                        query = Some(q);
4947                    }
4948
4949                    Ok(Statement::Cache {
4950                        table_flag,
4951                        table_name,
4952                        has_as,
4953                        options,
4954                        query,
4955                    })
4956                } else {
4957                    Ok(Statement::Cache {
4958                        table_flag,
4959                        table_name,
4960                        has_as,
4961                        options,
4962                        query,
4963                    })
4964                }
4965            } else {
4966                if self.peek_token() == Token::EOF {
4967                    self.prev_token();
4968                }
4969                self.expected("a `TABLE` keyword", self.peek_token())
4970            }
4971        }
4972    }
4973
4974    /// Parse 'AS' before as query,such as `WITH XXX AS SELECT XXX` oer `CACHE TABLE AS SELECT XXX`
4975    pub fn parse_as_query(&mut self) -> Result<(bool, Box<Query>), ParserError> {
4976        match self.peek_token().token {
4977            Token::Word(word) => match word.keyword {
4978                Keyword::AS => {
4979                    self.next_token();
4980                    Ok((true, self.parse_query()?))
4981                }
4982                _ => Ok((false, self.parse_query()?)),
4983            },
4984            _ => self.expected("a QUERY statement", self.peek_token()),
4985        }
4986    }
4987
4988    /// Parse a UNCACHE TABLE statement
4989    pub fn parse_uncache_table(&mut self) -> Result<Statement, ParserError> {
4990        self.expect_keyword_is(Keyword::TABLE)?;
4991        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
4992        let table_name = self.parse_object_name(false)?;
4993        Ok(Statement::UNCache {
4994            table_name,
4995            if_exists,
4996        })
4997    }
4998
4999    /// SQLite-specific `CREATE VIRTUAL TABLE`
5000    pub fn parse_create_virtual_table(&mut self) -> Result<Statement, ParserError> {
5001        self.expect_keyword_is(Keyword::TABLE)?;
5002        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5003        let table_name = self.parse_object_name(false)?;
5004        self.expect_keyword_is(Keyword::USING)?;
5005        let module_name = self.parse_identifier()?;
5006        // SQLite docs note that module "arguments syntax is sufficiently
5007        // general that the arguments can be made to appear as column
5008        // definitions in a traditional CREATE TABLE statement", but
5009        // we don't implement that.
5010        let module_args = self.parse_parenthesized_column_list(Optional, false)?;
5011        Ok(Statement::CreateVirtualTable {
5012            name: table_name,
5013            if_not_exists,
5014            module_name,
5015            module_args,
5016        })
5017    }
5018
5019    pub fn parse_create_schema(&mut self) -> Result<Statement, ParserError> {
5020        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5021
5022        let schema_name = self.parse_schema_name()?;
5023
5024        let default_collate_spec = if self.parse_keywords(&[Keyword::DEFAULT, Keyword::COLLATE]) {
5025            Some(self.parse_expr()?)
5026        } else {
5027            None
5028        };
5029
5030        let with = if self.peek_keyword(Keyword::WITH) {
5031            Some(self.parse_options(Keyword::WITH)?)
5032        } else {
5033            None
5034        };
5035
5036        let options = if self.peek_keyword(Keyword::OPTIONS) {
5037            Some(self.parse_options(Keyword::OPTIONS)?)
5038        } else {
5039            None
5040        };
5041
5042        let clone = if self.parse_keyword(Keyword::CLONE) {
5043            Some(self.parse_object_name(false)?)
5044        } else {
5045            None
5046        };
5047
5048        Ok(Statement::CreateSchema {
5049            schema_name,
5050            if_not_exists,
5051            with,
5052            options,
5053            default_collate_spec,
5054            clone,
5055        })
5056    }
5057
5058    fn parse_schema_name(&mut self) -> Result<SchemaName, ParserError> {
5059        if self.parse_keyword(Keyword::AUTHORIZATION) {
5060            Ok(SchemaName::UnnamedAuthorization(self.parse_identifier()?))
5061        } else {
5062            let name = self.parse_object_name(false)?;
5063
5064            if self.parse_keyword(Keyword::AUTHORIZATION) {
5065                Ok(SchemaName::NamedAuthorization(
5066                    name,
5067                    self.parse_identifier()?,
5068                ))
5069            } else {
5070                Ok(SchemaName::Simple(name))
5071            }
5072        }
5073    }
5074
5075    pub fn parse_create_database(&mut self) -> Result<Statement, ParserError> {
5076        let ine = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5077        let db_name = self.parse_object_name(false)?;
5078        let mut location = None;
5079        let mut managed_location = None;
5080        loop {
5081            match self.parse_one_of_keywords(&[Keyword::LOCATION, Keyword::MANAGEDLOCATION]) {
5082                Some(Keyword::LOCATION) => location = Some(self.parse_literal_string()?),
5083                Some(Keyword::MANAGEDLOCATION) => {
5084                    managed_location = Some(self.parse_literal_string()?)
5085                }
5086                _ => break,
5087            }
5088        }
5089        let clone = if self.parse_keyword(Keyword::CLONE) {
5090            Some(self.parse_object_name(false)?)
5091        } else {
5092            None
5093        };
5094
5095        Ok(Statement::CreateDatabase {
5096            db_name,
5097            if_not_exists: ine,
5098            location,
5099            managed_location,
5100            or_replace: false,
5101            transient: false,
5102            clone,
5103            data_retention_time_in_days: None,
5104            max_data_extension_time_in_days: None,
5105            external_volume: None,
5106            catalog: None,
5107            replace_invalid_characters: None,
5108            default_ddl_collation: None,
5109            storage_serialization_policy: None,
5110            comment: None,
5111            catalog_sync: None,
5112            catalog_sync_namespace_mode: None,
5113            catalog_sync_namespace_flatten_delimiter: None,
5114            with_tags: None,
5115            with_contacts: None,
5116        })
5117    }
5118
5119    pub fn parse_optional_create_function_using(
5120        &mut self,
5121    ) -> Result<Option<CreateFunctionUsing>, ParserError> {
5122        if !self.parse_keyword(Keyword::USING) {
5123            return Ok(None);
5124        };
5125        let keyword =
5126            self.expect_one_of_keywords(&[Keyword::JAR, Keyword::FILE, Keyword::ARCHIVE])?;
5127
5128        let uri = self.parse_literal_string()?;
5129
5130        match keyword {
5131            Keyword::JAR => Ok(Some(CreateFunctionUsing::Jar(uri))),
5132            Keyword::FILE => Ok(Some(CreateFunctionUsing::File(uri))),
5133            Keyword::ARCHIVE => Ok(Some(CreateFunctionUsing::Archive(uri))),
5134            _ => self.expected(
5135                "JAR, FILE or ARCHIVE, got {:?}",
5136                TokenWithSpan::wrap(Token::make_keyword(format!("{keyword:?}").as_str())),
5137            ),
5138        }
5139    }
5140
5141    pub fn parse_create_function(
5142        &mut self,
5143        or_alter: bool,
5144        or_replace: bool,
5145        temporary: bool,
5146    ) -> Result<Statement, ParserError> {
5147        if dialect_of!(self is HiveDialect) {
5148            self.parse_hive_create_function(or_replace, temporary)
5149        } else if dialect_of!(self is PostgreSqlDialect | GenericDialect) {
5150            self.parse_postgres_create_function(or_replace, temporary)
5151        } else if dialect_of!(self is DuckDbDialect) {
5152            self.parse_create_macro(or_replace, temporary)
5153        } else if dialect_of!(self is BigQueryDialect) {
5154            self.parse_bigquery_create_function(or_replace, temporary)
5155        } else if dialect_of!(self is MsSqlDialect) {
5156            self.parse_mssql_create_function(or_alter, or_replace, temporary)
5157        } else {
5158            self.prev_token();
5159            self.expected("an object type after CREATE", self.peek_token())
5160        }
5161    }
5162
5163    /// Parse `CREATE FUNCTION` for [PostgreSQL]
5164    ///
5165    /// [PostgreSQL]: https://www.postgresql.org/docs/15/sql-createfunction.html
5166    fn parse_postgres_create_function(
5167        &mut self,
5168        or_replace: bool,
5169        temporary: bool,
5170    ) -> Result<Statement, ParserError> {
5171        let name = self.parse_object_name(false)?;
5172
5173        self.expect_token(&Token::LParen)?;
5174        let args = if Token::RParen != self.peek_token_ref().token {
5175            self.parse_comma_separated(Parser::parse_function_arg)?
5176        } else {
5177            vec![]
5178        };
5179        self.expect_token(&Token::RParen)?;
5180
5181        let return_type = if self.parse_keyword(Keyword::RETURNS) {
5182            Some(self.parse_data_type()?)
5183        } else {
5184            None
5185        };
5186
5187        #[derive(Default)]
5188        struct Body {
5189            language: Option<Ident>,
5190            behavior: Option<FunctionBehavior>,
5191            function_body: Option<CreateFunctionBody>,
5192            called_on_null: Option<FunctionCalledOnNull>,
5193            parallel: Option<FunctionParallel>,
5194        }
5195        let mut body = Body::default();
5196        loop {
5197            fn ensure_not_set<T>(field: &Option<T>, name: &str) -> Result<(), ParserError> {
5198                if field.is_some() {
5199                    return Err(ParserError::ParserError(format!(
5200                        "{name} specified more than once",
5201                    )));
5202                }
5203                Ok(())
5204            }
5205            if self.parse_keyword(Keyword::AS) {
5206                ensure_not_set(&body.function_body, "AS")?;
5207                body.function_body = Some(self.parse_create_function_body_string()?);
5208            } else if self.parse_keyword(Keyword::LANGUAGE) {
5209                ensure_not_set(&body.language, "LANGUAGE")?;
5210                body.language = Some(self.parse_identifier()?);
5211            } else if self.parse_keyword(Keyword::IMMUTABLE) {
5212                ensure_not_set(&body.behavior, "IMMUTABLE | STABLE | VOLATILE")?;
5213                body.behavior = Some(FunctionBehavior::Immutable);
5214            } else if self.parse_keyword(Keyword::STABLE) {
5215                ensure_not_set(&body.behavior, "IMMUTABLE | STABLE | VOLATILE")?;
5216                body.behavior = Some(FunctionBehavior::Stable);
5217            } else if self.parse_keyword(Keyword::VOLATILE) {
5218                ensure_not_set(&body.behavior, "IMMUTABLE | STABLE | VOLATILE")?;
5219                body.behavior = Some(FunctionBehavior::Volatile);
5220            } else if self.parse_keywords(&[
5221                Keyword::CALLED,
5222                Keyword::ON,
5223                Keyword::NULL,
5224                Keyword::INPUT,
5225            ]) {
5226                ensure_not_set(
5227                    &body.called_on_null,
5228                    "CALLED ON NULL INPUT | RETURNS NULL ON NULL INPUT | STRICT",
5229                )?;
5230                body.called_on_null = Some(FunctionCalledOnNull::CalledOnNullInput);
5231            } else if self.parse_keywords(&[
5232                Keyword::RETURNS,
5233                Keyword::NULL,
5234                Keyword::ON,
5235                Keyword::NULL,
5236                Keyword::INPUT,
5237            ]) {
5238                ensure_not_set(
5239                    &body.called_on_null,
5240                    "CALLED ON NULL INPUT | RETURNS NULL ON NULL INPUT | STRICT",
5241                )?;
5242                body.called_on_null = Some(FunctionCalledOnNull::ReturnsNullOnNullInput);
5243            } else if self.parse_keyword(Keyword::STRICT) {
5244                ensure_not_set(
5245                    &body.called_on_null,
5246                    "CALLED ON NULL INPUT | RETURNS NULL ON NULL INPUT | STRICT",
5247                )?;
5248                body.called_on_null = Some(FunctionCalledOnNull::Strict);
5249            } else if self.parse_keyword(Keyword::PARALLEL) {
5250                ensure_not_set(&body.parallel, "PARALLEL { UNSAFE | RESTRICTED | SAFE }")?;
5251                if self.parse_keyword(Keyword::UNSAFE) {
5252                    body.parallel = Some(FunctionParallel::Unsafe);
5253                } else if self.parse_keyword(Keyword::RESTRICTED) {
5254                    body.parallel = Some(FunctionParallel::Restricted);
5255                } else if self.parse_keyword(Keyword::SAFE) {
5256                    body.parallel = Some(FunctionParallel::Safe);
5257                } else {
5258                    return self.expected("one of UNSAFE | RESTRICTED | SAFE", self.peek_token());
5259                }
5260            } else if self.parse_keyword(Keyword::RETURN) {
5261                ensure_not_set(&body.function_body, "RETURN")?;
5262                body.function_body = Some(CreateFunctionBody::Return(self.parse_expr()?));
5263            } else {
5264                break;
5265            }
5266        }
5267
5268        Ok(Statement::CreateFunction(CreateFunction {
5269            or_alter: false,
5270            or_replace,
5271            temporary,
5272            name,
5273            args: Some(args),
5274            return_type,
5275            behavior: body.behavior,
5276            called_on_null: body.called_on_null,
5277            parallel: body.parallel,
5278            language: body.language,
5279            function_body: body.function_body,
5280            if_not_exists: false,
5281            using: None,
5282            determinism_specifier: None,
5283            options: None,
5284            remote_connection: None,
5285        }))
5286    }
5287
5288    /// Parse `CREATE FUNCTION` for [Hive]
5289    ///
5290    /// [Hive]: https://cwiki.apache.org/confluence/display/hive/languagemanual+ddl#LanguageManualDDL-Create/Drop/ReloadFunction
5291    fn parse_hive_create_function(
5292        &mut self,
5293        or_replace: bool,
5294        temporary: bool,
5295    ) -> Result<Statement, ParserError> {
5296        let name = self.parse_object_name(false)?;
5297        self.expect_keyword_is(Keyword::AS)?;
5298
5299        let body = self.parse_create_function_body_string()?;
5300        let using = self.parse_optional_create_function_using()?;
5301
5302        Ok(Statement::CreateFunction(CreateFunction {
5303            or_alter: false,
5304            or_replace,
5305            temporary,
5306            name,
5307            function_body: Some(body),
5308            using,
5309            if_not_exists: false,
5310            args: None,
5311            return_type: None,
5312            behavior: None,
5313            called_on_null: None,
5314            parallel: None,
5315            language: None,
5316            determinism_specifier: None,
5317            options: None,
5318            remote_connection: None,
5319        }))
5320    }
5321
5322    /// Parse `CREATE FUNCTION` for [BigQuery]
5323    ///
5324    /// [BigQuery]: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#create_function_statement
5325    fn parse_bigquery_create_function(
5326        &mut self,
5327        or_replace: bool,
5328        temporary: bool,
5329    ) -> Result<Statement, ParserError> {
5330        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5331        let (name, args) = self.parse_create_function_name_and_params()?;
5332
5333        let return_type = if self.parse_keyword(Keyword::RETURNS) {
5334            Some(self.parse_data_type()?)
5335        } else {
5336            None
5337        };
5338
5339        let determinism_specifier = if self.parse_keyword(Keyword::DETERMINISTIC) {
5340            Some(FunctionDeterminismSpecifier::Deterministic)
5341        } else if self.parse_keywords(&[Keyword::NOT, Keyword::DETERMINISTIC]) {
5342            Some(FunctionDeterminismSpecifier::NotDeterministic)
5343        } else {
5344            None
5345        };
5346
5347        let language = if self.parse_keyword(Keyword::LANGUAGE) {
5348            Some(self.parse_identifier()?)
5349        } else {
5350            None
5351        };
5352
5353        let remote_connection =
5354            if self.parse_keywords(&[Keyword::REMOTE, Keyword::WITH, Keyword::CONNECTION]) {
5355                Some(self.parse_object_name(false)?)
5356            } else {
5357                None
5358            };
5359
5360        // `OPTIONS` may come before of after the function body but
5361        // may be specified at most once.
5362        let mut options = self.maybe_parse_options(Keyword::OPTIONS)?;
5363
5364        let function_body = if remote_connection.is_none() {
5365            self.expect_keyword_is(Keyword::AS)?;
5366            let expr = self.parse_expr()?;
5367            if options.is_none() {
5368                options = self.maybe_parse_options(Keyword::OPTIONS)?;
5369                Some(CreateFunctionBody::AsBeforeOptions {
5370                    body: expr,
5371                    link_symbol: None,
5372                })
5373            } else {
5374                Some(CreateFunctionBody::AsAfterOptions(expr))
5375            }
5376        } else {
5377            None
5378        };
5379
5380        Ok(Statement::CreateFunction(CreateFunction {
5381            or_alter: false,
5382            or_replace,
5383            temporary,
5384            if_not_exists,
5385            name,
5386            args: Some(args),
5387            return_type,
5388            function_body,
5389            language,
5390            determinism_specifier,
5391            options,
5392            remote_connection,
5393            using: None,
5394            behavior: None,
5395            called_on_null: None,
5396            parallel: None,
5397        }))
5398    }
5399
5400    /// Parse `CREATE FUNCTION` for [MsSql]
5401    ///
5402    /// [MsSql]: https://learn.microsoft.com/en-us/sql/t-sql/statements/create-function-transact-sql
5403    fn parse_mssql_create_function(
5404        &mut self,
5405        or_alter: bool,
5406        or_replace: bool,
5407        temporary: bool,
5408    ) -> Result<Statement, ParserError> {
5409        let (name, args) = self.parse_create_function_name_and_params()?;
5410
5411        self.expect_keyword(Keyword::RETURNS)?;
5412
5413        let return_table = self.maybe_parse(|p| {
5414            let return_table_name = p.parse_identifier()?;
5415
5416            p.expect_keyword_is(Keyword::TABLE)?;
5417            p.prev_token();
5418
5419            let table_column_defs = match p.parse_data_type()? {
5420                DataType::Table(Some(table_column_defs)) if !table_column_defs.is_empty() => {
5421                    table_column_defs
5422                }
5423                _ => parser_err!(
5424                    "Expected table column definitions after TABLE keyword",
5425                    p.peek_token().span.start
5426                )?,
5427            };
5428
5429            Ok(DataType::NamedTable {
5430                name: ObjectName(vec![ObjectNamePart::Identifier(return_table_name)]),
5431                columns: table_column_defs,
5432            })
5433        })?;
5434
5435        let return_type = if return_table.is_some() {
5436            return_table
5437        } else {
5438            Some(self.parse_data_type()?)
5439        };
5440
5441        let _ = self.parse_keyword(Keyword::AS);
5442
5443        let function_body = if self.peek_keyword(Keyword::BEGIN) {
5444            let begin_token = self.expect_keyword(Keyword::BEGIN)?;
5445            let statements = self.parse_statement_list(&[Keyword::END])?;
5446            let end_token = self.expect_keyword(Keyword::END)?;
5447
5448            Some(CreateFunctionBody::AsBeginEnd(BeginEndStatements {
5449                begin_token: AttachedToken(begin_token),
5450                statements,
5451                end_token: AttachedToken(end_token),
5452            }))
5453        } else if self.parse_keyword(Keyword::RETURN) {
5454            if self.peek_token() == Token::LParen {
5455                Some(CreateFunctionBody::AsReturnExpr(self.parse_expr()?))
5456            } else if self.peek_keyword(Keyword::SELECT) {
5457                let select = self.parse_select()?;
5458                Some(CreateFunctionBody::AsReturnSelect(select))
5459            } else {
5460                parser_err!(
5461                    "Expected a subquery (or bare SELECT statement) after RETURN",
5462                    self.peek_token().span.start
5463                )?
5464            }
5465        } else {
5466            parser_err!("Unparsable function body", self.peek_token().span.start)?
5467        };
5468
5469        Ok(Statement::CreateFunction(CreateFunction {
5470            or_alter,
5471            or_replace,
5472            temporary,
5473            if_not_exists: false,
5474            name,
5475            args: Some(args),
5476            return_type,
5477            function_body,
5478            language: None,
5479            determinism_specifier: None,
5480            options: None,
5481            remote_connection: None,
5482            using: None,
5483            behavior: None,
5484            called_on_null: None,
5485            parallel: None,
5486        }))
5487    }
5488
5489    fn parse_create_function_name_and_params(
5490        &mut self,
5491    ) -> Result<(ObjectName, Vec<OperateFunctionArg>), ParserError> {
5492        let name = self.parse_object_name(false)?;
5493        let parse_function_param =
5494            |parser: &mut Parser| -> Result<OperateFunctionArg, ParserError> {
5495                let name = parser.parse_identifier()?;
5496                let data_type = parser.parse_data_type()?;
5497                let default_expr = if parser.consume_token(&Token::Eq) {
5498                    Some(parser.parse_expr()?)
5499                } else {
5500                    None
5501                };
5502
5503                Ok(OperateFunctionArg {
5504                    mode: None,
5505                    name: Some(name),
5506                    data_type,
5507                    default_expr,
5508                })
5509            };
5510        self.expect_token(&Token::LParen)?;
5511        let args = self.parse_comma_separated0(parse_function_param, Token::RParen)?;
5512        self.expect_token(&Token::RParen)?;
5513        Ok((name, args))
5514    }
5515
5516    fn parse_function_arg(&mut self) -> Result<OperateFunctionArg, ParserError> {
5517        let mode = if self.parse_keyword(Keyword::IN) {
5518            Some(ArgMode::In)
5519        } else if self.parse_keyword(Keyword::OUT) {
5520            Some(ArgMode::Out)
5521        } else if self.parse_keyword(Keyword::INOUT) {
5522            Some(ArgMode::InOut)
5523        } else {
5524            None
5525        };
5526
5527        // parse: [ argname ] argtype
5528        let mut name = None;
5529        let mut data_type = self.parse_data_type()?;
5530
5531        // To check whether the first token is a name or a type, we need to
5532        // peek the next token, which if it is another type keyword, then the
5533        // first token is a name and not a type in itself.
5534        let data_type_idx = self.get_current_index();
5535
5536        // DEFAULT will be parsed as `DataType::Custom`, which is undesirable in this context
5537        fn parse_data_type_no_default(parser: &mut Parser) -> Result<DataType, ParserError> {
5538            if parser.peek_keyword(Keyword::DEFAULT) {
5539                // This dummy error is ignored in `maybe_parse`
5540                parser_err!(
5541                    "The DEFAULT keyword is not a type",
5542                    parser.peek_token().span.start
5543                )
5544            } else {
5545                parser.parse_data_type()
5546            }
5547        }
5548
5549        if let Some(next_data_type) = self.maybe_parse(parse_data_type_no_default)? {
5550            let token = self.token_at(data_type_idx);
5551
5552            // We ensure that the token is a `Word` token, and not other special tokens.
5553            if !matches!(token.token, Token::Word(_)) {
5554                return self.expected("a name or type", token.clone());
5555            }
5556
5557            name = Some(Ident::new(token.to_string()));
5558            data_type = next_data_type;
5559        }
5560
5561        let default_expr = if self.parse_keyword(Keyword::DEFAULT) || self.consume_token(&Token::Eq)
5562        {
5563            Some(self.parse_expr()?)
5564        } else {
5565            None
5566        };
5567        Ok(OperateFunctionArg {
5568            mode,
5569            name,
5570            data_type,
5571            default_expr,
5572        })
5573    }
5574
5575    /// Parse statements of the DropTrigger type such as:
5576    ///
5577    /// ```sql
5578    /// DROP TRIGGER [ IF EXISTS ] name ON table_name [ CASCADE | RESTRICT ]
5579    /// ```
5580    pub fn parse_drop_trigger(&mut self) -> Result<Statement, ParserError> {
5581        if !dialect_of!(self is PostgreSqlDialect | SQLiteDialect | GenericDialect | MySqlDialect | MsSqlDialect)
5582        {
5583            self.prev_token();
5584            return self.expected("an object type after DROP", self.peek_token());
5585        }
5586        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
5587        let trigger_name = self.parse_object_name(false)?;
5588        let table_name = if self.parse_keyword(Keyword::ON) {
5589            Some(self.parse_object_name(false)?)
5590        } else {
5591            None
5592        };
5593        let option = self
5594            .parse_one_of_keywords(&[Keyword::CASCADE, Keyword::RESTRICT])
5595            .map(|keyword| match keyword {
5596                Keyword::CASCADE => ReferentialAction::Cascade,
5597                Keyword::RESTRICT => ReferentialAction::Restrict,
5598                _ => unreachable!(),
5599            });
5600        Ok(Statement::DropTrigger(DropTrigger {
5601            if_exists,
5602            trigger_name,
5603            table_name,
5604            option,
5605        }))
5606    }
5607
5608    pub fn parse_create_trigger(
5609        &mut self,
5610        temporary: bool,
5611        or_alter: bool,
5612        or_replace: bool,
5613        is_constraint: bool,
5614    ) -> Result<Statement, ParserError> {
5615        if !dialect_of!(self is PostgreSqlDialect | SQLiteDialect | GenericDialect | MySqlDialect | MsSqlDialect)
5616        {
5617            self.prev_token();
5618            return self.expected("an object type after CREATE", self.peek_token());
5619        }
5620
5621        let name = self.parse_object_name(false)?;
5622        let period = self.maybe_parse(|parser| parser.parse_trigger_period())?;
5623
5624        let events = self.parse_keyword_separated(Keyword::OR, Parser::parse_trigger_event)?;
5625        self.expect_keyword_is(Keyword::ON)?;
5626        let table_name = self.parse_object_name(false)?;
5627
5628        let referenced_table_name = if self.parse_keyword(Keyword::FROM) {
5629            self.parse_object_name(true).ok()
5630        } else {
5631            None
5632        };
5633
5634        let characteristics = self.parse_constraint_characteristics()?;
5635
5636        let mut referencing = vec![];
5637        if self.parse_keyword(Keyword::REFERENCING) {
5638            while let Some(refer) = self.parse_trigger_referencing()? {
5639                referencing.push(refer);
5640            }
5641        }
5642
5643        let trigger_object = if self.parse_keyword(Keyword::FOR) {
5644            let include_each = self.parse_keyword(Keyword::EACH);
5645            let trigger_object =
5646                match self.expect_one_of_keywords(&[Keyword::ROW, Keyword::STATEMENT])? {
5647                    Keyword::ROW => TriggerObject::Row,
5648                    Keyword::STATEMENT => TriggerObject::Statement,
5649                    _ => unreachable!(),
5650                };
5651
5652            Some(if include_each {
5653                TriggerObjectKind::ForEach(trigger_object)
5654            } else {
5655                TriggerObjectKind::For(trigger_object)
5656            })
5657        } else {
5658            let _ = self.parse_keyword(Keyword::FOR);
5659
5660            None
5661        };
5662
5663        let condition = self
5664            .parse_keyword(Keyword::WHEN)
5665            .then(|| self.parse_expr())
5666            .transpose()?;
5667
5668        let mut exec_body = None;
5669        let mut statements = None;
5670        if self.parse_keyword(Keyword::EXECUTE) {
5671            exec_body = Some(self.parse_trigger_exec_body()?);
5672        } else {
5673            statements = Some(self.parse_conditional_statements(&[Keyword::END])?);
5674        }
5675
5676        Ok(CreateTrigger {
5677            or_alter,
5678            temporary,
5679            or_replace,
5680            is_constraint,
5681            name,
5682            period,
5683            period_before_table: true,
5684            events,
5685            table_name,
5686            referenced_table_name,
5687            referencing,
5688            trigger_object,
5689            condition,
5690            exec_body,
5691            statements_as: false,
5692            statements,
5693            characteristics,
5694        }
5695        .into())
5696    }
5697
5698    pub fn parse_trigger_period(&mut self) -> Result<TriggerPeriod, ParserError> {
5699        Ok(
5700            match self.expect_one_of_keywords(&[
5701                Keyword::FOR,
5702                Keyword::BEFORE,
5703                Keyword::AFTER,
5704                Keyword::INSTEAD,
5705            ])? {
5706                Keyword::FOR => TriggerPeriod::For,
5707                Keyword::BEFORE => TriggerPeriod::Before,
5708                Keyword::AFTER => TriggerPeriod::After,
5709                Keyword::INSTEAD => self
5710                    .expect_keyword_is(Keyword::OF)
5711                    .map(|_| TriggerPeriod::InsteadOf)?,
5712                _ => unreachable!(),
5713            },
5714        )
5715    }
5716
5717    pub fn parse_trigger_event(&mut self) -> Result<TriggerEvent, ParserError> {
5718        Ok(
5719            match self.expect_one_of_keywords(&[
5720                Keyword::INSERT,
5721                Keyword::UPDATE,
5722                Keyword::DELETE,
5723                Keyword::TRUNCATE,
5724            ])? {
5725                Keyword::INSERT => TriggerEvent::Insert,
5726                Keyword::UPDATE => {
5727                    if self.parse_keyword(Keyword::OF) {
5728                        let cols = self.parse_comma_separated(Parser::parse_identifier)?;
5729                        TriggerEvent::Update(cols)
5730                    } else {
5731                        TriggerEvent::Update(vec![])
5732                    }
5733                }
5734                Keyword::DELETE => TriggerEvent::Delete,
5735                Keyword::TRUNCATE => TriggerEvent::Truncate,
5736                _ => unreachable!(),
5737            },
5738        )
5739    }
5740
5741    pub fn parse_trigger_referencing(&mut self) -> Result<Option<TriggerReferencing>, ParserError> {
5742        let refer_type = match self.parse_one_of_keywords(&[Keyword::OLD, Keyword::NEW]) {
5743            Some(Keyword::OLD) if self.parse_keyword(Keyword::TABLE) => {
5744                TriggerReferencingType::OldTable
5745            }
5746            Some(Keyword::NEW) if self.parse_keyword(Keyword::TABLE) => {
5747                TriggerReferencingType::NewTable
5748            }
5749            _ => {
5750                return Ok(None);
5751            }
5752        };
5753
5754        let is_as = self.parse_keyword(Keyword::AS);
5755        let transition_relation_name = self.parse_object_name(false)?;
5756        Ok(Some(TriggerReferencing {
5757            refer_type,
5758            is_as,
5759            transition_relation_name,
5760        }))
5761    }
5762
5763    pub fn parse_trigger_exec_body(&mut self) -> Result<TriggerExecBody, ParserError> {
5764        Ok(TriggerExecBody {
5765            exec_type: match self
5766                .expect_one_of_keywords(&[Keyword::FUNCTION, Keyword::PROCEDURE])?
5767            {
5768                Keyword::FUNCTION => TriggerExecBodyType::Function,
5769                Keyword::PROCEDURE => TriggerExecBodyType::Procedure,
5770                _ => unreachable!(),
5771            },
5772            func_desc: self.parse_function_desc()?,
5773        })
5774    }
5775
5776    pub fn parse_create_macro(
5777        &mut self,
5778        or_replace: bool,
5779        temporary: bool,
5780    ) -> Result<Statement, ParserError> {
5781        if dialect_of!(self is DuckDbDialect |  GenericDialect) {
5782            let name = self.parse_object_name(false)?;
5783            self.expect_token(&Token::LParen)?;
5784            let args = if self.consume_token(&Token::RParen) {
5785                self.prev_token();
5786                None
5787            } else {
5788                Some(self.parse_comma_separated(Parser::parse_macro_arg)?)
5789            };
5790
5791            self.expect_token(&Token::RParen)?;
5792            self.expect_keyword_is(Keyword::AS)?;
5793
5794            Ok(Statement::CreateMacro {
5795                or_replace,
5796                temporary,
5797                name,
5798                args,
5799                definition: if self.parse_keyword(Keyword::TABLE) {
5800                    MacroDefinition::Table(self.parse_query()?)
5801                } else {
5802                    MacroDefinition::Expr(self.parse_expr()?)
5803                },
5804            })
5805        } else {
5806            self.prev_token();
5807            self.expected("an object type after CREATE", self.peek_token())
5808        }
5809    }
5810
5811    fn parse_macro_arg(&mut self) -> Result<MacroArg, ParserError> {
5812        let name = self.parse_identifier()?;
5813
5814        let default_expr =
5815            if self.consume_token(&Token::Assignment) || self.consume_token(&Token::RArrow) {
5816                Some(self.parse_expr()?)
5817            } else {
5818                None
5819            };
5820        Ok(MacroArg { name, default_expr })
5821    }
5822
5823    pub fn parse_create_external_table(
5824        &mut self,
5825        or_replace: bool,
5826    ) -> Result<Statement, ParserError> {
5827        self.expect_keyword_is(Keyword::TABLE)?;
5828        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5829        let table_name = self.parse_object_name(false)?;
5830        let (columns, constraints) = self.parse_columns()?;
5831
5832        let hive_distribution = self.parse_hive_distribution()?;
5833        let hive_formats = self.parse_hive_formats()?;
5834
5835        let file_format = if let Some(ref hf) = hive_formats {
5836            if let Some(ref ff) = hf.storage {
5837                match ff {
5838                    HiveIOFormat::FileFormat { format } => Some(*format),
5839                    _ => None,
5840                }
5841            } else {
5842                None
5843            }
5844        } else {
5845            None
5846        };
5847        let location = hive_formats.as_ref().and_then(|hf| hf.location.clone());
5848        let table_properties = self.parse_options(Keyword::TBLPROPERTIES)?;
5849        let table_options = if !table_properties.is_empty() {
5850            CreateTableOptions::TableProperties(table_properties)
5851        } else {
5852            CreateTableOptions::None
5853        };
5854        Ok(CreateTableBuilder::new(table_name)
5855            .columns(columns)
5856            .constraints(constraints)
5857            .hive_distribution(hive_distribution)
5858            .hive_formats(hive_formats)
5859            .table_options(table_options)
5860            .or_replace(or_replace)
5861            .if_not_exists(if_not_exists)
5862            .external(true)
5863            .file_format(file_format)
5864            .location(location)
5865            .build())
5866    }
5867
5868    pub fn parse_file_format(&mut self) -> Result<FileFormat, ParserError> {
5869        let next_token = self.next_token();
5870        match &next_token.token {
5871            Token::Word(w) => match w.keyword {
5872                Keyword::AVRO => Ok(FileFormat::AVRO),
5873                Keyword::JSONFILE => Ok(FileFormat::JSONFILE),
5874                Keyword::ORC => Ok(FileFormat::ORC),
5875                Keyword::PARQUET => Ok(FileFormat::PARQUET),
5876                Keyword::RCFILE => Ok(FileFormat::RCFILE),
5877                Keyword::SEQUENCEFILE => Ok(FileFormat::SEQUENCEFILE),
5878                Keyword::TEXTFILE => Ok(FileFormat::TEXTFILE),
5879                _ => self.expected("fileformat", next_token),
5880            },
5881            _ => self.expected("fileformat", next_token),
5882        }
5883    }
5884
5885    fn parse_analyze_format_kind(&mut self) -> Result<AnalyzeFormatKind, ParserError> {
5886        if self.consume_token(&Token::Eq) {
5887            Ok(AnalyzeFormatKind::Assignment(self.parse_analyze_format()?))
5888        } else {
5889            Ok(AnalyzeFormatKind::Keyword(self.parse_analyze_format()?))
5890        }
5891    }
5892
5893    pub fn parse_analyze_format(&mut self) -> Result<AnalyzeFormat, ParserError> {
5894        let next_token = self.next_token();
5895        match &next_token.token {
5896            Token::Word(w) => match w.keyword {
5897                Keyword::TEXT => Ok(AnalyzeFormat::TEXT),
5898                Keyword::GRAPHVIZ => Ok(AnalyzeFormat::GRAPHVIZ),
5899                Keyword::JSON => Ok(AnalyzeFormat::JSON),
5900                _ => self.expected("fileformat", next_token),
5901            },
5902            _ => self.expected("fileformat", next_token),
5903        }
5904    }
5905
5906    pub fn parse_create_view(
5907        &mut self,
5908        or_alter: bool,
5909        or_replace: bool,
5910        temporary: bool,
5911        create_view_params: Option<CreateViewParams>,
5912    ) -> Result<Statement, ParserError> {
5913        let secure = self.parse_keyword(Keyword::SECURE);
5914        let materialized = self.parse_keyword(Keyword::MATERIALIZED);
5915        self.expect_keyword_is(Keyword::VIEW)?;
5916        let allow_unquoted_hyphen = dialect_of!(self is BigQueryDialect);
5917        // Tries to parse IF NOT EXISTS either before name or after name
5918        // Name before IF NOT EXISTS is supported by snowflake but undocumented
5919        let if_not_exists_first =
5920            self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5921        let name = self.parse_object_name(allow_unquoted_hyphen)?;
5922        let name_before_not_exists = !if_not_exists_first
5923            && self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5924        let if_not_exists = if_not_exists_first || name_before_not_exists;
5925        // Many dialects support `OR ALTER` right after `CREATE`, but we don't (yet).
5926        // ANSI SQL and Postgres support RECURSIVE here, but we don't support it either.
5927        let columns = self.parse_view_columns()?;
5928        let mut options = CreateTableOptions::None;
5929        let with_options = self.parse_options(Keyword::WITH)?;
5930        if !with_options.is_empty() {
5931            options = CreateTableOptions::With(with_options);
5932        }
5933
5934        let cluster_by = if self.parse_keyword(Keyword::CLUSTER) {
5935            self.expect_keyword_is(Keyword::BY)?;
5936            self.parse_parenthesized_column_list(Optional, false)?
5937        } else {
5938            vec![]
5939        };
5940
5941        if dialect_of!(self is BigQueryDialect | GenericDialect) {
5942            if let Some(opts) = self.maybe_parse_options(Keyword::OPTIONS)? {
5943                if !opts.is_empty() {
5944                    options = CreateTableOptions::Options(opts);
5945                }
5946            };
5947        }
5948
5949        let to = if dialect_of!(self is ClickHouseDialect | GenericDialect)
5950            && self.parse_keyword(Keyword::TO)
5951        {
5952            Some(self.parse_object_name(false)?)
5953        } else {
5954            None
5955        };
5956
5957        let comment = if dialect_of!(self is SnowflakeDialect | GenericDialect)
5958            && self.parse_keyword(Keyword::COMMENT)
5959        {
5960            self.expect_token(&Token::Eq)?;
5961            Some(self.parse_comment_value()?)
5962        } else {
5963            None
5964        };
5965
5966        self.expect_keyword_is(Keyword::AS)?;
5967        let query = self.parse_query()?;
5968        // Optional `WITH [ CASCADED | LOCAL ] CHECK OPTION` is widely supported here.
5969
5970        let with_no_schema_binding = dialect_of!(self is RedshiftSqlDialect | GenericDialect)
5971            && self.parse_keywords(&[
5972                Keyword::WITH,
5973                Keyword::NO,
5974                Keyword::SCHEMA,
5975                Keyword::BINDING,
5976            ]);
5977
5978        Ok(CreateView {
5979            or_alter,
5980            name,
5981            columns,
5982            query,
5983            materialized,
5984            secure,
5985            or_replace,
5986            options,
5987            cluster_by,
5988            comment,
5989            with_no_schema_binding,
5990            if_not_exists,
5991            temporary,
5992            to,
5993            params: create_view_params,
5994            name_before_not_exists,
5995        }
5996        .into())
5997    }
5998
5999    /// Parse optional parameters for the `CREATE VIEW` statement supported by [MySQL].
6000    ///
6001    /// [MySQL]: https://dev.mysql.com/doc/refman/9.1/en/create-view.html
6002    fn parse_create_view_params(&mut self) -> Result<Option<CreateViewParams>, ParserError> {
6003        let algorithm = if self.parse_keyword(Keyword::ALGORITHM) {
6004            self.expect_token(&Token::Eq)?;
6005            Some(
6006                match self.expect_one_of_keywords(&[
6007                    Keyword::UNDEFINED,
6008                    Keyword::MERGE,
6009                    Keyword::TEMPTABLE,
6010                ])? {
6011                    Keyword::UNDEFINED => CreateViewAlgorithm::Undefined,
6012                    Keyword::MERGE => CreateViewAlgorithm::Merge,
6013                    Keyword::TEMPTABLE => CreateViewAlgorithm::TempTable,
6014                    _ => {
6015                        self.prev_token();
6016                        let found = self.next_token();
6017                        return self
6018                            .expected("UNDEFINED or MERGE or TEMPTABLE after ALGORITHM =", found);
6019                    }
6020                },
6021            )
6022        } else {
6023            None
6024        };
6025        let definer = if self.parse_keyword(Keyword::DEFINER) {
6026            self.expect_token(&Token::Eq)?;
6027            Some(self.parse_grantee_name()?)
6028        } else {
6029            None
6030        };
6031        let security = if self.parse_keywords(&[Keyword::SQL, Keyword::SECURITY]) {
6032            Some(
6033                match self.expect_one_of_keywords(&[Keyword::DEFINER, Keyword::INVOKER])? {
6034                    Keyword::DEFINER => CreateViewSecurity::Definer,
6035                    Keyword::INVOKER => CreateViewSecurity::Invoker,
6036                    _ => {
6037                        self.prev_token();
6038                        let found = self.next_token();
6039                        return self.expected("DEFINER or INVOKER after SQL SECURITY", found);
6040                    }
6041                },
6042            )
6043        } else {
6044            None
6045        };
6046        if algorithm.is_some() || definer.is_some() || security.is_some() {
6047            Ok(Some(CreateViewParams {
6048                algorithm,
6049                definer,
6050                security,
6051            }))
6052        } else {
6053            Ok(None)
6054        }
6055    }
6056
6057    pub fn parse_create_role(&mut self) -> Result<Statement, ParserError> {
6058        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
6059        let names = self.parse_comma_separated(|p| p.parse_object_name(false))?;
6060
6061        let _ = self.parse_keyword(Keyword::WITH); // [ WITH ]
6062
6063        let optional_keywords = if dialect_of!(self is MsSqlDialect) {
6064            vec![Keyword::AUTHORIZATION]
6065        } else if dialect_of!(self is PostgreSqlDialect) {
6066            vec![
6067                Keyword::LOGIN,
6068                Keyword::NOLOGIN,
6069                Keyword::INHERIT,
6070                Keyword::NOINHERIT,
6071                Keyword::BYPASSRLS,
6072                Keyword::NOBYPASSRLS,
6073                Keyword::PASSWORD,
6074                Keyword::CREATEDB,
6075                Keyword::NOCREATEDB,
6076                Keyword::CREATEROLE,
6077                Keyword::NOCREATEROLE,
6078                Keyword::SUPERUSER,
6079                Keyword::NOSUPERUSER,
6080                Keyword::REPLICATION,
6081                Keyword::NOREPLICATION,
6082                Keyword::CONNECTION,
6083                Keyword::VALID,
6084                Keyword::IN,
6085                Keyword::ROLE,
6086                Keyword::ADMIN,
6087                Keyword::USER,
6088            ]
6089        } else {
6090            vec![]
6091        };
6092
6093        // MSSQL
6094        let mut authorization_owner = None;
6095        // Postgres
6096        let mut login = None;
6097        let mut inherit = None;
6098        let mut bypassrls = None;
6099        let mut password = None;
6100        let mut create_db = None;
6101        let mut create_role = None;
6102        let mut superuser = None;
6103        let mut replication = None;
6104        let mut connection_limit = None;
6105        let mut valid_until = None;
6106        let mut in_role = vec![];
6107        let mut in_group = vec![];
6108        let mut role = vec![];
6109        let mut user = vec![];
6110        let mut admin = vec![];
6111
6112        while let Some(keyword) = self.parse_one_of_keywords(&optional_keywords) {
6113            let loc = self
6114                .tokens
6115                .get(self.index - 1)
6116                .map_or(Location { line: 0, column: 0 }, |t| t.span.start);
6117            match keyword {
6118                Keyword::AUTHORIZATION => {
6119                    if authorization_owner.is_some() {
6120                        parser_err!("Found multiple AUTHORIZATION", loc)
6121                    } else {
6122                        authorization_owner = Some(self.parse_object_name(false)?);
6123                        Ok(())
6124                    }
6125                }
6126                Keyword::LOGIN | Keyword::NOLOGIN => {
6127                    if login.is_some() {
6128                        parser_err!("Found multiple LOGIN or NOLOGIN", loc)
6129                    } else {
6130                        login = Some(keyword == Keyword::LOGIN);
6131                        Ok(())
6132                    }
6133                }
6134                Keyword::INHERIT | Keyword::NOINHERIT => {
6135                    if inherit.is_some() {
6136                        parser_err!("Found multiple INHERIT or NOINHERIT", loc)
6137                    } else {
6138                        inherit = Some(keyword == Keyword::INHERIT);
6139                        Ok(())
6140                    }
6141                }
6142                Keyword::BYPASSRLS | Keyword::NOBYPASSRLS => {
6143                    if bypassrls.is_some() {
6144                        parser_err!("Found multiple BYPASSRLS or NOBYPASSRLS", loc)
6145                    } else {
6146                        bypassrls = Some(keyword == Keyword::BYPASSRLS);
6147                        Ok(())
6148                    }
6149                }
6150                Keyword::CREATEDB | Keyword::NOCREATEDB => {
6151                    if create_db.is_some() {
6152                        parser_err!("Found multiple CREATEDB or NOCREATEDB", loc)
6153                    } else {
6154                        create_db = Some(keyword == Keyword::CREATEDB);
6155                        Ok(())
6156                    }
6157                }
6158                Keyword::CREATEROLE | Keyword::NOCREATEROLE => {
6159                    if create_role.is_some() {
6160                        parser_err!("Found multiple CREATEROLE or NOCREATEROLE", loc)
6161                    } else {
6162                        create_role = Some(keyword == Keyword::CREATEROLE);
6163                        Ok(())
6164                    }
6165                }
6166                Keyword::SUPERUSER | Keyword::NOSUPERUSER => {
6167                    if superuser.is_some() {
6168                        parser_err!("Found multiple SUPERUSER or NOSUPERUSER", loc)
6169                    } else {
6170                        superuser = Some(keyword == Keyword::SUPERUSER);
6171                        Ok(())
6172                    }
6173                }
6174                Keyword::REPLICATION | Keyword::NOREPLICATION => {
6175                    if replication.is_some() {
6176                        parser_err!("Found multiple REPLICATION or NOREPLICATION", loc)
6177                    } else {
6178                        replication = Some(keyword == Keyword::REPLICATION);
6179                        Ok(())
6180                    }
6181                }
6182                Keyword::PASSWORD => {
6183                    if password.is_some() {
6184                        parser_err!("Found multiple PASSWORD", loc)
6185                    } else {
6186                        password = if self.parse_keyword(Keyword::NULL) {
6187                            Some(Password::NullPassword)
6188                        } else {
6189                            Some(Password::Password(Expr::Value(self.parse_value()?)))
6190                        };
6191                        Ok(())
6192                    }
6193                }
6194                Keyword::CONNECTION => {
6195                    self.expect_keyword_is(Keyword::LIMIT)?;
6196                    if connection_limit.is_some() {
6197                        parser_err!("Found multiple CONNECTION LIMIT", loc)
6198                    } else {
6199                        connection_limit = Some(Expr::Value(self.parse_number_value()?));
6200                        Ok(())
6201                    }
6202                }
6203                Keyword::VALID => {
6204                    self.expect_keyword_is(Keyword::UNTIL)?;
6205                    if valid_until.is_some() {
6206                        parser_err!("Found multiple VALID UNTIL", loc)
6207                    } else {
6208                        valid_until = Some(Expr::Value(self.parse_value()?));
6209                        Ok(())
6210                    }
6211                }
6212                Keyword::IN => {
6213                    if self.parse_keyword(Keyword::ROLE) {
6214                        if !in_role.is_empty() {
6215                            parser_err!("Found multiple IN ROLE", loc)
6216                        } else {
6217                            in_role = self.parse_comma_separated(|p| p.parse_identifier())?;
6218                            Ok(())
6219                        }
6220                    } else if self.parse_keyword(Keyword::GROUP) {
6221                        if !in_group.is_empty() {
6222                            parser_err!("Found multiple IN GROUP", loc)
6223                        } else {
6224                            in_group = self.parse_comma_separated(|p| p.parse_identifier())?;
6225                            Ok(())
6226                        }
6227                    } else {
6228                        self.expected("ROLE or GROUP after IN", self.peek_token())
6229                    }
6230                }
6231                Keyword::ROLE => {
6232                    if !role.is_empty() {
6233                        parser_err!("Found multiple ROLE", loc)
6234                    } else {
6235                        role = self.parse_comma_separated(|p| p.parse_identifier())?;
6236                        Ok(())
6237                    }
6238                }
6239                Keyword::USER => {
6240                    if !user.is_empty() {
6241                        parser_err!("Found multiple USER", loc)
6242                    } else {
6243                        user = self.parse_comma_separated(|p| p.parse_identifier())?;
6244                        Ok(())
6245                    }
6246                }
6247                Keyword::ADMIN => {
6248                    if !admin.is_empty() {
6249                        parser_err!("Found multiple ADMIN", loc)
6250                    } else {
6251                        admin = self.parse_comma_separated(|p| p.parse_identifier())?;
6252                        Ok(())
6253                    }
6254                }
6255                _ => break,
6256            }?
6257        }
6258
6259        Ok(CreateRole {
6260            names,
6261            if_not_exists,
6262            login,
6263            inherit,
6264            bypassrls,
6265            password,
6266            create_db,
6267            create_role,
6268            replication,
6269            superuser,
6270            connection_limit,
6271            valid_until,
6272            in_role,
6273            in_group,
6274            role,
6275            user,
6276            admin,
6277            authorization_owner,
6278        }
6279        .into())
6280    }
6281
6282    pub fn parse_owner(&mut self) -> Result<Owner, ParserError> {
6283        let owner = match self.parse_one_of_keywords(&[Keyword::CURRENT_USER, Keyword::CURRENT_ROLE, Keyword::SESSION_USER]) {
6284            Some(Keyword::CURRENT_USER) => Owner::CurrentUser,
6285            Some(Keyword::CURRENT_ROLE) => Owner::CurrentRole,
6286            Some(Keyword::SESSION_USER) => Owner::SessionUser,
6287            Some(_) => unreachable!(),
6288            None => {
6289                match self.parse_identifier() {
6290                    Ok(ident) => Owner::Ident(ident),
6291                    Err(e) => {
6292                        return Err(ParserError::ParserError(format!("Expected: CURRENT_USER, CURRENT_ROLE, SESSION_USER or identifier after OWNER TO. {e}")))
6293                    }
6294                }
6295            }
6296        };
6297        Ok(owner)
6298    }
6299
6300    /// Parses a [Statement::CreateDomain] statement.
6301    fn parse_create_domain(&mut self) -> Result<Statement, ParserError> {
6302        let name = self.parse_object_name(false)?;
6303        self.expect_keyword_is(Keyword::AS)?;
6304        let data_type = self.parse_data_type()?;
6305        let collation = if self.parse_keyword(Keyword::COLLATE) {
6306            Some(self.parse_identifier()?)
6307        } else {
6308            None
6309        };
6310        let default = if self.parse_keyword(Keyword::DEFAULT) {
6311            Some(self.parse_expr()?)
6312        } else {
6313            None
6314        };
6315        let mut constraints = Vec::new();
6316        while let Some(constraint) = self.parse_optional_table_constraint()? {
6317            constraints.push(constraint);
6318        }
6319
6320        Ok(Statement::CreateDomain(CreateDomain {
6321            name,
6322            data_type,
6323            collation,
6324            default,
6325            constraints,
6326        }))
6327    }
6328
6329    /// ```sql
6330    ///     CREATE POLICY name ON table_name [ AS { PERMISSIVE | RESTRICTIVE } ]
6331    ///     [ FOR { ALL | SELECT | INSERT | UPDATE | DELETE } ]
6332    ///     [ TO { role_name | PUBLIC | CURRENT_USER | CURRENT_ROLE | SESSION_USER } [, ...] ]
6333    ///     [ USING ( using_expression ) ]
6334    ///     [ WITH CHECK ( with_check_expression ) ]
6335    /// ```
6336    ///
6337    /// [PostgreSQL Documentation](https://www.postgresql.org/docs/current/sql-createpolicy.html)
6338    pub fn parse_create_policy(&mut self) -> Result<Statement, ParserError> {
6339        let name = self.parse_identifier()?;
6340        self.expect_keyword_is(Keyword::ON)?;
6341        let table_name = self.parse_object_name(false)?;
6342
6343        let policy_type = if self.parse_keyword(Keyword::AS) {
6344            let keyword =
6345                self.expect_one_of_keywords(&[Keyword::PERMISSIVE, Keyword::RESTRICTIVE])?;
6346            Some(match keyword {
6347                Keyword::PERMISSIVE => CreatePolicyType::Permissive,
6348                Keyword::RESTRICTIVE => CreatePolicyType::Restrictive,
6349                _ => unreachable!(),
6350            })
6351        } else {
6352            None
6353        };
6354
6355        let command = if self.parse_keyword(Keyword::FOR) {
6356            let keyword = self.expect_one_of_keywords(&[
6357                Keyword::ALL,
6358                Keyword::SELECT,
6359                Keyword::INSERT,
6360                Keyword::UPDATE,
6361                Keyword::DELETE,
6362            ])?;
6363            Some(match keyword {
6364                Keyword::ALL => CreatePolicyCommand::All,
6365                Keyword::SELECT => CreatePolicyCommand::Select,
6366                Keyword::INSERT => CreatePolicyCommand::Insert,
6367                Keyword::UPDATE => CreatePolicyCommand::Update,
6368                Keyword::DELETE => CreatePolicyCommand::Delete,
6369                _ => unreachable!(),
6370            })
6371        } else {
6372            None
6373        };
6374
6375        let to = if self.parse_keyword(Keyword::TO) {
6376            Some(self.parse_comma_separated(|p| p.parse_owner())?)
6377        } else {
6378            None
6379        };
6380
6381        let using = if self.parse_keyword(Keyword::USING) {
6382            self.expect_token(&Token::LParen)?;
6383            let expr = self.parse_expr()?;
6384            self.expect_token(&Token::RParen)?;
6385            Some(expr)
6386        } else {
6387            None
6388        };
6389
6390        let with_check = if self.parse_keywords(&[Keyword::WITH, Keyword::CHECK]) {
6391            self.expect_token(&Token::LParen)?;
6392            let expr = self.parse_expr()?;
6393            self.expect_token(&Token::RParen)?;
6394            Some(expr)
6395        } else {
6396            None
6397        };
6398
6399        Ok(CreatePolicy {
6400            name,
6401            table_name,
6402            policy_type,
6403            command,
6404            to,
6405            using,
6406            with_check,
6407        })
6408    }
6409
6410    /// ```sql
6411    /// CREATE CONNECTOR [IF NOT EXISTS] connector_name
6412    /// [TYPE datasource_type]
6413    /// [URL datasource_url]
6414    /// [COMMENT connector_comment]
6415    /// [WITH DCPROPERTIES(property_name=property_value, ...)]
6416    /// ```
6417    ///
6418    /// [Hive Documentation](https://cwiki.apache.org/confluence/pages/viewpage.action?pageId=27362034#LanguageManualDDL-CreateDataConnectorCreateConnector)
6419    pub fn parse_create_connector(&mut self) -> Result<Statement, ParserError> {
6420        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
6421        let name = self.parse_identifier()?;
6422
6423        let connector_type = if self.parse_keyword(Keyword::TYPE) {
6424            Some(self.parse_literal_string()?)
6425        } else {
6426            None
6427        };
6428
6429        let url = if self.parse_keyword(Keyword::URL) {
6430            Some(self.parse_literal_string()?)
6431        } else {
6432            None
6433        };
6434
6435        let comment = self.parse_optional_inline_comment()?;
6436
6437        let with_dcproperties =
6438            match self.parse_options_with_keywords(&[Keyword::WITH, Keyword::DCPROPERTIES])? {
6439                properties if !properties.is_empty() => Some(properties),
6440                _ => None,
6441            };
6442
6443        Ok(Statement::CreateConnector(CreateConnector {
6444            name,
6445            if_not_exists,
6446            connector_type,
6447            url,
6448            comment,
6449            with_dcproperties,
6450        }))
6451    }
6452
6453    /// Parse an operator name, which can contain special characters like +, -, <, >, =
6454    /// that are tokenized as operator tokens rather than identifiers.
6455    /// This is used for PostgreSQL CREATE OPERATOR statements.
6456    ///
6457    /// Examples: `+`, `myschema.+`, `pg_catalog.<=`
6458    fn parse_operator_name(&mut self) -> Result<ObjectName, ParserError> {
6459        let mut parts = vec![];
6460        loop {
6461            parts.push(ObjectNamePart::Identifier(Ident::new(
6462                self.next_token().to_string(),
6463            )));
6464            if !self.consume_token(&Token::Period) {
6465                break;
6466            }
6467        }
6468        Ok(ObjectName(parts))
6469    }
6470
6471    /// Parse a [Statement::CreateOperator]
6472    ///
6473    /// [PostgreSQL Documentation](https://www.postgresql.org/docs/current/sql-createoperator.html)
6474    pub fn parse_create_operator(&mut self) -> Result<Statement, ParserError> {
6475        let name = self.parse_operator_name()?;
6476        self.expect_token(&Token::LParen)?;
6477
6478        let mut function: Option<ObjectName> = None;
6479        let mut is_procedure = false;
6480        let mut left_arg: Option<DataType> = None;
6481        let mut right_arg: Option<DataType> = None;
6482        let mut commutator: Option<ObjectName> = None;
6483        let mut negator: Option<ObjectName> = None;
6484        let mut restrict: Option<ObjectName> = None;
6485        let mut join: Option<ObjectName> = None;
6486        let mut hashes = false;
6487        let mut merges = false;
6488
6489        loop {
6490            let keyword = self.expect_one_of_keywords(&[
6491                Keyword::FUNCTION,
6492                Keyword::PROCEDURE,
6493                Keyword::LEFTARG,
6494                Keyword::RIGHTARG,
6495                Keyword::COMMUTATOR,
6496                Keyword::NEGATOR,
6497                Keyword::RESTRICT,
6498                Keyword::JOIN,
6499                Keyword::HASHES,
6500                Keyword::MERGES,
6501            ])?;
6502
6503            match keyword {
6504                Keyword::HASHES if !hashes => {
6505                    hashes = true;
6506                }
6507                Keyword::MERGES if !merges => {
6508                    merges = true;
6509                }
6510                Keyword::FUNCTION | Keyword::PROCEDURE if function.is_none() => {
6511                    self.expect_token(&Token::Eq)?;
6512                    function = Some(self.parse_object_name(false)?);
6513                    is_procedure = keyword == Keyword::PROCEDURE;
6514                }
6515                Keyword::LEFTARG if left_arg.is_none() => {
6516                    self.expect_token(&Token::Eq)?;
6517                    left_arg = Some(self.parse_data_type()?);
6518                }
6519                Keyword::RIGHTARG if right_arg.is_none() => {
6520                    self.expect_token(&Token::Eq)?;
6521                    right_arg = Some(self.parse_data_type()?);
6522                }
6523                Keyword::COMMUTATOR if commutator.is_none() => {
6524                    self.expect_token(&Token::Eq)?;
6525                    if self.parse_keyword(Keyword::OPERATOR) {
6526                        self.expect_token(&Token::LParen)?;
6527                        commutator = Some(self.parse_operator_name()?);
6528                        self.expect_token(&Token::RParen)?;
6529                    } else {
6530                        commutator = Some(self.parse_operator_name()?);
6531                    }
6532                }
6533                Keyword::NEGATOR if negator.is_none() => {
6534                    self.expect_token(&Token::Eq)?;
6535                    if self.parse_keyword(Keyword::OPERATOR) {
6536                        self.expect_token(&Token::LParen)?;
6537                        negator = Some(self.parse_operator_name()?);
6538                        self.expect_token(&Token::RParen)?;
6539                    } else {
6540                        negator = Some(self.parse_operator_name()?);
6541                    }
6542                }
6543                Keyword::RESTRICT if restrict.is_none() => {
6544                    self.expect_token(&Token::Eq)?;
6545                    restrict = Some(self.parse_object_name(false)?);
6546                }
6547                Keyword::JOIN if join.is_none() => {
6548                    self.expect_token(&Token::Eq)?;
6549                    join = Some(self.parse_object_name(false)?);
6550                }
6551                _ => {
6552                    return Err(ParserError::ParserError(format!(
6553                        "Duplicate or unexpected keyword {:?} in CREATE OPERATOR",
6554                        keyword
6555                    )))
6556                }
6557            }
6558
6559            if !self.consume_token(&Token::Comma) {
6560                break;
6561            }
6562        }
6563
6564        // Expect closing parenthesis
6565        self.expect_token(&Token::RParen)?;
6566
6567        // FUNCTION is required
6568        let function = function.ok_or_else(|| {
6569            ParserError::ParserError("CREATE OPERATOR requires FUNCTION parameter".to_string())
6570        })?;
6571
6572        Ok(Statement::CreateOperator(CreateOperator {
6573            name,
6574            function,
6575            is_procedure,
6576            left_arg,
6577            right_arg,
6578            commutator,
6579            negator,
6580            restrict,
6581            join,
6582            hashes,
6583            merges,
6584        }))
6585    }
6586
6587    /// Parse a [Statement::CreateOperatorFamily]
6588    ///
6589    /// [PostgreSQL Documentation](https://www.postgresql.org/docs/current/sql-createopfamily.html)
6590    pub fn parse_create_operator_family(&mut self) -> Result<Statement, ParserError> {
6591        let name = self.parse_object_name(false)?;
6592        self.expect_keyword(Keyword::USING)?;
6593        let using = self.parse_identifier()?;
6594
6595        Ok(Statement::CreateOperatorFamily(CreateOperatorFamily {
6596            name,
6597            using,
6598        }))
6599    }
6600
6601    /// Parse a [Statement::CreateOperatorClass]
6602    ///
6603    /// [PostgreSQL Documentation](https://www.postgresql.org/docs/current/sql-createopclass.html)
6604    pub fn parse_create_operator_class(&mut self) -> Result<Statement, ParserError> {
6605        let name = self.parse_object_name(false)?;
6606        let default = self.parse_keyword(Keyword::DEFAULT);
6607        self.expect_keywords(&[Keyword::FOR, Keyword::TYPE])?;
6608        let for_type = self.parse_data_type()?;
6609        self.expect_keyword(Keyword::USING)?;
6610        let using = self.parse_identifier()?;
6611
6612        let family = if self.parse_keyword(Keyword::FAMILY) {
6613            Some(self.parse_object_name(false)?)
6614        } else {
6615            None
6616        };
6617
6618        self.expect_keyword(Keyword::AS)?;
6619
6620        let mut items = vec![];
6621        loop {
6622            if self.parse_keyword(Keyword::OPERATOR) {
6623                let strategy_number = self.parse_literal_uint()? as u32;
6624                let operator_name = self.parse_operator_name()?;
6625
6626                // Optional operator argument types
6627                let op_types = if self.consume_token(&Token::LParen) {
6628                    let left = self.parse_data_type()?;
6629                    self.expect_token(&Token::Comma)?;
6630                    let right = self.parse_data_type()?;
6631                    self.expect_token(&Token::RParen)?;
6632                    Some(OperatorArgTypes { left, right })
6633                } else {
6634                    None
6635                };
6636
6637                // Optional purpose
6638                let purpose = if self.parse_keyword(Keyword::FOR) {
6639                    if self.parse_keyword(Keyword::SEARCH) {
6640                        Some(OperatorPurpose::ForSearch)
6641                    } else if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
6642                        let sort_family = self.parse_object_name(false)?;
6643                        Some(OperatorPurpose::ForOrderBy { sort_family })
6644                    } else {
6645                        return self.expected("SEARCH or ORDER BY after FOR", self.peek_token());
6646                    }
6647                } else {
6648                    None
6649                };
6650
6651                items.push(OperatorClassItem::Operator {
6652                    strategy_number,
6653                    operator_name,
6654                    op_types,
6655                    purpose,
6656                });
6657            } else if self.parse_keyword(Keyword::FUNCTION) {
6658                let support_number = self.parse_literal_uint()? as u32;
6659
6660                // Optional operator types
6661                let op_types =
6662                    if self.consume_token(&Token::LParen) && self.peek_token() != Token::RParen {
6663                        let mut types = vec![];
6664                        loop {
6665                            types.push(self.parse_data_type()?);
6666                            if !self.consume_token(&Token::Comma) {
6667                                break;
6668                            }
6669                        }
6670                        self.expect_token(&Token::RParen)?;
6671                        Some(types)
6672                    } else if self.consume_token(&Token::LParen) {
6673                        self.expect_token(&Token::RParen)?;
6674                        Some(vec![])
6675                    } else {
6676                        None
6677                    };
6678
6679                let function_name = self.parse_object_name(false)?;
6680
6681                // Function argument types
6682                let argument_types = if self.consume_token(&Token::LParen) {
6683                    let mut types = vec![];
6684                    loop {
6685                        if self.peek_token() == Token::RParen {
6686                            break;
6687                        }
6688                        types.push(self.parse_data_type()?);
6689                        if !self.consume_token(&Token::Comma) {
6690                            break;
6691                        }
6692                    }
6693                    self.expect_token(&Token::RParen)?;
6694                    types
6695                } else {
6696                    vec![]
6697                };
6698
6699                items.push(OperatorClassItem::Function {
6700                    support_number,
6701                    op_types,
6702                    function_name,
6703                    argument_types,
6704                });
6705            } else if self.parse_keyword(Keyword::STORAGE) {
6706                let storage_type = self.parse_data_type()?;
6707                items.push(OperatorClassItem::Storage { storage_type });
6708            } else {
6709                break;
6710            }
6711
6712            // Check for comma separator
6713            if !self.consume_token(&Token::Comma) {
6714                break;
6715            }
6716        }
6717
6718        Ok(Statement::CreateOperatorClass(CreateOperatorClass {
6719            name,
6720            default,
6721            for_type,
6722            using,
6723            family,
6724            items,
6725        }))
6726    }
6727
6728    pub fn parse_drop(&mut self) -> Result<Statement, ParserError> {
6729        // MySQL dialect supports `TEMPORARY`
6730        let temporary = dialect_of!(self is MySqlDialect | GenericDialect | DuckDbDialect)
6731            && self.parse_keyword(Keyword::TEMPORARY);
6732        let persistent = dialect_of!(self is DuckDbDialect)
6733            && self.parse_one_of_keywords(&[Keyword::PERSISTENT]).is_some();
6734
6735        let object_type = if self.parse_keyword(Keyword::TABLE) {
6736            ObjectType::Table
6737        } else if self.parse_keyword(Keyword::VIEW) {
6738            ObjectType::View
6739        } else if self.parse_keywords(&[Keyword::MATERIALIZED, Keyword::VIEW]) {
6740            ObjectType::MaterializedView
6741        } else if self.parse_keyword(Keyword::INDEX) {
6742            ObjectType::Index
6743        } else if self.parse_keyword(Keyword::ROLE) {
6744            ObjectType::Role
6745        } else if self.parse_keyword(Keyword::SCHEMA) {
6746            ObjectType::Schema
6747        } else if self.parse_keyword(Keyword::DATABASE) {
6748            ObjectType::Database
6749        } else if self.parse_keyword(Keyword::SEQUENCE) {
6750            ObjectType::Sequence
6751        } else if self.parse_keyword(Keyword::STAGE) {
6752            ObjectType::Stage
6753        } else if self.parse_keyword(Keyword::TYPE) {
6754            ObjectType::Type
6755        } else if self.parse_keyword(Keyword::USER) {
6756            ObjectType::User
6757        } else if self.parse_keyword(Keyword::STREAM) {
6758            ObjectType::Stream
6759        } else if self.parse_keyword(Keyword::FUNCTION) {
6760            return self.parse_drop_function();
6761        } else if self.parse_keyword(Keyword::POLICY) {
6762            return self.parse_drop_policy();
6763        } else if self.parse_keyword(Keyword::CONNECTOR) {
6764            return self.parse_drop_connector();
6765        } else if self.parse_keyword(Keyword::DOMAIN) {
6766            return self.parse_drop_domain();
6767        } else if self.parse_keyword(Keyword::PROCEDURE) {
6768            return self.parse_drop_procedure();
6769        } else if self.parse_keyword(Keyword::SECRET) {
6770            return self.parse_drop_secret(temporary, persistent);
6771        } else if self.parse_keyword(Keyword::TRIGGER) {
6772            return self.parse_drop_trigger();
6773        } else if self.parse_keyword(Keyword::EXTENSION) {
6774            return self.parse_drop_extension();
6775        } else if self.parse_keyword(Keyword::OPERATOR) {
6776            // Check if this is DROP OPERATOR FAMILY or DROP OPERATOR CLASS
6777            return if self.parse_keyword(Keyword::FAMILY) {
6778                self.parse_drop_operator_family()
6779            } else if self.parse_keyword(Keyword::CLASS) {
6780                self.parse_drop_operator_class()
6781            } else {
6782                self.parse_drop_operator()
6783            };
6784        } else {
6785            return self.expected(
6786                "CONNECTOR, DATABASE, EXTENSION, FUNCTION, INDEX, OPERATOR, POLICY, PROCEDURE, ROLE, SCHEMA, SECRET, SEQUENCE, STAGE, TABLE, TRIGGER, TYPE, VIEW, MATERIALIZED VIEW or USER after DROP",
6787                self.peek_token(),
6788            );
6789        };
6790        // Many dialects support the non-standard `IF EXISTS` clause and allow
6791        // specifying multiple objects to delete in a single statement
6792        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6793        let names = self.parse_comma_separated(|p| p.parse_object_name(false))?;
6794
6795        let loc = self.peek_token().span.start;
6796        let cascade = self.parse_keyword(Keyword::CASCADE);
6797        let restrict = self.parse_keyword(Keyword::RESTRICT);
6798        let purge = self.parse_keyword(Keyword::PURGE);
6799        if cascade && restrict {
6800            return parser_err!("Cannot specify both CASCADE and RESTRICT in DROP", loc);
6801        }
6802        if object_type == ObjectType::Role && (cascade || restrict || purge) {
6803            return parser_err!(
6804                "Cannot specify CASCADE, RESTRICT, or PURGE in DROP ROLE",
6805                loc
6806            );
6807        }
6808        let table = if self.parse_keyword(Keyword::ON) {
6809            Some(self.parse_object_name(false)?)
6810        } else {
6811            None
6812        };
6813        Ok(Statement::Drop {
6814            object_type,
6815            if_exists,
6816            names,
6817            cascade,
6818            restrict,
6819            purge,
6820            temporary,
6821            table,
6822        })
6823    }
6824
6825    fn parse_optional_drop_behavior(&mut self) -> Option<DropBehavior> {
6826        match self.parse_one_of_keywords(&[Keyword::CASCADE, Keyword::RESTRICT]) {
6827            Some(Keyword::CASCADE) => Some(DropBehavior::Cascade),
6828            Some(Keyword::RESTRICT) => Some(DropBehavior::Restrict),
6829            _ => None,
6830        }
6831    }
6832
6833    /// ```sql
6834    /// DROP FUNCTION [ IF EXISTS ] name [ ( [ [ argmode ] [ argname ] argtype [, ...] ] ) ] [, ...]
6835    /// [ CASCADE | RESTRICT ]
6836    /// ```
6837    fn parse_drop_function(&mut self) -> Result<Statement, ParserError> {
6838        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6839        let func_desc = self.parse_comma_separated(Parser::parse_function_desc)?;
6840        let drop_behavior = self.parse_optional_drop_behavior();
6841        Ok(Statement::DropFunction(DropFunction {
6842            if_exists,
6843            func_desc,
6844            drop_behavior,
6845        }))
6846    }
6847
6848    /// ```sql
6849    /// DROP POLICY [ IF EXISTS ] name ON table_name [ CASCADE | RESTRICT ]
6850    /// ```
6851    ///
6852    /// [PostgreSQL Documentation](https://www.postgresql.org/docs/current/sql-droppolicy.html)
6853    fn parse_drop_policy(&mut self) -> Result<Statement, ParserError> {
6854        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6855        let name = self.parse_identifier()?;
6856        self.expect_keyword_is(Keyword::ON)?;
6857        let table_name = self.parse_object_name(false)?;
6858        let drop_behavior = self.parse_optional_drop_behavior();
6859        Ok(Statement::DropPolicy {
6860            if_exists,
6861            name,
6862            table_name,
6863            drop_behavior,
6864        })
6865    }
6866    /// ```sql
6867    /// DROP CONNECTOR [IF EXISTS] name
6868    /// ```
6869    ///
6870    /// See [Hive](https://cwiki.apache.org/confluence/pages/viewpage.action?pageId=27362034#LanguageManualDDL-DropConnector)
6871    fn parse_drop_connector(&mut self) -> Result<Statement, ParserError> {
6872        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6873        let name = self.parse_identifier()?;
6874        Ok(Statement::DropConnector { if_exists, name })
6875    }
6876
6877    /// ```sql
6878    /// DROP DOMAIN [ IF EXISTS ] name [ CASCADE | RESTRICT ]
6879    /// ```
6880    fn parse_drop_domain(&mut self) -> Result<Statement, ParserError> {
6881        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6882        let name = self.parse_object_name(false)?;
6883        let drop_behavior = self.parse_optional_drop_behavior();
6884        Ok(Statement::DropDomain(DropDomain {
6885            if_exists,
6886            name,
6887            drop_behavior,
6888        }))
6889    }
6890
6891    /// ```sql
6892    /// DROP PROCEDURE [ IF EXISTS ] name [ ( [ [ argmode ] [ argname ] argtype [, ...] ] ) ] [, ...]
6893    /// [ CASCADE | RESTRICT ]
6894    /// ```
6895    fn parse_drop_procedure(&mut self) -> Result<Statement, ParserError> {
6896        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6897        let proc_desc = self.parse_comma_separated(Parser::parse_function_desc)?;
6898        let drop_behavior = self.parse_optional_drop_behavior();
6899        Ok(Statement::DropProcedure {
6900            if_exists,
6901            proc_desc,
6902            drop_behavior,
6903        })
6904    }
6905
6906    fn parse_function_desc(&mut self) -> Result<FunctionDesc, ParserError> {
6907        let name = self.parse_object_name(false)?;
6908
6909        let args = if self.consume_token(&Token::LParen) {
6910            if self.consume_token(&Token::RParen) {
6911                Some(vec![])
6912            } else {
6913                let args = self.parse_comma_separated(Parser::parse_function_arg)?;
6914                self.expect_token(&Token::RParen)?;
6915                Some(args)
6916            }
6917        } else {
6918            None
6919        };
6920
6921        Ok(FunctionDesc { name, args })
6922    }
6923
6924    /// See [DuckDB Docs](https://duckdb.org/docs/sql/statements/create_secret.html) for more details.
6925    fn parse_drop_secret(
6926        &mut self,
6927        temporary: bool,
6928        persistent: bool,
6929    ) -> Result<Statement, ParserError> {
6930        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6931        let name = self.parse_identifier()?;
6932        let storage_specifier = if self.parse_keyword(Keyword::FROM) {
6933            self.parse_identifier().ok()
6934        } else {
6935            None
6936        };
6937        let temp = match (temporary, persistent) {
6938            (true, false) => Some(true),
6939            (false, true) => Some(false),
6940            (false, false) => None,
6941            _ => self.expected("TEMPORARY or PERSISTENT", self.peek_token())?,
6942        };
6943
6944        Ok(Statement::DropSecret {
6945            if_exists,
6946            temporary: temp,
6947            name,
6948            storage_specifier,
6949        })
6950    }
6951
6952    /// Parse a `DECLARE` statement.
6953    ///
6954    /// ```sql
6955    /// DECLARE name [ BINARY ] [ ASENSITIVE | INSENSITIVE ] [ [ NO ] SCROLL ]
6956    ///     CURSOR [ { WITH | WITHOUT } HOLD ] FOR query
6957    /// ```
6958    ///
6959    /// The syntax can vary significantly between warehouses. See the grammar
6960    /// on the warehouse specific function in such cases.
6961    pub fn parse_declare(&mut self) -> Result<Statement, ParserError> {
6962        if dialect_of!(self is BigQueryDialect) {
6963            return self.parse_big_query_declare();
6964        }
6965        if dialect_of!(self is SnowflakeDialect) {
6966            return self.parse_snowflake_declare();
6967        }
6968        if dialect_of!(self is MsSqlDialect) {
6969            return self.parse_mssql_declare();
6970        }
6971
6972        let name = self.parse_identifier()?;
6973
6974        let binary = Some(self.parse_keyword(Keyword::BINARY));
6975        let sensitive = if self.parse_keyword(Keyword::INSENSITIVE) {
6976            Some(true)
6977        } else if self.parse_keyword(Keyword::ASENSITIVE) {
6978            Some(false)
6979        } else {
6980            None
6981        };
6982        let scroll = if self.parse_keyword(Keyword::SCROLL) {
6983            Some(true)
6984        } else if self.parse_keywords(&[Keyword::NO, Keyword::SCROLL]) {
6985            Some(false)
6986        } else {
6987            None
6988        };
6989
6990        self.expect_keyword_is(Keyword::CURSOR)?;
6991        let declare_type = Some(DeclareType::Cursor);
6992
6993        let hold = match self.parse_one_of_keywords(&[Keyword::WITH, Keyword::WITHOUT]) {
6994            Some(keyword) => {
6995                self.expect_keyword_is(Keyword::HOLD)?;
6996
6997                match keyword {
6998                    Keyword::WITH => Some(true),
6999                    Keyword::WITHOUT => Some(false),
7000                    _ => unreachable!(),
7001                }
7002            }
7003            None => None,
7004        };
7005
7006        self.expect_keyword_is(Keyword::FOR)?;
7007
7008        let query = Some(self.parse_query()?);
7009
7010        Ok(Statement::Declare {
7011            stmts: vec![Declare {
7012                names: vec![name],
7013                data_type: None,
7014                assignment: None,
7015                declare_type,
7016                binary,
7017                sensitive,
7018                scroll,
7019                hold,
7020                for_query: query,
7021            }],
7022        })
7023    }
7024
7025    /// Parse a [BigQuery] `DECLARE` statement.
7026    ///
7027    /// Syntax:
7028    /// ```text
7029    /// DECLARE variable_name[, ...] [{ <variable_type> | <DEFAULT expression> }];
7030    /// ```
7031    /// [BigQuery]: https://cloud.google.com/bigquery/docs/reference/standard-sql/procedural-language#declare
7032    pub fn parse_big_query_declare(&mut self) -> Result<Statement, ParserError> {
7033        let names = self.parse_comma_separated(Parser::parse_identifier)?;
7034
7035        let data_type = match self.peek_token().token {
7036            Token::Word(w) if w.keyword == Keyword::DEFAULT => None,
7037            _ => Some(self.parse_data_type()?),
7038        };
7039
7040        let expr = if data_type.is_some() {
7041            if self.parse_keyword(Keyword::DEFAULT) {
7042                Some(self.parse_expr()?)
7043            } else {
7044                None
7045            }
7046        } else {
7047            // If no variable type - default expression must be specified, per BQ docs.
7048            // i.e `DECLARE foo;` is invalid.
7049            self.expect_keyword_is(Keyword::DEFAULT)?;
7050            Some(self.parse_expr()?)
7051        };
7052
7053        Ok(Statement::Declare {
7054            stmts: vec![Declare {
7055                names,
7056                data_type,
7057                assignment: expr.map(|expr| DeclareAssignment::Default(Box::new(expr))),
7058                declare_type: None,
7059                binary: None,
7060                sensitive: None,
7061                scroll: None,
7062                hold: None,
7063                for_query: None,
7064            }],
7065        })
7066    }
7067
7068    /// Parse a [Snowflake] `DECLARE` statement.
7069    ///
7070    /// Syntax:
7071    /// ```text
7072    /// DECLARE
7073    ///   [{ <variable_declaration>
7074    ///      | <cursor_declaration>
7075    ///      | <resultset_declaration>
7076    ///      | <exception_declaration> }; ... ]
7077    ///
7078    /// <variable_declaration>
7079    /// <variable_name> [<type>] [ { DEFAULT | := } <expression>]
7080    ///
7081    /// <cursor_declaration>
7082    /// <cursor_name> CURSOR FOR <query>
7083    ///
7084    /// <resultset_declaration>
7085    /// <resultset_name> RESULTSET [ { DEFAULT | := } ( <query> ) ] ;
7086    ///
7087    /// <exception_declaration>
7088    /// <exception_name> EXCEPTION [ ( <exception_number> , '<exception_message>' ) ] ;
7089    /// ```
7090    ///
7091    /// [Snowflake]: https://docs.snowflake.com/en/sql-reference/snowflake-scripting/declare
7092    pub fn parse_snowflake_declare(&mut self) -> Result<Statement, ParserError> {
7093        let mut stmts = vec![];
7094        loop {
7095            let name = self.parse_identifier()?;
7096            let (declare_type, for_query, assigned_expr, data_type) =
7097                if self.parse_keyword(Keyword::CURSOR) {
7098                    self.expect_keyword_is(Keyword::FOR)?;
7099                    match self.peek_token().token {
7100                        Token::Word(w) if w.keyword == Keyword::SELECT => (
7101                            Some(DeclareType::Cursor),
7102                            Some(self.parse_query()?),
7103                            None,
7104                            None,
7105                        ),
7106                        _ => (
7107                            Some(DeclareType::Cursor),
7108                            None,
7109                            Some(DeclareAssignment::For(Box::new(self.parse_expr()?))),
7110                            None,
7111                        ),
7112                    }
7113                } else if self.parse_keyword(Keyword::RESULTSET) {
7114                    let assigned_expr = if self.peek_token().token != Token::SemiColon {
7115                        self.parse_snowflake_variable_declaration_expression()?
7116                    } else {
7117                        // Nothing more to do. The statement has no further parameters.
7118                        None
7119                    };
7120
7121                    (Some(DeclareType::ResultSet), None, assigned_expr, None)
7122                } else if self.parse_keyword(Keyword::EXCEPTION) {
7123                    let assigned_expr = if self.peek_token().token == Token::LParen {
7124                        Some(DeclareAssignment::Expr(Box::new(self.parse_expr()?)))
7125                    } else {
7126                        // Nothing more to do. The statement has no further parameters.
7127                        None
7128                    };
7129
7130                    (Some(DeclareType::Exception), None, assigned_expr, None)
7131                } else {
7132                    // Without an explicit keyword, the only valid option is variable declaration.
7133                    let (assigned_expr, data_type) = if let Some(assigned_expr) =
7134                        self.parse_snowflake_variable_declaration_expression()?
7135                    {
7136                        (Some(assigned_expr), None)
7137                    } else if let Token::Word(_) = self.peek_token().token {
7138                        let data_type = self.parse_data_type()?;
7139                        (
7140                            self.parse_snowflake_variable_declaration_expression()?,
7141                            Some(data_type),
7142                        )
7143                    } else {
7144                        (None, None)
7145                    };
7146                    (None, None, assigned_expr, data_type)
7147                };
7148            let stmt = Declare {
7149                names: vec![name],
7150                data_type,
7151                assignment: assigned_expr,
7152                declare_type,
7153                binary: None,
7154                sensitive: None,
7155                scroll: None,
7156                hold: None,
7157                for_query,
7158            };
7159
7160            stmts.push(stmt);
7161            if self.consume_token(&Token::SemiColon) {
7162                match self.peek_token().token {
7163                    Token::Word(w)
7164                        if ALL_KEYWORDS
7165                            .binary_search(&w.value.to_uppercase().as_str())
7166                            .is_err() =>
7167                    {
7168                        // Not a keyword - start of a new declaration.
7169                        continue;
7170                    }
7171                    _ => {
7172                        // Put back the semicolon, this is the end of the DECLARE statement.
7173                        self.prev_token();
7174                    }
7175                }
7176            }
7177
7178            break;
7179        }
7180
7181        Ok(Statement::Declare { stmts })
7182    }
7183
7184    /// Parse a [MsSql] `DECLARE` statement.
7185    ///
7186    /// Syntax:
7187    /// ```text
7188    /// DECLARE
7189    // {
7190    //   { @local_variable [AS] data_type [ = value ] }
7191    //   | { @cursor_variable_name CURSOR [ FOR ] }
7192    // } [ ,...n ]
7193    /// ```
7194    /// [MsSql]: https://learn.microsoft.com/en-us/sql/t-sql/language-elements/declare-local-variable-transact-sql?view=sql-server-ver16
7195    pub fn parse_mssql_declare(&mut self) -> Result<Statement, ParserError> {
7196        let stmts = self.parse_comma_separated(Parser::parse_mssql_declare_stmt)?;
7197
7198        Ok(Statement::Declare { stmts })
7199    }
7200
7201    /// Parse the body of a [MsSql] `DECLARE`statement.
7202    ///
7203    /// Syntax:
7204    /// ```text
7205    // {
7206    //   { @local_variable [AS] data_type [ = value ] }
7207    //   | { @cursor_variable_name CURSOR [ FOR ]}
7208    // } [ ,...n ]
7209    /// ```
7210    /// [MsSql]: https://learn.microsoft.com/en-us/sql/t-sql/language-elements/declare-local-variable-transact-sql?view=sql-server-ver16
7211    pub fn parse_mssql_declare_stmt(&mut self) -> Result<Declare, ParserError> {
7212        let name = {
7213            let ident = self.parse_identifier()?;
7214            if !ident.value.starts_with('@')
7215                && !matches!(
7216                    self.peek_token().token,
7217                    Token::Word(w) if w.keyword == Keyword::CURSOR
7218                )
7219            {
7220                Err(ParserError::TokenizerError(
7221                    "Invalid MsSql variable declaration.".to_string(),
7222                ))
7223            } else {
7224                Ok(ident)
7225            }
7226        }?;
7227
7228        let (declare_type, data_type) = match self.peek_token().token {
7229            Token::Word(w) => match w.keyword {
7230                Keyword::CURSOR => {
7231                    self.next_token();
7232                    (Some(DeclareType::Cursor), None)
7233                }
7234                Keyword::AS => {
7235                    self.next_token();
7236                    (None, Some(self.parse_data_type()?))
7237                }
7238                _ => (None, Some(self.parse_data_type()?)),
7239            },
7240            _ => (None, Some(self.parse_data_type()?)),
7241        };
7242
7243        let (for_query, assignment) = if self.peek_keyword(Keyword::FOR) {
7244            self.next_token();
7245            let query = Some(self.parse_query()?);
7246            (query, None)
7247        } else {
7248            let assignment = self.parse_mssql_variable_declaration_expression()?;
7249            (None, assignment)
7250        };
7251
7252        Ok(Declare {
7253            names: vec![name],
7254            data_type,
7255            assignment,
7256            declare_type,
7257            binary: None,
7258            sensitive: None,
7259            scroll: None,
7260            hold: None,
7261            for_query,
7262        })
7263    }
7264
7265    /// Parses the assigned expression in a variable declaration.
7266    ///
7267    /// Syntax:
7268    /// ```text
7269    /// [ { DEFAULT | := } <expression>]
7270    /// ```
7271    /// <https://docs.snowflake.com/en/sql-reference/snowflake-scripting/declare#variable-declaration-syntax>
7272    pub fn parse_snowflake_variable_declaration_expression(
7273        &mut self,
7274    ) -> Result<Option<DeclareAssignment>, ParserError> {
7275        Ok(match self.peek_token().token {
7276            Token::Word(w) if w.keyword == Keyword::DEFAULT => {
7277                self.next_token(); // Skip `DEFAULT`
7278                Some(DeclareAssignment::Default(Box::new(self.parse_expr()?)))
7279            }
7280            Token::Assignment => {
7281                self.next_token(); // Skip `:=`
7282                Some(DeclareAssignment::DuckAssignment(Box::new(
7283                    self.parse_expr()?,
7284                )))
7285            }
7286            _ => None,
7287        })
7288    }
7289
7290    /// Parses the assigned expression in a variable declaration.
7291    ///
7292    /// Syntax:
7293    /// ```text
7294    /// [ = <expression>]
7295    /// ```
7296    pub fn parse_mssql_variable_declaration_expression(
7297        &mut self,
7298    ) -> Result<Option<DeclareAssignment>, ParserError> {
7299        Ok(match self.peek_token().token {
7300            Token::Eq => {
7301                self.next_token(); // Skip `=`
7302                Some(DeclareAssignment::MsSqlAssignment(Box::new(
7303                    self.parse_expr()?,
7304                )))
7305            }
7306            _ => None,
7307        })
7308    }
7309
7310    // FETCH [ direction { FROM | IN } ] cursor INTO target;
7311    pub fn parse_fetch_statement(&mut self) -> Result<Statement, ParserError> {
7312        let direction = if self.parse_keyword(Keyword::NEXT) {
7313            FetchDirection::Next
7314        } else if self.parse_keyword(Keyword::PRIOR) {
7315            FetchDirection::Prior
7316        } else if self.parse_keyword(Keyword::FIRST) {
7317            FetchDirection::First
7318        } else if self.parse_keyword(Keyword::LAST) {
7319            FetchDirection::Last
7320        } else if self.parse_keyword(Keyword::ABSOLUTE) {
7321            FetchDirection::Absolute {
7322                limit: self.parse_number_value()?.value,
7323            }
7324        } else if self.parse_keyword(Keyword::RELATIVE) {
7325            FetchDirection::Relative {
7326                limit: self.parse_number_value()?.value,
7327            }
7328        } else if self.parse_keyword(Keyword::FORWARD) {
7329            if self.parse_keyword(Keyword::ALL) {
7330                FetchDirection::ForwardAll
7331            } else {
7332                FetchDirection::Forward {
7333                    // TODO: Support optional
7334                    limit: Some(self.parse_number_value()?.value),
7335                }
7336            }
7337        } else if self.parse_keyword(Keyword::BACKWARD) {
7338            if self.parse_keyword(Keyword::ALL) {
7339                FetchDirection::BackwardAll
7340            } else {
7341                FetchDirection::Backward {
7342                    // TODO: Support optional
7343                    limit: Some(self.parse_number_value()?.value),
7344                }
7345            }
7346        } else if self.parse_keyword(Keyword::ALL) {
7347            FetchDirection::All
7348        } else {
7349            FetchDirection::Count {
7350                limit: self.parse_number_value()?.value,
7351            }
7352        };
7353
7354        let position = if self.peek_keyword(Keyword::FROM) {
7355            self.expect_keyword(Keyword::FROM)?;
7356            FetchPosition::From
7357        } else if self.peek_keyword(Keyword::IN) {
7358            self.expect_keyword(Keyword::IN)?;
7359            FetchPosition::In
7360        } else {
7361            return parser_err!("Expected FROM or IN", self.peek_token().span.start);
7362        };
7363
7364        let name = self.parse_identifier()?;
7365
7366        let into = if self.parse_keyword(Keyword::INTO) {
7367            Some(self.parse_object_name(false)?)
7368        } else {
7369            None
7370        };
7371
7372        Ok(Statement::Fetch {
7373            name,
7374            direction,
7375            position,
7376            into,
7377        })
7378    }
7379
7380    pub fn parse_discard(&mut self) -> Result<Statement, ParserError> {
7381        let object_type = if self.parse_keyword(Keyword::ALL) {
7382            DiscardObject::ALL
7383        } else if self.parse_keyword(Keyword::PLANS) {
7384            DiscardObject::PLANS
7385        } else if self.parse_keyword(Keyword::SEQUENCES) {
7386            DiscardObject::SEQUENCES
7387        } else if self.parse_keyword(Keyword::TEMP) || self.parse_keyword(Keyword::TEMPORARY) {
7388            DiscardObject::TEMP
7389        } else {
7390            return self.expected(
7391                "ALL, PLANS, SEQUENCES, TEMP or TEMPORARY after DISCARD",
7392                self.peek_token(),
7393            );
7394        };
7395        Ok(Statement::Discard { object_type })
7396    }
7397
7398    pub fn parse_create_index(&mut self, unique: bool) -> Result<Statement, ParserError> {
7399        let concurrently = self.parse_keyword(Keyword::CONCURRENTLY);
7400        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
7401
7402        let mut using = None;
7403
7404        let index_name = if if_not_exists || !self.parse_keyword(Keyword::ON) {
7405            let index_name = self.parse_object_name(false)?;
7406            // MySQL allows `USING index_type` either before or after `ON table_name`
7407            using = self.parse_optional_using_then_index_type()?;
7408            self.expect_keyword_is(Keyword::ON)?;
7409            Some(index_name)
7410        } else {
7411            None
7412        };
7413
7414        let table_name = self.parse_object_name(false)?;
7415
7416        // MySQL allows having two `USING` clauses.
7417        // In that case, the second clause overwrites the first.
7418        using = self.parse_optional_using_then_index_type()?.or(using);
7419
7420        let columns = self.parse_parenthesized_index_column_list()?;
7421
7422        let include = if self.parse_keyword(Keyword::INCLUDE) {
7423            self.expect_token(&Token::LParen)?;
7424            let columns = self.parse_comma_separated(|p| p.parse_identifier())?;
7425            self.expect_token(&Token::RParen)?;
7426            columns
7427        } else {
7428            vec![]
7429        };
7430
7431        let nulls_distinct = if self.parse_keyword(Keyword::NULLS) {
7432            let not = self.parse_keyword(Keyword::NOT);
7433            self.expect_keyword_is(Keyword::DISTINCT)?;
7434            Some(!not)
7435        } else {
7436            None
7437        };
7438
7439        let with = if self.dialect.supports_create_index_with_clause()
7440            && self.parse_keyword(Keyword::WITH)
7441        {
7442            self.expect_token(&Token::LParen)?;
7443            let with_params = self.parse_comma_separated(Parser::parse_expr)?;
7444            self.expect_token(&Token::RParen)?;
7445            with_params
7446        } else {
7447            Vec::new()
7448        };
7449
7450        let predicate = if self.parse_keyword(Keyword::WHERE) {
7451            Some(self.parse_expr()?)
7452        } else {
7453            None
7454        };
7455
7456        // MySQL options (including the modern style of `USING` after the column list instead of
7457        // before, which is deprecated) shouldn't conflict with other preceding options (e.g. `WITH
7458        // PARSER` won't be caught by the above `WITH` clause parsing because MySQL doesn't set that
7459        // support flag). This is probably invalid syntax for other dialects, but it is simpler to
7460        // parse it anyway (as we do inside `ALTER TABLE` and `CREATE TABLE` parsing).
7461        let index_options = self.parse_index_options()?;
7462
7463        // MySQL allows `ALGORITHM` and `LOCK` options. Unlike in `ALTER TABLE`, they need not be comma separated.
7464        let mut alter_options = Vec::new();
7465        while self
7466            .peek_one_of_keywords(&[Keyword::ALGORITHM, Keyword::LOCK])
7467            .is_some()
7468        {
7469            alter_options.push(self.parse_alter_table_operation()?)
7470        }
7471
7472        Ok(Statement::CreateIndex(CreateIndex {
7473            name: index_name,
7474            table_name,
7475            using,
7476            columns,
7477            unique,
7478            concurrently,
7479            if_not_exists,
7480            include,
7481            nulls_distinct,
7482            with,
7483            predicate,
7484            index_options,
7485            alter_options,
7486        }))
7487    }
7488
7489    pub fn parse_create_extension(&mut self) -> Result<Statement, ParserError> {
7490        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
7491        let name = self.parse_identifier()?;
7492
7493        let (schema, version, cascade) = if self.parse_keyword(Keyword::WITH) {
7494            let schema = if self.parse_keyword(Keyword::SCHEMA) {
7495                Some(self.parse_identifier()?)
7496            } else {
7497                None
7498            };
7499
7500            let version = if self.parse_keyword(Keyword::VERSION) {
7501                Some(self.parse_identifier()?)
7502            } else {
7503                None
7504            };
7505
7506            let cascade = self.parse_keyword(Keyword::CASCADE);
7507
7508            (schema, version, cascade)
7509        } else {
7510            (None, None, false)
7511        };
7512
7513        Ok(CreateExtension {
7514            name,
7515            if_not_exists,
7516            schema,
7517            version,
7518            cascade,
7519        }
7520        .into())
7521    }
7522
7523    /// Parse a PostgreSQL-specific [Statement::DropExtension] statement.
7524    pub fn parse_drop_extension(&mut self) -> Result<Statement, ParserError> {
7525        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7526        let names = self.parse_comma_separated(|p| p.parse_identifier())?;
7527        let cascade_or_restrict =
7528            self.parse_one_of_keywords(&[Keyword::CASCADE, Keyword::RESTRICT]);
7529        Ok(Statement::DropExtension(DropExtension {
7530            names,
7531            if_exists,
7532            cascade_or_restrict: cascade_or_restrict
7533                .map(|k| match k {
7534                    Keyword::CASCADE => Ok(ReferentialAction::Cascade),
7535                    Keyword::RESTRICT => Ok(ReferentialAction::Restrict),
7536                    _ => self.expected("CASCADE or RESTRICT", self.peek_token()),
7537                })
7538                .transpose()?,
7539        }))
7540    }
7541
7542    /// Parse a[Statement::DropOperator] statement.
7543    ///
7544    pub fn parse_drop_operator(&mut self) -> Result<Statement, ParserError> {
7545        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7546        let operators = self.parse_comma_separated(|p| p.parse_drop_operator_signature())?;
7547        let drop_behavior = self.parse_optional_drop_behavior();
7548        Ok(Statement::DropOperator(DropOperator {
7549            if_exists,
7550            operators,
7551            drop_behavior,
7552        }))
7553    }
7554
7555    /// Parse an operator signature for a [Statement::DropOperator]
7556    /// Format: `name ( { left_type | NONE } , right_type )`
7557    fn parse_drop_operator_signature(&mut self) -> Result<DropOperatorSignature, ParserError> {
7558        let name = self.parse_operator_name()?;
7559        self.expect_token(&Token::LParen)?;
7560
7561        // Parse left operand type (or NONE for prefix operators)
7562        let left_type = if self.parse_keyword(Keyword::NONE) {
7563            None
7564        } else {
7565            Some(self.parse_data_type()?)
7566        };
7567
7568        self.expect_token(&Token::Comma)?;
7569
7570        // Parse right operand type (always required)
7571        let right_type = self.parse_data_type()?;
7572
7573        self.expect_token(&Token::RParen)?;
7574
7575        Ok(DropOperatorSignature {
7576            name,
7577            left_type,
7578            right_type,
7579        })
7580    }
7581
7582    /// Parse a [Statement::DropOperatorFamily]
7583    ///
7584    /// [PostgreSQL Documentation](https://www.postgresql.org/docs/current/sql-dropopfamily.html)
7585    pub fn parse_drop_operator_family(&mut self) -> Result<Statement, ParserError> {
7586        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7587        let names = self.parse_comma_separated(|p| p.parse_object_name(false))?;
7588        self.expect_keyword(Keyword::USING)?;
7589        let using = self.parse_identifier()?;
7590        let drop_behavior = self.parse_optional_drop_behavior();
7591        Ok(Statement::DropOperatorFamily(DropOperatorFamily {
7592            if_exists,
7593            names,
7594            using,
7595            drop_behavior,
7596        }))
7597    }
7598
7599    /// Parse a [Statement::DropOperatorClass]
7600    ///
7601    /// [PostgreSQL Documentation](https://www.postgresql.org/docs/current/sql-dropopclass.html)
7602    pub fn parse_drop_operator_class(&mut self) -> Result<Statement, ParserError> {
7603        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7604        let names = self.parse_comma_separated(|p| p.parse_object_name(false))?;
7605        self.expect_keyword(Keyword::USING)?;
7606        let using = self.parse_identifier()?;
7607        let drop_behavior = self.parse_optional_drop_behavior();
7608        Ok(Statement::DropOperatorClass(DropOperatorClass {
7609            if_exists,
7610            names,
7611            using,
7612            drop_behavior,
7613        }))
7614    }
7615
7616    //TODO: Implement parsing for Skewed
7617    pub fn parse_hive_distribution(&mut self) -> Result<HiveDistributionStyle, ParserError> {
7618        if self.parse_keywords(&[Keyword::PARTITIONED, Keyword::BY]) {
7619            self.expect_token(&Token::LParen)?;
7620            let columns = self.parse_comma_separated(Parser::parse_column_def)?;
7621            self.expect_token(&Token::RParen)?;
7622            Ok(HiveDistributionStyle::PARTITIONED { columns })
7623        } else {
7624            Ok(HiveDistributionStyle::NONE)
7625        }
7626    }
7627
7628    pub fn parse_hive_formats(&mut self) -> Result<Option<HiveFormat>, ParserError> {
7629        let mut hive_format: Option<HiveFormat> = None;
7630        loop {
7631            match self.parse_one_of_keywords(&[
7632                Keyword::ROW,
7633                Keyword::STORED,
7634                Keyword::LOCATION,
7635                Keyword::WITH,
7636            ]) {
7637                Some(Keyword::ROW) => {
7638                    hive_format
7639                        .get_or_insert_with(HiveFormat::default)
7640                        .row_format = Some(self.parse_row_format()?);
7641                }
7642                Some(Keyword::STORED) => {
7643                    self.expect_keyword_is(Keyword::AS)?;
7644                    if self.parse_keyword(Keyword::INPUTFORMAT) {
7645                        let input_format = self.parse_expr()?;
7646                        self.expect_keyword_is(Keyword::OUTPUTFORMAT)?;
7647                        let output_format = self.parse_expr()?;
7648                        hive_format.get_or_insert_with(HiveFormat::default).storage =
7649                            Some(HiveIOFormat::IOF {
7650                                input_format,
7651                                output_format,
7652                            });
7653                    } else {
7654                        let format = self.parse_file_format()?;
7655                        hive_format.get_or_insert_with(HiveFormat::default).storage =
7656                            Some(HiveIOFormat::FileFormat { format });
7657                    }
7658                }
7659                Some(Keyword::LOCATION) => {
7660                    hive_format.get_or_insert_with(HiveFormat::default).location =
7661                        Some(self.parse_literal_string()?);
7662                }
7663                Some(Keyword::WITH) => {
7664                    self.prev_token();
7665                    let properties = self
7666                        .parse_options_with_keywords(&[Keyword::WITH, Keyword::SERDEPROPERTIES])?;
7667                    if !properties.is_empty() {
7668                        hive_format
7669                            .get_or_insert_with(HiveFormat::default)
7670                            .serde_properties = Some(properties);
7671                    } else {
7672                        break;
7673                    }
7674                }
7675                None => break,
7676                _ => break,
7677            }
7678        }
7679
7680        Ok(hive_format)
7681    }
7682
7683    pub fn parse_row_format(&mut self) -> Result<HiveRowFormat, ParserError> {
7684        self.expect_keyword_is(Keyword::FORMAT)?;
7685        match self.parse_one_of_keywords(&[Keyword::SERDE, Keyword::DELIMITED]) {
7686            Some(Keyword::SERDE) => {
7687                let class = self.parse_literal_string()?;
7688                Ok(HiveRowFormat::SERDE { class })
7689            }
7690            _ => {
7691                let mut row_delimiters = vec![];
7692
7693                loop {
7694                    match self.parse_one_of_keywords(&[
7695                        Keyword::FIELDS,
7696                        Keyword::COLLECTION,
7697                        Keyword::MAP,
7698                        Keyword::LINES,
7699                        Keyword::NULL,
7700                    ]) {
7701                        Some(Keyword::FIELDS) => {
7702                            if self.parse_keywords(&[Keyword::TERMINATED, Keyword::BY]) {
7703                                row_delimiters.push(HiveRowDelimiter {
7704                                    delimiter: HiveDelimiter::FieldsTerminatedBy,
7705                                    char: self.parse_identifier()?,
7706                                });
7707
7708                                if self.parse_keywords(&[Keyword::ESCAPED, Keyword::BY]) {
7709                                    row_delimiters.push(HiveRowDelimiter {
7710                                        delimiter: HiveDelimiter::FieldsEscapedBy,
7711                                        char: self.parse_identifier()?,
7712                                    });
7713                                }
7714                            } else {
7715                                break;
7716                            }
7717                        }
7718                        Some(Keyword::COLLECTION) => {
7719                            if self.parse_keywords(&[
7720                                Keyword::ITEMS,
7721                                Keyword::TERMINATED,
7722                                Keyword::BY,
7723                            ]) {
7724                                row_delimiters.push(HiveRowDelimiter {
7725                                    delimiter: HiveDelimiter::CollectionItemsTerminatedBy,
7726                                    char: self.parse_identifier()?,
7727                                });
7728                            } else {
7729                                break;
7730                            }
7731                        }
7732                        Some(Keyword::MAP) => {
7733                            if self.parse_keywords(&[
7734                                Keyword::KEYS,
7735                                Keyword::TERMINATED,
7736                                Keyword::BY,
7737                            ]) {
7738                                row_delimiters.push(HiveRowDelimiter {
7739                                    delimiter: HiveDelimiter::MapKeysTerminatedBy,
7740                                    char: self.parse_identifier()?,
7741                                });
7742                            } else {
7743                                break;
7744                            }
7745                        }
7746                        Some(Keyword::LINES) => {
7747                            if self.parse_keywords(&[Keyword::TERMINATED, Keyword::BY]) {
7748                                row_delimiters.push(HiveRowDelimiter {
7749                                    delimiter: HiveDelimiter::LinesTerminatedBy,
7750                                    char: self.parse_identifier()?,
7751                                });
7752                            } else {
7753                                break;
7754                            }
7755                        }
7756                        Some(Keyword::NULL) => {
7757                            if self.parse_keywords(&[Keyword::DEFINED, Keyword::AS]) {
7758                                row_delimiters.push(HiveRowDelimiter {
7759                                    delimiter: HiveDelimiter::NullDefinedAs,
7760                                    char: self.parse_identifier()?,
7761                                });
7762                            } else {
7763                                break;
7764                            }
7765                        }
7766                        _ => {
7767                            break;
7768                        }
7769                    }
7770                }
7771
7772                Ok(HiveRowFormat::DELIMITED {
7773                    delimiters: row_delimiters,
7774                })
7775            }
7776        }
7777    }
7778
7779    fn parse_optional_on_cluster(&mut self) -> Result<Option<Ident>, ParserError> {
7780        if self.parse_keywords(&[Keyword::ON, Keyword::CLUSTER]) {
7781            Ok(Some(self.parse_identifier()?))
7782        } else {
7783            Ok(None)
7784        }
7785    }
7786
7787    pub fn parse_create_table(
7788        &mut self,
7789        or_replace: bool,
7790        temporary: bool,
7791        global: Option<bool>,
7792        transient: bool,
7793    ) -> Result<Statement, ParserError> {
7794        let allow_unquoted_hyphen = dialect_of!(self is BigQueryDialect);
7795        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
7796        let table_name = self.parse_object_name(allow_unquoted_hyphen)?;
7797
7798        // Clickhouse has `ON CLUSTER 'cluster'` syntax for DDLs
7799        let on_cluster = self.parse_optional_on_cluster()?;
7800
7801        let like = self.maybe_parse_create_table_like(allow_unquoted_hyphen)?;
7802
7803        let clone = if self.parse_keyword(Keyword::CLONE) {
7804            self.parse_object_name(allow_unquoted_hyphen).ok()
7805        } else {
7806            None
7807        };
7808
7809        // parse optional column list (schema)
7810        let (columns, constraints) = self.parse_columns()?;
7811        let comment_after_column_def =
7812            if dialect_of!(self is HiveDialect) && self.parse_keyword(Keyword::COMMENT) {
7813                let next_token = self.next_token();
7814                match next_token.token {
7815                    Token::SingleQuotedString(str) => Some(CommentDef::WithoutEq(str)),
7816                    _ => self.expected("comment", next_token)?,
7817                }
7818            } else {
7819                None
7820            };
7821
7822        // SQLite supports `WITHOUT ROWID` at the end of `CREATE TABLE`
7823        let without_rowid = self.parse_keywords(&[Keyword::WITHOUT, Keyword::ROWID]);
7824
7825        let hive_distribution = self.parse_hive_distribution()?;
7826        let clustered_by = self.parse_optional_clustered_by()?;
7827        let hive_formats = self.parse_hive_formats()?;
7828
7829        let create_table_config = self.parse_optional_create_table_config()?;
7830
7831        // ClickHouse supports `PRIMARY KEY`, before `ORDER BY`
7832        // https://clickhouse.com/docs/en/sql-reference/statements/create/table#primary-key
7833        let primary_key = if dialect_of!(self is ClickHouseDialect | GenericDialect)
7834            && self.parse_keywords(&[Keyword::PRIMARY, Keyword::KEY])
7835        {
7836            Some(Box::new(self.parse_expr()?))
7837        } else {
7838            None
7839        };
7840
7841        let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
7842            if self.consume_token(&Token::LParen) {
7843                let columns = if self.peek_token() != Token::RParen {
7844                    self.parse_comma_separated(|p| p.parse_expr())?
7845                } else {
7846                    vec![]
7847                };
7848                self.expect_token(&Token::RParen)?;
7849                Some(OneOrManyWithParens::Many(columns))
7850            } else {
7851                Some(OneOrManyWithParens::One(self.parse_expr()?))
7852            }
7853        } else {
7854            None
7855        };
7856
7857        let on_commit = if self.parse_keywords(&[Keyword::ON, Keyword::COMMIT]) {
7858            Some(self.parse_create_table_on_commit()?)
7859        } else {
7860            None
7861        };
7862
7863        let strict = self.parse_keyword(Keyword::STRICT);
7864
7865        // Parse optional `AS ( query )`
7866        let query = if self.parse_keyword(Keyword::AS) {
7867            Some(self.parse_query()?)
7868        } else if self.dialect.supports_create_table_select() && self.parse_keyword(Keyword::SELECT)
7869        {
7870            // rewind the SELECT keyword
7871            self.prev_token();
7872            Some(self.parse_query()?)
7873        } else {
7874            None
7875        };
7876
7877        Ok(CreateTableBuilder::new(table_name)
7878            .temporary(temporary)
7879            .columns(columns)
7880            .constraints(constraints)
7881            .or_replace(or_replace)
7882            .if_not_exists(if_not_exists)
7883            .transient(transient)
7884            .hive_distribution(hive_distribution)
7885            .hive_formats(hive_formats)
7886            .global(global)
7887            .query(query)
7888            .without_rowid(without_rowid)
7889            .like(like)
7890            .clone_clause(clone)
7891            .comment_after_column_def(comment_after_column_def)
7892            .order_by(order_by)
7893            .on_commit(on_commit)
7894            .on_cluster(on_cluster)
7895            .clustered_by(clustered_by)
7896            .partition_by(create_table_config.partition_by)
7897            .cluster_by(create_table_config.cluster_by)
7898            .inherits(create_table_config.inherits)
7899            .table_options(create_table_config.table_options)
7900            .primary_key(primary_key)
7901            .strict(strict)
7902            .build())
7903    }
7904
7905    fn maybe_parse_create_table_like(
7906        &mut self,
7907        allow_unquoted_hyphen: bool,
7908    ) -> Result<Option<CreateTableLikeKind>, ParserError> {
7909        let like = if self.dialect.supports_create_table_like_parenthesized()
7910            && self.consume_token(&Token::LParen)
7911        {
7912            if self.parse_keyword(Keyword::LIKE) {
7913                let name = self.parse_object_name(allow_unquoted_hyphen)?;
7914                let defaults = if self.parse_keywords(&[Keyword::INCLUDING, Keyword::DEFAULTS]) {
7915                    Some(CreateTableLikeDefaults::Including)
7916                } else if self.parse_keywords(&[Keyword::EXCLUDING, Keyword::DEFAULTS]) {
7917                    Some(CreateTableLikeDefaults::Excluding)
7918                } else {
7919                    None
7920                };
7921                self.expect_token(&Token::RParen)?;
7922                Some(CreateTableLikeKind::Parenthesized(CreateTableLike {
7923                    name,
7924                    defaults,
7925                }))
7926            } else {
7927                // Rollback the '(' it's probably the columns list
7928                self.prev_token();
7929                None
7930            }
7931        } else if self.parse_keyword(Keyword::LIKE) || self.parse_keyword(Keyword::ILIKE) {
7932            let name = self.parse_object_name(allow_unquoted_hyphen)?;
7933            Some(CreateTableLikeKind::Plain(CreateTableLike {
7934                name,
7935                defaults: None,
7936            }))
7937        } else {
7938            None
7939        };
7940        Ok(like)
7941    }
7942
7943    pub(crate) fn parse_create_table_on_commit(&mut self) -> Result<OnCommit, ParserError> {
7944        if self.parse_keywords(&[Keyword::DELETE, Keyword::ROWS]) {
7945            Ok(OnCommit::DeleteRows)
7946        } else if self.parse_keywords(&[Keyword::PRESERVE, Keyword::ROWS]) {
7947            Ok(OnCommit::PreserveRows)
7948        } else if self.parse_keywords(&[Keyword::DROP]) {
7949            Ok(OnCommit::Drop)
7950        } else {
7951            parser_err!(
7952                "Expecting DELETE ROWS, PRESERVE ROWS or DROP",
7953                self.peek_token()
7954            )
7955        }
7956    }
7957
7958    /// Parse configuration like inheritance, partitioning, clustering information during the table creation.
7959    ///
7960    /// [BigQuery](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#syntax_2)
7961    /// [PostgreSQL](https://www.postgresql.org/docs/current/ddl-partitioning.html)
7962    /// [MySql](https://dev.mysql.com/doc/refman/8.4/en/create-table.html)
7963    fn parse_optional_create_table_config(
7964        &mut self,
7965    ) -> Result<CreateTableConfiguration, ParserError> {
7966        let mut table_options = CreateTableOptions::None;
7967
7968        let inherits = if self.parse_keyword(Keyword::INHERITS) {
7969            Some(self.parse_parenthesized_qualified_column_list(IsOptional::Mandatory, false)?)
7970        } else {
7971            None
7972        };
7973
7974        // PostgreSQL supports `WITH ( options )`, before `AS`
7975        let with_options = self.parse_options(Keyword::WITH)?;
7976        if !with_options.is_empty() {
7977            table_options = CreateTableOptions::With(with_options)
7978        }
7979
7980        let table_properties = self.parse_options(Keyword::TBLPROPERTIES)?;
7981        if !table_properties.is_empty() {
7982            table_options = CreateTableOptions::TableProperties(table_properties);
7983        }
7984        let partition_by = if dialect_of!(self is BigQueryDialect | PostgreSqlDialect | GenericDialect)
7985            && self.parse_keywords(&[Keyword::PARTITION, Keyword::BY])
7986        {
7987            Some(Box::new(self.parse_expr()?))
7988        } else {
7989            None
7990        };
7991
7992        let mut cluster_by = None;
7993        if dialect_of!(self is BigQueryDialect | GenericDialect) {
7994            if self.parse_keywords(&[Keyword::CLUSTER, Keyword::BY]) {
7995                cluster_by = Some(WrappedCollection::NoWrapping(
7996                    self.parse_comma_separated(|p| p.parse_expr())?,
7997                ));
7998            };
7999
8000            if let Token::Word(word) = self.peek_token().token {
8001                if word.keyword == Keyword::OPTIONS {
8002                    table_options =
8003                        CreateTableOptions::Options(self.parse_options(Keyword::OPTIONS)?)
8004                }
8005            };
8006        }
8007
8008        if !dialect_of!(self is HiveDialect) && table_options == CreateTableOptions::None {
8009            let plain_options = self.parse_plain_options()?;
8010            if !plain_options.is_empty() {
8011                table_options = CreateTableOptions::Plain(plain_options)
8012            }
8013        };
8014
8015        Ok(CreateTableConfiguration {
8016            partition_by,
8017            cluster_by,
8018            inherits,
8019            table_options,
8020        })
8021    }
8022
8023    fn parse_plain_option(&mut self) -> Result<Option<SqlOption>, ParserError> {
8024        // Single parameter option
8025        // <https://dev.mysql.com/doc/refman/8.4/en/create-table.html>
8026        if self.parse_keywords(&[Keyword::START, Keyword::TRANSACTION]) {
8027            return Ok(Some(SqlOption::Ident(Ident::new("START TRANSACTION"))));
8028        }
8029
8030        // Custom option
8031        // <https://dev.mysql.com/doc/refman/8.4/en/create-table.html>
8032        if self.parse_keywords(&[Keyword::COMMENT]) {
8033            let has_eq = self.consume_token(&Token::Eq);
8034            let value = self.next_token();
8035
8036            let comment = match (has_eq, value.token) {
8037                (true, Token::SingleQuotedString(s)) => {
8038                    Ok(Some(SqlOption::Comment(CommentDef::WithEq(s))))
8039                }
8040                (false, Token::SingleQuotedString(s)) => {
8041                    Ok(Some(SqlOption::Comment(CommentDef::WithoutEq(s))))
8042                }
8043                (_, token) => {
8044                    self.expected("Token::SingleQuotedString", TokenWithSpan::wrap(token))
8045                }
8046            };
8047            return comment;
8048        }
8049
8050        // <https://dev.mysql.com/doc/refman/8.4/en/create-table.html>
8051        // <https://clickhouse.com/docs/sql-reference/statements/create/table>
8052        if self.parse_keywords(&[Keyword::ENGINE]) {
8053            let _ = self.consume_token(&Token::Eq);
8054            let value = self.next_token();
8055
8056            let engine = match value.token {
8057                Token::Word(w) => {
8058                    let parameters = if self.peek_token() == Token::LParen {
8059                        self.parse_parenthesized_identifiers()?
8060                    } else {
8061                        vec![]
8062                    };
8063
8064                    Ok(Some(SqlOption::NamedParenthesizedList(
8065                        NamedParenthesizedList {
8066                            key: Ident::new("ENGINE"),
8067                            name: Some(Ident::new(w.value)),
8068                            values: parameters,
8069                        },
8070                    )))
8071                }
8072                _ => {
8073                    return self.expected("Token::Word", value)?;
8074                }
8075            };
8076
8077            return engine;
8078        }
8079
8080        // <https://dev.mysql.com/doc/refman/8.4/en/create-table.html>
8081        if self.parse_keywords(&[Keyword::TABLESPACE]) {
8082            let _ = self.consume_token(&Token::Eq);
8083            let value = self.next_token();
8084
8085            let tablespace = match value.token {
8086                Token::Word(Word { value: name, .. }) | Token::SingleQuotedString(name) => {
8087                    let storage = match self.parse_keyword(Keyword::STORAGE) {
8088                        true => {
8089                            let _ = self.consume_token(&Token::Eq);
8090                            let storage_token = self.next_token();
8091                            match &storage_token.token {
8092                                Token::Word(w) => match w.value.to_uppercase().as_str() {
8093                                    "DISK" => Some(StorageType::Disk),
8094                                    "MEMORY" => Some(StorageType::Memory),
8095                                    _ => self
8096                                        .expected("Storage type (DISK or MEMORY)", storage_token)?,
8097                                },
8098                                _ => self.expected("Token::Word", storage_token)?,
8099                            }
8100                        }
8101                        false => None,
8102                    };
8103
8104                    Ok(Some(SqlOption::TableSpace(TablespaceOption {
8105                        name,
8106                        storage,
8107                    })))
8108                }
8109                _ => {
8110                    return self.expected("Token::Word", value)?;
8111                }
8112            };
8113
8114            return tablespace;
8115        }
8116
8117        // <https://dev.mysql.com/doc/refman/8.4/en/create-table.html>
8118        if self.parse_keyword(Keyword::UNION) {
8119            let _ = self.consume_token(&Token::Eq);
8120            let value = self.next_token();
8121
8122            match value.token {
8123                Token::LParen => {
8124                    let tables: Vec<Ident> =
8125                        self.parse_comma_separated0(Parser::parse_identifier, Token::RParen)?;
8126                    self.expect_token(&Token::RParen)?;
8127
8128                    return Ok(Some(SqlOption::NamedParenthesizedList(
8129                        NamedParenthesizedList {
8130                            key: Ident::new("UNION"),
8131                            name: None,
8132                            values: tables,
8133                        },
8134                    )));
8135                }
8136                _ => {
8137                    return self.expected("Token::LParen", value)?;
8138                }
8139            }
8140        }
8141
8142        // Key/Value parameter option
8143        let key = if self.parse_keywords(&[Keyword::DEFAULT, Keyword::CHARSET]) {
8144            Ident::new("DEFAULT CHARSET")
8145        } else if self.parse_keyword(Keyword::CHARSET) {
8146            Ident::new("CHARSET")
8147        } else if self.parse_keywords(&[Keyword::DEFAULT, Keyword::CHARACTER, Keyword::SET]) {
8148            Ident::new("DEFAULT CHARACTER SET")
8149        } else if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
8150            Ident::new("CHARACTER SET")
8151        } else if self.parse_keywords(&[Keyword::DEFAULT, Keyword::COLLATE]) {
8152            Ident::new("DEFAULT COLLATE")
8153        } else if self.parse_keyword(Keyword::COLLATE) {
8154            Ident::new("COLLATE")
8155        } else if self.parse_keywords(&[Keyword::DATA, Keyword::DIRECTORY]) {
8156            Ident::new("DATA DIRECTORY")
8157        } else if self.parse_keywords(&[Keyword::INDEX, Keyword::DIRECTORY]) {
8158            Ident::new("INDEX DIRECTORY")
8159        } else if self.parse_keyword(Keyword::KEY_BLOCK_SIZE) {
8160            Ident::new("KEY_BLOCK_SIZE")
8161        } else if self.parse_keyword(Keyword::ROW_FORMAT) {
8162            Ident::new("ROW_FORMAT")
8163        } else if self.parse_keyword(Keyword::PACK_KEYS) {
8164            Ident::new("PACK_KEYS")
8165        } else if self.parse_keyword(Keyword::STATS_AUTO_RECALC) {
8166            Ident::new("STATS_AUTO_RECALC")
8167        } else if self.parse_keyword(Keyword::STATS_PERSISTENT) {
8168            Ident::new("STATS_PERSISTENT")
8169        } else if self.parse_keyword(Keyword::STATS_SAMPLE_PAGES) {
8170            Ident::new("STATS_SAMPLE_PAGES")
8171        } else if self.parse_keyword(Keyword::DELAY_KEY_WRITE) {
8172            Ident::new("DELAY_KEY_WRITE")
8173        } else if self.parse_keyword(Keyword::COMPRESSION) {
8174            Ident::new("COMPRESSION")
8175        } else if self.parse_keyword(Keyword::ENCRYPTION) {
8176            Ident::new("ENCRYPTION")
8177        } else if self.parse_keyword(Keyword::MAX_ROWS) {
8178            Ident::new("MAX_ROWS")
8179        } else if self.parse_keyword(Keyword::MIN_ROWS) {
8180            Ident::new("MIN_ROWS")
8181        } else if self.parse_keyword(Keyword::AUTOEXTEND_SIZE) {
8182            Ident::new("AUTOEXTEND_SIZE")
8183        } else if self.parse_keyword(Keyword::AVG_ROW_LENGTH) {
8184            Ident::new("AVG_ROW_LENGTH")
8185        } else if self.parse_keyword(Keyword::CHECKSUM) {
8186            Ident::new("CHECKSUM")
8187        } else if self.parse_keyword(Keyword::CONNECTION) {
8188            Ident::new("CONNECTION")
8189        } else if self.parse_keyword(Keyword::ENGINE_ATTRIBUTE) {
8190            Ident::new("ENGINE_ATTRIBUTE")
8191        } else if self.parse_keyword(Keyword::PASSWORD) {
8192            Ident::new("PASSWORD")
8193        } else if self.parse_keyword(Keyword::SECONDARY_ENGINE_ATTRIBUTE) {
8194            Ident::new("SECONDARY_ENGINE_ATTRIBUTE")
8195        } else if self.parse_keyword(Keyword::INSERT_METHOD) {
8196            Ident::new("INSERT_METHOD")
8197        } else if self.parse_keyword(Keyword::AUTO_INCREMENT) {
8198            Ident::new("AUTO_INCREMENT")
8199        } else {
8200            return Ok(None);
8201        };
8202
8203        let _ = self.consume_token(&Token::Eq);
8204
8205        let value = match self
8206            .maybe_parse(|parser| parser.parse_value())?
8207            .map(Expr::Value)
8208        {
8209            Some(expr) => expr,
8210            None => Expr::Identifier(self.parse_identifier()?),
8211        };
8212
8213        Ok(Some(SqlOption::KeyValue { key, value }))
8214    }
8215
8216    pub fn parse_plain_options(&mut self) -> Result<Vec<SqlOption>, ParserError> {
8217        let mut options = Vec::new();
8218
8219        while let Some(option) = self.parse_plain_option()? {
8220            options.push(option);
8221            // Some dialects support comma-separated options; it shouldn't introduce ambiguity to
8222            // consume it for all dialects.
8223            let _ = self.consume_token(&Token::Comma);
8224        }
8225
8226        Ok(options)
8227    }
8228
8229    pub fn parse_optional_inline_comment(&mut self) -> Result<Option<CommentDef>, ParserError> {
8230        let comment = if self.parse_keyword(Keyword::COMMENT) {
8231            let has_eq = self.consume_token(&Token::Eq);
8232            let comment = self.parse_comment_value()?;
8233            Some(if has_eq {
8234                CommentDef::WithEq(comment)
8235            } else {
8236                CommentDef::WithoutEq(comment)
8237            })
8238        } else {
8239            None
8240        };
8241        Ok(comment)
8242    }
8243
8244    pub fn parse_comment_value(&mut self) -> Result<String, ParserError> {
8245        let next_token = self.next_token();
8246        let value = match next_token.token {
8247            Token::SingleQuotedString(str) => str,
8248            Token::DollarQuotedString(str) => str.value,
8249            _ => self.expected("string literal", next_token)?,
8250        };
8251        Ok(value)
8252    }
8253
8254    pub fn parse_optional_procedure_parameters(
8255        &mut self,
8256    ) -> Result<Option<Vec<ProcedureParam>>, ParserError> {
8257        let mut params = vec![];
8258        if !self.consume_token(&Token::LParen) || self.consume_token(&Token::RParen) {
8259            return Ok(Some(params));
8260        }
8261        loop {
8262            if let Token::Word(_) = self.peek_token().token {
8263                params.push(self.parse_procedure_param()?)
8264            }
8265            let comma = self.consume_token(&Token::Comma);
8266            if self.consume_token(&Token::RParen) {
8267                // allow a trailing comma, even though it's not in standard
8268                break;
8269            } else if !comma {
8270                return self.expected("',' or ')' after parameter definition", self.peek_token());
8271            }
8272        }
8273        Ok(Some(params))
8274    }
8275
8276    pub fn parse_columns(&mut self) -> Result<(Vec<ColumnDef>, Vec<TableConstraint>), ParserError> {
8277        let mut columns = vec![];
8278        let mut constraints = vec![];
8279        if !self.consume_token(&Token::LParen) || self.consume_token(&Token::RParen) {
8280            return Ok((columns, constraints));
8281        }
8282
8283        loop {
8284            if let Some(constraint) = self.parse_optional_table_constraint()? {
8285                constraints.push(constraint);
8286            } else if let Token::Word(_) = self.peek_token().token {
8287                columns.push(self.parse_column_def()?);
8288            } else {
8289                return self.expected("column name or constraint definition", self.peek_token());
8290            }
8291
8292            let comma = self.consume_token(&Token::Comma);
8293            let rparen = self.peek_token().token == Token::RParen;
8294
8295            if !comma && !rparen {
8296                return self.expected("',' or ')' after column definition", self.peek_token());
8297            };
8298
8299            if rparen
8300                && (!comma
8301                    || self.dialect.supports_column_definition_trailing_commas()
8302                    || self.options.trailing_commas)
8303            {
8304                let _ = self.consume_token(&Token::RParen);
8305                break;
8306            }
8307        }
8308
8309        Ok((columns, constraints))
8310    }
8311
8312    pub fn parse_procedure_param(&mut self) -> Result<ProcedureParam, ParserError> {
8313        let mode = if self.parse_keyword(Keyword::IN) {
8314            Some(ArgMode::In)
8315        } else if self.parse_keyword(Keyword::OUT) {
8316            Some(ArgMode::Out)
8317        } else if self.parse_keyword(Keyword::INOUT) {
8318            Some(ArgMode::InOut)
8319        } else {
8320            None
8321        };
8322        let name = self.parse_identifier()?;
8323        let data_type = self.parse_data_type()?;
8324        let default = if self.consume_token(&Token::Eq) {
8325            Some(self.parse_expr()?)
8326        } else {
8327            None
8328        };
8329
8330        Ok(ProcedureParam {
8331            name,
8332            data_type,
8333            mode,
8334            default,
8335        })
8336    }
8337
8338    pub fn parse_column_def(&mut self) -> Result<ColumnDef, ParserError> {
8339        let col_name = self.parse_identifier()?;
8340        let data_type = if self.is_column_type_sqlite_unspecified() {
8341            DataType::Unspecified
8342        } else {
8343            self.parse_data_type()?
8344        };
8345        let mut options = vec![];
8346        loop {
8347            if self.parse_keyword(Keyword::CONSTRAINT) {
8348                let name = Some(self.parse_identifier()?);
8349                if let Some(option) = self.parse_optional_column_option()? {
8350                    options.push(ColumnOptionDef { name, option });
8351                } else {
8352                    return self.expected(
8353                        "constraint details after CONSTRAINT <name>",
8354                        self.peek_token(),
8355                    );
8356                }
8357            } else if let Some(option) = self.parse_optional_column_option()? {
8358                options.push(ColumnOptionDef { name: None, option });
8359            } else {
8360                break;
8361            };
8362        }
8363        Ok(ColumnDef {
8364            name: col_name,
8365            data_type,
8366            options,
8367        })
8368    }
8369
8370    fn is_column_type_sqlite_unspecified(&mut self) -> bool {
8371        if dialect_of!(self is SQLiteDialect) {
8372            match self.peek_token().token {
8373                Token::Word(word) => matches!(
8374                    word.keyword,
8375                    Keyword::CONSTRAINT
8376                        | Keyword::PRIMARY
8377                        | Keyword::NOT
8378                        | Keyword::UNIQUE
8379                        | Keyword::CHECK
8380                        | Keyword::DEFAULT
8381                        | Keyword::COLLATE
8382                        | Keyword::REFERENCES
8383                        | Keyword::GENERATED
8384                        | Keyword::AS
8385                ),
8386                _ => true, // e.g. comma immediately after column name
8387            }
8388        } else {
8389            false
8390        }
8391    }
8392
8393    pub fn parse_optional_column_option(&mut self) -> Result<Option<ColumnOption>, ParserError> {
8394        if let Some(option) = self.dialect.parse_column_option(self)? {
8395            return option;
8396        }
8397
8398        self.with_state(
8399            ColumnDefinition,
8400            |parser| -> Result<Option<ColumnOption>, ParserError> {
8401                parser.parse_optional_column_option_inner()
8402            },
8403        )
8404    }
8405
8406    fn parse_optional_column_option_inner(&mut self) -> Result<Option<ColumnOption>, ParserError> {
8407        if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
8408            Ok(Some(ColumnOption::CharacterSet(
8409                self.parse_object_name(false)?,
8410            )))
8411        } else if self.parse_keywords(&[Keyword::COLLATE]) {
8412            Ok(Some(ColumnOption::Collation(
8413                self.parse_object_name(false)?,
8414            )))
8415        } else if self.parse_keywords(&[Keyword::NOT, Keyword::NULL]) {
8416            Ok(Some(ColumnOption::NotNull))
8417        } else if self.parse_keywords(&[Keyword::COMMENT]) {
8418            Ok(Some(ColumnOption::Comment(self.parse_comment_value()?)))
8419        } else if self.parse_keyword(Keyword::NULL) {
8420            Ok(Some(ColumnOption::Null))
8421        } else if self.parse_keyword(Keyword::DEFAULT) {
8422            Ok(Some(ColumnOption::Default(
8423                self.parse_column_option_expr()?,
8424            )))
8425        } else if dialect_of!(self is ClickHouseDialect| GenericDialect)
8426            && self.parse_keyword(Keyword::MATERIALIZED)
8427        {
8428            Ok(Some(ColumnOption::Materialized(
8429                self.parse_column_option_expr()?,
8430            )))
8431        } else if dialect_of!(self is ClickHouseDialect| GenericDialect)
8432            && self.parse_keyword(Keyword::ALIAS)
8433        {
8434            Ok(Some(ColumnOption::Alias(self.parse_column_option_expr()?)))
8435        } else if dialect_of!(self is ClickHouseDialect| GenericDialect)
8436            && self.parse_keyword(Keyword::EPHEMERAL)
8437        {
8438            // The expression is optional for the EPHEMERAL syntax, so we need to check
8439            // if the column definition has remaining tokens before parsing the expression.
8440            if matches!(self.peek_token().token, Token::Comma | Token::RParen) {
8441                Ok(Some(ColumnOption::Ephemeral(None)))
8442            } else {
8443                Ok(Some(ColumnOption::Ephemeral(Some(
8444                    self.parse_column_option_expr()?,
8445                ))))
8446            }
8447        } else if self.parse_keywords(&[Keyword::PRIMARY, Keyword::KEY]) {
8448            let characteristics = self.parse_constraint_characteristics()?;
8449            Ok(Some(
8450                PrimaryKeyConstraint {
8451                    name: None,
8452                    index_name: None,
8453                    index_type: None,
8454                    columns: vec![],
8455                    index_options: vec![],
8456                    characteristics,
8457                }
8458                .into(),
8459            ))
8460        } else if self.parse_keyword(Keyword::UNIQUE) {
8461            let characteristics = self.parse_constraint_characteristics()?;
8462            Ok(Some(
8463                UniqueConstraint {
8464                    name: None,
8465                    index_name: None,
8466                    index_type_display: KeyOrIndexDisplay::None,
8467                    index_type: None,
8468                    columns: vec![],
8469                    index_options: vec![],
8470                    characteristics,
8471                    nulls_distinct: NullsDistinctOption::None,
8472                }
8473                .into(),
8474            ))
8475        } else if self.parse_keyword(Keyword::REFERENCES) {
8476            let foreign_table = self.parse_object_name(false)?;
8477            // PostgreSQL allows omitting the column list and
8478            // uses the primary key column of the foreign table by default
8479            let referred_columns = self.parse_parenthesized_column_list(Optional, false)?;
8480            let mut match_kind = None;
8481            let mut on_delete = None;
8482            let mut on_update = None;
8483            loop {
8484                if match_kind.is_none() && self.parse_keyword(Keyword::MATCH) {
8485                    match_kind = Some(self.parse_match_kind()?);
8486                } else if on_delete.is_none()
8487                    && self.parse_keywords(&[Keyword::ON, Keyword::DELETE])
8488                {
8489                    on_delete = Some(self.parse_referential_action()?);
8490                } else if on_update.is_none()
8491                    && self.parse_keywords(&[Keyword::ON, Keyword::UPDATE])
8492                {
8493                    on_update = Some(self.parse_referential_action()?);
8494                } else {
8495                    break;
8496                }
8497            }
8498            let characteristics = self.parse_constraint_characteristics()?;
8499
8500            Ok(Some(
8501                ForeignKeyConstraint {
8502                    name: None,       // Column-level constraints don't have names
8503                    index_name: None, // Not applicable for column-level constraints
8504                    columns: vec![],  // Not applicable for column-level constraints
8505                    foreign_table,
8506                    referred_columns,
8507                    on_delete,
8508                    on_update,
8509                    match_kind,
8510                    characteristics,
8511                }
8512                .into(),
8513            ))
8514        } else if self.parse_keyword(Keyword::CHECK) {
8515            self.expect_token(&Token::LParen)?;
8516            // since `CHECK` requires parentheses, we can parse the inner expression in ParserState::Normal
8517            let expr: Expr = self.with_state(ParserState::Normal, |p| p.parse_expr())?;
8518            self.expect_token(&Token::RParen)?;
8519            Ok(Some(
8520                CheckConstraint {
8521                    name: None, // Column-level check constraints don't have names
8522                    expr: Box::new(expr),
8523                    enforced: None, // Could be extended later to support MySQL ENFORCED/NOT ENFORCED
8524                }
8525                .into(),
8526            ))
8527        } else if self.parse_keyword(Keyword::AUTO_INCREMENT)
8528            && dialect_of!(self is MySqlDialect | GenericDialect)
8529        {
8530            // Support AUTO_INCREMENT for MySQL
8531            Ok(Some(ColumnOption::DialectSpecific(vec![
8532                Token::make_keyword("AUTO_INCREMENT"),
8533            ])))
8534        } else if self.parse_keyword(Keyword::AUTOINCREMENT)
8535            && dialect_of!(self is SQLiteDialect |  GenericDialect)
8536        {
8537            // Support AUTOINCREMENT for SQLite
8538            Ok(Some(ColumnOption::DialectSpecific(vec![
8539                Token::make_keyword("AUTOINCREMENT"),
8540            ])))
8541        } else if self.parse_keyword(Keyword::ASC)
8542            && self.dialect.supports_asc_desc_in_column_definition()
8543        {
8544            // Support ASC for SQLite
8545            Ok(Some(ColumnOption::DialectSpecific(vec![
8546                Token::make_keyword("ASC"),
8547            ])))
8548        } else if self.parse_keyword(Keyword::DESC)
8549            && self.dialect.supports_asc_desc_in_column_definition()
8550        {
8551            // Support DESC for SQLite
8552            Ok(Some(ColumnOption::DialectSpecific(vec![
8553                Token::make_keyword("DESC"),
8554            ])))
8555        } else if self.parse_keywords(&[Keyword::ON, Keyword::UPDATE])
8556            && dialect_of!(self is MySqlDialect | GenericDialect)
8557        {
8558            let expr = self.parse_column_option_expr()?;
8559            Ok(Some(ColumnOption::OnUpdate(expr)))
8560        } else if self.parse_keyword(Keyword::GENERATED) {
8561            self.parse_optional_column_option_generated()
8562        } else if dialect_of!(self is BigQueryDialect | GenericDialect)
8563            && self.parse_keyword(Keyword::OPTIONS)
8564        {
8565            self.prev_token();
8566            Ok(Some(ColumnOption::Options(
8567                self.parse_options(Keyword::OPTIONS)?,
8568            )))
8569        } else if self.parse_keyword(Keyword::AS)
8570            && dialect_of!(self is MySqlDialect | SQLiteDialect | DuckDbDialect | GenericDialect)
8571        {
8572            self.parse_optional_column_option_as()
8573        } else if self.parse_keyword(Keyword::SRID)
8574            && dialect_of!(self is MySqlDialect | GenericDialect)
8575        {
8576            Ok(Some(ColumnOption::Srid(Box::new(
8577                self.parse_column_option_expr()?,
8578            ))))
8579        } else if self.parse_keyword(Keyword::IDENTITY)
8580            && dialect_of!(self is MsSqlDialect | GenericDialect)
8581        {
8582            let parameters = if self.consume_token(&Token::LParen) {
8583                let seed = self.parse_number()?;
8584                self.expect_token(&Token::Comma)?;
8585                let increment = self.parse_number()?;
8586                self.expect_token(&Token::RParen)?;
8587
8588                Some(IdentityPropertyFormatKind::FunctionCall(
8589                    IdentityParameters { seed, increment },
8590                ))
8591            } else {
8592                None
8593            };
8594            Ok(Some(ColumnOption::Identity(
8595                IdentityPropertyKind::Identity(IdentityProperty {
8596                    parameters,
8597                    order: None,
8598                }),
8599            )))
8600        } else if dialect_of!(self is SQLiteDialect | GenericDialect)
8601            && self.parse_keywords(&[Keyword::ON, Keyword::CONFLICT])
8602        {
8603            // Support ON CONFLICT for SQLite
8604            Ok(Some(ColumnOption::OnConflict(
8605                self.expect_one_of_keywords(&[
8606                    Keyword::ROLLBACK,
8607                    Keyword::ABORT,
8608                    Keyword::FAIL,
8609                    Keyword::IGNORE,
8610                    Keyword::REPLACE,
8611                ])?,
8612            )))
8613        } else if self.parse_keyword(Keyword::INVISIBLE) {
8614            Ok(Some(ColumnOption::Invisible))
8615        } else {
8616            Ok(None)
8617        }
8618    }
8619
8620    /// When parsing some column option expressions we need to revert to [ParserState::Normal] since
8621    /// `NOT NULL` is allowed as an alias for `IS NOT NULL`.
8622    /// In those cases we use this helper instead of calling [Parser::parse_expr] directly.
8623    ///
8624    /// For example, consider these `CREATE TABLE` statements:
8625    /// ```sql
8626    /// CREATE TABLE foo (abc BOOL DEFAULT (42 NOT NULL) NOT NULL);
8627    /// ```
8628    /// vs
8629    /// ```sql
8630    /// CREATE TABLE foo (abc BOOL NOT NULL);
8631    /// ```
8632    ///
8633    /// In the first we should parse the inner portion of `(42 NOT NULL)` as [Expr::IsNotNull],
8634    /// whereas is both statements that trailing `NOT NULL` should only be parsed as a
8635    /// [ColumnOption::NotNull].
8636    fn parse_column_option_expr(&mut self) -> Result<Expr, ParserError> {
8637        if self.peek_token_ref().token == Token::LParen {
8638            let expr: Expr = self.with_state(ParserState::Normal, |p| p.parse_prefix())?;
8639            Ok(expr)
8640        } else {
8641            Ok(self.parse_expr()?)
8642        }
8643    }
8644
8645    pub(crate) fn parse_tag(&mut self) -> Result<Tag, ParserError> {
8646        let name = self.parse_object_name(false)?;
8647        self.expect_token(&Token::Eq)?;
8648        let value = self.parse_literal_string()?;
8649
8650        Ok(Tag::new(name, value))
8651    }
8652
8653    fn parse_optional_column_option_generated(
8654        &mut self,
8655    ) -> Result<Option<ColumnOption>, ParserError> {
8656        if self.parse_keywords(&[Keyword::ALWAYS, Keyword::AS, Keyword::IDENTITY]) {
8657            let mut sequence_options = vec![];
8658            if self.expect_token(&Token::LParen).is_ok() {
8659                sequence_options = self.parse_create_sequence_options()?;
8660                self.expect_token(&Token::RParen)?;
8661            }
8662            Ok(Some(ColumnOption::Generated {
8663                generated_as: GeneratedAs::Always,
8664                sequence_options: Some(sequence_options),
8665                generation_expr: None,
8666                generation_expr_mode: None,
8667                generated_keyword: true,
8668            }))
8669        } else if self.parse_keywords(&[
8670            Keyword::BY,
8671            Keyword::DEFAULT,
8672            Keyword::AS,
8673            Keyword::IDENTITY,
8674        ]) {
8675            let mut sequence_options = vec![];
8676            if self.expect_token(&Token::LParen).is_ok() {
8677                sequence_options = self.parse_create_sequence_options()?;
8678                self.expect_token(&Token::RParen)?;
8679            }
8680            Ok(Some(ColumnOption::Generated {
8681                generated_as: GeneratedAs::ByDefault,
8682                sequence_options: Some(sequence_options),
8683                generation_expr: None,
8684                generation_expr_mode: None,
8685                generated_keyword: true,
8686            }))
8687        } else if self.parse_keywords(&[Keyword::ALWAYS, Keyword::AS]) {
8688            if self.expect_token(&Token::LParen).is_ok() {
8689                let expr: Expr = self.with_state(ParserState::Normal, |p| p.parse_expr())?;
8690                self.expect_token(&Token::RParen)?;
8691                let (gen_as, expr_mode) = if self.parse_keywords(&[Keyword::STORED]) {
8692                    Ok((
8693                        GeneratedAs::ExpStored,
8694                        Some(GeneratedExpressionMode::Stored),
8695                    ))
8696                } else if dialect_of!(self is PostgreSqlDialect) {
8697                    // Postgres' AS IDENTITY branches are above, this one needs STORED
8698                    self.expected("STORED", self.peek_token())
8699                } else if self.parse_keywords(&[Keyword::VIRTUAL]) {
8700                    Ok((GeneratedAs::Always, Some(GeneratedExpressionMode::Virtual)))
8701                } else {
8702                    Ok((GeneratedAs::Always, None))
8703                }?;
8704
8705                Ok(Some(ColumnOption::Generated {
8706                    generated_as: gen_as,
8707                    sequence_options: None,
8708                    generation_expr: Some(expr),
8709                    generation_expr_mode: expr_mode,
8710                    generated_keyword: true,
8711                }))
8712            } else {
8713                Ok(None)
8714            }
8715        } else {
8716            Ok(None)
8717        }
8718    }
8719
8720    fn parse_optional_column_option_as(&mut self) -> Result<Option<ColumnOption>, ParserError> {
8721        // Some DBs allow 'AS (expr)', shorthand for GENERATED ALWAYS AS
8722        self.expect_token(&Token::LParen)?;
8723        let expr = self.parse_expr()?;
8724        self.expect_token(&Token::RParen)?;
8725
8726        let (gen_as, expr_mode) = if self.parse_keywords(&[Keyword::STORED]) {
8727            (
8728                GeneratedAs::ExpStored,
8729                Some(GeneratedExpressionMode::Stored),
8730            )
8731        } else if self.parse_keywords(&[Keyword::VIRTUAL]) {
8732            (GeneratedAs::Always, Some(GeneratedExpressionMode::Virtual))
8733        } else {
8734            (GeneratedAs::Always, None)
8735        };
8736
8737        Ok(Some(ColumnOption::Generated {
8738            generated_as: gen_as,
8739            sequence_options: None,
8740            generation_expr: Some(expr),
8741            generation_expr_mode: expr_mode,
8742            generated_keyword: false,
8743        }))
8744    }
8745
8746    pub fn parse_optional_clustered_by(&mut self) -> Result<Option<ClusteredBy>, ParserError> {
8747        let clustered_by = if dialect_of!(self is HiveDialect|GenericDialect)
8748            && self.parse_keywords(&[Keyword::CLUSTERED, Keyword::BY])
8749        {
8750            let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
8751
8752            let sorted_by = if self.parse_keywords(&[Keyword::SORTED, Keyword::BY]) {
8753                self.expect_token(&Token::LParen)?;
8754                let sorted_by_columns = self.parse_comma_separated(|p| p.parse_order_by_expr())?;
8755                self.expect_token(&Token::RParen)?;
8756                Some(sorted_by_columns)
8757            } else {
8758                None
8759            };
8760
8761            self.expect_keyword_is(Keyword::INTO)?;
8762            let num_buckets = self.parse_number_value()?.value;
8763            self.expect_keyword_is(Keyword::BUCKETS)?;
8764            Some(ClusteredBy {
8765                columns,
8766                sorted_by,
8767                num_buckets,
8768            })
8769        } else {
8770            None
8771        };
8772        Ok(clustered_by)
8773    }
8774
8775    pub fn parse_referential_action(&mut self) -> Result<ReferentialAction, ParserError> {
8776        if self.parse_keyword(Keyword::RESTRICT) {
8777            Ok(ReferentialAction::Restrict)
8778        } else if self.parse_keyword(Keyword::CASCADE) {
8779            Ok(ReferentialAction::Cascade)
8780        } else if self.parse_keywords(&[Keyword::SET, Keyword::NULL]) {
8781            Ok(ReferentialAction::SetNull)
8782        } else if self.parse_keywords(&[Keyword::NO, Keyword::ACTION]) {
8783            Ok(ReferentialAction::NoAction)
8784        } else if self.parse_keywords(&[Keyword::SET, Keyword::DEFAULT]) {
8785            Ok(ReferentialAction::SetDefault)
8786        } else {
8787            self.expected(
8788                "one of RESTRICT, CASCADE, SET NULL, NO ACTION or SET DEFAULT",
8789                self.peek_token(),
8790            )
8791        }
8792    }
8793
8794    pub fn parse_match_kind(&mut self) -> Result<ConstraintReferenceMatchKind, ParserError> {
8795        if self.parse_keyword(Keyword::FULL) {
8796            Ok(ConstraintReferenceMatchKind::Full)
8797        } else if self.parse_keyword(Keyword::PARTIAL) {
8798            Ok(ConstraintReferenceMatchKind::Partial)
8799        } else if self.parse_keyword(Keyword::SIMPLE) {
8800            Ok(ConstraintReferenceMatchKind::Simple)
8801        } else {
8802            self.expected("one of FULL, PARTIAL or SIMPLE", self.peek_token())
8803        }
8804    }
8805
8806    pub fn parse_constraint_characteristics(
8807        &mut self,
8808    ) -> Result<Option<ConstraintCharacteristics>, ParserError> {
8809        let mut cc = ConstraintCharacteristics::default();
8810
8811        loop {
8812            if cc.deferrable.is_none() && self.parse_keywords(&[Keyword::NOT, Keyword::DEFERRABLE])
8813            {
8814                cc.deferrable = Some(false);
8815            } else if cc.deferrable.is_none() && self.parse_keyword(Keyword::DEFERRABLE) {
8816                cc.deferrable = Some(true);
8817            } else if cc.initially.is_none() && self.parse_keyword(Keyword::INITIALLY) {
8818                if self.parse_keyword(Keyword::DEFERRED) {
8819                    cc.initially = Some(DeferrableInitial::Deferred);
8820                } else if self.parse_keyword(Keyword::IMMEDIATE) {
8821                    cc.initially = Some(DeferrableInitial::Immediate);
8822                } else {
8823                    self.expected("one of DEFERRED or IMMEDIATE", self.peek_token())?;
8824                }
8825            } else if cc.enforced.is_none() && self.parse_keyword(Keyword::ENFORCED) {
8826                cc.enforced = Some(true);
8827            } else if cc.enforced.is_none()
8828                && self.parse_keywords(&[Keyword::NOT, Keyword::ENFORCED])
8829            {
8830                cc.enforced = Some(false);
8831            } else {
8832                break;
8833            }
8834        }
8835
8836        if cc.deferrable.is_some() || cc.initially.is_some() || cc.enforced.is_some() {
8837            Ok(Some(cc))
8838        } else {
8839            Ok(None)
8840        }
8841    }
8842
8843    pub fn parse_optional_table_constraint(
8844        &mut self,
8845    ) -> Result<Option<TableConstraint>, ParserError> {
8846        let name = if self.parse_keyword(Keyword::CONSTRAINT) {
8847            Some(self.parse_identifier()?)
8848        } else {
8849            None
8850        };
8851
8852        let next_token = self.next_token();
8853        match next_token.token {
8854            Token::Word(w) if w.keyword == Keyword::UNIQUE => {
8855                let index_type_display = self.parse_index_type_display();
8856                if !dialect_of!(self is GenericDialect | MySqlDialect)
8857                    && !index_type_display.is_none()
8858                {
8859                    return self
8860                        .expected("`index_name` or `(column_name [, ...])`", self.peek_token());
8861                }
8862
8863                let nulls_distinct = self.parse_optional_nulls_distinct()?;
8864
8865                // optional index name
8866                let index_name = self.parse_optional_ident()?;
8867                let index_type = self.parse_optional_using_then_index_type()?;
8868
8869                let columns = self.parse_parenthesized_index_column_list()?;
8870                let index_options = self.parse_index_options()?;
8871                let characteristics = self.parse_constraint_characteristics()?;
8872                Ok(Some(
8873                    UniqueConstraint {
8874                        name,
8875                        index_name,
8876                        index_type_display,
8877                        index_type,
8878                        columns,
8879                        index_options,
8880                        characteristics,
8881                        nulls_distinct,
8882                    }
8883                    .into(),
8884                ))
8885            }
8886            Token::Word(w) if w.keyword == Keyword::PRIMARY => {
8887                // after `PRIMARY` always stay `KEY`
8888                self.expect_keyword_is(Keyword::KEY)?;
8889
8890                // optional index name
8891                let index_name = self.parse_optional_ident()?;
8892                let index_type = self.parse_optional_using_then_index_type()?;
8893
8894                let columns = self.parse_parenthesized_index_column_list()?;
8895                let index_options = self.parse_index_options()?;
8896                let characteristics = self.parse_constraint_characteristics()?;
8897                Ok(Some(
8898                    PrimaryKeyConstraint {
8899                        name,
8900                        index_name,
8901                        index_type,
8902                        columns,
8903                        index_options,
8904                        characteristics,
8905                    }
8906                    .into(),
8907                ))
8908            }
8909            Token::Word(w) if w.keyword == Keyword::FOREIGN => {
8910                self.expect_keyword_is(Keyword::KEY)?;
8911                let index_name = self.parse_optional_ident()?;
8912                let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
8913                self.expect_keyword_is(Keyword::REFERENCES)?;
8914                let foreign_table = self.parse_object_name(false)?;
8915                let referred_columns = self.parse_parenthesized_column_list(Optional, false)?;
8916                let mut match_kind = None;
8917                let mut on_delete = None;
8918                let mut on_update = None;
8919                loop {
8920                    if match_kind.is_none() && self.parse_keyword(Keyword::MATCH) {
8921                        match_kind = Some(self.parse_match_kind()?);
8922                    } else if on_delete.is_none()
8923                        && self.parse_keywords(&[Keyword::ON, Keyword::DELETE])
8924                    {
8925                        on_delete = Some(self.parse_referential_action()?);
8926                    } else if on_update.is_none()
8927                        && self.parse_keywords(&[Keyword::ON, Keyword::UPDATE])
8928                    {
8929                        on_update = Some(self.parse_referential_action()?);
8930                    } else {
8931                        break;
8932                    }
8933                }
8934
8935                let characteristics = self.parse_constraint_characteristics()?;
8936
8937                Ok(Some(
8938                    ForeignKeyConstraint {
8939                        name,
8940                        index_name,
8941                        columns,
8942                        foreign_table,
8943                        referred_columns,
8944                        on_delete,
8945                        on_update,
8946                        match_kind,
8947                        characteristics,
8948                    }
8949                    .into(),
8950                ))
8951            }
8952            Token::Word(w) if w.keyword == Keyword::CHECK => {
8953                self.expect_token(&Token::LParen)?;
8954                let expr = Box::new(self.parse_expr()?);
8955                self.expect_token(&Token::RParen)?;
8956
8957                let enforced = if self.parse_keyword(Keyword::ENFORCED) {
8958                    Some(true)
8959                } else if self.parse_keywords(&[Keyword::NOT, Keyword::ENFORCED]) {
8960                    Some(false)
8961                } else {
8962                    None
8963                };
8964
8965                Ok(Some(
8966                    CheckConstraint {
8967                        name,
8968                        expr,
8969                        enforced,
8970                    }
8971                    .into(),
8972                ))
8973            }
8974            Token::Word(w)
8975                if (w.keyword == Keyword::INDEX || w.keyword == Keyword::KEY)
8976                    && dialect_of!(self is GenericDialect | MySqlDialect)
8977                    && name.is_none() =>
8978            {
8979                let display_as_key = w.keyword == Keyword::KEY;
8980
8981                let name = match self.peek_token().token {
8982                    Token::Word(word) if word.keyword == Keyword::USING => None,
8983                    _ => self.parse_optional_ident()?,
8984                };
8985
8986                let index_type = self.parse_optional_using_then_index_type()?;
8987                let columns = self.parse_parenthesized_index_column_list()?;
8988                let index_options = self.parse_index_options()?;
8989
8990                Ok(Some(
8991                    IndexConstraint {
8992                        display_as_key,
8993                        name,
8994                        index_type,
8995                        columns,
8996                        index_options,
8997                    }
8998                    .into(),
8999                ))
9000            }
9001            Token::Word(w)
9002                if (w.keyword == Keyword::FULLTEXT || w.keyword == Keyword::SPATIAL)
9003                    && dialect_of!(self is GenericDialect | MySqlDialect) =>
9004            {
9005                if let Some(name) = name {
9006                    return self.expected(
9007                        "FULLTEXT or SPATIAL option without constraint name",
9008                        TokenWithSpan {
9009                            token: Token::make_keyword(&name.to_string()),
9010                            span: next_token.span,
9011                        },
9012                    );
9013                }
9014
9015                let fulltext = w.keyword == Keyword::FULLTEXT;
9016
9017                let index_type_display = self.parse_index_type_display();
9018
9019                let opt_index_name = self.parse_optional_ident()?;
9020
9021                let columns = self.parse_parenthesized_index_column_list()?;
9022
9023                Ok(Some(
9024                    FullTextOrSpatialConstraint {
9025                        fulltext,
9026                        index_type_display,
9027                        opt_index_name,
9028                        columns,
9029                    }
9030                    .into(),
9031                ))
9032            }
9033            _ => {
9034                if name.is_some() {
9035                    self.expected("PRIMARY, UNIQUE, FOREIGN, or CHECK", next_token)
9036                } else {
9037                    self.prev_token();
9038                    Ok(None)
9039                }
9040            }
9041        }
9042    }
9043
9044    fn parse_optional_nulls_distinct(&mut self) -> Result<NullsDistinctOption, ParserError> {
9045        Ok(if self.parse_keyword(Keyword::NULLS) {
9046            let not = self.parse_keyword(Keyword::NOT);
9047            self.expect_keyword_is(Keyword::DISTINCT)?;
9048            if not {
9049                NullsDistinctOption::NotDistinct
9050            } else {
9051                NullsDistinctOption::Distinct
9052            }
9053        } else {
9054            NullsDistinctOption::None
9055        })
9056    }
9057
9058    pub fn maybe_parse_options(
9059        &mut self,
9060        keyword: Keyword,
9061    ) -> Result<Option<Vec<SqlOption>>, ParserError> {
9062        if let Token::Word(word) = self.peek_token().token {
9063            if word.keyword == keyword {
9064                return Ok(Some(self.parse_options(keyword)?));
9065            }
9066        };
9067        Ok(None)
9068    }
9069
9070    pub fn parse_options(&mut self, keyword: Keyword) -> Result<Vec<SqlOption>, ParserError> {
9071        if self.parse_keyword(keyword) {
9072            self.expect_token(&Token::LParen)?;
9073            let options = self.parse_comma_separated0(Parser::parse_sql_option, Token::RParen)?;
9074            self.expect_token(&Token::RParen)?;
9075            Ok(options)
9076        } else {
9077            Ok(vec![])
9078        }
9079    }
9080
9081    pub fn parse_options_with_keywords(
9082        &mut self,
9083        keywords: &[Keyword],
9084    ) -> Result<Vec<SqlOption>, ParserError> {
9085        if self.parse_keywords(keywords) {
9086            self.expect_token(&Token::LParen)?;
9087            let options = self.parse_comma_separated(Parser::parse_sql_option)?;
9088            self.expect_token(&Token::RParen)?;
9089            Ok(options)
9090        } else {
9091            Ok(vec![])
9092        }
9093    }
9094
9095    pub fn parse_index_type(&mut self) -> Result<IndexType, ParserError> {
9096        Ok(if self.parse_keyword(Keyword::BTREE) {
9097            IndexType::BTree
9098        } else if self.parse_keyword(Keyword::HASH) {
9099            IndexType::Hash
9100        } else if self.parse_keyword(Keyword::GIN) {
9101            IndexType::GIN
9102        } else if self.parse_keyword(Keyword::GIST) {
9103            IndexType::GiST
9104        } else if self.parse_keyword(Keyword::SPGIST) {
9105            IndexType::SPGiST
9106        } else if self.parse_keyword(Keyword::BRIN) {
9107            IndexType::BRIN
9108        } else if self.parse_keyword(Keyword::BLOOM) {
9109            IndexType::Bloom
9110        } else {
9111            IndexType::Custom(self.parse_identifier()?)
9112        })
9113    }
9114
9115    /// Optionally parse the `USING` keyword, followed by an [IndexType]
9116    /// Example:
9117    /// ```sql
9118    //// USING BTREE (name, age DESC)
9119    /// ```
9120    pub fn parse_optional_using_then_index_type(
9121        &mut self,
9122    ) -> Result<Option<IndexType>, ParserError> {
9123        if self.parse_keyword(Keyword::USING) {
9124            Ok(Some(self.parse_index_type()?))
9125        } else {
9126            Ok(None)
9127        }
9128    }
9129
9130    /// Parse `[ident]`, mostly `ident` is name, like:
9131    /// `window_name`, `index_name`, ...
9132    pub fn parse_optional_ident(&mut self) -> Result<Option<Ident>, ParserError> {
9133        self.maybe_parse(|parser| parser.parse_identifier())
9134    }
9135
9136    #[must_use]
9137    pub fn parse_index_type_display(&mut self) -> KeyOrIndexDisplay {
9138        if self.parse_keyword(Keyword::KEY) {
9139            KeyOrIndexDisplay::Key
9140        } else if self.parse_keyword(Keyword::INDEX) {
9141            KeyOrIndexDisplay::Index
9142        } else {
9143            KeyOrIndexDisplay::None
9144        }
9145    }
9146
9147    pub fn parse_optional_index_option(&mut self) -> Result<Option<IndexOption>, ParserError> {
9148        if let Some(index_type) = self.parse_optional_using_then_index_type()? {
9149            Ok(Some(IndexOption::Using(index_type)))
9150        } else if self.parse_keyword(Keyword::COMMENT) {
9151            let s = self.parse_literal_string()?;
9152            Ok(Some(IndexOption::Comment(s)))
9153        } else {
9154            Ok(None)
9155        }
9156    }
9157
9158    pub fn parse_index_options(&mut self) -> Result<Vec<IndexOption>, ParserError> {
9159        let mut options = Vec::new();
9160
9161        loop {
9162            match self.parse_optional_index_option()? {
9163                Some(index_option) => options.push(index_option),
9164                None => return Ok(options),
9165            }
9166        }
9167    }
9168
9169    pub fn parse_sql_option(&mut self) -> Result<SqlOption, ParserError> {
9170        let is_mssql = dialect_of!(self is MsSqlDialect|GenericDialect);
9171
9172        match self.peek_token().token {
9173            Token::Word(w) if w.keyword == Keyword::HEAP && is_mssql => {
9174                Ok(SqlOption::Ident(self.parse_identifier()?))
9175            }
9176            Token::Word(w) if w.keyword == Keyword::PARTITION && is_mssql => {
9177                self.parse_option_partition()
9178            }
9179            Token::Word(w) if w.keyword == Keyword::CLUSTERED && is_mssql => {
9180                self.parse_option_clustered()
9181            }
9182            _ => {
9183                let name = self.parse_identifier()?;
9184                self.expect_token(&Token::Eq)?;
9185                let value = self.parse_expr()?;
9186
9187                Ok(SqlOption::KeyValue { key: name, value })
9188            }
9189        }
9190    }
9191
9192    pub fn parse_option_clustered(&mut self) -> Result<SqlOption, ParserError> {
9193        if self.parse_keywords(&[
9194            Keyword::CLUSTERED,
9195            Keyword::COLUMNSTORE,
9196            Keyword::INDEX,
9197            Keyword::ORDER,
9198        ]) {
9199            Ok(SqlOption::Clustered(
9200                TableOptionsClustered::ColumnstoreIndexOrder(
9201                    self.parse_parenthesized_column_list(IsOptional::Mandatory, false)?,
9202                ),
9203            ))
9204        } else if self.parse_keywords(&[Keyword::CLUSTERED, Keyword::COLUMNSTORE, Keyword::INDEX]) {
9205            Ok(SqlOption::Clustered(
9206                TableOptionsClustered::ColumnstoreIndex,
9207            ))
9208        } else if self.parse_keywords(&[Keyword::CLUSTERED, Keyword::INDEX]) {
9209            self.expect_token(&Token::LParen)?;
9210
9211            let columns = self.parse_comma_separated(|p| {
9212                let name = p.parse_identifier()?;
9213                let asc = p.parse_asc_desc();
9214
9215                Ok(ClusteredIndex { name, asc })
9216            })?;
9217
9218            self.expect_token(&Token::RParen)?;
9219
9220            Ok(SqlOption::Clustered(TableOptionsClustered::Index(columns)))
9221        } else {
9222            Err(ParserError::ParserError(
9223                "invalid CLUSTERED sequence".to_string(),
9224            ))
9225        }
9226    }
9227
9228    pub fn parse_option_partition(&mut self) -> Result<SqlOption, ParserError> {
9229        self.expect_keyword_is(Keyword::PARTITION)?;
9230        self.expect_token(&Token::LParen)?;
9231        let column_name = self.parse_identifier()?;
9232
9233        self.expect_keyword_is(Keyword::RANGE)?;
9234        let range_direction = if self.parse_keyword(Keyword::LEFT) {
9235            Some(PartitionRangeDirection::Left)
9236        } else if self.parse_keyword(Keyword::RIGHT) {
9237            Some(PartitionRangeDirection::Right)
9238        } else {
9239            None
9240        };
9241
9242        self.expect_keywords(&[Keyword::FOR, Keyword::VALUES])?;
9243        self.expect_token(&Token::LParen)?;
9244
9245        let for_values = self.parse_comma_separated(Parser::parse_expr)?;
9246
9247        self.expect_token(&Token::RParen)?;
9248        self.expect_token(&Token::RParen)?;
9249
9250        Ok(SqlOption::Partition {
9251            column_name,
9252            range_direction,
9253            for_values,
9254        })
9255    }
9256
9257    pub fn parse_partition(&mut self) -> Result<Partition, ParserError> {
9258        self.expect_token(&Token::LParen)?;
9259        let partitions = self.parse_comma_separated(Parser::parse_expr)?;
9260        self.expect_token(&Token::RParen)?;
9261        Ok(Partition::Partitions(partitions))
9262    }
9263
9264    pub fn parse_projection_select(&mut self) -> Result<ProjectionSelect, ParserError> {
9265        self.expect_token(&Token::LParen)?;
9266        self.expect_keyword_is(Keyword::SELECT)?;
9267        let projection = self.parse_projection()?;
9268        let group_by = self.parse_optional_group_by()?;
9269        let order_by = self.parse_optional_order_by()?;
9270        self.expect_token(&Token::RParen)?;
9271        Ok(ProjectionSelect {
9272            projection,
9273            group_by,
9274            order_by,
9275        })
9276    }
9277    pub fn parse_alter_table_add_projection(&mut self) -> Result<AlterTableOperation, ParserError> {
9278        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
9279        let name = self.parse_identifier()?;
9280        let query = self.parse_projection_select()?;
9281        Ok(AlterTableOperation::AddProjection {
9282            if_not_exists,
9283            name,
9284            select: query,
9285        })
9286    }
9287
9288    pub fn parse_alter_table_operation(&mut self) -> Result<AlterTableOperation, ParserError> {
9289        let operation = if self.parse_keyword(Keyword::ADD) {
9290            if let Some(constraint) = self.parse_optional_table_constraint()? {
9291                let not_valid = self.parse_keywords(&[Keyword::NOT, Keyword::VALID]);
9292                AlterTableOperation::AddConstraint {
9293                    constraint,
9294                    not_valid,
9295                }
9296            } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
9297                && self.parse_keyword(Keyword::PROJECTION)
9298            {
9299                return self.parse_alter_table_add_projection();
9300            } else {
9301                let if_not_exists =
9302                    self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
9303                let mut new_partitions = vec![];
9304                loop {
9305                    if self.parse_keyword(Keyword::PARTITION) {
9306                        new_partitions.push(self.parse_partition()?);
9307                    } else {
9308                        break;
9309                    }
9310                }
9311                if !new_partitions.is_empty() {
9312                    AlterTableOperation::AddPartitions {
9313                        if_not_exists,
9314                        new_partitions,
9315                    }
9316                } else {
9317                    let column_keyword = self.parse_keyword(Keyword::COLUMN);
9318
9319                    let if_not_exists = if dialect_of!(self is PostgreSqlDialect | BigQueryDialect | DuckDbDialect | GenericDialect)
9320                    {
9321                        self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS])
9322                            || if_not_exists
9323                    } else {
9324                        false
9325                    };
9326
9327                    let column_def = self.parse_column_def()?;
9328
9329                    let column_position = self.parse_column_position()?;
9330
9331                    AlterTableOperation::AddColumn {
9332                        column_keyword,
9333                        if_not_exists,
9334                        column_def,
9335                        column_position,
9336                    }
9337                }
9338            }
9339        } else if self.parse_keyword(Keyword::RENAME) {
9340            if dialect_of!(self is PostgreSqlDialect) && self.parse_keyword(Keyword::CONSTRAINT) {
9341                let old_name = self.parse_identifier()?;
9342                self.expect_keyword_is(Keyword::TO)?;
9343                let new_name = self.parse_identifier()?;
9344                AlterTableOperation::RenameConstraint { old_name, new_name }
9345            } else if self.parse_keyword(Keyword::TO) {
9346                let table_name = self.parse_object_name(false)?;
9347                AlterTableOperation::RenameTable {
9348                    table_name: RenameTableNameKind::To(table_name),
9349                }
9350            } else if self.parse_keyword(Keyword::AS) {
9351                let table_name = self.parse_object_name(false)?;
9352                AlterTableOperation::RenameTable {
9353                    table_name: RenameTableNameKind::As(table_name),
9354                }
9355            } else {
9356                let _ = self.parse_keyword(Keyword::COLUMN); // [ COLUMN ]
9357                let old_column_name = self.parse_identifier()?;
9358                self.expect_keyword_is(Keyword::TO)?;
9359                let new_column_name = self.parse_identifier()?;
9360                AlterTableOperation::RenameColumn {
9361                    old_column_name,
9362                    new_column_name,
9363                }
9364            }
9365        } else if self.parse_keyword(Keyword::DISABLE) {
9366            if self.parse_keywords(&[Keyword::ROW, Keyword::LEVEL, Keyword::SECURITY]) {
9367                AlterTableOperation::DisableRowLevelSecurity {}
9368            } else if self.parse_keyword(Keyword::RULE) {
9369                let name = self.parse_identifier()?;
9370                AlterTableOperation::DisableRule { name }
9371            } else if self.parse_keyword(Keyword::TRIGGER) {
9372                let name = self.parse_identifier()?;
9373                AlterTableOperation::DisableTrigger { name }
9374            } else {
9375                return self.expected(
9376                    "ROW LEVEL SECURITY, RULE, or TRIGGER after DISABLE",
9377                    self.peek_token(),
9378                );
9379            }
9380        } else if self.parse_keyword(Keyword::ENABLE) {
9381            if self.parse_keywords(&[Keyword::ALWAYS, Keyword::RULE]) {
9382                let name = self.parse_identifier()?;
9383                AlterTableOperation::EnableAlwaysRule { name }
9384            } else if self.parse_keywords(&[Keyword::ALWAYS, Keyword::TRIGGER]) {
9385                let name = self.parse_identifier()?;
9386                AlterTableOperation::EnableAlwaysTrigger { name }
9387            } else if self.parse_keywords(&[Keyword::ROW, Keyword::LEVEL, Keyword::SECURITY]) {
9388                AlterTableOperation::EnableRowLevelSecurity {}
9389            } else if self.parse_keywords(&[Keyword::REPLICA, Keyword::RULE]) {
9390                let name = self.parse_identifier()?;
9391                AlterTableOperation::EnableReplicaRule { name }
9392            } else if self.parse_keywords(&[Keyword::REPLICA, Keyword::TRIGGER]) {
9393                let name = self.parse_identifier()?;
9394                AlterTableOperation::EnableReplicaTrigger { name }
9395            } else if self.parse_keyword(Keyword::RULE) {
9396                let name = self.parse_identifier()?;
9397                AlterTableOperation::EnableRule { name }
9398            } else if self.parse_keyword(Keyword::TRIGGER) {
9399                let name = self.parse_identifier()?;
9400                AlterTableOperation::EnableTrigger { name }
9401            } else {
9402                return self.expected(
9403                    "ALWAYS, REPLICA, ROW LEVEL SECURITY, RULE, or TRIGGER after ENABLE",
9404                    self.peek_token(),
9405                );
9406            }
9407        } else if self.parse_keywords(&[Keyword::CLEAR, Keyword::PROJECTION])
9408            && dialect_of!(self is ClickHouseDialect|GenericDialect)
9409        {
9410            let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
9411            let name = self.parse_identifier()?;
9412            let partition = if self.parse_keywords(&[Keyword::IN, Keyword::PARTITION]) {
9413                Some(self.parse_identifier()?)
9414            } else {
9415                None
9416            };
9417            AlterTableOperation::ClearProjection {
9418                if_exists,
9419                name,
9420                partition,
9421            }
9422        } else if self.parse_keywords(&[Keyword::MATERIALIZE, Keyword::PROJECTION])
9423            && dialect_of!(self is ClickHouseDialect|GenericDialect)
9424        {
9425            let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
9426            let name = self.parse_identifier()?;
9427            let partition = if self.parse_keywords(&[Keyword::IN, Keyword::PARTITION]) {
9428                Some(self.parse_identifier()?)
9429            } else {
9430                None
9431            };
9432            AlterTableOperation::MaterializeProjection {
9433                if_exists,
9434                name,
9435                partition,
9436            }
9437        } else if self.parse_keyword(Keyword::DROP) {
9438            if self.parse_keywords(&[Keyword::IF, Keyword::EXISTS, Keyword::PARTITION]) {
9439                self.expect_token(&Token::LParen)?;
9440                let partitions = self.parse_comma_separated(Parser::parse_expr)?;
9441                self.expect_token(&Token::RParen)?;
9442                AlterTableOperation::DropPartitions {
9443                    partitions,
9444                    if_exists: true,
9445                }
9446            } else if self.parse_keyword(Keyword::PARTITION) {
9447                self.expect_token(&Token::LParen)?;
9448                let partitions = self.parse_comma_separated(Parser::parse_expr)?;
9449                self.expect_token(&Token::RParen)?;
9450                AlterTableOperation::DropPartitions {
9451                    partitions,
9452                    if_exists: false,
9453                }
9454            } else if self.parse_keyword(Keyword::CONSTRAINT) {
9455                let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
9456                let name = self.parse_identifier()?;
9457                let drop_behavior = self.parse_optional_drop_behavior();
9458                AlterTableOperation::DropConstraint {
9459                    if_exists,
9460                    name,
9461                    drop_behavior,
9462                }
9463            } else if self.parse_keywords(&[Keyword::PRIMARY, Keyword::KEY]) {
9464                let drop_behavior = self.parse_optional_drop_behavior();
9465                AlterTableOperation::DropPrimaryKey { drop_behavior }
9466            } else if self.parse_keywords(&[Keyword::FOREIGN, Keyword::KEY]) {
9467                let name = self.parse_identifier()?;
9468                let drop_behavior = self.parse_optional_drop_behavior();
9469                AlterTableOperation::DropForeignKey {
9470                    name,
9471                    drop_behavior,
9472                }
9473            } else if self.parse_keyword(Keyword::INDEX) {
9474                let name = self.parse_identifier()?;
9475                AlterTableOperation::DropIndex { name }
9476            } else if self.parse_keyword(Keyword::PROJECTION)
9477                && dialect_of!(self is ClickHouseDialect|GenericDialect)
9478            {
9479                let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
9480                let name = self.parse_identifier()?;
9481                AlterTableOperation::DropProjection { if_exists, name }
9482            } else if self.parse_keywords(&[Keyword::CLUSTERING, Keyword::KEY]) {
9483                AlterTableOperation::DropClusteringKey
9484            } else {
9485                let has_column_keyword = self.parse_keyword(Keyword::COLUMN); // [ COLUMN ]
9486                let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
9487                let column_names = if self.dialect.supports_comma_separated_drop_column_list() {
9488                    self.parse_comma_separated(Parser::parse_identifier)?
9489                } else {
9490                    vec![self.parse_identifier()?]
9491                };
9492                let drop_behavior = self.parse_optional_drop_behavior();
9493                AlterTableOperation::DropColumn {
9494                    has_column_keyword,
9495                    column_names,
9496                    if_exists,
9497                    drop_behavior,
9498                }
9499            }
9500        } else if self.parse_keyword(Keyword::PARTITION) {
9501            self.expect_token(&Token::LParen)?;
9502            let before = self.parse_comma_separated(Parser::parse_expr)?;
9503            self.expect_token(&Token::RParen)?;
9504            self.expect_keyword_is(Keyword::RENAME)?;
9505            self.expect_keywords(&[Keyword::TO, Keyword::PARTITION])?;
9506            self.expect_token(&Token::LParen)?;
9507            let renames = self.parse_comma_separated(Parser::parse_expr)?;
9508            self.expect_token(&Token::RParen)?;
9509            AlterTableOperation::RenamePartitions {
9510                old_partitions: before,
9511                new_partitions: renames,
9512            }
9513        } else if self.parse_keyword(Keyword::CHANGE) {
9514            let _ = self.parse_keyword(Keyword::COLUMN); // [ COLUMN ]
9515            let old_name = self.parse_identifier()?;
9516            let new_name = self.parse_identifier()?;
9517            let data_type = self.parse_data_type()?;
9518            let mut options = vec![];
9519            while let Some(option) = self.parse_optional_column_option()? {
9520                options.push(option);
9521            }
9522
9523            let column_position = self.parse_column_position()?;
9524
9525            AlterTableOperation::ChangeColumn {
9526                old_name,
9527                new_name,
9528                data_type,
9529                options,
9530                column_position,
9531            }
9532        } else if self.parse_keyword(Keyword::MODIFY) {
9533            let _ = self.parse_keyword(Keyword::COLUMN); // [ COLUMN ]
9534            let col_name = self.parse_identifier()?;
9535            let data_type = self.parse_data_type()?;
9536            let mut options = vec![];
9537            while let Some(option) = self.parse_optional_column_option()? {
9538                options.push(option);
9539            }
9540
9541            let column_position = self.parse_column_position()?;
9542
9543            AlterTableOperation::ModifyColumn {
9544                col_name,
9545                data_type,
9546                options,
9547                column_position,
9548            }
9549        } else if self.parse_keyword(Keyword::ALTER) {
9550            let _ = self.parse_keyword(Keyword::COLUMN); // [ COLUMN ]
9551            let column_name = self.parse_identifier()?;
9552            let is_postgresql = dialect_of!(self is PostgreSqlDialect);
9553
9554            let op: AlterColumnOperation = if self.parse_keywords(&[
9555                Keyword::SET,
9556                Keyword::NOT,
9557                Keyword::NULL,
9558            ]) {
9559                AlterColumnOperation::SetNotNull {}
9560            } else if self.parse_keywords(&[Keyword::DROP, Keyword::NOT, Keyword::NULL]) {
9561                AlterColumnOperation::DropNotNull {}
9562            } else if self.parse_keywords(&[Keyword::SET, Keyword::DEFAULT]) {
9563                AlterColumnOperation::SetDefault {
9564                    value: self.parse_expr()?,
9565                }
9566            } else if self.parse_keywords(&[Keyword::DROP, Keyword::DEFAULT]) {
9567                AlterColumnOperation::DropDefault {}
9568            } else if self.parse_keywords(&[Keyword::SET, Keyword::DATA, Keyword::TYPE]) {
9569                self.parse_set_data_type(true)?
9570            } else if self.parse_keyword(Keyword::TYPE) {
9571                self.parse_set_data_type(false)?
9572            } else if self.parse_keywords(&[Keyword::ADD, Keyword::GENERATED]) {
9573                let generated_as = if self.parse_keyword(Keyword::ALWAYS) {
9574                    Some(GeneratedAs::Always)
9575                } else if self.parse_keywords(&[Keyword::BY, Keyword::DEFAULT]) {
9576                    Some(GeneratedAs::ByDefault)
9577                } else {
9578                    None
9579                };
9580
9581                self.expect_keywords(&[Keyword::AS, Keyword::IDENTITY])?;
9582
9583                let mut sequence_options: Option<Vec<SequenceOptions>> = None;
9584
9585                if self.peek_token().token == Token::LParen {
9586                    self.expect_token(&Token::LParen)?;
9587                    sequence_options = Some(self.parse_create_sequence_options()?);
9588                    self.expect_token(&Token::RParen)?;
9589                }
9590
9591                AlterColumnOperation::AddGenerated {
9592                    generated_as,
9593                    sequence_options,
9594                }
9595            } else {
9596                let message = if is_postgresql {
9597                    "SET/DROP NOT NULL, SET DEFAULT, SET DATA TYPE, or ADD GENERATED after ALTER COLUMN"
9598                } else {
9599                    "SET/DROP NOT NULL, SET DEFAULT, or SET DATA TYPE after ALTER COLUMN"
9600                };
9601
9602                return self.expected(message, self.peek_token());
9603            };
9604            AlterTableOperation::AlterColumn { column_name, op }
9605        } else if self.parse_keyword(Keyword::SWAP) {
9606            self.expect_keyword_is(Keyword::WITH)?;
9607            let table_name = self.parse_object_name(false)?;
9608            AlterTableOperation::SwapWith { table_name }
9609        } else if dialect_of!(self is PostgreSqlDialect | GenericDialect)
9610            && self.parse_keywords(&[Keyword::OWNER, Keyword::TO])
9611        {
9612            let new_owner = self.parse_owner()?;
9613            AlterTableOperation::OwnerTo { new_owner }
9614        } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
9615            && self.parse_keyword(Keyword::ATTACH)
9616        {
9617            AlterTableOperation::AttachPartition {
9618                partition: self.parse_part_or_partition()?,
9619            }
9620        } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
9621            && self.parse_keyword(Keyword::DETACH)
9622        {
9623            AlterTableOperation::DetachPartition {
9624                partition: self.parse_part_or_partition()?,
9625            }
9626        } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
9627            && self.parse_keyword(Keyword::FREEZE)
9628        {
9629            let partition = self.parse_part_or_partition()?;
9630            let with_name = if self.parse_keyword(Keyword::WITH) {
9631                self.expect_keyword_is(Keyword::NAME)?;
9632                Some(self.parse_identifier()?)
9633            } else {
9634                None
9635            };
9636            AlterTableOperation::FreezePartition {
9637                partition,
9638                with_name,
9639            }
9640        } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
9641            && self.parse_keyword(Keyword::UNFREEZE)
9642        {
9643            let partition = self.parse_part_or_partition()?;
9644            let with_name = if self.parse_keyword(Keyword::WITH) {
9645                self.expect_keyword_is(Keyword::NAME)?;
9646                Some(self.parse_identifier()?)
9647            } else {
9648                None
9649            };
9650            AlterTableOperation::UnfreezePartition {
9651                partition,
9652                with_name,
9653            }
9654        } else if self.parse_keywords(&[Keyword::CLUSTER, Keyword::BY]) {
9655            self.expect_token(&Token::LParen)?;
9656            let exprs = self.parse_comma_separated(|parser| parser.parse_expr())?;
9657            self.expect_token(&Token::RParen)?;
9658            AlterTableOperation::ClusterBy { exprs }
9659        } else if self.parse_keywords(&[Keyword::SUSPEND, Keyword::RECLUSTER]) {
9660            AlterTableOperation::SuspendRecluster
9661        } else if self.parse_keywords(&[Keyword::RESUME, Keyword::RECLUSTER]) {
9662            AlterTableOperation::ResumeRecluster
9663        } else if self.parse_keyword(Keyword::LOCK) {
9664            let equals = self.consume_token(&Token::Eq);
9665            let lock = match self.parse_one_of_keywords(&[
9666                Keyword::DEFAULT,
9667                Keyword::EXCLUSIVE,
9668                Keyword::NONE,
9669                Keyword::SHARED,
9670            ]) {
9671                Some(Keyword::DEFAULT) => AlterTableLock::Default,
9672                Some(Keyword::EXCLUSIVE) => AlterTableLock::Exclusive,
9673                Some(Keyword::NONE) => AlterTableLock::None,
9674                Some(Keyword::SHARED) => AlterTableLock::Shared,
9675                _ => self.expected(
9676                    "DEFAULT, EXCLUSIVE, NONE or SHARED after LOCK [=]",
9677                    self.peek_token(),
9678                )?,
9679            };
9680            AlterTableOperation::Lock { equals, lock }
9681        } else if self.parse_keyword(Keyword::ALGORITHM) {
9682            let equals = self.consume_token(&Token::Eq);
9683            let algorithm = match self.parse_one_of_keywords(&[
9684                Keyword::DEFAULT,
9685                Keyword::INSTANT,
9686                Keyword::INPLACE,
9687                Keyword::COPY,
9688            ]) {
9689                Some(Keyword::DEFAULT) => AlterTableAlgorithm::Default,
9690                Some(Keyword::INSTANT) => AlterTableAlgorithm::Instant,
9691                Some(Keyword::INPLACE) => AlterTableAlgorithm::Inplace,
9692                Some(Keyword::COPY) => AlterTableAlgorithm::Copy,
9693                _ => self.expected(
9694                    "DEFAULT, INSTANT, INPLACE, or COPY after ALGORITHM [=]",
9695                    self.peek_token(),
9696                )?,
9697            };
9698            AlterTableOperation::Algorithm { equals, algorithm }
9699        } else if self.parse_keyword(Keyword::AUTO_INCREMENT) {
9700            let equals = self.consume_token(&Token::Eq);
9701            let value = self.parse_number_value()?;
9702            AlterTableOperation::AutoIncrement { equals, value }
9703        } else if self.parse_keywords(&[Keyword::REPLICA, Keyword::IDENTITY]) {
9704            let identity = if self.parse_keyword(Keyword::NONE) {
9705                ReplicaIdentity::None
9706            } else if self.parse_keyword(Keyword::FULL) {
9707                ReplicaIdentity::Full
9708            } else if self.parse_keyword(Keyword::DEFAULT) {
9709                ReplicaIdentity::Default
9710            } else if self.parse_keywords(&[Keyword::USING, Keyword::INDEX]) {
9711                ReplicaIdentity::Index(self.parse_identifier()?)
9712            } else {
9713                return self.expected(
9714                    "NONE, FULL, DEFAULT, or USING INDEX index_name after REPLICA IDENTITY",
9715                    self.peek_token(),
9716                );
9717            };
9718
9719            AlterTableOperation::ReplicaIdentity { identity }
9720        } else if self.parse_keywords(&[Keyword::VALIDATE, Keyword::CONSTRAINT]) {
9721            let name = self.parse_identifier()?;
9722            AlterTableOperation::ValidateConstraint { name }
9723        } else {
9724            let mut options =
9725                self.parse_options_with_keywords(&[Keyword::SET, Keyword::TBLPROPERTIES])?;
9726            if !options.is_empty() {
9727                AlterTableOperation::SetTblProperties {
9728                    table_properties: options,
9729                }
9730            } else {
9731                options = self.parse_options(Keyword::SET)?;
9732                if !options.is_empty() {
9733                    AlterTableOperation::SetOptionsParens { options }
9734                } else {
9735                    return self.expected(
9736                    "ADD, RENAME, PARTITION, SWAP, DROP, REPLICA IDENTITY, SET, or SET TBLPROPERTIES after ALTER TABLE",
9737                    self.peek_token(),
9738                  );
9739                }
9740            }
9741        };
9742        Ok(operation)
9743    }
9744
9745    fn parse_set_data_type(&mut self, had_set: bool) -> Result<AlterColumnOperation, ParserError> {
9746        let data_type = self.parse_data_type()?;
9747        let using = if self.dialect.supports_alter_column_type_using()
9748            && self.parse_keyword(Keyword::USING)
9749        {
9750            Some(self.parse_expr()?)
9751        } else {
9752            None
9753        };
9754        Ok(AlterColumnOperation::SetDataType {
9755            data_type,
9756            using,
9757            had_set,
9758        })
9759    }
9760
9761    fn parse_part_or_partition(&mut self) -> Result<Partition, ParserError> {
9762        let keyword = self.expect_one_of_keywords(&[Keyword::PART, Keyword::PARTITION])?;
9763        match keyword {
9764            Keyword::PART => Ok(Partition::Part(self.parse_expr()?)),
9765            Keyword::PARTITION => Ok(Partition::Expr(self.parse_expr()?)),
9766            // unreachable because expect_one_of_keywords used above
9767            _ => unreachable!(),
9768        }
9769    }
9770
9771    pub fn parse_alter(&mut self) -> Result<Statement, ParserError> {
9772        let object_type = self.expect_one_of_keywords(&[
9773            Keyword::VIEW,
9774            Keyword::TYPE,
9775            Keyword::TABLE,
9776            Keyword::INDEX,
9777            Keyword::ROLE,
9778            Keyword::POLICY,
9779            Keyword::CONNECTOR,
9780            Keyword::ICEBERG,
9781            Keyword::SCHEMA,
9782            Keyword::USER,
9783        ])?;
9784        match object_type {
9785            Keyword::SCHEMA => {
9786                self.prev_token();
9787                self.prev_token();
9788                self.parse_alter_schema()
9789            }
9790            Keyword::VIEW => self.parse_alter_view(),
9791            Keyword::TYPE => self.parse_alter_type(),
9792            Keyword::TABLE => self.parse_alter_table(false),
9793            Keyword::ICEBERG => {
9794                self.expect_keyword(Keyword::TABLE)?;
9795                self.parse_alter_table(true)
9796            }
9797            Keyword::INDEX => {
9798                let index_name = self.parse_object_name(false)?;
9799                let operation = if self.parse_keyword(Keyword::RENAME) {
9800                    if self.parse_keyword(Keyword::TO) {
9801                        let index_name = self.parse_object_name(false)?;
9802                        AlterIndexOperation::RenameIndex { index_name }
9803                    } else {
9804                        return self.expected("TO after RENAME", self.peek_token());
9805                    }
9806                } else {
9807                    return self.expected("RENAME after ALTER INDEX", self.peek_token());
9808                };
9809
9810                Ok(Statement::AlterIndex {
9811                    name: index_name,
9812                    operation,
9813                })
9814            }
9815            Keyword::ROLE => self.parse_alter_role(),
9816            Keyword::POLICY => self.parse_alter_policy(),
9817            Keyword::CONNECTOR => self.parse_alter_connector(),
9818            Keyword::USER => self.parse_alter_user(),
9819            // unreachable because expect_one_of_keywords used above
9820            _ => unreachable!(),
9821        }
9822    }
9823
9824    /// Parse a [Statement::AlterTable]
9825    pub fn parse_alter_table(&mut self, iceberg: bool) -> Result<Statement, ParserError> {
9826        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
9827        let only = self.parse_keyword(Keyword::ONLY); // [ ONLY ]
9828        let table_name = self.parse_object_name(false)?;
9829        let on_cluster = self.parse_optional_on_cluster()?;
9830        let operations = self.parse_comma_separated(Parser::parse_alter_table_operation)?;
9831
9832        let mut location = None;
9833        if self.parse_keyword(Keyword::LOCATION) {
9834            location = Some(HiveSetLocation {
9835                has_set: false,
9836                location: self.parse_identifier()?,
9837            });
9838        } else if self.parse_keywords(&[Keyword::SET, Keyword::LOCATION]) {
9839            location = Some(HiveSetLocation {
9840                has_set: true,
9841                location: self.parse_identifier()?,
9842            });
9843        }
9844
9845        let end_token = if self.peek_token_ref().token == Token::SemiColon {
9846            self.peek_token_ref().clone()
9847        } else {
9848            self.get_current_token().clone()
9849        };
9850
9851        Ok(AlterTable {
9852            name: table_name,
9853            if_exists,
9854            only,
9855            operations,
9856            location,
9857            on_cluster,
9858            table_type: if iceberg {
9859                Some(AlterTableType::Iceberg)
9860            } else {
9861                None
9862            },
9863            end_token: AttachedToken(end_token),
9864        }
9865        .into())
9866    }
9867
9868    pub fn parse_alter_view(&mut self) -> Result<Statement, ParserError> {
9869        let name = self.parse_object_name(false)?;
9870        let columns = self.parse_parenthesized_column_list(Optional, false)?;
9871
9872        let with_options = self.parse_options(Keyword::WITH)?;
9873
9874        self.expect_keyword_is(Keyword::AS)?;
9875        let query = self.parse_query()?;
9876
9877        Ok(Statement::AlterView {
9878            name,
9879            columns,
9880            query,
9881            with_options,
9882        })
9883    }
9884
9885    /// Parse a [Statement::AlterType]
9886    pub fn parse_alter_type(&mut self) -> Result<Statement, ParserError> {
9887        let name = self.parse_object_name(false)?;
9888
9889        if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
9890            let new_name = self.parse_identifier()?;
9891            Ok(Statement::AlterType(AlterType {
9892                name,
9893                operation: AlterTypeOperation::Rename(AlterTypeRename { new_name }),
9894            }))
9895        } else if self.parse_keywords(&[Keyword::ADD, Keyword::VALUE]) {
9896            let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
9897            let new_enum_value = self.parse_identifier()?;
9898            let position = if self.parse_keyword(Keyword::BEFORE) {
9899                Some(AlterTypeAddValuePosition::Before(self.parse_identifier()?))
9900            } else if self.parse_keyword(Keyword::AFTER) {
9901                Some(AlterTypeAddValuePosition::After(self.parse_identifier()?))
9902            } else {
9903                None
9904            };
9905
9906            Ok(Statement::AlterType(AlterType {
9907                name,
9908                operation: AlterTypeOperation::AddValue(AlterTypeAddValue {
9909                    if_not_exists,
9910                    value: new_enum_value,
9911                    position,
9912                }),
9913            }))
9914        } else if self.parse_keywords(&[Keyword::RENAME, Keyword::VALUE]) {
9915            let existing_enum_value = self.parse_identifier()?;
9916            self.expect_keyword(Keyword::TO)?;
9917            let new_enum_value = self.parse_identifier()?;
9918
9919            Ok(Statement::AlterType(AlterType {
9920                name,
9921                operation: AlterTypeOperation::RenameValue(AlterTypeRenameValue {
9922                    from: existing_enum_value,
9923                    to: new_enum_value,
9924                }),
9925            }))
9926        } else {
9927            self.expected_ref(
9928                "{RENAME TO | { RENAME | ADD } VALUE}",
9929                self.peek_token_ref(),
9930            )
9931        }
9932    }
9933
9934    // Parse a [Statement::AlterSchema]
9935    // ALTER SCHEMA [ IF EXISTS ] schema_name
9936    pub fn parse_alter_schema(&mut self) -> Result<Statement, ParserError> {
9937        self.expect_keywords(&[Keyword::ALTER, Keyword::SCHEMA])?;
9938        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
9939        let name = self.parse_object_name(false)?;
9940        let operation = if self.parse_keywords(&[Keyword::SET, Keyword::OPTIONS]) {
9941            self.prev_token();
9942            let options = self.parse_options(Keyword::OPTIONS)?;
9943            AlterSchemaOperation::SetOptionsParens { options }
9944        } else if self.parse_keywords(&[Keyword::SET, Keyword::DEFAULT, Keyword::COLLATE]) {
9945            let collate = self.parse_expr()?;
9946            AlterSchemaOperation::SetDefaultCollate { collate }
9947        } else if self.parse_keywords(&[Keyword::ADD, Keyword::REPLICA]) {
9948            let replica = self.parse_identifier()?;
9949            let options = if self.peek_keyword(Keyword::OPTIONS) {
9950                Some(self.parse_options(Keyword::OPTIONS)?)
9951            } else {
9952                None
9953            };
9954            AlterSchemaOperation::AddReplica { replica, options }
9955        } else if self.parse_keywords(&[Keyword::DROP, Keyword::REPLICA]) {
9956            let replica = self.parse_identifier()?;
9957            AlterSchemaOperation::DropReplica { replica }
9958        } else if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
9959            let new_name = self.parse_object_name(false)?;
9960            AlterSchemaOperation::Rename { name: new_name }
9961        } else if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
9962            let owner = self.parse_owner()?;
9963            AlterSchemaOperation::OwnerTo { owner }
9964        } else {
9965            return self.expected_ref("ALTER SCHEMA operation", self.peek_token_ref());
9966        };
9967        Ok(Statement::AlterSchema(AlterSchema {
9968            name,
9969            if_exists,
9970            operations: vec![operation],
9971        }))
9972    }
9973
9974    /// Parse a `CALL procedure_name(arg1, arg2, ...)`
9975    /// or `CALL procedure_name` statement
9976    pub fn parse_call(&mut self) -> Result<Statement, ParserError> {
9977        let object_name = self.parse_object_name(false)?;
9978        if self.peek_token().token == Token::LParen {
9979            match self.parse_function(object_name)? {
9980                Expr::Function(f) => Ok(Statement::Call(f)),
9981                other => parser_err!(
9982                    format!("Expected a simple procedure call but found: {other}"),
9983                    self.peek_token().span.start
9984                ),
9985            }
9986        } else {
9987            Ok(Statement::Call(Function {
9988                name: object_name,
9989                uses_odbc_syntax: false,
9990                parameters: FunctionArguments::None,
9991                args: FunctionArguments::None,
9992                over: None,
9993                filter: None,
9994                null_treatment: None,
9995                within_group: vec![],
9996            }))
9997        }
9998    }
9999
10000    /// Parse a copy statement
10001    pub fn parse_copy(&mut self) -> Result<Statement, ParserError> {
10002        let source;
10003        if self.consume_token(&Token::LParen) {
10004            source = CopySource::Query(self.parse_query()?);
10005            self.expect_token(&Token::RParen)?;
10006        } else {
10007            let table_name = self.parse_object_name(false)?;
10008            let columns = self.parse_parenthesized_column_list(Optional, false)?;
10009            source = CopySource::Table {
10010                table_name,
10011                columns,
10012            };
10013        }
10014        let to = match self.parse_one_of_keywords(&[Keyword::FROM, Keyword::TO]) {
10015            Some(Keyword::FROM) => false,
10016            Some(Keyword::TO) => true,
10017            _ => self.expected("FROM or TO", self.peek_token())?,
10018        };
10019        if !to {
10020            // Use a separate if statement to prevent Rust compiler from complaining about
10021            // "if statement in this position is unstable: https://github.com/rust-lang/rust/issues/53667"
10022            if let CopySource::Query(_) = source {
10023                return Err(ParserError::ParserError(
10024                    "COPY ... FROM does not support query as a source".to_string(),
10025                ));
10026            }
10027        }
10028        let target = if self.parse_keyword(Keyword::STDIN) {
10029            CopyTarget::Stdin
10030        } else if self.parse_keyword(Keyword::STDOUT) {
10031            CopyTarget::Stdout
10032        } else if self.parse_keyword(Keyword::PROGRAM) {
10033            CopyTarget::Program {
10034                command: self.parse_literal_string()?,
10035            }
10036        } else {
10037            CopyTarget::File {
10038                filename: self.parse_literal_string()?,
10039            }
10040        };
10041        let _ = self.parse_keyword(Keyword::WITH); // [ WITH ]
10042        let mut options = vec![];
10043        if self.consume_token(&Token::LParen) {
10044            options = self.parse_comma_separated(Parser::parse_copy_option)?;
10045            self.expect_token(&Token::RParen)?;
10046        }
10047        let mut legacy_options = vec![];
10048        while let Some(opt) = self.maybe_parse(|parser| parser.parse_copy_legacy_option())? {
10049            legacy_options.push(opt);
10050        }
10051        let values = if let CopyTarget::Stdin = target {
10052            self.expect_token(&Token::SemiColon)?;
10053            self.parse_tsv()
10054        } else {
10055            vec![]
10056        };
10057        Ok(Statement::Copy {
10058            source,
10059            to,
10060            target,
10061            options,
10062            legacy_options,
10063            values,
10064        })
10065    }
10066
10067    /// Parse [Statement::Open]
10068    fn parse_open(&mut self) -> Result<Statement, ParserError> {
10069        self.expect_keyword(Keyword::OPEN)?;
10070        Ok(Statement::Open(OpenStatement {
10071            cursor_name: self.parse_identifier()?,
10072        }))
10073    }
10074
10075    pub fn parse_close(&mut self) -> Result<Statement, ParserError> {
10076        let cursor = if self.parse_keyword(Keyword::ALL) {
10077            CloseCursor::All
10078        } else {
10079            let name = self.parse_identifier()?;
10080
10081            CloseCursor::Specific { name }
10082        };
10083
10084        Ok(Statement::Close { cursor })
10085    }
10086
10087    fn parse_copy_option(&mut self) -> Result<CopyOption, ParserError> {
10088        let ret = match self.parse_one_of_keywords(&[
10089            Keyword::FORMAT,
10090            Keyword::FREEZE,
10091            Keyword::DELIMITER,
10092            Keyword::NULL,
10093            Keyword::HEADER,
10094            Keyword::QUOTE,
10095            Keyword::ESCAPE,
10096            Keyword::FORCE_QUOTE,
10097            Keyword::FORCE_NOT_NULL,
10098            Keyword::FORCE_NULL,
10099            Keyword::ENCODING,
10100        ]) {
10101            Some(Keyword::FORMAT) => CopyOption::Format(self.parse_identifier()?),
10102            Some(Keyword::FREEZE) => CopyOption::Freeze(!matches!(
10103                self.parse_one_of_keywords(&[Keyword::TRUE, Keyword::FALSE]),
10104                Some(Keyword::FALSE)
10105            )),
10106            Some(Keyword::DELIMITER) => CopyOption::Delimiter(self.parse_literal_char()?),
10107            Some(Keyword::NULL) => CopyOption::Null(self.parse_literal_string()?),
10108            Some(Keyword::HEADER) => CopyOption::Header(!matches!(
10109                self.parse_one_of_keywords(&[Keyword::TRUE, Keyword::FALSE]),
10110                Some(Keyword::FALSE)
10111            )),
10112            Some(Keyword::QUOTE) => CopyOption::Quote(self.parse_literal_char()?),
10113            Some(Keyword::ESCAPE) => CopyOption::Escape(self.parse_literal_char()?),
10114            Some(Keyword::FORCE_QUOTE) => {
10115                CopyOption::ForceQuote(self.parse_parenthesized_column_list(Mandatory, false)?)
10116            }
10117            Some(Keyword::FORCE_NOT_NULL) => {
10118                CopyOption::ForceNotNull(self.parse_parenthesized_column_list(Mandatory, false)?)
10119            }
10120            Some(Keyword::FORCE_NULL) => {
10121                CopyOption::ForceNull(self.parse_parenthesized_column_list(Mandatory, false)?)
10122            }
10123            Some(Keyword::ENCODING) => CopyOption::Encoding(self.parse_literal_string()?),
10124            _ => self.expected("option", self.peek_token())?,
10125        };
10126        Ok(ret)
10127    }
10128
10129    fn parse_copy_legacy_option(&mut self) -> Result<CopyLegacyOption, ParserError> {
10130        // FORMAT \[ AS \] is optional
10131        if self.parse_keyword(Keyword::FORMAT) {
10132            let _ = self.parse_keyword(Keyword::AS);
10133        }
10134
10135        let ret = match self.parse_one_of_keywords(&[
10136            Keyword::ACCEPTANYDATE,
10137            Keyword::ACCEPTINVCHARS,
10138            Keyword::ADDQUOTES,
10139            Keyword::ALLOWOVERWRITE,
10140            Keyword::BINARY,
10141            Keyword::BLANKSASNULL,
10142            Keyword::BZIP2,
10143            Keyword::CLEANPATH,
10144            Keyword::COMPUPDATE,
10145            Keyword::CSV,
10146            Keyword::DATEFORMAT,
10147            Keyword::DELIMITER,
10148            Keyword::EMPTYASNULL,
10149            Keyword::ENCRYPTED,
10150            Keyword::ESCAPE,
10151            Keyword::EXTENSION,
10152            Keyword::FIXEDWIDTH,
10153            Keyword::GZIP,
10154            Keyword::HEADER,
10155            Keyword::IAM_ROLE,
10156            Keyword::IGNOREHEADER,
10157            Keyword::JSON,
10158            Keyword::MANIFEST,
10159            Keyword::MAXFILESIZE,
10160            Keyword::NULL,
10161            Keyword::PARALLEL,
10162            Keyword::PARQUET,
10163            Keyword::PARTITION,
10164            Keyword::REGION,
10165            Keyword::REMOVEQUOTES,
10166            Keyword::ROWGROUPSIZE,
10167            Keyword::STATUPDATE,
10168            Keyword::TIMEFORMAT,
10169            Keyword::TRUNCATECOLUMNS,
10170            Keyword::ZSTD,
10171        ]) {
10172            Some(Keyword::ACCEPTANYDATE) => CopyLegacyOption::AcceptAnyDate,
10173            Some(Keyword::ACCEPTINVCHARS) => {
10174                let _ = self.parse_keyword(Keyword::AS); // [ AS ]
10175                let ch = if matches!(self.peek_token().token, Token::SingleQuotedString(_)) {
10176                    Some(self.parse_literal_string()?)
10177                } else {
10178                    None
10179                };
10180                CopyLegacyOption::AcceptInvChars(ch)
10181            }
10182            Some(Keyword::ADDQUOTES) => CopyLegacyOption::AddQuotes,
10183            Some(Keyword::ALLOWOVERWRITE) => CopyLegacyOption::AllowOverwrite,
10184            Some(Keyword::BINARY) => CopyLegacyOption::Binary,
10185            Some(Keyword::BLANKSASNULL) => CopyLegacyOption::BlankAsNull,
10186            Some(Keyword::BZIP2) => CopyLegacyOption::Bzip2,
10187            Some(Keyword::CLEANPATH) => CopyLegacyOption::CleanPath,
10188            Some(Keyword::COMPUPDATE) => {
10189                let preset = self.parse_keyword(Keyword::PRESET);
10190                let enabled = match self.parse_one_of_keywords(&[
10191                    Keyword::TRUE,
10192                    Keyword::FALSE,
10193                    Keyword::ON,
10194                    Keyword::OFF,
10195                ]) {
10196                    Some(Keyword::TRUE) | Some(Keyword::ON) => Some(true),
10197                    Some(Keyword::FALSE) | Some(Keyword::OFF) => Some(false),
10198                    _ => None,
10199                };
10200                CopyLegacyOption::CompUpdate { preset, enabled }
10201            }
10202            Some(Keyword::CSV) => CopyLegacyOption::Csv({
10203                let mut opts = vec![];
10204                while let Some(opt) =
10205                    self.maybe_parse(|parser| parser.parse_copy_legacy_csv_option())?
10206                {
10207                    opts.push(opt);
10208                }
10209                opts
10210            }),
10211            Some(Keyword::DATEFORMAT) => {
10212                let _ = self.parse_keyword(Keyword::AS);
10213                let fmt = if matches!(self.peek_token().token, Token::SingleQuotedString(_)) {
10214                    Some(self.parse_literal_string()?)
10215                } else {
10216                    None
10217                };
10218                CopyLegacyOption::DateFormat(fmt)
10219            }
10220            Some(Keyword::DELIMITER) => {
10221                let _ = self.parse_keyword(Keyword::AS);
10222                CopyLegacyOption::Delimiter(self.parse_literal_char()?)
10223            }
10224            Some(Keyword::EMPTYASNULL) => CopyLegacyOption::EmptyAsNull,
10225            Some(Keyword::ENCRYPTED) => {
10226                let auto = self.parse_keyword(Keyword::AUTO);
10227                CopyLegacyOption::Encrypted { auto }
10228            }
10229            Some(Keyword::ESCAPE) => CopyLegacyOption::Escape,
10230            Some(Keyword::EXTENSION) => {
10231                let ext = self.parse_literal_string()?;
10232                CopyLegacyOption::Extension(ext)
10233            }
10234            Some(Keyword::FIXEDWIDTH) => {
10235                let spec = self.parse_literal_string()?;
10236                CopyLegacyOption::FixedWidth(spec)
10237            }
10238            Some(Keyword::GZIP) => CopyLegacyOption::Gzip,
10239            Some(Keyword::HEADER) => CopyLegacyOption::Header,
10240            Some(Keyword::IAM_ROLE) => CopyLegacyOption::IamRole(self.parse_iam_role_kind()?),
10241            Some(Keyword::IGNOREHEADER) => {
10242                let _ = self.parse_keyword(Keyword::AS);
10243                let num_rows = self.parse_literal_uint()?;
10244                CopyLegacyOption::IgnoreHeader(num_rows)
10245            }
10246            Some(Keyword::JSON) => CopyLegacyOption::Json,
10247            Some(Keyword::MANIFEST) => {
10248                let verbose = self.parse_keyword(Keyword::VERBOSE);
10249                CopyLegacyOption::Manifest { verbose }
10250            }
10251            Some(Keyword::MAXFILESIZE) => {
10252                let _ = self.parse_keyword(Keyword::AS);
10253                let size = self.parse_number_value()?.value;
10254                let unit = match self.parse_one_of_keywords(&[Keyword::MB, Keyword::GB]) {
10255                    Some(Keyword::MB) => Some(FileSizeUnit::MB),
10256                    Some(Keyword::GB) => Some(FileSizeUnit::GB),
10257                    _ => None,
10258                };
10259                CopyLegacyOption::MaxFileSize(FileSize { size, unit })
10260            }
10261            Some(Keyword::NULL) => {
10262                let _ = self.parse_keyword(Keyword::AS);
10263                CopyLegacyOption::Null(self.parse_literal_string()?)
10264            }
10265            Some(Keyword::PARALLEL) => {
10266                let enabled = match self.parse_one_of_keywords(&[
10267                    Keyword::TRUE,
10268                    Keyword::FALSE,
10269                    Keyword::ON,
10270                    Keyword::OFF,
10271                ]) {
10272                    Some(Keyword::TRUE) | Some(Keyword::ON) => Some(true),
10273                    Some(Keyword::FALSE) | Some(Keyword::OFF) => Some(false),
10274                    _ => None,
10275                };
10276                CopyLegacyOption::Parallel(enabled)
10277            }
10278            Some(Keyword::PARQUET) => CopyLegacyOption::Parquet,
10279            Some(Keyword::PARTITION) => {
10280                self.expect_keyword(Keyword::BY)?;
10281                let columns = self.parse_parenthesized_column_list(IsOptional::Mandatory, false)?;
10282                let include = self.parse_keyword(Keyword::INCLUDE);
10283                CopyLegacyOption::PartitionBy(UnloadPartitionBy { columns, include })
10284            }
10285            Some(Keyword::REGION) => {
10286                let _ = self.parse_keyword(Keyword::AS);
10287                let region = self.parse_literal_string()?;
10288                CopyLegacyOption::Region(region)
10289            }
10290            Some(Keyword::REMOVEQUOTES) => CopyLegacyOption::RemoveQuotes,
10291            Some(Keyword::ROWGROUPSIZE) => {
10292                let _ = self.parse_keyword(Keyword::AS);
10293                let file_size = self.parse_file_size()?;
10294                CopyLegacyOption::RowGroupSize(file_size)
10295            }
10296            Some(Keyword::STATUPDATE) => {
10297                let enabled = match self.parse_one_of_keywords(&[
10298                    Keyword::TRUE,
10299                    Keyword::FALSE,
10300                    Keyword::ON,
10301                    Keyword::OFF,
10302                ]) {
10303                    Some(Keyword::TRUE) | Some(Keyword::ON) => Some(true),
10304                    Some(Keyword::FALSE) | Some(Keyword::OFF) => Some(false),
10305                    _ => None,
10306                };
10307                CopyLegacyOption::StatUpdate(enabled)
10308            }
10309            Some(Keyword::TIMEFORMAT) => {
10310                let _ = self.parse_keyword(Keyword::AS);
10311                let fmt = if matches!(self.peek_token().token, Token::SingleQuotedString(_)) {
10312                    Some(self.parse_literal_string()?)
10313                } else {
10314                    None
10315                };
10316                CopyLegacyOption::TimeFormat(fmt)
10317            }
10318            Some(Keyword::TRUNCATECOLUMNS) => CopyLegacyOption::TruncateColumns,
10319            Some(Keyword::ZSTD) => CopyLegacyOption::Zstd,
10320            _ => self.expected("option", self.peek_token())?,
10321        };
10322        Ok(ret)
10323    }
10324
10325    fn parse_file_size(&mut self) -> Result<FileSize, ParserError> {
10326        let size = self.parse_number_value()?.value;
10327        let unit = self.maybe_parse_file_size_unit();
10328        Ok(FileSize { size, unit })
10329    }
10330
10331    fn maybe_parse_file_size_unit(&mut self) -> Option<FileSizeUnit> {
10332        match self.parse_one_of_keywords(&[Keyword::MB, Keyword::GB]) {
10333            Some(Keyword::MB) => Some(FileSizeUnit::MB),
10334            Some(Keyword::GB) => Some(FileSizeUnit::GB),
10335            _ => None,
10336        }
10337    }
10338
10339    fn parse_iam_role_kind(&mut self) -> Result<IamRoleKind, ParserError> {
10340        if self.parse_keyword(Keyword::DEFAULT) {
10341            Ok(IamRoleKind::Default)
10342        } else {
10343            let arn = self.parse_literal_string()?;
10344            Ok(IamRoleKind::Arn(arn))
10345        }
10346    }
10347
10348    fn parse_copy_legacy_csv_option(&mut self) -> Result<CopyLegacyCsvOption, ParserError> {
10349        let ret = match self.parse_one_of_keywords(&[
10350            Keyword::HEADER,
10351            Keyword::QUOTE,
10352            Keyword::ESCAPE,
10353            Keyword::FORCE,
10354        ]) {
10355            Some(Keyword::HEADER) => CopyLegacyCsvOption::Header,
10356            Some(Keyword::QUOTE) => {
10357                let _ = self.parse_keyword(Keyword::AS); // [ AS ]
10358                CopyLegacyCsvOption::Quote(self.parse_literal_char()?)
10359            }
10360            Some(Keyword::ESCAPE) => {
10361                let _ = self.parse_keyword(Keyword::AS); // [ AS ]
10362                CopyLegacyCsvOption::Escape(self.parse_literal_char()?)
10363            }
10364            Some(Keyword::FORCE) if self.parse_keywords(&[Keyword::NOT, Keyword::NULL]) => {
10365                CopyLegacyCsvOption::ForceNotNull(
10366                    self.parse_comma_separated(|p| p.parse_identifier())?,
10367                )
10368            }
10369            Some(Keyword::FORCE) if self.parse_keywords(&[Keyword::QUOTE]) => {
10370                CopyLegacyCsvOption::ForceQuote(
10371                    self.parse_comma_separated(|p| p.parse_identifier())?,
10372                )
10373            }
10374            _ => self.expected("csv option", self.peek_token())?,
10375        };
10376        Ok(ret)
10377    }
10378
10379    fn parse_literal_char(&mut self) -> Result<char, ParserError> {
10380        let s = self.parse_literal_string()?;
10381        if s.len() != 1 {
10382            let loc = self
10383                .tokens
10384                .get(self.index - 1)
10385                .map_or(Location { line: 0, column: 0 }, |t| t.span.start);
10386            return parser_err!(format!("Expect a char, found {s:?}"), loc);
10387        }
10388        Ok(s.chars().next().unwrap())
10389    }
10390
10391    /// Parse a tab separated values in
10392    /// COPY payload
10393    pub fn parse_tsv(&mut self) -> Vec<Option<String>> {
10394        self.parse_tab_value()
10395    }
10396
10397    pub fn parse_tab_value(&mut self) -> Vec<Option<String>> {
10398        let mut values = vec![];
10399        let mut content = String::from("");
10400        while let Some(t) = self.next_token_no_skip().map(|t| &t.token) {
10401            match t {
10402                Token::Whitespace(Whitespace::Tab) => {
10403                    values.push(Some(content.to_string()));
10404                    content.clear();
10405                }
10406                Token::Whitespace(Whitespace::Newline) => {
10407                    values.push(Some(content.to_string()));
10408                    content.clear();
10409                }
10410                Token::Backslash => {
10411                    if self.consume_token(&Token::Period) {
10412                        return values;
10413                    }
10414                    if let Token::Word(w) = self.next_token().token {
10415                        if w.value == "N" {
10416                            values.push(None);
10417                        }
10418                    }
10419                }
10420                _ => {
10421                    content.push_str(&t.to_string());
10422                }
10423            }
10424        }
10425        values
10426    }
10427
10428    /// Parse a literal value (numbers, strings, date/time, booleans)
10429    pub fn parse_value(&mut self) -> Result<ValueWithSpan, ParserError> {
10430        let next_token = self.next_token();
10431        let span = next_token.span;
10432        let ok_value = |value: Value| Ok(value.with_span(span));
10433        match next_token.token {
10434            Token::Word(w) => match w.keyword {
10435                Keyword::TRUE if self.dialect.supports_boolean_literals() => {
10436                    ok_value(Value::Boolean(true))
10437                }
10438                Keyword::FALSE if self.dialect.supports_boolean_literals() => {
10439                    ok_value(Value::Boolean(false))
10440                }
10441                Keyword::NULL => ok_value(Value::Null),
10442                Keyword::NoKeyword if w.quote_style.is_some() => match w.quote_style {
10443                    Some('"') => ok_value(Value::DoubleQuotedString(w.value)),
10444                    Some('\'') => ok_value(Value::SingleQuotedString(w.value)),
10445                    _ => self.expected(
10446                        "A value?",
10447                        TokenWithSpan {
10448                            token: Token::Word(w),
10449                            span,
10450                        },
10451                    )?,
10452                },
10453                _ => self.expected(
10454                    "a concrete value",
10455                    TokenWithSpan {
10456                        token: Token::Word(w),
10457                        span,
10458                    },
10459                ),
10460            },
10461            // The call to n.parse() returns a bigdecimal when the
10462            // bigdecimal feature is enabled, and is otherwise a no-op
10463            // (i.e., it returns the input string).
10464            Token::Number(n, l) => ok_value(Value::Number(Self::parse(n, span.start)?, l)),
10465            Token::SingleQuotedString(ref s) => ok_value(Value::SingleQuotedString(
10466                self.maybe_concat_string_literal(s.to_string()),
10467            )),
10468            Token::DoubleQuotedString(ref s) => ok_value(Value::DoubleQuotedString(
10469                self.maybe_concat_string_literal(s.to_string()),
10470            )),
10471            Token::TripleSingleQuotedString(ref s) => {
10472                ok_value(Value::TripleSingleQuotedString(s.to_string()))
10473            }
10474            Token::TripleDoubleQuotedString(ref s) => {
10475                ok_value(Value::TripleDoubleQuotedString(s.to_string()))
10476            }
10477            Token::DollarQuotedString(ref s) => ok_value(Value::DollarQuotedString(s.clone())),
10478            Token::SingleQuotedByteStringLiteral(ref s) => {
10479                ok_value(Value::SingleQuotedByteStringLiteral(s.clone()))
10480            }
10481            Token::DoubleQuotedByteStringLiteral(ref s) => {
10482                ok_value(Value::DoubleQuotedByteStringLiteral(s.clone()))
10483            }
10484            Token::TripleSingleQuotedByteStringLiteral(ref s) => {
10485                ok_value(Value::TripleSingleQuotedByteStringLiteral(s.clone()))
10486            }
10487            Token::TripleDoubleQuotedByteStringLiteral(ref s) => {
10488                ok_value(Value::TripleDoubleQuotedByteStringLiteral(s.clone()))
10489            }
10490            Token::SingleQuotedRawStringLiteral(ref s) => {
10491                ok_value(Value::SingleQuotedRawStringLiteral(s.clone()))
10492            }
10493            Token::DoubleQuotedRawStringLiteral(ref s) => {
10494                ok_value(Value::DoubleQuotedRawStringLiteral(s.clone()))
10495            }
10496            Token::TripleSingleQuotedRawStringLiteral(ref s) => {
10497                ok_value(Value::TripleSingleQuotedRawStringLiteral(s.clone()))
10498            }
10499            Token::TripleDoubleQuotedRawStringLiteral(ref s) => {
10500                ok_value(Value::TripleDoubleQuotedRawStringLiteral(s.clone()))
10501            }
10502            Token::NationalStringLiteral(ref s) => {
10503                ok_value(Value::NationalStringLiteral(s.to_string()))
10504            }
10505            Token::EscapedStringLiteral(ref s) => {
10506                ok_value(Value::EscapedStringLiteral(s.to_string()))
10507            }
10508            Token::UnicodeStringLiteral(ref s) => {
10509                ok_value(Value::UnicodeStringLiteral(s.to_string()))
10510            }
10511            Token::HexStringLiteral(ref s) => ok_value(Value::HexStringLiteral(s.to_string())),
10512            Token::Placeholder(ref s) => ok_value(Value::Placeholder(s.to_string())),
10513            tok @ Token::Colon | tok @ Token::AtSign => {
10514                // 1. Not calling self.parse_identifier(false)?
10515                //    because only in placeholder we want to check
10516                //    numbers as idfentifies.  This because snowflake
10517                //    allows numbers as placeholders
10518                // 2. Not calling self.next_token() to enforce `tok`
10519                //    be followed immediately by a word/number, ie.
10520                //    without any whitespace in between
10521                let next_token = self.next_token_no_skip().unwrap_or(&EOF_TOKEN).clone();
10522                let ident = match next_token.token {
10523                    Token::Word(w) => Ok(w.into_ident(next_token.span)),
10524                    Token::Number(w, false) => Ok(Ident::with_span(next_token.span, w)),
10525                    _ => self.expected("placeholder", next_token),
10526                }?;
10527                Ok(Value::Placeholder(tok.to_string() + &ident.value)
10528                    .with_span(Span::new(span.start, ident.span.end)))
10529            }
10530            unexpected => self.expected(
10531                "a value",
10532                TokenWithSpan {
10533                    token: unexpected,
10534                    span,
10535                },
10536            ),
10537        }
10538    }
10539
10540    fn maybe_concat_string_literal(&mut self, mut str: String) -> String {
10541        if self.dialect.supports_string_literal_concatenation() {
10542            while let Token::SingleQuotedString(ref s) | Token::DoubleQuotedString(ref s) =
10543                self.peek_token_ref().token
10544            {
10545                str.push_str(s.clone().as_str());
10546                self.advance_token();
10547            }
10548        }
10549        str
10550    }
10551
10552    /// Parse an unsigned numeric literal
10553    pub fn parse_number_value(&mut self) -> Result<ValueWithSpan, ParserError> {
10554        let value_wrapper = self.parse_value()?;
10555        match &value_wrapper.value {
10556            Value::Number(_, _) => Ok(value_wrapper),
10557            Value::Placeholder(_) => Ok(value_wrapper),
10558            _ => {
10559                self.prev_token();
10560                self.expected("literal number", self.peek_token())
10561            }
10562        }
10563    }
10564
10565    /// Parse a numeric literal as an expression. Returns a [`Expr::UnaryOp`] if the number is signed,
10566    /// otherwise returns a [`Expr::Value`]
10567    pub fn parse_number(&mut self) -> Result<Expr, ParserError> {
10568        let next_token = self.next_token();
10569        match next_token.token {
10570            Token::Plus => Ok(Expr::UnaryOp {
10571                op: UnaryOperator::Plus,
10572                expr: Box::new(Expr::Value(self.parse_number_value()?)),
10573            }),
10574            Token::Minus => Ok(Expr::UnaryOp {
10575                op: UnaryOperator::Minus,
10576                expr: Box::new(Expr::Value(self.parse_number_value()?)),
10577            }),
10578            _ => {
10579                self.prev_token();
10580                Ok(Expr::Value(self.parse_number_value()?))
10581            }
10582        }
10583    }
10584
10585    fn parse_introduced_string_expr(&mut self) -> Result<Expr, ParserError> {
10586        let next_token = self.next_token();
10587        let span = next_token.span;
10588        match next_token.token {
10589            Token::SingleQuotedString(ref s) => Ok(Expr::Value(
10590                Value::SingleQuotedString(s.to_string()).with_span(span),
10591            )),
10592            Token::DoubleQuotedString(ref s) => Ok(Expr::Value(
10593                Value::DoubleQuotedString(s.to_string()).with_span(span),
10594            )),
10595            Token::HexStringLiteral(ref s) => Ok(Expr::Value(
10596                Value::HexStringLiteral(s.to_string()).with_span(span),
10597            )),
10598            unexpected => self.expected(
10599                "a string value",
10600                TokenWithSpan {
10601                    token: unexpected,
10602                    span,
10603                },
10604            ),
10605        }
10606    }
10607
10608    /// Parse an unsigned literal integer/long
10609    pub fn parse_literal_uint(&mut self) -> Result<u64, ParserError> {
10610        let next_token = self.next_token();
10611        match next_token.token {
10612            Token::Number(s, _) => Self::parse::<u64>(s, next_token.span.start),
10613            _ => self.expected("literal int", next_token),
10614        }
10615    }
10616
10617    /// Parse the body of a `CREATE FUNCTION` specified as a string.
10618    /// e.g. `CREATE FUNCTION ... AS $$ body $$`.
10619    fn parse_create_function_body_string(&mut self) -> Result<CreateFunctionBody, ParserError> {
10620        let parse_string_expr = |parser: &mut Parser| -> Result<Expr, ParserError> {
10621            let peek_token = parser.peek_token();
10622            let span = peek_token.span;
10623            match peek_token.token {
10624                Token::DollarQuotedString(s) if dialect_of!(parser is PostgreSqlDialect | GenericDialect) =>
10625                {
10626                    parser.next_token();
10627                    Ok(Expr::Value(Value::DollarQuotedString(s).with_span(span)))
10628                }
10629                _ => Ok(Expr::Value(
10630                    Value::SingleQuotedString(parser.parse_literal_string()?).with_span(span),
10631                )),
10632            }
10633        };
10634
10635        Ok(CreateFunctionBody::AsBeforeOptions {
10636            body: parse_string_expr(self)?,
10637            link_symbol: if self.consume_token(&Token::Comma) {
10638                Some(parse_string_expr(self)?)
10639            } else {
10640                None
10641            },
10642        })
10643    }
10644
10645    /// Parse a literal string
10646    pub fn parse_literal_string(&mut self) -> Result<String, ParserError> {
10647        let next_token = self.next_token();
10648        match next_token.token {
10649            Token::Word(Word {
10650                value,
10651                keyword: Keyword::NoKeyword,
10652                ..
10653            }) => Ok(value),
10654            Token::SingleQuotedString(s) => Ok(s),
10655            Token::DoubleQuotedString(s) => Ok(s),
10656            Token::EscapedStringLiteral(s) if dialect_of!(self is PostgreSqlDialect | GenericDialect) => {
10657                Ok(s)
10658            }
10659            Token::UnicodeStringLiteral(s) => Ok(s),
10660            _ => self.expected("literal string", next_token),
10661        }
10662    }
10663
10664    /// Parse a boolean string
10665    pub(crate) fn parse_boolean_string(&mut self) -> Result<bool, ParserError> {
10666        match self.parse_one_of_keywords(&[Keyword::TRUE, Keyword::FALSE]) {
10667            Some(Keyword::TRUE) => Ok(true),
10668            Some(Keyword::FALSE) => Ok(false),
10669            _ => self.expected("TRUE or FALSE", self.peek_token()),
10670        }
10671    }
10672
10673    /// Parse a literal unicode normalization clause
10674    pub fn parse_unicode_is_normalized(&mut self, expr: Expr) -> Result<Expr, ParserError> {
10675        let neg = self.parse_keyword(Keyword::NOT);
10676        let normalized_form = self.maybe_parse(|parser| {
10677            match parser.parse_one_of_keywords(&[
10678                Keyword::NFC,
10679                Keyword::NFD,
10680                Keyword::NFKC,
10681                Keyword::NFKD,
10682            ]) {
10683                Some(Keyword::NFC) => Ok(NormalizationForm::NFC),
10684                Some(Keyword::NFD) => Ok(NormalizationForm::NFD),
10685                Some(Keyword::NFKC) => Ok(NormalizationForm::NFKC),
10686                Some(Keyword::NFKD) => Ok(NormalizationForm::NFKD),
10687                _ => parser.expected("unicode normalization form", parser.peek_token()),
10688            }
10689        })?;
10690        if self.parse_keyword(Keyword::NORMALIZED) {
10691            return Ok(Expr::IsNormalized {
10692                expr: Box::new(expr),
10693                form: normalized_form,
10694                negated: neg,
10695            });
10696        }
10697        self.expected("unicode normalization form", self.peek_token())
10698    }
10699
10700    pub fn parse_enum_values(&mut self) -> Result<Vec<EnumMember>, ParserError> {
10701        self.expect_token(&Token::LParen)?;
10702        let values = self.parse_comma_separated(|parser| {
10703            let name = parser.parse_literal_string()?;
10704            let e = if parser.consume_token(&Token::Eq) {
10705                let value = parser.parse_number()?;
10706                EnumMember::NamedValue(name, value)
10707            } else {
10708                EnumMember::Name(name)
10709            };
10710            Ok(e)
10711        })?;
10712        self.expect_token(&Token::RParen)?;
10713
10714        Ok(values)
10715    }
10716
10717    /// Parse a SQL datatype (in the context of a CREATE TABLE statement for example)
10718    pub fn parse_data_type(&mut self) -> Result<DataType, ParserError> {
10719        let (ty, trailing_bracket) = self.parse_data_type_helper()?;
10720        if trailing_bracket.0 {
10721            return parser_err!(
10722                format!("unmatched > after parsing data type {ty}"),
10723                self.peek_token()
10724            );
10725        }
10726
10727        Ok(ty)
10728    }
10729
10730    fn parse_data_type_helper(
10731        &mut self,
10732    ) -> Result<(DataType, MatchedTrailingBracket), ParserError> {
10733        let dialect = self.dialect;
10734        self.advance_token();
10735        let next_token = self.get_current_token();
10736        let next_token_index = self.get_current_index();
10737
10738        let mut trailing_bracket: MatchedTrailingBracket = false.into();
10739        let mut data = match &next_token.token {
10740            Token::Word(w) => match w.keyword {
10741                Keyword::BOOLEAN => Ok(DataType::Boolean),
10742                Keyword::BOOL => Ok(DataType::Bool),
10743                Keyword::FLOAT => {
10744                    let precision = self.parse_exact_number_optional_precision_scale()?;
10745
10746                    if self.parse_keyword(Keyword::UNSIGNED) {
10747                        Ok(DataType::FloatUnsigned(precision))
10748                    } else {
10749                        Ok(DataType::Float(precision))
10750                    }
10751                }
10752                Keyword::REAL => {
10753                    if self.parse_keyword(Keyword::UNSIGNED) {
10754                        Ok(DataType::RealUnsigned)
10755                    } else {
10756                        Ok(DataType::Real)
10757                    }
10758                }
10759                Keyword::FLOAT4 => Ok(DataType::Float4),
10760                Keyword::FLOAT32 => Ok(DataType::Float32),
10761                Keyword::FLOAT64 => Ok(DataType::Float64),
10762                Keyword::FLOAT8 => Ok(DataType::Float8),
10763                Keyword::DOUBLE => {
10764                    if self.parse_keyword(Keyword::PRECISION) {
10765                        if self.parse_keyword(Keyword::UNSIGNED) {
10766                            Ok(DataType::DoublePrecisionUnsigned)
10767                        } else {
10768                            Ok(DataType::DoublePrecision)
10769                        }
10770                    } else {
10771                        let precision = self.parse_exact_number_optional_precision_scale()?;
10772
10773                        if self.parse_keyword(Keyword::UNSIGNED) {
10774                            Ok(DataType::DoubleUnsigned(precision))
10775                        } else {
10776                            Ok(DataType::Double(precision))
10777                        }
10778                    }
10779                }
10780                Keyword::TINYINT => {
10781                    let optional_precision = self.parse_optional_precision();
10782                    if self.parse_keyword(Keyword::UNSIGNED) {
10783                        Ok(DataType::TinyIntUnsigned(optional_precision?))
10784                    } else {
10785                        if dialect.supports_data_type_signed_suffix() {
10786                            let _ = self.parse_keyword(Keyword::SIGNED);
10787                        }
10788                        Ok(DataType::TinyInt(optional_precision?))
10789                    }
10790                }
10791                Keyword::INT2 => {
10792                    let optional_precision = self.parse_optional_precision();
10793                    if self.parse_keyword(Keyword::UNSIGNED) {
10794                        Ok(DataType::Int2Unsigned(optional_precision?))
10795                    } else {
10796                        Ok(DataType::Int2(optional_precision?))
10797                    }
10798                }
10799                Keyword::SMALLINT => {
10800                    let optional_precision = self.parse_optional_precision();
10801                    if self.parse_keyword(Keyword::UNSIGNED) {
10802                        Ok(DataType::SmallIntUnsigned(optional_precision?))
10803                    } else {
10804                        if dialect.supports_data_type_signed_suffix() {
10805                            let _ = self.parse_keyword(Keyword::SIGNED);
10806                        }
10807                        Ok(DataType::SmallInt(optional_precision?))
10808                    }
10809                }
10810                Keyword::MEDIUMINT => {
10811                    let optional_precision = self.parse_optional_precision();
10812                    if self.parse_keyword(Keyword::UNSIGNED) {
10813                        Ok(DataType::MediumIntUnsigned(optional_precision?))
10814                    } else {
10815                        if dialect.supports_data_type_signed_suffix() {
10816                            let _ = self.parse_keyword(Keyword::SIGNED);
10817                        }
10818                        Ok(DataType::MediumInt(optional_precision?))
10819                    }
10820                }
10821                Keyword::INT => {
10822                    let optional_precision = self.parse_optional_precision();
10823                    if self.parse_keyword(Keyword::UNSIGNED) {
10824                        Ok(DataType::IntUnsigned(optional_precision?))
10825                    } else {
10826                        if dialect.supports_data_type_signed_suffix() {
10827                            let _ = self.parse_keyword(Keyword::SIGNED);
10828                        }
10829                        Ok(DataType::Int(optional_precision?))
10830                    }
10831                }
10832                Keyword::INT4 => {
10833                    let optional_precision = self.parse_optional_precision();
10834                    if self.parse_keyword(Keyword::UNSIGNED) {
10835                        Ok(DataType::Int4Unsigned(optional_precision?))
10836                    } else {
10837                        Ok(DataType::Int4(optional_precision?))
10838                    }
10839                }
10840                Keyword::INT8 => {
10841                    let optional_precision = self.parse_optional_precision();
10842                    if self.parse_keyword(Keyword::UNSIGNED) {
10843                        Ok(DataType::Int8Unsigned(optional_precision?))
10844                    } else {
10845                        Ok(DataType::Int8(optional_precision?))
10846                    }
10847                }
10848                Keyword::INT16 => Ok(DataType::Int16),
10849                Keyword::INT32 => Ok(DataType::Int32),
10850                Keyword::INT64 => Ok(DataType::Int64),
10851                Keyword::INT128 => Ok(DataType::Int128),
10852                Keyword::INT256 => Ok(DataType::Int256),
10853                Keyword::INTEGER => {
10854                    let optional_precision = self.parse_optional_precision();
10855                    if self.parse_keyword(Keyword::UNSIGNED) {
10856                        Ok(DataType::IntegerUnsigned(optional_precision?))
10857                    } else {
10858                        if dialect.supports_data_type_signed_suffix() {
10859                            let _ = self.parse_keyword(Keyword::SIGNED);
10860                        }
10861                        Ok(DataType::Integer(optional_precision?))
10862                    }
10863                }
10864                Keyword::BIGINT => {
10865                    let optional_precision = self.parse_optional_precision();
10866                    if self.parse_keyword(Keyword::UNSIGNED) {
10867                        Ok(DataType::BigIntUnsigned(optional_precision?))
10868                    } else {
10869                        if dialect.supports_data_type_signed_suffix() {
10870                            let _ = self.parse_keyword(Keyword::SIGNED);
10871                        }
10872                        Ok(DataType::BigInt(optional_precision?))
10873                    }
10874                }
10875                Keyword::HUGEINT => Ok(DataType::HugeInt),
10876                Keyword::UBIGINT => Ok(DataType::UBigInt),
10877                Keyword::UHUGEINT => Ok(DataType::UHugeInt),
10878                Keyword::USMALLINT => Ok(DataType::USmallInt),
10879                Keyword::UTINYINT => Ok(DataType::UTinyInt),
10880                Keyword::UINT8 => Ok(DataType::UInt8),
10881                Keyword::UINT16 => Ok(DataType::UInt16),
10882                Keyword::UINT32 => Ok(DataType::UInt32),
10883                Keyword::UINT64 => Ok(DataType::UInt64),
10884                Keyword::UINT128 => Ok(DataType::UInt128),
10885                Keyword::UINT256 => Ok(DataType::UInt256),
10886                Keyword::VARCHAR => Ok(DataType::Varchar(self.parse_optional_character_length()?)),
10887                Keyword::NVARCHAR => {
10888                    Ok(DataType::Nvarchar(self.parse_optional_character_length()?))
10889                }
10890                Keyword::CHARACTER => {
10891                    if self.parse_keyword(Keyword::VARYING) {
10892                        Ok(DataType::CharacterVarying(
10893                            self.parse_optional_character_length()?,
10894                        ))
10895                    } else if self.parse_keywords(&[Keyword::LARGE, Keyword::OBJECT]) {
10896                        Ok(DataType::CharacterLargeObject(
10897                            self.parse_optional_precision()?,
10898                        ))
10899                    } else {
10900                        Ok(DataType::Character(self.parse_optional_character_length()?))
10901                    }
10902                }
10903                Keyword::CHAR => {
10904                    if self.parse_keyword(Keyword::VARYING) {
10905                        Ok(DataType::CharVarying(
10906                            self.parse_optional_character_length()?,
10907                        ))
10908                    } else if self.parse_keywords(&[Keyword::LARGE, Keyword::OBJECT]) {
10909                        Ok(DataType::CharLargeObject(self.parse_optional_precision()?))
10910                    } else {
10911                        Ok(DataType::Char(self.parse_optional_character_length()?))
10912                    }
10913                }
10914                Keyword::CLOB => Ok(DataType::Clob(self.parse_optional_precision()?)),
10915                Keyword::BINARY => Ok(DataType::Binary(self.parse_optional_precision()?)),
10916                Keyword::VARBINARY => Ok(DataType::Varbinary(self.parse_optional_binary_length()?)),
10917                Keyword::BLOB => Ok(DataType::Blob(self.parse_optional_precision()?)),
10918                Keyword::TINYBLOB => Ok(DataType::TinyBlob),
10919                Keyword::MEDIUMBLOB => Ok(DataType::MediumBlob),
10920                Keyword::LONGBLOB => Ok(DataType::LongBlob),
10921                Keyword::BYTES => Ok(DataType::Bytes(self.parse_optional_precision()?)),
10922                Keyword::BIT => {
10923                    if self.parse_keyword(Keyword::VARYING) {
10924                        Ok(DataType::BitVarying(self.parse_optional_precision()?))
10925                    } else {
10926                        Ok(DataType::Bit(self.parse_optional_precision()?))
10927                    }
10928                }
10929                Keyword::VARBIT => Ok(DataType::VarBit(self.parse_optional_precision()?)),
10930                Keyword::UUID => Ok(DataType::Uuid),
10931                Keyword::DATE => Ok(DataType::Date),
10932                Keyword::DATE32 => Ok(DataType::Date32),
10933                Keyword::DATETIME => Ok(DataType::Datetime(self.parse_optional_precision()?)),
10934                Keyword::DATETIME64 => {
10935                    self.prev_token();
10936                    let (precision, time_zone) = self.parse_datetime_64()?;
10937                    Ok(DataType::Datetime64(precision, time_zone))
10938                }
10939                Keyword::TIMESTAMP => {
10940                    let precision = self.parse_optional_precision()?;
10941                    let tz = if self.parse_keyword(Keyword::WITH) {
10942                        self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
10943                        TimezoneInfo::WithTimeZone
10944                    } else if self.parse_keyword(Keyword::WITHOUT) {
10945                        self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
10946                        TimezoneInfo::WithoutTimeZone
10947                    } else {
10948                        TimezoneInfo::None
10949                    };
10950                    Ok(DataType::Timestamp(precision, tz))
10951                }
10952                Keyword::TIMESTAMPTZ => Ok(DataType::Timestamp(
10953                    self.parse_optional_precision()?,
10954                    TimezoneInfo::Tz,
10955                )),
10956                Keyword::TIMESTAMP_NTZ => {
10957                    Ok(DataType::TimestampNtz(self.parse_optional_precision()?))
10958                }
10959                Keyword::TIME => {
10960                    let precision = self.parse_optional_precision()?;
10961                    let tz = if self.parse_keyword(Keyword::WITH) {
10962                        self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
10963                        TimezoneInfo::WithTimeZone
10964                    } else if self.parse_keyword(Keyword::WITHOUT) {
10965                        self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
10966                        TimezoneInfo::WithoutTimeZone
10967                    } else {
10968                        TimezoneInfo::None
10969                    };
10970                    Ok(DataType::Time(precision, tz))
10971                }
10972                Keyword::TIMETZ => Ok(DataType::Time(
10973                    self.parse_optional_precision()?,
10974                    TimezoneInfo::Tz,
10975                )),
10976                Keyword::INTERVAL => {
10977                    if self.dialect.supports_interval_options() {
10978                        let fields = self.maybe_parse_optional_interval_fields()?;
10979                        let precision = self.parse_optional_precision()?;
10980                        Ok(DataType::Interval { fields, precision })
10981                    } else {
10982                        Ok(DataType::Interval {
10983                            fields: None,
10984                            precision: None,
10985                        })
10986                    }
10987                }
10988                Keyword::JSON => Ok(DataType::JSON),
10989                Keyword::JSONB => Ok(DataType::JSONB),
10990                Keyword::REGCLASS => Ok(DataType::Regclass),
10991                Keyword::STRING => Ok(DataType::String(self.parse_optional_precision()?)),
10992                Keyword::FIXEDSTRING => {
10993                    self.expect_token(&Token::LParen)?;
10994                    let character_length = self.parse_literal_uint()?;
10995                    self.expect_token(&Token::RParen)?;
10996                    Ok(DataType::FixedString(character_length))
10997                }
10998                Keyword::TEXT => Ok(DataType::Text),
10999                Keyword::TINYTEXT => Ok(DataType::TinyText),
11000                Keyword::MEDIUMTEXT => Ok(DataType::MediumText),
11001                Keyword::LONGTEXT => Ok(DataType::LongText),
11002                Keyword::BYTEA => Ok(DataType::Bytea),
11003                Keyword::NUMERIC => Ok(DataType::Numeric(
11004                    self.parse_exact_number_optional_precision_scale()?,
11005                )),
11006                Keyword::DECIMAL => {
11007                    let precision = self.parse_exact_number_optional_precision_scale()?;
11008
11009                    if self.parse_keyword(Keyword::UNSIGNED) {
11010                        Ok(DataType::DecimalUnsigned(precision))
11011                    } else {
11012                        Ok(DataType::Decimal(precision))
11013                    }
11014                }
11015                Keyword::DEC => {
11016                    let precision = self.parse_exact_number_optional_precision_scale()?;
11017
11018                    if self.parse_keyword(Keyword::UNSIGNED) {
11019                        Ok(DataType::DecUnsigned(precision))
11020                    } else {
11021                        Ok(DataType::Dec(precision))
11022                    }
11023                }
11024                Keyword::BIGNUMERIC => Ok(DataType::BigNumeric(
11025                    self.parse_exact_number_optional_precision_scale()?,
11026                )),
11027                Keyword::BIGDECIMAL => Ok(DataType::BigDecimal(
11028                    self.parse_exact_number_optional_precision_scale()?,
11029                )),
11030                Keyword::ENUM => Ok(DataType::Enum(self.parse_enum_values()?, None)),
11031                Keyword::ENUM8 => Ok(DataType::Enum(self.parse_enum_values()?, Some(8))),
11032                Keyword::ENUM16 => Ok(DataType::Enum(self.parse_enum_values()?, Some(16))),
11033                Keyword::SET => Ok(DataType::Set(self.parse_string_values()?)),
11034                Keyword::ARRAY => {
11035                    if dialect_of!(self is SnowflakeDialect) {
11036                        Ok(DataType::Array(ArrayElemTypeDef::None))
11037                    } else if dialect_of!(self is ClickHouseDialect) {
11038                        Ok(self.parse_sub_type(|internal_type| {
11039                            DataType::Array(ArrayElemTypeDef::Parenthesis(internal_type))
11040                        })?)
11041                    } else {
11042                        self.expect_token(&Token::Lt)?;
11043                        let (inside_type, _trailing_bracket) = self.parse_data_type_helper()?;
11044                        trailing_bracket = self.expect_closing_angle_bracket(_trailing_bracket)?;
11045                        Ok(DataType::Array(ArrayElemTypeDef::AngleBracket(Box::new(
11046                            inside_type,
11047                        ))))
11048                    }
11049                }
11050                Keyword::STRUCT if dialect_is!(dialect is DuckDbDialect) => {
11051                    self.prev_token();
11052                    let field_defs = self.parse_duckdb_struct_type_def()?;
11053                    Ok(DataType::Struct(field_defs, StructBracketKind::Parentheses))
11054                }
11055                Keyword::STRUCT if dialect_is!(dialect is BigQueryDialect | GenericDialect) => {
11056                    self.prev_token();
11057                    let (field_defs, _trailing_bracket) =
11058                        self.parse_struct_type_def(Self::parse_struct_field_def)?;
11059                    trailing_bracket = _trailing_bracket;
11060                    Ok(DataType::Struct(
11061                        field_defs,
11062                        StructBracketKind::AngleBrackets,
11063                    ))
11064                }
11065                Keyword::UNION if dialect_is!(dialect is DuckDbDialect | GenericDialect) => {
11066                    self.prev_token();
11067                    let fields = self.parse_union_type_def()?;
11068                    Ok(DataType::Union(fields))
11069                }
11070                Keyword::NULLABLE if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
11071                    Ok(self.parse_sub_type(DataType::Nullable)?)
11072                }
11073                Keyword::LOWCARDINALITY if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
11074                    Ok(self.parse_sub_type(DataType::LowCardinality)?)
11075                }
11076                Keyword::MAP if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
11077                    self.prev_token();
11078                    let (key_data_type, value_data_type) = self.parse_click_house_map_def()?;
11079                    Ok(DataType::Map(
11080                        Box::new(key_data_type),
11081                        Box::new(value_data_type),
11082                    ))
11083                }
11084                Keyword::NESTED if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
11085                    self.expect_token(&Token::LParen)?;
11086                    let field_defs = self.parse_comma_separated(Parser::parse_column_def)?;
11087                    self.expect_token(&Token::RParen)?;
11088                    Ok(DataType::Nested(field_defs))
11089                }
11090                Keyword::TUPLE if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
11091                    self.prev_token();
11092                    let field_defs = self.parse_click_house_tuple_def()?;
11093                    Ok(DataType::Tuple(field_defs))
11094                }
11095                Keyword::TRIGGER => Ok(DataType::Trigger),
11096                Keyword::ANY if self.peek_keyword(Keyword::TYPE) => {
11097                    let _ = self.parse_keyword(Keyword::TYPE);
11098                    Ok(DataType::AnyType)
11099                }
11100                Keyword::TABLE => {
11101                    // an LParen after the TABLE keyword indicates that table columns are being defined
11102                    // whereas no LParen indicates an anonymous table expression will be returned
11103                    if self.peek_token() == Token::LParen {
11104                        let columns = self.parse_returns_table_columns()?;
11105                        Ok(DataType::Table(Some(columns)))
11106                    } else {
11107                        Ok(DataType::Table(None))
11108                    }
11109                }
11110                Keyword::SIGNED => {
11111                    if self.parse_keyword(Keyword::INTEGER) {
11112                        Ok(DataType::SignedInteger)
11113                    } else {
11114                        Ok(DataType::Signed)
11115                    }
11116                }
11117                Keyword::UNSIGNED => {
11118                    if self.parse_keyword(Keyword::INTEGER) {
11119                        Ok(DataType::UnsignedInteger)
11120                    } else {
11121                        Ok(DataType::Unsigned)
11122                    }
11123                }
11124                Keyword::TSVECTOR if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
11125                    Ok(DataType::TsVector)
11126                }
11127                Keyword::TSQUERY if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
11128                    Ok(DataType::TsQuery)
11129                }
11130                _ => {
11131                    self.prev_token();
11132                    let type_name = self.parse_object_name(false)?;
11133                    if let Some(modifiers) = self.parse_optional_type_modifiers()? {
11134                        Ok(DataType::Custom(type_name, modifiers))
11135                    } else {
11136                        Ok(DataType::Custom(type_name, vec![]))
11137                    }
11138                }
11139            },
11140            _ => self.expected_at("a data type name", next_token_index),
11141        }?;
11142
11143        if self.dialect.supports_array_typedef_with_brackets() {
11144            while self.consume_token(&Token::LBracket) {
11145                // Parse optional array data type size
11146                let size = self.maybe_parse(|p| p.parse_literal_uint())?;
11147                self.expect_token(&Token::RBracket)?;
11148                data = DataType::Array(ArrayElemTypeDef::SquareBracket(Box::new(data), size))
11149            }
11150        }
11151        Ok((data, trailing_bracket))
11152    }
11153
11154    fn parse_returns_table_column(&mut self) -> Result<ColumnDef, ParserError> {
11155        self.parse_column_def()
11156    }
11157
11158    fn parse_returns_table_columns(&mut self) -> Result<Vec<ColumnDef>, ParserError> {
11159        self.expect_token(&Token::LParen)?;
11160        let columns = self.parse_comma_separated(Parser::parse_returns_table_column)?;
11161        self.expect_token(&Token::RParen)?;
11162        Ok(columns)
11163    }
11164
11165    pub fn parse_string_values(&mut self) -> Result<Vec<String>, ParserError> {
11166        self.expect_token(&Token::LParen)?;
11167        let mut values = Vec::new();
11168        loop {
11169            let next_token = self.next_token();
11170            match next_token.token {
11171                Token::SingleQuotedString(value) => values.push(value),
11172                _ => self.expected("a string", next_token)?,
11173            }
11174            let next_token = self.next_token();
11175            match next_token.token {
11176                Token::Comma => (),
11177                Token::RParen => break,
11178                _ => self.expected(", or }", next_token)?,
11179            }
11180        }
11181        Ok(values)
11182    }
11183
11184    /// Strictly parse `identifier AS identifier`
11185    pub fn parse_identifier_with_alias(&mut self) -> Result<IdentWithAlias, ParserError> {
11186        let ident = self.parse_identifier()?;
11187        self.expect_keyword_is(Keyword::AS)?;
11188        let alias = self.parse_identifier()?;
11189        Ok(IdentWithAlias { ident, alias })
11190    }
11191
11192    /// Parse `identifier [AS] identifier` where the AS keyword is optional
11193    fn parse_identifier_with_optional_alias(&mut self) -> Result<IdentWithAlias, ParserError> {
11194        let ident = self.parse_identifier()?;
11195        let _after_as = self.parse_keyword(Keyword::AS);
11196        let alias = self.parse_identifier()?;
11197        Ok(IdentWithAlias { ident, alias })
11198    }
11199
11200    /// Parse comma-separated list of parenthesized queries for pipe operators
11201    fn parse_pipe_operator_queries(&mut self) -> Result<Vec<Query>, ParserError> {
11202        self.parse_comma_separated(|parser| {
11203            parser.expect_token(&Token::LParen)?;
11204            let query = parser.parse_query()?;
11205            parser.expect_token(&Token::RParen)?;
11206            Ok(*query)
11207        })
11208    }
11209
11210    /// Parse set quantifier for pipe operators that require DISTINCT. E.g. INTERSECT and EXCEPT
11211    fn parse_distinct_required_set_quantifier(
11212        &mut self,
11213        operator_name: &str,
11214    ) -> Result<SetQuantifier, ParserError> {
11215        let quantifier = self.parse_set_quantifier(&Some(SetOperator::Intersect));
11216        match quantifier {
11217            SetQuantifier::Distinct | SetQuantifier::DistinctByName => Ok(quantifier),
11218            _ => Err(ParserError::ParserError(format!(
11219                "{operator_name} pipe operator requires DISTINCT modifier",
11220            ))),
11221        }
11222    }
11223
11224    /// Parse optional identifier alias (with or without AS keyword)
11225    fn parse_identifier_optional_alias(&mut self) -> Result<Option<Ident>, ParserError> {
11226        if self.parse_keyword(Keyword::AS) {
11227            Ok(Some(self.parse_identifier()?))
11228        } else {
11229            // Check if the next token is an identifier (implicit alias)
11230            self.maybe_parse(|parser| parser.parse_identifier())
11231        }
11232    }
11233
11234    /// Optionally parses an alias for a select list item
11235    fn maybe_parse_select_item_alias(&mut self) -> Result<Option<Ident>, ParserError> {
11236        fn validator(explicit: bool, kw: &Keyword, parser: &mut Parser) -> bool {
11237            parser.dialect.is_select_item_alias(explicit, kw, parser)
11238        }
11239        self.parse_optional_alias_inner(None, validator)
11240    }
11241
11242    /// Optionally parses an alias for a table like in `... FROM generate_series(1, 10) AS t (col)`.
11243    /// In this case, the alias is allowed to optionally name the columns in the table, in
11244    /// addition to the table itself.
11245    pub fn maybe_parse_table_alias(&mut self) -> Result<Option<TableAlias>, ParserError> {
11246        fn validator(explicit: bool, kw: &Keyword, parser: &mut Parser) -> bool {
11247            parser.dialect.is_table_factor_alias(explicit, kw, parser)
11248        }
11249        let explicit = self.peek_keyword(Keyword::AS);
11250        match self.parse_optional_alias_inner(None, validator)? {
11251            Some(name) => {
11252                let columns = self.parse_table_alias_column_defs()?;
11253                Ok(Some(TableAlias {
11254                    explicit,
11255                    name,
11256                    columns,
11257                }))
11258            }
11259            None => Ok(None),
11260        }
11261    }
11262
11263    fn parse_table_index_hints(&mut self) -> Result<Vec<TableIndexHints>, ParserError> {
11264        let mut hints = vec![];
11265        while let Some(hint_type) =
11266            self.parse_one_of_keywords(&[Keyword::USE, Keyword::IGNORE, Keyword::FORCE])
11267        {
11268            let hint_type = match hint_type {
11269                Keyword::USE => TableIndexHintType::Use,
11270                Keyword::IGNORE => TableIndexHintType::Ignore,
11271                Keyword::FORCE => TableIndexHintType::Force,
11272                _ => {
11273                    return self.expected(
11274                        "expected to match USE/IGNORE/FORCE keyword",
11275                        self.peek_token(),
11276                    )
11277                }
11278            };
11279            let index_type = match self.parse_one_of_keywords(&[Keyword::INDEX, Keyword::KEY]) {
11280                Some(Keyword::INDEX) => TableIndexType::Index,
11281                Some(Keyword::KEY) => TableIndexType::Key,
11282                _ => {
11283                    return self.expected("expected to match INDEX/KEY keyword", self.peek_token())
11284                }
11285            };
11286            let for_clause = if self.parse_keyword(Keyword::FOR) {
11287                let clause = if self.parse_keyword(Keyword::JOIN) {
11288                    TableIndexHintForClause::Join
11289                } else if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
11290                    TableIndexHintForClause::OrderBy
11291                } else if self.parse_keywords(&[Keyword::GROUP, Keyword::BY]) {
11292                    TableIndexHintForClause::GroupBy
11293                } else {
11294                    return self.expected(
11295                        "expected to match FOR/ORDER BY/GROUP BY table hint in for clause",
11296                        self.peek_token(),
11297                    );
11298                };
11299                Some(clause)
11300            } else {
11301                None
11302            };
11303
11304            self.expect_token(&Token::LParen)?;
11305            let index_names = if self.peek_token().token != Token::RParen {
11306                self.parse_comma_separated(Parser::parse_identifier)?
11307            } else {
11308                vec![]
11309            };
11310            self.expect_token(&Token::RParen)?;
11311            hints.push(TableIndexHints {
11312                hint_type,
11313                index_type,
11314                for_clause,
11315                index_names,
11316            });
11317        }
11318        Ok(hints)
11319    }
11320
11321    /// Wrapper for parse_optional_alias_inner, left for backwards-compatibility
11322    /// but new flows should use the context-specific methods such as `maybe_parse_select_item_alias`
11323    /// and `maybe_parse_table_alias`.
11324    pub fn parse_optional_alias(
11325        &mut self,
11326        reserved_kwds: &[Keyword],
11327    ) -> Result<Option<Ident>, ParserError> {
11328        fn validator(_explicit: bool, _kw: &Keyword, _parser: &mut Parser) -> bool {
11329            false
11330        }
11331        self.parse_optional_alias_inner(Some(reserved_kwds), validator)
11332    }
11333
11334    /// Parses an optional alias after a SQL element such as a select list item
11335    /// or a table name.
11336    ///
11337    /// This method accepts an optional list of reserved keywords or a function
11338    /// to call to validate if a keyword should be parsed as an alias, to allow
11339    /// callers to customize the parsing logic based on their context.
11340    fn parse_optional_alias_inner<F>(
11341        &mut self,
11342        reserved_kwds: Option<&[Keyword]>,
11343        validator: F,
11344    ) -> Result<Option<Ident>, ParserError>
11345    where
11346        F: Fn(bool, &Keyword, &mut Parser) -> bool,
11347    {
11348        let after_as = self.parse_keyword(Keyword::AS);
11349
11350        let next_token = self.next_token();
11351        match next_token.token {
11352            // By default, if a word is located after the `AS` keyword we consider it an alias
11353            // as long as it's not reserved.
11354            Token::Word(w)
11355                if after_as || reserved_kwds.is_some_and(|x| !x.contains(&w.keyword)) =>
11356            {
11357                Ok(Some(w.into_ident(next_token.span)))
11358            }
11359            // This pattern allows for customizing the acceptance of words as aliases based on the caller's
11360            // context, such as to what SQL element this word is a potential alias of (select item alias, table name
11361            // alias, etc.) or dialect-specific logic that goes beyond a simple list of reserved keywords.
11362            Token::Word(w) if validator(after_as, &w.keyword, self) => {
11363                Ok(Some(w.into_ident(next_token.span)))
11364            }
11365            // For backwards-compatibility, we accept quoted strings as aliases regardless of the context.
11366            Token::SingleQuotedString(s) => Ok(Some(Ident::with_quote('\'', s))),
11367            Token::DoubleQuotedString(s) => Ok(Some(Ident::with_quote('\"', s))),
11368            _ => {
11369                if after_as {
11370                    return self.expected("an identifier after AS", next_token);
11371                }
11372                self.prev_token();
11373                Ok(None) // no alias found
11374            }
11375        }
11376    }
11377
11378    pub fn parse_optional_group_by(&mut self) -> Result<Option<GroupByExpr>, ParserError> {
11379        if self.parse_keywords(&[Keyword::GROUP, Keyword::BY]) {
11380            let expressions = if self.parse_keyword(Keyword::ALL) {
11381                None
11382            } else {
11383                Some(self.parse_comma_separated(Parser::parse_group_by_expr)?)
11384            };
11385
11386            let mut modifiers = vec![];
11387            if self.dialect.supports_group_by_with_modifier() {
11388                loop {
11389                    if !self.parse_keyword(Keyword::WITH) {
11390                        break;
11391                    }
11392                    let keyword = self.expect_one_of_keywords(&[
11393                        Keyword::ROLLUP,
11394                        Keyword::CUBE,
11395                        Keyword::TOTALS,
11396                    ])?;
11397                    modifiers.push(match keyword {
11398                        Keyword::ROLLUP => GroupByWithModifier::Rollup,
11399                        Keyword::CUBE => GroupByWithModifier::Cube,
11400                        Keyword::TOTALS => GroupByWithModifier::Totals,
11401                        _ => {
11402                            return parser_err!(
11403                                "BUG: expected to match GroupBy modifier keyword",
11404                                self.peek_token().span.start
11405                            )
11406                        }
11407                    });
11408                }
11409            }
11410            if self.parse_keywords(&[Keyword::GROUPING, Keyword::SETS]) {
11411                self.expect_token(&Token::LParen)?;
11412                let result = self.parse_comma_separated(|p| {
11413                    if p.peek_token_ref().token == Token::LParen {
11414                        p.parse_tuple(true, true)
11415                    } else {
11416                        Ok(vec![p.parse_expr()?])
11417                    }
11418                })?;
11419                self.expect_token(&Token::RParen)?;
11420                modifiers.push(GroupByWithModifier::GroupingSets(Expr::GroupingSets(
11421                    result,
11422                )));
11423            };
11424            let group_by = match expressions {
11425                None => GroupByExpr::All(modifiers),
11426                Some(exprs) => GroupByExpr::Expressions(exprs, modifiers),
11427            };
11428            Ok(Some(group_by))
11429        } else {
11430            Ok(None)
11431        }
11432    }
11433
11434    pub fn parse_optional_order_by(&mut self) -> Result<Option<OrderBy>, ParserError> {
11435        if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
11436            let order_by =
11437                if self.dialect.supports_order_by_all() && self.parse_keyword(Keyword::ALL) {
11438                    let order_by_options = self.parse_order_by_options()?;
11439                    OrderBy {
11440                        kind: OrderByKind::All(order_by_options),
11441                        interpolate: None,
11442                    }
11443                } else {
11444                    let exprs = self.parse_comma_separated(Parser::parse_order_by_expr)?;
11445                    let interpolate = if dialect_of!(self is ClickHouseDialect | GenericDialect) {
11446                        self.parse_interpolations()?
11447                    } else {
11448                        None
11449                    };
11450                    OrderBy {
11451                        kind: OrderByKind::Expressions(exprs),
11452                        interpolate,
11453                    }
11454                };
11455            Ok(Some(order_by))
11456        } else {
11457            Ok(None)
11458        }
11459    }
11460
11461    fn parse_optional_limit_clause(&mut self) -> Result<Option<LimitClause>, ParserError> {
11462        let mut offset = if self.parse_keyword(Keyword::OFFSET) {
11463            Some(self.parse_offset()?)
11464        } else {
11465            None
11466        };
11467
11468        let (limit, limit_by) = if self.parse_keyword(Keyword::LIMIT) {
11469            let expr = self.parse_limit()?;
11470
11471            if self.dialect.supports_limit_comma()
11472                && offset.is_none()
11473                && expr.is_some() // ALL not supported with comma
11474                && self.consume_token(&Token::Comma)
11475            {
11476                let offset = expr.ok_or_else(|| {
11477                    ParserError::ParserError(
11478                        "Missing offset for LIMIT <offset>, <limit>".to_string(),
11479                    )
11480                })?;
11481                return Ok(Some(LimitClause::OffsetCommaLimit {
11482                    offset,
11483                    limit: self.parse_expr()?,
11484                }));
11485            }
11486
11487            let limit_by = if dialect_of!(self is ClickHouseDialect | GenericDialect)
11488                && self.parse_keyword(Keyword::BY)
11489            {
11490                Some(self.parse_comma_separated(Parser::parse_expr)?)
11491            } else {
11492                None
11493            };
11494
11495            (Some(expr), limit_by)
11496        } else {
11497            (None, None)
11498        };
11499
11500        if offset.is_none() && limit.is_some() && self.parse_keyword(Keyword::OFFSET) {
11501            offset = Some(self.parse_offset()?);
11502        }
11503
11504        if offset.is_some() || (limit.is_some() && limit != Some(None)) || limit_by.is_some() {
11505            Ok(Some(LimitClause::LimitOffset {
11506                limit: limit.unwrap_or_default(),
11507                offset,
11508                limit_by: limit_by.unwrap_or_default(),
11509            }))
11510        } else {
11511            Ok(None)
11512        }
11513    }
11514
11515    /// Parse a table object for insertion
11516    /// e.g. `some_database.some_table` or `FUNCTION some_table_func(...)`
11517    pub fn parse_table_object(&mut self) -> Result<TableObject, ParserError> {
11518        if self.dialect.supports_insert_table_function() && self.parse_keyword(Keyword::FUNCTION) {
11519            let fn_name = self.parse_object_name(false)?;
11520            self.parse_function_call(fn_name)
11521                .map(TableObject::TableFunction)
11522        } else {
11523            self.parse_object_name(false).map(TableObject::TableName)
11524        }
11525    }
11526
11527    /// Parse a possibly qualified, possibly quoted identifier, e.g.
11528    /// `foo` or `myschema."table"
11529    ///
11530    /// The `in_table_clause` parameter indicates whether the object name is a table in a FROM, JOIN,
11531    /// or similar table clause. Currently, this is used only to support unquoted hyphenated identifiers
11532    /// in this context on BigQuery.
11533    pub fn parse_object_name(&mut self, in_table_clause: bool) -> Result<ObjectName, ParserError> {
11534        self.parse_object_name_inner(in_table_clause, false)
11535    }
11536
11537    /// Parse a possibly qualified, possibly quoted identifier, e.g.
11538    /// `foo` or `myschema."table"
11539    ///
11540    /// The `in_table_clause` parameter indicates whether the object name is a table in a FROM, JOIN,
11541    /// or similar table clause. Currently, this is used only to support unquoted hyphenated identifiers
11542    /// in this context on BigQuery.
11543    ///
11544    /// The `allow_wildcards` parameter indicates whether to allow for wildcards in the object name
11545    /// e.g. *, *.*, `foo`.*, or "foo"."bar"
11546    fn parse_object_name_inner(
11547        &mut self,
11548        in_table_clause: bool,
11549        allow_wildcards: bool,
11550    ) -> Result<ObjectName, ParserError> {
11551        let mut parts = vec![];
11552        if dialect_of!(self is BigQueryDialect) && in_table_clause {
11553            loop {
11554                let (ident, end_with_period) = self.parse_unquoted_hyphenated_identifier()?;
11555                parts.push(ObjectNamePart::Identifier(ident));
11556                if !self.consume_token(&Token::Period) && !end_with_period {
11557                    break;
11558                }
11559            }
11560        } else {
11561            loop {
11562                if allow_wildcards && self.peek_token().token == Token::Mul {
11563                    let span = self.next_token().span;
11564                    parts.push(ObjectNamePart::Identifier(Ident {
11565                        value: Token::Mul.to_string(),
11566                        quote_style: None,
11567                        span,
11568                    }));
11569                } else if dialect_of!(self is BigQueryDialect) && in_table_clause {
11570                    let (ident, end_with_period) = self.parse_unquoted_hyphenated_identifier()?;
11571                    parts.push(ObjectNamePart::Identifier(ident));
11572                    if !self.consume_token(&Token::Period) && !end_with_period {
11573                        break;
11574                    }
11575                } else if self.dialect.supports_object_name_double_dot_notation()
11576                    && parts.len() == 1
11577                    && matches!(self.peek_token().token, Token::Period)
11578                {
11579                    // Empty string here means default schema
11580                    parts.push(ObjectNamePart::Identifier(Ident::new("")));
11581                } else {
11582                    let ident = self.parse_identifier()?;
11583                    let part = if self
11584                        .dialect
11585                        .is_identifier_generating_function_name(&ident, &parts)
11586                    {
11587                        self.expect_token(&Token::LParen)?;
11588                        let args: Vec<FunctionArg> =
11589                            self.parse_comma_separated0(Self::parse_function_args, Token::RParen)?;
11590                        self.expect_token(&Token::RParen)?;
11591                        ObjectNamePart::Function(ObjectNamePartFunction { name: ident, args })
11592                    } else {
11593                        ObjectNamePart::Identifier(ident)
11594                    };
11595                    parts.push(part);
11596                }
11597
11598                if !self.consume_token(&Token::Period) {
11599                    break;
11600                }
11601            }
11602        }
11603
11604        // BigQuery accepts any number of quoted identifiers of a table name.
11605        // https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#quoted_identifiers
11606        if dialect_of!(self is BigQueryDialect)
11607            && parts.iter().any(|part| {
11608                part.as_ident()
11609                    .is_some_and(|ident| ident.value.contains('.'))
11610            })
11611        {
11612            parts = parts
11613                .into_iter()
11614                .flat_map(|part| match part.as_ident() {
11615                    Some(ident) => ident
11616                        .value
11617                        .split('.')
11618                        .map(|value| {
11619                            ObjectNamePart::Identifier(Ident {
11620                                value: value.into(),
11621                                quote_style: ident.quote_style,
11622                                span: ident.span,
11623                            })
11624                        })
11625                        .collect::<Vec<_>>(),
11626                    None => vec![part],
11627                })
11628                .collect()
11629        }
11630
11631        Ok(ObjectName(parts))
11632    }
11633
11634    /// Parse identifiers
11635    pub fn parse_identifiers(&mut self) -> Result<Vec<Ident>, ParserError> {
11636        let mut idents = vec![];
11637        loop {
11638            match &self.peek_token_ref().token {
11639                Token::Word(w) => {
11640                    idents.push(w.clone().into_ident(self.peek_token_ref().span));
11641                }
11642                Token::EOF | Token::Eq => break,
11643                _ => {}
11644            }
11645            self.advance_token();
11646        }
11647        Ok(idents)
11648    }
11649
11650    /// Parse identifiers of form ident1[.identN]*
11651    ///
11652    /// Similar in functionality to [parse_identifiers], with difference
11653    /// being this function is much more strict about parsing a valid multipart identifier, not
11654    /// allowing extraneous tokens to be parsed, otherwise it fails.
11655    ///
11656    /// For example:
11657    ///
11658    /// ```rust
11659    /// use sqlparser::ast::Ident;
11660    /// use sqlparser::dialect::GenericDialect;
11661    /// use sqlparser::parser::Parser;
11662    ///
11663    /// let dialect = GenericDialect {};
11664    /// let expected = vec![Ident::new("one"), Ident::new("two")];
11665    ///
11666    /// // expected usage
11667    /// let sql = "one.two";
11668    /// let mut parser = Parser::new(&dialect).try_with_sql(sql).unwrap();
11669    /// let actual = parser.parse_multipart_identifier().unwrap();
11670    /// assert_eq!(&actual, &expected);
11671    ///
11672    /// // parse_identifiers is more loose on what it allows, parsing successfully
11673    /// let sql = "one + two";
11674    /// let mut parser = Parser::new(&dialect).try_with_sql(sql).unwrap();
11675    /// let actual = parser.parse_identifiers().unwrap();
11676    /// assert_eq!(&actual, &expected);
11677    ///
11678    /// // expected to strictly fail due to + separator
11679    /// let sql = "one + two";
11680    /// let mut parser = Parser::new(&dialect).try_with_sql(sql).unwrap();
11681    /// let actual = parser.parse_multipart_identifier().unwrap_err();
11682    /// assert_eq!(
11683    ///     actual.to_string(),
11684    ///     "sql parser error: Unexpected token in identifier: +"
11685    /// );
11686    /// ```
11687    ///
11688    /// [parse_identifiers]: Parser::parse_identifiers
11689    pub fn parse_multipart_identifier(&mut self) -> Result<Vec<Ident>, ParserError> {
11690        let mut idents = vec![];
11691
11692        // expecting at least one word for identifier
11693        let next_token = self.next_token();
11694        match next_token.token {
11695            Token::Word(w) => idents.push(w.into_ident(next_token.span)),
11696            Token::EOF => {
11697                return Err(ParserError::ParserError(
11698                    "Empty input when parsing identifier".to_string(),
11699                ))?
11700            }
11701            token => {
11702                return Err(ParserError::ParserError(format!(
11703                    "Unexpected token in identifier: {token}"
11704                )))?
11705            }
11706        };
11707
11708        // parse optional next parts if exist
11709        loop {
11710            match self.next_token().token {
11711                // ensure that optional period is succeeded by another identifier
11712                Token::Period => {
11713                    let next_token = self.next_token();
11714                    match next_token.token {
11715                        Token::Word(w) => idents.push(w.into_ident(next_token.span)),
11716                        Token::EOF => {
11717                            return Err(ParserError::ParserError(
11718                                "Trailing period in identifier".to_string(),
11719                            ))?
11720                        }
11721                        token => {
11722                            return Err(ParserError::ParserError(format!(
11723                                "Unexpected token following period in identifier: {token}"
11724                            )))?
11725                        }
11726                    }
11727                }
11728                Token::EOF => break,
11729                token => {
11730                    return Err(ParserError::ParserError(format!(
11731                        "Unexpected token in identifier: {token}"
11732                    )))?
11733                }
11734            }
11735        }
11736
11737        Ok(idents)
11738    }
11739
11740    /// Parse a simple one-word identifier (possibly quoted, possibly a keyword)
11741    pub fn parse_identifier(&mut self) -> Result<Ident, ParserError> {
11742        let next_token = self.next_token();
11743        match next_token.token {
11744            Token::Word(w) => Ok(w.into_ident(next_token.span)),
11745            Token::SingleQuotedString(s) => Ok(Ident::with_quote('\'', s)),
11746            Token::DoubleQuotedString(s) => Ok(Ident::with_quote('\"', s)),
11747            _ => self.expected("identifier", next_token),
11748        }
11749    }
11750
11751    /// On BigQuery, hyphens are permitted in unquoted identifiers inside of a FROM or
11752    /// TABLE clause.
11753    ///
11754    /// The first segment must be an ordinary unquoted identifier, e.g. it must not start
11755    /// with a digit. Subsequent segments are either must either be valid identifiers or
11756    /// integers, e.g. foo-123 is allowed, but foo-123a is not.
11757    ///
11758    /// [BigQuery-lexical](https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical)
11759    ///
11760    /// Return a tuple of the identifier and a boolean indicating it ends with a period.
11761    fn parse_unquoted_hyphenated_identifier(&mut self) -> Result<(Ident, bool), ParserError> {
11762        match self.peek_token().token {
11763            Token::Word(w) => {
11764                let quote_style_is_none = w.quote_style.is_none();
11765                let mut requires_whitespace = false;
11766                let mut ident = w.into_ident(self.next_token().span);
11767                if quote_style_is_none {
11768                    while matches!(self.peek_token_no_skip().token, Token::Minus) {
11769                        self.next_token();
11770                        ident.value.push('-');
11771
11772                        let token = self
11773                            .next_token_no_skip()
11774                            .cloned()
11775                            .unwrap_or(TokenWithSpan::wrap(Token::EOF));
11776                        requires_whitespace = match token.token {
11777                            Token::Word(next_word) if next_word.quote_style.is_none() => {
11778                                ident.value.push_str(&next_word.value);
11779                                false
11780                            }
11781                            Token::Number(s, false) => {
11782                                // A number token can represent a decimal value ending with a period, e.g., `Number('123.')`.
11783                                // However, for an [ObjectName], it is part of a hyphenated identifier, e.g., `foo-123.bar`.
11784                                //
11785                                // If a number token is followed by a period, it is part of an [ObjectName].
11786                                // Return the identifier with `true` if the number token is followed by a period, indicating that
11787                                // parsing should continue for the next part of the hyphenated identifier.
11788                                if s.ends_with('.') {
11789                                    let Some(s) = s.split('.').next().filter(|s| {
11790                                        !s.is_empty() && s.chars().all(|c| c.is_ascii_digit())
11791                                    }) else {
11792                                        return self.expected(
11793                                            "continuation of hyphenated identifier",
11794                                            TokenWithSpan::new(Token::Number(s, false), token.span),
11795                                        );
11796                                    };
11797                                    ident.value.push_str(s);
11798                                    return Ok((ident, true));
11799                                } else {
11800                                    ident.value.push_str(&s);
11801                                }
11802                                // If next token is period, then it is part of an ObjectName and we don't expect whitespace
11803                                // after the number.
11804                                !matches!(self.peek_token().token, Token::Period)
11805                            }
11806                            _ => {
11807                                return self
11808                                    .expected("continuation of hyphenated identifier", token);
11809                            }
11810                        }
11811                    }
11812
11813                    // If the last segment was a number, we must check that it's followed by whitespace,
11814                    // otherwise foo-123a will be parsed as `foo-123` with the alias `a`.
11815                    if requires_whitespace {
11816                        let token = self.next_token();
11817                        if !matches!(token.token, Token::EOF | Token::Whitespace(_)) {
11818                            return self
11819                                .expected("whitespace following hyphenated identifier", token);
11820                        }
11821                    }
11822                }
11823                Ok((ident, false))
11824            }
11825            _ => Ok((self.parse_identifier()?, false)),
11826        }
11827    }
11828
11829    /// Parses a parenthesized, comma-separated list of column definitions within a view.
11830    fn parse_view_columns(&mut self) -> Result<Vec<ViewColumnDef>, ParserError> {
11831        if self.consume_token(&Token::LParen) {
11832            if self.peek_token().token == Token::RParen {
11833                self.next_token();
11834                Ok(vec![])
11835            } else {
11836                let cols = self.parse_comma_separated_with_trailing_commas(
11837                    Parser::parse_view_column,
11838                    self.dialect.supports_column_definition_trailing_commas(),
11839                    Self::is_reserved_for_column_alias,
11840                )?;
11841                self.expect_token(&Token::RParen)?;
11842                Ok(cols)
11843            }
11844        } else {
11845            Ok(vec![])
11846        }
11847    }
11848
11849    /// Parses a column definition within a view.
11850    fn parse_view_column(&mut self) -> Result<ViewColumnDef, ParserError> {
11851        let name = self.parse_identifier()?;
11852        let options = self.parse_view_column_options()?;
11853        let data_type = if dialect_of!(self is ClickHouseDialect) {
11854            Some(self.parse_data_type()?)
11855        } else {
11856            None
11857        };
11858        Ok(ViewColumnDef {
11859            name,
11860            data_type,
11861            options,
11862        })
11863    }
11864
11865    fn parse_view_column_options(&mut self) -> Result<Option<ColumnOptions>, ParserError> {
11866        let mut options = Vec::new();
11867        loop {
11868            let option = self.parse_optional_column_option()?;
11869            if let Some(option) = option {
11870                options.push(option);
11871            } else {
11872                break;
11873            }
11874        }
11875        if options.is_empty() {
11876            Ok(None)
11877        } else if self.dialect.supports_space_separated_column_options() {
11878            Ok(Some(ColumnOptions::SpaceSeparated(options)))
11879        } else {
11880            Ok(Some(ColumnOptions::CommaSeparated(options)))
11881        }
11882    }
11883
11884    /// Parses a parenthesized comma-separated list of unqualified, possibly quoted identifiers.
11885    /// For example: `(col1, "col 2", ...)`
11886    pub fn parse_parenthesized_column_list(
11887        &mut self,
11888        optional: IsOptional,
11889        allow_empty: bool,
11890    ) -> Result<Vec<Ident>, ParserError> {
11891        self.parse_parenthesized_column_list_inner(optional, allow_empty, |p| p.parse_identifier())
11892    }
11893
11894    pub fn parse_parenthesized_compound_identifier_list(
11895        &mut self,
11896        optional: IsOptional,
11897        allow_empty: bool,
11898    ) -> Result<Vec<Expr>, ParserError> {
11899        self.parse_parenthesized_column_list_inner(optional, allow_empty, |p| {
11900            Ok(Expr::CompoundIdentifier(
11901                p.parse_period_separated(|p| p.parse_identifier())?,
11902            ))
11903        })
11904    }
11905
11906    /// Parses a parenthesized comma-separated list of index columns, which can be arbitrary
11907    /// expressions with ordering information (and an opclass in some dialects).
11908    fn parse_parenthesized_index_column_list(&mut self) -> Result<Vec<IndexColumn>, ParserError> {
11909        self.parse_parenthesized_column_list_inner(Mandatory, false, |p| {
11910            p.parse_create_index_expr()
11911        })
11912    }
11913
11914    /// Parses a parenthesized comma-separated list of qualified, possibly quoted identifiers.
11915    /// For example: `(db1.sc1.tbl1.col1, db1.sc1.tbl1."col 2", ...)`
11916    pub fn parse_parenthesized_qualified_column_list(
11917        &mut self,
11918        optional: IsOptional,
11919        allow_empty: bool,
11920    ) -> Result<Vec<ObjectName>, ParserError> {
11921        self.parse_parenthesized_column_list_inner(optional, allow_empty, |p| {
11922            p.parse_object_name(true)
11923        })
11924    }
11925
11926    /// Parses a parenthesized comma-separated list of columns using
11927    /// the provided function to parse each element.
11928    fn parse_parenthesized_column_list_inner<F, T>(
11929        &mut self,
11930        optional: IsOptional,
11931        allow_empty: bool,
11932        mut f: F,
11933    ) -> Result<Vec<T>, ParserError>
11934    where
11935        F: FnMut(&mut Parser) -> Result<T, ParserError>,
11936    {
11937        if self.consume_token(&Token::LParen) {
11938            if allow_empty && self.peek_token().token == Token::RParen {
11939                self.next_token();
11940                Ok(vec![])
11941            } else {
11942                let cols = self.parse_comma_separated(|p| f(p))?;
11943                self.expect_token(&Token::RParen)?;
11944                Ok(cols)
11945            }
11946        } else if optional == Optional {
11947            Ok(vec![])
11948        } else {
11949            self.expected("a list of columns in parentheses", self.peek_token())
11950        }
11951    }
11952
11953    /// Parses a parenthesized comma-separated list of table alias column definitions.
11954    fn parse_table_alias_column_defs(&mut self) -> Result<Vec<TableAliasColumnDef>, ParserError> {
11955        if self.consume_token(&Token::LParen) {
11956            let cols = self.parse_comma_separated(|p| {
11957                let name = p.parse_identifier()?;
11958                let data_type = p.maybe_parse(|p| p.parse_data_type())?;
11959                Ok(TableAliasColumnDef { name, data_type })
11960            })?;
11961            self.expect_token(&Token::RParen)?;
11962            Ok(cols)
11963        } else {
11964            Ok(vec![])
11965        }
11966    }
11967
11968    pub fn parse_precision(&mut self) -> Result<u64, ParserError> {
11969        self.expect_token(&Token::LParen)?;
11970        let n = self.parse_literal_uint()?;
11971        self.expect_token(&Token::RParen)?;
11972        Ok(n)
11973    }
11974
11975    pub fn parse_optional_precision(&mut self) -> Result<Option<u64>, ParserError> {
11976        if self.consume_token(&Token::LParen) {
11977            let n = self.parse_literal_uint()?;
11978            self.expect_token(&Token::RParen)?;
11979            Ok(Some(n))
11980        } else {
11981            Ok(None)
11982        }
11983    }
11984
11985    fn maybe_parse_optional_interval_fields(
11986        &mut self,
11987    ) -> Result<Option<IntervalFields>, ParserError> {
11988        match self.parse_one_of_keywords(&[
11989            // Can be followed by `TO` option
11990            Keyword::YEAR,
11991            Keyword::DAY,
11992            Keyword::HOUR,
11993            Keyword::MINUTE,
11994            // No `TO` option
11995            Keyword::MONTH,
11996            Keyword::SECOND,
11997        ]) {
11998            Some(Keyword::YEAR) => {
11999                if self.peek_keyword(Keyword::TO) {
12000                    self.expect_keyword(Keyword::TO)?;
12001                    self.expect_keyword(Keyword::MONTH)?;
12002                    Ok(Some(IntervalFields::YearToMonth))
12003                } else {
12004                    Ok(Some(IntervalFields::Year))
12005                }
12006            }
12007            Some(Keyword::DAY) => {
12008                if self.peek_keyword(Keyword::TO) {
12009                    self.expect_keyword(Keyword::TO)?;
12010                    match self.expect_one_of_keywords(&[
12011                        Keyword::HOUR,
12012                        Keyword::MINUTE,
12013                        Keyword::SECOND,
12014                    ])? {
12015                        Keyword::HOUR => Ok(Some(IntervalFields::DayToHour)),
12016                        Keyword::MINUTE => Ok(Some(IntervalFields::DayToMinute)),
12017                        Keyword::SECOND => Ok(Some(IntervalFields::DayToSecond)),
12018                        _ => {
12019                            self.prev_token();
12020                            self.expected("HOUR, MINUTE, or SECOND", self.peek_token())
12021                        }
12022                    }
12023                } else {
12024                    Ok(Some(IntervalFields::Day))
12025                }
12026            }
12027            Some(Keyword::HOUR) => {
12028                if self.peek_keyword(Keyword::TO) {
12029                    self.expect_keyword(Keyword::TO)?;
12030                    match self.expect_one_of_keywords(&[Keyword::MINUTE, Keyword::SECOND])? {
12031                        Keyword::MINUTE => Ok(Some(IntervalFields::HourToMinute)),
12032                        Keyword::SECOND => Ok(Some(IntervalFields::HourToSecond)),
12033                        _ => {
12034                            self.prev_token();
12035                            self.expected("MINUTE or SECOND", self.peek_token())
12036                        }
12037                    }
12038                } else {
12039                    Ok(Some(IntervalFields::Hour))
12040                }
12041            }
12042            Some(Keyword::MINUTE) => {
12043                if self.peek_keyword(Keyword::TO) {
12044                    self.expect_keyword(Keyword::TO)?;
12045                    self.expect_keyword(Keyword::SECOND)?;
12046                    Ok(Some(IntervalFields::MinuteToSecond))
12047                } else {
12048                    Ok(Some(IntervalFields::Minute))
12049                }
12050            }
12051            Some(Keyword::MONTH) => Ok(Some(IntervalFields::Month)),
12052            Some(Keyword::SECOND) => Ok(Some(IntervalFields::Second)),
12053            Some(_) => {
12054                self.prev_token();
12055                self.expected(
12056                    "YEAR, MONTH, DAY, HOUR, MINUTE, or SECOND",
12057                    self.peek_token(),
12058                )
12059            }
12060            None => Ok(None),
12061        }
12062    }
12063
12064    /// Parse datetime64 [1]
12065    /// Syntax
12066    /// ```sql
12067    /// DateTime64(precision[, timezone])
12068    /// ```
12069    ///
12070    /// [1]: https://clickhouse.com/docs/en/sql-reference/data-types/datetime64
12071    pub fn parse_datetime_64(&mut self) -> Result<(u64, Option<String>), ParserError> {
12072        self.expect_keyword_is(Keyword::DATETIME64)?;
12073        self.expect_token(&Token::LParen)?;
12074        let precision = self.parse_literal_uint()?;
12075        let time_zone = if self.consume_token(&Token::Comma) {
12076            Some(self.parse_literal_string()?)
12077        } else {
12078            None
12079        };
12080        self.expect_token(&Token::RParen)?;
12081        Ok((precision, time_zone))
12082    }
12083
12084    pub fn parse_optional_character_length(
12085        &mut self,
12086    ) -> Result<Option<CharacterLength>, ParserError> {
12087        if self.consume_token(&Token::LParen) {
12088            let character_length = self.parse_character_length()?;
12089            self.expect_token(&Token::RParen)?;
12090            Ok(Some(character_length))
12091        } else {
12092            Ok(None)
12093        }
12094    }
12095
12096    pub fn parse_optional_binary_length(&mut self) -> Result<Option<BinaryLength>, ParserError> {
12097        if self.consume_token(&Token::LParen) {
12098            let binary_length = self.parse_binary_length()?;
12099            self.expect_token(&Token::RParen)?;
12100            Ok(Some(binary_length))
12101        } else {
12102            Ok(None)
12103        }
12104    }
12105
12106    pub fn parse_character_length(&mut self) -> Result<CharacterLength, ParserError> {
12107        if self.parse_keyword(Keyword::MAX) {
12108            return Ok(CharacterLength::Max);
12109        }
12110        let length = self.parse_literal_uint()?;
12111        let unit = if self.parse_keyword(Keyword::CHARACTERS) {
12112            Some(CharLengthUnits::Characters)
12113        } else if self.parse_keyword(Keyword::OCTETS) {
12114            Some(CharLengthUnits::Octets)
12115        } else {
12116            None
12117        };
12118        Ok(CharacterLength::IntegerLength { length, unit })
12119    }
12120
12121    pub fn parse_binary_length(&mut self) -> Result<BinaryLength, ParserError> {
12122        if self.parse_keyword(Keyword::MAX) {
12123            return Ok(BinaryLength::Max);
12124        }
12125        let length = self.parse_literal_uint()?;
12126        Ok(BinaryLength::IntegerLength { length })
12127    }
12128
12129    pub fn parse_optional_precision_scale(
12130        &mut self,
12131    ) -> Result<(Option<u64>, Option<u64>), ParserError> {
12132        if self.consume_token(&Token::LParen) {
12133            let n = self.parse_literal_uint()?;
12134            let scale = if self.consume_token(&Token::Comma) {
12135                Some(self.parse_literal_uint()?)
12136            } else {
12137                None
12138            };
12139            self.expect_token(&Token::RParen)?;
12140            Ok((Some(n), scale))
12141        } else {
12142            Ok((None, None))
12143        }
12144    }
12145
12146    pub fn parse_exact_number_optional_precision_scale(
12147        &mut self,
12148    ) -> Result<ExactNumberInfo, ParserError> {
12149        if self.consume_token(&Token::LParen) {
12150            let precision = self.parse_literal_uint()?;
12151            let scale = if self.consume_token(&Token::Comma) {
12152                Some(self.parse_signed_integer()?)
12153            } else {
12154                None
12155            };
12156
12157            self.expect_token(&Token::RParen)?;
12158
12159            match scale {
12160                None => Ok(ExactNumberInfo::Precision(precision)),
12161                Some(scale) => Ok(ExactNumberInfo::PrecisionAndScale(precision, scale)),
12162            }
12163        } else {
12164            Ok(ExactNumberInfo::None)
12165        }
12166    }
12167
12168    /// Parse an optionally signed integer literal.
12169    fn parse_signed_integer(&mut self) -> Result<i64, ParserError> {
12170        let is_negative = self.consume_token(&Token::Minus);
12171
12172        if !is_negative {
12173            let _ = self.consume_token(&Token::Plus);
12174        }
12175
12176        let current_token = self.peek_token_ref();
12177        match &current_token.token {
12178            Token::Number(s, _) => {
12179                let s = s.clone();
12180                let span_start = current_token.span.start;
12181                self.advance_token();
12182                let value = Self::parse::<i64>(s, span_start)?;
12183                Ok(if is_negative { -value } else { value })
12184            }
12185            _ => self.expected_ref("number", current_token),
12186        }
12187    }
12188
12189    pub fn parse_optional_type_modifiers(&mut self) -> Result<Option<Vec<String>>, ParserError> {
12190        if self.consume_token(&Token::LParen) {
12191            let mut modifiers = Vec::new();
12192            loop {
12193                let next_token = self.next_token();
12194                match next_token.token {
12195                    Token::Word(w) => modifiers.push(w.to_string()),
12196                    Token::Number(n, _) => modifiers.push(n),
12197                    Token::SingleQuotedString(s) => modifiers.push(s),
12198
12199                    Token::Comma => {
12200                        continue;
12201                    }
12202                    Token::RParen => {
12203                        break;
12204                    }
12205                    _ => self.expected("type modifiers", next_token)?,
12206                }
12207            }
12208
12209            Ok(Some(modifiers))
12210        } else {
12211            Ok(None)
12212        }
12213    }
12214
12215    /// Parse a parenthesized sub data type
12216    fn parse_sub_type<F>(&mut self, parent_type: F) -> Result<DataType, ParserError>
12217    where
12218        F: FnOnce(Box<DataType>) -> DataType,
12219    {
12220        self.expect_token(&Token::LParen)?;
12221        let inside_type = self.parse_data_type()?;
12222        self.expect_token(&Token::RParen)?;
12223        Ok(parent_type(inside_type.into()))
12224    }
12225
12226    /// Parse a DELETE statement, returning a `Box`ed SetExpr
12227    ///
12228    /// This is used to reduce the size of the stack frames in debug builds
12229    fn parse_delete_setexpr_boxed(
12230        &mut self,
12231        delete_token: TokenWithSpan,
12232    ) -> Result<Box<SetExpr>, ParserError> {
12233        Ok(Box::new(SetExpr::Delete(self.parse_delete(delete_token)?)))
12234    }
12235
12236    /// Parse a MERGE statement, returning a `Box`ed SetExpr
12237    ///
12238    /// This is used to reduce the size of the stack frames in debug builds
12239    fn parse_merge_setexpr_boxed(
12240        &mut self,
12241        merge_token: TokenWithSpan,
12242    ) -> Result<Box<SetExpr>, ParserError> {
12243        Ok(Box::new(SetExpr::Merge(self.parse_merge(merge_token)?)))
12244    }
12245
12246    pub fn parse_delete(&mut self, delete_token: TokenWithSpan) -> Result<Statement, ParserError> {
12247        let (tables, with_from_keyword) = if !self.parse_keyword(Keyword::FROM) {
12248            // `FROM` keyword is optional in BigQuery SQL.
12249            // https://cloud.google.com/bigquery/docs/reference/standard-sql/dml-syntax#delete_statement
12250            if dialect_of!(self is BigQueryDialect | OracleDialect | GenericDialect) {
12251                (vec![], false)
12252            } else {
12253                let tables = self.parse_comma_separated(|p| p.parse_object_name(false))?;
12254                self.expect_keyword_is(Keyword::FROM)?;
12255                (tables, true)
12256            }
12257        } else {
12258            (vec![], true)
12259        };
12260
12261        let from = self.parse_comma_separated(Parser::parse_table_and_joins)?;
12262        let using = if self.parse_keyword(Keyword::USING) {
12263            Some(self.parse_comma_separated(Parser::parse_table_and_joins)?)
12264        } else {
12265            None
12266        };
12267        let selection = if self.parse_keyword(Keyword::WHERE) {
12268            Some(self.parse_expr()?)
12269        } else {
12270            None
12271        };
12272        let returning = if self.parse_keyword(Keyword::RETURNING) {
12273            Some(self.parse_comma_separated(Parser::parse_select_item)?)
12274        } else {
12275            None
12276        };
12277        let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
12278            self.parse_comma_separated(Parser::parse_order_by_expr)?
12279        } else {
12280            vec![]
12281        };
12282        let limit = if self.parse_keyword(Keyword::LIMIT) {
12283            self.parse_limit()?
12284        } else {
12285            None
12286        };
12287
12288        Ok(Statement::Delete(Delete {
12289            delete_token: delete_token.into(),
12290            tables,
12291            from: if with_from_keyword {
12292                FromTable::WithFromKeyword(from)
12293            } else {
12294                FromTable::WithoutKeyword(from)
12295            },
12296            using,
12297            selection,
12298            returning,
12299            order_by,
12300            limit,
12301        }))
12302    }
12303
12304    // KILL [CONNECTION | QUERY | MUTATION] processlist_id
12305    pub fn parse_kill(&mut self) -> Result<Statement, ParserError> {
12306        let modifier_keyword =
12307            self.parse_one_of_keywords(&[Keyword::CONNECTION, Keyword::QUERY, Keyword::MUTATION]);
12308
12309        let id = self.parse_literal_uint()?;
12310
12311        let modifier = match modifier_keyword {
12312            Some(Keyword::CONNECTION) => Some(KillType::Connection),
12313            Some(Keyword::QUERY) => Some(KillType::Query),
12314            Some(Keyword::MUTATION) => {
12315                if dialect_of!(self is ClickHouseDialect | GenericDialect) {
12316                    Some(KillType::Mutation)
12317                } else {
12318                    self.expected(
12319                        "Unsupported type for KILL, allowed: CONNECTION | QUERY",
12320                        self.peek_token(),
12321                    )?
12322                }
12323            }
12324            _ => None,
12325        };
12326
12327        Ok(Statement::Kill { modifier, id })
12328    }
12329
12330    pub fn parse_explain(
12331        &mut self,
12332        describe_alias: DescribeAlias,
12333    ) -> Result<Statement, ParserError> {
12334        let mut analyze = false;
12335        let mut verbose = false;
12336        let mut query_plan = false;
12337        let mut estimate = false;
12338        let mut format = None;
12339        let mut options = None;
12340
12341        // Note: DuckDB is compatible with PostgreSQL syntax for this statement,
12342        // although not all features may be implemented.
12343        if describe_alias == DescribeAlias::Explain
12344            && self.dialect.supports_explain_with_utility_options()
12345            && self.peek_token().token == Token::LParen
12346        {
12347            options = Some(self.parse_utility_options()?)
12348        } else if self.parse_keywords(&[Keyword::QUERY, Keyword::PLAN]) {
12349            query_plan = true;
12350        } else if self.parse_keyword(Keyword::ESTIMATE) {
12351            estimate = true;
12352        } else {
12353            analyze = self.parse_keyword(Keyword::ANALYZE);
12354            verbose = self.parse_keyword(Keyword::VERBOSE);
12355            if self.parse_keyword(Keyword::FORMAT) {
12356                format = Some(self.parse_analyze_format_kind()?);
12357            }
12358        }
12359
12360        match self.maybe_parse(|parser| parser.parse_statement())? {
12361            Some(Statement::Explain { .. }) | Some(Statement::ExplainTable { .. }) => Err(
12362                ParserError::ParserError("Explain must be root of the plan".to_string()),
12363            ),
12364            Some(statement) => Ok(Statement::Explain {
12365                describe_alias,
12366                analyze,
12367                verbose,
12368                query_plan,
12369                estimate,
12370                statement: Box::new(statement),
12371                format,
12372                options,
12373            }),
12374            _ => {
12375                let hive_format =
12376                    match self.parse_one_of_keywords(&[Keyword::EXTENDED, Keyword::FORMATTED]) {
12377                        Some(Keyword::EXTENDED) => Some(HiveDescribeFormat::Extended),
12378                        Some(Keyword::FORMATTED) => Some(HiveDescribeFormat::Formatted),
12379                        _ => None,
12380                    };
12381
12382                let has_table_keyword = if self.dialect.describe_requires_table_keyword() {
12383                    // only allow to use TABLE keyword for DESC|DESCRIBE statement
12384                    self.parse_keyword(Keyword::TABLE)
12385                } else {
12386                    false
12387                };
12388
12389                let table_name = self.parse_object_name(false)?;
12390                Ok(Statement::ExplainTable {
12391                    describe_alias,
12392                    hive_format,
12393                    has_table_keyword,
12394                    table_name,
12395                })
12396            }
12397        }
12398    }
12399
12400    /// Parse a query expression, i.e. a `SELECT` statement optionally
12401    /// preceded with some `WITH` CTE declarations and optionally followed
12402    /// by `ORDER BY`. Unlike some other parse_... methods, this one doesn't
12403    /// expect the initial keyword to be already consumed
12404    pub fn parse_query(&mut self) -> Result<Box<Query>, ParserError> {
12405        let _guard = self.recursion_counter.try_decrease()?;
12406        let with = if self.parse_keyword(Keyword::WITH) {
12407            let with_token = self.get_current_token();
12408            Some(With {
12409                with_token: with_token.clone().into(),
12410                recursive: self.parse_keyword(Keyword::RECURSIVE),
12411                cte_tables: self.parse_comma_separated(Parser::parse_cte)?,
12412            })
12413        } else {
12414            None
12415        };
12416        if self.parse_keyword(Keyword::INSERT) {
12417            Ok(Query {
12418                with,
12419                body: self.parse_insert_setexpr_boxed(self.get_current_token().clone())?,
12420                order_by: None,
12421                limit_clause: None,
12422                fetch: None,
12423                locks: vec![],
12424                for_clause: None,
12425                settings: None,
12426                format_clause: None,
12427                pipe_operators: vec![],
12428            }
12429            .into())
12430        } else if self.parse_keyword(Keyword::UPDATE) {
12431            Ok(Query {
12432                with,
12433                body: self.parse_update_setexpr_boxed(self.get_current_token().clone())?,
12434                order_by: None,
12435                limit_clause: None,
12436                fetch: None,
12437                locks: vec![],
12438                for_clause: None,
12439                settings: None,
12440                format_clause: None,
12441                pipe_operators: vec![],
12442            }
12443            .into())
12444        } else if self.parse_keyword(Keyword::DELETE) {
12445            Ok(Query {
12446                with,
12447                body: self.parse_delete_setexpr_boxed(self.get_current_token().clone())?,
12448                limit_clause: None,
12449                order_by: None,
12450                fetch: None,
12451                locks: vec![],
12452                for_clause: None,
12453                settings: None,
12454                format_clause: None,
12455                pipe_operators: vec![],
12456            }
12457            .into())
12458        } else if self.parse_keyword(Keyword::MERGE) {
12459            Ok(Query {
12460                with,
12461                body: self.parse_merge_setexpr_boxed(self.get_current_token().clone())?,
12462                limit_clause: None,
12463                order_by: None,
12464                fetch: None,
12465                locks: vec![],
12466                for_clause: None,
12467                settings: None,
12468                format_clause: None,
12469                pipe_operators: vec![],
12470            }
12471            .into())
12472        } else {
12473            let body = self.parse_query_body(self.dialect.prec_unknown())?;
12474
12475            let order_by = self.parse_optional_order_by()?;
12476
12477            let limit_clause = self.parse_optional_limit_clause()?;
12478
12479            let settings = self.parse_settings()?;
12480
12481            let fetch = if self.parse_keyword(Keyword::FETCH) {
12482                Some(self.parse_fetch()?)
12483            } else {
12484                None
12485            };
12486
12487            let mut for_clause = None;
12488            let mut locks = Vec::new();
12489            while self.parse_keyword(Keyword::FOR) {
12490                if let Some(parsed_for_clause) = self.parse_for_clause()? {
12491                    for_clause = Some(parsed_for_clause);
12492                    break;
12493                } else {
12494                    locks.push(self.parse_lock()?);
12495                }
12496            }
12497            let format_clause = if dialect_of!(self is ClickHouseDialect | GenericDialect)
12498                && self.parse_keyword(Keyword::FORMAT)
12499            {
12500                if self.parse_keyword(Keyword::NULL) {
12501                    Some(FormatClause::Null)
12502                } else {
12503                    let ident = self.parse_identifier()?;
12504                    Some(FormatClause::Identifier(ident))
12505                }
12506            } else {
12507                None
12508            };
12509
12510            let pipe_operators = if self.dialect.supports_pipe_operator() {
12511                self.parse_pipe_operators()?
12512            } else {
12513                Vec::new()
12514            };
12515
12516            Ok(Query {
12517                with,
12518                body,
12519                order_by,
12520                limit_clause,
12521                fetch,
12522                locks,
12523                for_clause,
12524                settings,
12525                format_clause,
12526                pipe_operators,
12527            }
12528            .into())
12529        }
12530    }
12531
12532    fn parse_pipe_operators(&mut self) -> Result<Vec<PipeOperator>, ParserError> {
12533        let mut pipe_operators = Vec::new();
12534
12535        while self.consume_token(&Token::VerticalBarRightAngleBracket) {
12536            let kw = self.expect_one_of_keywords(&[
12537                Keyword::SELECT,
12538                Keyword::EXTEND,
12539                Keyword::SET,
12540                Keyword::DROP,
12541                Keyword::AS,
12542                Keyword::WHERE,
12543                Keyword::LIMIT,
12544                Keyword::AGGREGATE,
12545                Keyword::ORDER,
12546                Keyword::TABLESAMPLE,
12547                Keyword::RENAME,
12548                Keyword::UNION,
12549                Keyword::INTERSECT,
12550                Keyword::EXCEPT,
12551                Keyword::CALL,
12552                Keyword::PIVOT,
12553                Keyword::UNPIVOT,
12554                Keyword::JOIN,
12555                Keyword::INNER,
12556                Keyword::LEFT,
12557                Keyword::RIGHT,
12558                Keyword::FULL,
12559                Keyword::CROSS,
12560            ])?;
12561            match kw {
12562                Keyword::SELECT => {
12563                    let exprs = self.parse_comma_separated(Parser::parse_select_item)?;
12564                    pipe_operators.push(PipeOperator::Select { exprs })
12565                }
12566                Keyword::EXTEND => {
12567                    let exprs = self.parse_comma_separated(Parser::parse_select_item)?;
12568                    pipe_operators.push(PipeOperator::Extend { exprs })
12569                }
12570                Keyword::SET => {
12571                    let assignments = self.parse_comma_separated(Parser::parse_assignment)?;
12572                    pipe_operators.push(PipeOperator::Set { assignments })
12573                }
12574                Keyword::DROP => {
12575                    let columns = self.parse_identifiers()?;
12576                    pipe_operators.push(PipeOperator::Drop { columns })
12577                }
12578                Keyword::AS => {
12579                    let alias = self.parse_identifier()?;
12580                    pipe_operators.push(PipeOperator::As { alias })
12581                }
12582                Keyword::WHERE => {
12583                    let expr = self.parse_expr()?;
12584                    pipe_operators.push(PipeOperator::Where { expr })
12585                }
12586                Keyword::LIMIT => {
12587                    let expr = self.parse_expr()?;
12588                    let offset = if self.parse_keyword(Keyword::OFFSET) {
12589                        Some(self.parse_expr()?)
12590                    } else {
12591                        None
12592                    };
12593                    pipe_operators.push(PipeOperator::Limit { expr, offset })
12594                }
12595                Keyword::AGGREGATE => {
12596                    let full_table_exprs = if self.peek_keyword(Keyword::GROUP) {
12597                        vec![]
12598                    } else {
12599                        self.parse_comma_separated(|parser| {
12600                            parser.parse_expr_with_alias_and_order_by()
12601                        })?
12602                    };
12603
12604                    let group_by_expr = if self.parse_keywords(&[Keyword::GROUP, Keyword::BY]) {
12605                        self.parse_comma_separated(|parser| {
12606                            parser.parse_expr_with_alias_and_order_by()
12607                        })?
12608                    } else {
12609                        vec![]
12610                    };
12611
12612                    pipe_operators.push(PipeOperator::Aggregate {
12613                        full_table_exprs,
12614                        group_by_expr,
12615                    })
12616                }
12617                Keyword::ORDER => {
12618                    self.expect_one_of_keywords(&[Keyword::BY])?;
12619                    let exprs = self.parse_comma_separated(Parser::parse_order_by_expr)?;
12620                    pipe_operators.push(PipeOperator::OrderBy { exprs })
12621                }
12622                Keyword::TABLESAMPLE => {
12623                    let sample = self.parse_table_sample(TableSampleModifier::TableSample)?;
12624                    pipe_operators.push(PipeOperator::TableSample { sample });
12625                }
12626                Keyword::RENAME => {
12627                    let mappings =
12628                        self.parse_comma_separated(Parser::parse_identifier_with_optional_alias)?;
12629                    pipe_operators.push(PipeOperator::Rename { mappings });
12630                }
12631                Keyword::UNION => {
12632                    let set_quantifier = self.parse_set_quantifier(&Some(SetOperator::Union));
12633                    let queries = self.parse_pipe_operator_queries()?;
12634                    pipe_operators.push(PipeOperator::Union {
12635                        set_quantifier,
12636                        queries,
12637                    });
12638                }
12639                Keyword::INTERSECT => {
12640                    let set_quantifier =
12641                        self.parse_distinct_required_set_quantifier("INTERSECT")?;
12642                    let queries = self.parse_pipe_operator_queries()?;
12643                    pipe_operators.push(PipeOperator::Intersect {
12644                        set_quantifier,
12645                        queries,
12646                    });
12647                }
12648                Keyword::EXCEPT => {
12649                    let set_quantifier = self.parse_distinct_required_set_quantifier("EXCEPT")?;
12650                    let queries = self.parse_pipe_operator_queries()?;
12651                    pipe_operators.push(PipeOperator::Except {
12652                        set_quantifier,
12653                        queries,
12654                    });
12655                }
12656                Keyword::CALL => {
12657                    let function_name = self.parse_object_name(false)?;
12658                    let function_expr = self.parse_function(function_name)?;
12659                    if let Expr::Function(function) = function_expr {
12660                        let alias = self.parse_identifier_optional_alias()?;
12661                        pipe_operators.push(PipeOperator::Call { function, alias });
12662                    } else {
12663                        return Err(ParserError::ParserError(
12664                            "Expected function call after CALL".to_string(),
12665                        ));
12666                    }
12667                }
12668                Keyword::PIVOT => {
12669                    self.expect_token(&Token::LParen)?;
12670                    let aggregate_functions =
12671                        self.parse_comma_separated(Self::parse_aliased_function_call)?;
12672                    self.expect_keyword_is(Keyword::FOR)?;
12673                    let value_column = self.parse_period_separated(|p| p.parse_identifier())?;
12674                    self.expect_keyword_is(Keyword::IN)?;
12675
12676                    self.expect_token(&Token::LParen)?;
12677                    let value_source = if self.parse_keyword(Keyword::ANY) {
12678                        let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
12679                            self.parse_comma_separated(Parser::parse_order_by_expr)?
12680                        } else {
12681                            vec![]
12682                        };
12683                        PivotValueSource::Any(order_by)
12684                    } else if self.peek_sub_query() {
12685                        PivotValueSource::Subquery(self.parse_query()?)
12686                    } else {
12687                        PivotValueSource::List(
12688                            self.parse_comma_separated(Self::parse_expr_with_alias)?,
12689                        )
12690                    };
12691                    self.expect_token(&Token::RParen)?;
12692                    self.expect_token(&Token::RParen)?;
12693
12694                    let alias = self.parse_identifier_optional_alias()?;
12695
12696                    pipe_operators.push(PipeOperator::Pivot {
12697                        aggregate_functions,
12698                        value_column,
12699                        value_source,
12700                        alias,
12701                    });
12702                }
12703                Keyword::UNPIVOT => {
12704                    self.expect_token(&Token::LParen)?;
12705                    let value_column = self.parse_identifier()?;
12706                    self.expect_keyword(Keyword::FOR)?;
12707                    let name_column = self.parse_identifier()?;
12708                    self.expect_keyword(Keyword::IN)?;
12709
12710                    self.expect_token(&Token::LParen)?;
12711                    let unpivot_columns = self.parse_comma_separated(Parser::parse_identifier)?;
12712                    self.expect_token(&Token::RParen)?;
12713
12714                    self.expect_token(&Token::RParen)?;
12715
12716                    let alias = self.parse_identifier_optional_alias()?;
12717
12718                    pipe_operators.push(PipeOperator::Unpivot {
12719                        value_column,
12720                        name_column,
12721                        unpivot_columns,
12722                        alias,
12723                    });
12724                }
12725                Keyword::JOIN
12726                | Keyword::INNER
12727                | Keyword::LEFT
12728                | Keyword::RIGHT
12729                | Keyword::FULL
12730                | Keyword::CROSS => {
12731                    self.prev_token();
12732                    let mut joins = self.parse_joins()?;
12733                    if joins.len() != 1 {
12734                        return Err(ParserError::ParserError(
12735                            "Join pipe operator must have a single join".to_string(),
12736                        ));
12737                    }
12738                    let join = joins.swap_remove(0);
12739                    pipe_operators.push(PipeOperator::Join(join))
12740                }
12741                unhandled => {
12742                    return Err(ParserError::ParserError(format!(
12743                    "`expect_one_of_keywords` further up allowed unhandled keyword: {unhandled:?}"
12744                )))
12745                }
12746            }
12747        }
12748        Ok(pipe_operators)
12749    }
12750
12751    fn parse_settings(&mut self) -> Result<Option<Vec<Setting>>, ParserError> {
12752        let settings = if dialect_of!(self is ClickHouseDialect|GenericDialect)
12753            && self.parse_keyword(Keyword::SETTINGS)
12754        {
12755            let key_values = self.parse_comma_separated(|p| {
12756                let key = p.parse_identifier()?;
12757                p.expect_token(&Token::Eq)?;
12758                let value = p.parse_expr()?;
12759                Ok(Setting { key, value })
12760            })?;
12761            Some(key_values)
12762        } else {
12763            None
12764        };
12765        Ok(settings)
12766    }
12767
12768    /// Parse a mssql `FOR [XML | JSON | BROWSE]` clause
12769    pub fn parse_for_clause(&mut self) -> Result<Option<ForClause>, ParserError> {
12770        if self.parse_keyword(Keyword::XML) {
12771            Ok(Some(self.parse_for_xml()?))
12772        } else if self.parse_keyword(Keyword::JSON) {
12773            Ok(Some(self.parse_for_json()?))
12774        } else if self.parse_keyword(Keyword::BROWSE) {
12775            Ok(Some(ForClause::Browse))
12776        } else {
12777            Ok(None)
12778        }
12779    }
12780
12781    /// Parse a mssql `FOR XML` clause
12782    pub fn parse_for_xml(&mut self) -> Result<ForClause, ParserError> {
12783        let for_xml = if self.parse_keyword(Keyword::RAW) {
12784            let mut element_name = None;
12785            if self.peek_token().token == Token::LParen {
12786                self.expect_token(&Token::LParen)?;
12787                element_name = Some(self.parse_literal_string()?);
12788                self.expect_token(&Token::RParen)?;
12789            }
12790            ForXml::Raw(element_name)
12791        } else if self.parse_keyword(Keyword::AUTO) {
12792            ForXml::Auto
12793        } else if self.parse_keyword(Keyword::EXPLICIT) {
12794            ForXml::Explicit
12795        } else if self.parse_keyword(Keyword::PATH) {
12796            let mut element_name = None;
12797            if self.peek_token().token == Token::LParen {
12798                self.expect_token(&Token::LParen)?;
12799                element_name = Some(self.parse_literal_string()?);
12800                self.expect_token(&Token::RParen)?;
12801            }
12802            ForXml::Path(element_name)
12803        } else {
12804            return Err(ParserError::ParserError(
12805                "Expected FOR XML [RAW | AUTO | EXPLICIT | PATH ]".to_string(),
12806            ));
12807        };
12808        let mut elements = false;
12809        let mut binary_base64 = false;
12810        let mut root = None;
12811        let mut r#type = false;
12812        while self.peek_token().token == Token::Comma {
12813            self.next_token();
12814            if self.parse_keyword(Keyword::ELEMENTS) {
12815                elements = true;
12816            } else if self.parse_keyword(Keyword::BINARY) {
12817                self.expect_keyword_is(Keyword::BASE64)?;
12818                binary_base64 = true;
12819            } else if self.parse_keyword(Keyword::ROOT) {
12820                self.expect_token(&Token::LParen)?;
12821                root = Some(self.parse_literal_string()?);
12822                self.expect_token(&Token::RParen)?;
12823            } else if self.parse_keyword(Keyword::TYPE) {
12824                r#type = true;
12825            }
12826        }
12827        Ok(ForClause::Xml {
12828            for_xml,
12829            elements,
12830            binary_base64,
12831            root,
12832            r#type,
12833        })
12834    }
12835
12836    /// Parse a mssql `FOR JSON` clause
12837    pub fn parse_for_json(&mut self) -> Result<ForClause, ParserError> {
12838        let for_json = if self.parse_keyword(Keyword::AUTO) {
12839            ForJson::Auto
12840        } else if self.parse_keyword(Keyword::PATH) {
12841            ForJson::Path
12842        } else {
12843            return Err(ParserError::ParserError(
12844                "Expected FOR JSON [AUTO | PATH ]".to_string(),
12845            ));
12846        };
12847        let mut root = None;
12848        let mut include_null_values = false;
12849        let mut without_array_wrapper = false;
12850        while self.peek_token().token == Token::Comma {
12851            self.next_token();
12852            if self.parse_keyword(Keyword::ROOT) {
12853                self.expect_token(&Token::LParen)?;
12854                root = Some(self.parse_literal_string()?);
12855                self.expect_token(&Token::RParen)?;
12856            } else if self.parse_keyword(Keyword::INCLUDE_NULL_VALUES) {
12857                include_null_values = true;
12858            } else if self.parse_keyword(Keyword::WITHOUT_ARRAY_WRAPPER) {
12859                without_array_wrapper = true;
12860            }
12861        }
12862        Ok(ForClause::Json {
12863            for_json,
12864            root,
12865            include_null_values,
12866            without_array_wrapper,
12867        })
12868    }
12869
12870    /// Parse a CTE (`alias [( col1, col2, ... )] AS (subquery)`)
12871    pub fn parse_cte(&mut self) -> Result<Cte, ParserError> {
12872        let name = self.parse_identifier()?;
12873
12874        let mut cte = if self.parse_keyword(Keyword::AS) {
12875            let mut is_materialized = None;
12876            if dialect_of!(self is PostgreSqlDialect) {
12877                if self.parse_keyword(Keyword::MATERIALIZED) {
12878                    is_materialized = Some(CteAsMaterialized::Materialized);
12879                } else if self.parse_keywords(&[Keyword::NOT, Keyword::MATERIALIZED]) {
12880                    is_materialized = Some(CteAsMaterialized::NotMaterialized);
12881                }
12882            }
12883            self.expect_token(&Token::LParen)?;
12884
12885            let query = self.parse_query()?;
12886            let closing_paren_token = self.expect_token(&Token::RParen)?;
12887
12888            let alias = TableAlias {
12889                explicit: false,
12890                name,
12891                columns: vec![],
12892            };
12893            Cte {
12894                alias,
12895                query,
12896                from: None,
12897                materialized: is_materialized,
12898                closing_paren_token: closing_paren_token.into(),
12899            }
12900        } else {
12901            let columns = self.parse_table_alias_column_defs()?;
12902            self.expect_keyword_is(Keyword::AS)?;
12903            let mut is_materialized = None;
12904            if dialect_of!(self is PostgreSqlDialect) {
12905                if self.parse_keyword(Keyword::MATERIALIZED) {
12906                    is_materialized = Some(CteAsMaterialized::Materialized);
12907                } else if self.parse_keywords(&[Keyword::NOT, Keyword::MATERIALIZED]) {
12908                    is_materialized = Some(CteAsMaterialized::NotMaterialized);
12909                }
12910            }
12911            self.expect_token(&Token::LParen)?;
12912
12913            let query = self.parse_query()?;
12914            let closing_paren_token = self.expect_token(&Token::RParen)?;
12915
12916            let alias = TableAlias {
12917                explicit: false,
12918                name,
12919                columns,
12920            };
12921            Cte {
12922                alias,
12923                query,
12924                from: None,
12925                materialized: is_materialized,
12926                closing_paren_token: closing_paren_token.into(),
12927            }
12928        };
12929        if self.parse_keyword(Keyword::FROM) {
12930            cte.from = Some(self.parse_identifier()?);
12931        }
12932        Ok(cte)
12933    }
12934
12935    /// Parse a "query body", which is an expression with roughly the
12936    /// following grammar:
12937    /// ```sql
12938    ///   query_body ::= restricted_select | '(' subquery ')' | set_operation
12939    ///   restricted_select ::= 'SELECT' [expr_list] [ from ] [ where ] [ groupby_having ]
12940    ///   subquery ::= query_body [ order_by_limit ]
12941    ///   set_operation ::= query_body { 'UNION' | 'EXCEPT' | 'INTERSECT' } [ 'ALL' ] query_body
12942    /// ```
12943    pub fn parse_query_body(&mut self, precedence: u8) -> Result<Box<SetExpr>, ParserError> {
12944        // We parse the expression using a Pratt parser, as in `parse_expr()`.
12945        // Start by parsing a restricted SELECT or a `(subquery)`:
12946        let expr = if self.peek_keyword(Keyword::SELECT)
12947            || (self.peek_keyword(Keyword::FROM) && self.dialect.supports_from_first_select())
12948        {
12949            SetExpr::Select(self.parse_select().map(Box::new)?)
12950        } else if self.consume_token(&Token::LParen) {
12951            // CTEs are not allowed here, but the parser currently accepts them
12952            let subquery = self.parse_query()?;
12953            self.expect_token(&Token::RParen)?;
12954            SetExpr::Query(subquery)
12955        } else if self.parse_keyword(Keyword::VALUES) {
12956            let is_mysql = dialect_of!(self is MySqlDialect);
12957            SetExpr::Values(self.parse_values(is_mysql, false)?)
12958        } else if self.parse_keyword(Keyword::VALUE) {
12959            let is_mysql = dialect_of!(self is MySqlDialect);
12960            SetExpr::Values(self.parse_values(is_mysql, true)?)
12961        } else if self.parse_keyword(Keyword::TABLE) {
12962            SetExpr::Table(Box::new(self.parse_as_table()?))
12963        } else {
12964            return self.expected(
12965                "SELECT, VALUES, or a subquery in the query body",
12966                self.peek_token(),
12967            );
12968        };
12969
12970        self.parse_remaining_set_exprs(expr, precedence)
12971    }
12972
12973    /// Parse any extra set expressions that may be present in a query body
12974    ///
12975    /// (this is its own function to reduce required stack size in debug builds)
12976    fn parse_remaining_set_exprs(
12977        &mut self,
12978        mut expr: SetExpr,
12979        precedence: u8,
12980    ) -> Result<Box<SetExpr>, ParserError> {
12981        loop {
12982            // The query can be optionally followed by a set operator:
12983            let op = self.parse_set_operator(&self.peek_token().token);
12984            let next_precedence = match op {
12985                // UNION and EXCEPT have the same binding power and evaluate left-to-right
12986                Some(SetOperator::Union) | Some(SetOperator::Except) | Some(SetOperator::Minus) => {
12987                    10
12988                }
12989                // INTERSECT has higher precedence than UNION/EXCEPT
12990                Some(SetOperator::Intersect) => 20,
12991                // Unexpected token or EOF => stop parsing the query body
12992                None => break,
12993            };
12994            if precedence >= next_precedence {
12995                break;
12996            }
12997            self.next_token(); // skip past the set operator
12998            let set_quantifier = self.parse_set_quantifier(&op);
12999            expr = SetExpr::SetOperation {
13000                left: Box::new(expr),
13001                op: op.unwrap(),
13002                set_quantifier,
13003                right: self.parse_query_body(next_precedence)?,
13004            };
13005        }
13006
13007        Ok(expr.into())
13008    }
13009
13010    pub fn parse_set_operator(&mut self, token: &Token) -> Option<SetOperator> {
13011        match token {
13012            Token::Word(w) if w.keyword == Keyword::UNION => Some(SetOperator::Union),
13013            Token::Word(w) if w.keyword == Keyword::EXCEPT => Some(SetOperator::Except),
13014            Token::Word(w) if w.keyword == Keyword::INTERSECT => Some(SetOperator::Intersect),
13015            Token::Word(w) if w.keyword == Keyword::MINUS => Some(SetOperator::Minus),
13016            _ => None,
13017        }
13018    }
13019
13020    pub fn parse_set_quantifier(&mut self, op: &Option<SetOperator>) -> SetQuantifier {
13021        match op {
13022            Some(
13023                SetOperator::Except
13024                | SetOperator::Intersect
13025                | SetOperator::Union
13026                | SetOperator::Minus,
13027            ) => {
13028                if self.parse_keywords(&[Keyword::DISTINCT, Keyword::BY, Keyword::NAME]) {
13029                    SetQuantifier::DistinctByName
13030                } else if self.parse_keywords(&[Keyword::BY, Keyword::NAME]) {
13031                    SetQuantifier::ByName
13032                } else if self.parse_keyword(Keyword::ALL) {
13033                    if self.parse_keywords(&[Keyword::BY, Keyword::NAME]) {
13034                        SetQuantifier::AllByName
13035                    } else {
13036                        SetQuantifier::All
13037                    }
13038                } else if self.parse_keyword(Keyword::DISTINCT) {
13039                    SetQuantifier::Distinct
13040                } else {
13041                    SetQuantifier::None
13042                }
13043            }
13044            _ => SetQuantifier::None,
13045        }
13046    }
13047
13048    /// Parse a restricted `SELECT` statement (no CTEs / `UNION` / `ORDER BY`)
13049    pub fn parse_select(&mut self) -> Result<Select, ParserError> {
13050        let mut from_first = None;
13051
13052        if self.dialect.supports_from_first_select() && self.peek_keyword(Keyword::FROM) {
13053            let from_token = self.expect_keyword(Keyword::FROM)?;
13054            let from = self.parse_table_with_joins()?;
13055            if !self.peek_keyword(Keyword::SELECT) {
13056                return Ok(Select {
13057                    select_token: AttachedToken(from_token),
13058                    distinct: None,
13059                    top: None,
13060                    top_before_distinct: false,
13061                    projection: vec![],
13062                    exclude: None,
13063                    into: None,
13064                    from,
13065                    lateral_views: vec![],
13066                    prewhere: None,
13067                    selection: None,
13068                    group_by: GroupByExpr::Expressions(vec![], vec![]),
13069                    cluster_by: vec![],
13070                    distribute_by: vec![],
13071                    sort_by: vec![],
13072                    having: None,
13073                    named_window: vec![],
13074                    window_before_qualify: false,
13075                    qualify: None,
13076                    value_table_mode: None,
13077                    connect_by: None,
13078                    flavor: SelectFlavor::FromFirstNoSelect,
13079                });
13080            }
13081            from_first = Some(from);
13082        }
13083
13084        let select_token = self.expect_keyword(Keyword::SELECT)?;
13085        let value_table_mode = self.parse_value_table_mode()?;
13086
13087        let mut top_before_distinct = false;
13088        let mut top = None;
13089        if self.dialect.supports_top_before_distinct() && self.parse_keyword(Keyword::TOP) {
13090            top = Some(self.parse_top()?);
13091            top_before_distinct = true;
13092        }
13093        let distinct = self.parse_all_or_distinct()?;
13094        if !self.dialect.supports_top_before_distinct() && self.parse_keyword(Keyword::TOP) {
13095            top = Some(self.parse_top()?);
13096        }
13097
13098        let projection =
13099            if self.dialect.supports_empty_projections() && self.peek_keyword(Keyword::FROM) {
13100                vec![]
13101            } else {
13102                self.parse_projection()?
13103            };
13104
13105        let exclude = if self.dialect.supports_select_exclude() {
13106            self.parse_optional_select_item_exclude()?
13107        } else {
13108            None
13109        };
13110
13111        let into = if self.parse_keyword(Keyword::INTO) {
13112            Some(self.parse_select_into()?)
13113        } else {
13114            None
13115        };
13116
13117        // Note that for keywords to be properly handled here, they need to be
13118        // added to `RESERVED_FOR_COLUMN_ALIAS` / `RESERVED_FOR_TABLE_ALIAS`,
13119        // otherwise they may be parsed as an alias as part of the `projection`
13120        // or `from`.
13121
13122        let (from, from_first) = if let Some(from) = from_first.take() {
13123            (from, true)
13124        } else if self.parse_keyword(Keyword::FROM) {
13125            (self.parse_table_with_joins()?, false)
13126        } else {
13127            (vec![], false)
13128        };
13129
13130        let mut lateral_views = vec![];
13131        loop {
13132            if self.parse_keywords(&[Keyword::LATERAL, Keyword::VIEW]) {
13133                let outer = self.parse_keyword(Keyword::OUTER);
13134                let lateral_view = self.parse_expr()?;
13135                let lateral_view_name = self.parse_object_name(false)?;
13136                let lateral_col_alias = self
13137                    .parse_comma_separated(|parser| {
13138                        parser.parse_optional_alias(&[
13139                            Keyword::WHERE,
13140                            Keyword::GROUP,
13141                            Keyword::CLUSTER,
13142                            Keyword::HAVING,
13143                            Keyword::LATERAL,
13144                        ]) // This couldn't possibly be a bad idea
13145                    })?
13146                    .into_iter()
13147                    .flatten()
13148                    .collect();
13149
13150                lateral_views.push(LateralView {
13151                    lateral_view,
13152                    lateral_view_name,
13153                    lateral_col_alias,
13154                    outer,
13155                });
13156            } else {
13157                break;
13158            }
13159        }
13160
13161        let prewhere = if dialect_of!(self is ClickHouseDialect|GenericDialect)
13162            && self.parse_keyword(Keyword::PREWHERE)
13163        {
13164            Some(self.parse_expr()?)
13165        } else {
13166            None
13167        };
13168
13169        let selection = if self.parse_keyword(Keyword::WHERE) {
13170            Some(self.parse_expr()?)
13171        } else {
13172            None
13173        };
13174
13175        let group_by = self
13176            .parse_optional_group_by()?
13177            .unwrap_or_else(|| GroupByExpr::Expressions(vec![], vec![]));
13178
13179        let cluster_by = if self.parse_keywords(&[Keyword::CLUSTER, Keyword::BY]) {
13180            self.parse_comma_separated(Parser::parse_expr)?
13181        } else {
13182            vec![]
13183        };
13184
13185        let distribute_by = if self.parse_keywords(&[Keyword::DISTRIBUTE, Keyword::BY]) {
13186            self.parse_comma_separated(Parser::parse_expr)?
13187        } else {
13188            vec![]
13189        };
13190
13191        let sort_by = if self.parse_keywords(&[Keyword::SORT, Keyword::BY]) {
13192            self.parse_comma_separated(Parser::parse_order_by_expr)?
13193        } else {
13194            vec![]
13195        };
13196
13197        let having = if self.parse_keyword(Keyword::HAVING) {
13198            Some(self.parse_expr()?)
13199        } else {
13200            None
13201        };
13202
13203        // Accept QUALIFY and WINDOW in any order and flag accordingly.
13204        let (named_windows, qualify, window_before_qualify) = if self.parse_keyword(Keyword::WINDOW)
13205        {
13206            let named_windows = self.parse_comma_separated(Parser::parse_named_window)?;
13207            if self.parse_keyword(Keyword::QUALIFY) {
13208                (named_windows, Some(self.parse_expr()?), true)
13209            } else {
13210                (named_windows, None, true)
13211            }
13212        } else if self.parse_keyword(Keyword::QUALIFY) {
13213            let qualify = Some(self.parse_expr()?);
13214            if self.parse_keyword(Keyword::WINDOW) {
13215                (
13216                    self.parse_comma_separated(Parser::parse_named_window)?,
13217                    qualify,
13218                    false,
13219                )
13220            } else {
13221                (Default::default(), qualify, false)
13222            }
13223        } else {
13224            Default::default()
13225        };
13226
13227        let connect_by = if self.dialect.supports_connect_by()
13228            && self
13229                .parse_one_of_keywords(&[Keyword::START, Keyword::CONNECT])
13230                .is_some()
13231        {
13232            self.prev_token();
13233            Some(self.parse_connect_by()?)
13234        } else {
13235            None
13236        };
13237
13238        Ok(Select {
13239            select_token: AttachedToken(select_token),
13240            distinct,
13241            top,
13242            top_before_distinct,
13243            projection,
13244            exclude,
13245            into,
13246            from,
13247            lateral_views,
13248            prewhere,
13249            selection,
13250            group_by,
13251            cluster_by,
13252            distribute_by,
13253            sort_by,
13254            having,
13255            named_window: named_windows,
13256            window_before_qualify,
13257            qualify,
13258            value_table_mode,
13259            connect_by,
13260            flavor: if from_first {
13261                SelectFlavor::FromFirst
13262            } else {
13263                SelectFlavor::Standard
13264            },
13265        })
13266    }
13267
13268    fn parse_value_table_mode(&mut self) -> Result<Option<ValueTableMode>, ParserError> {
13269        if !dialect_of!(self is BigQueryDialect) {
13270            return Ok(None);
13271        }
13272
13273        let mode = if self.parse_keywords(&[Keyword::DISTINCT, Keyword::AS, Keyword::VALUE]) {
13274            Some(ValueTableMode::DistinctAsValue)
13275        } else if self.parse_keywords(&[Keyword::DISTINCT, Keyword::AS, Keyword::STRUCT]) {
13276            Some(ValueTableMode::DistinctAsStruct)
13277        } else if self.parse_keywords(&[Keyword::AS, Keyword::VALUE])
13278            || self.parse_keywords(&[Keyword::ALL, Keyword::AS, Keyword::VALUE])
13279        {
13280            Some(ValueTableMode::AsValue)
13281        } else if self.parse_keywords(&[Keyword::AS, Keyword::STRUCT])
13282            || self.parse_keywords(&[Keyword::ALL, Keyword::AS, Keyword::STRUCT])
13283        {
13284            Some(ValueTableMode::AsStruct)
13285        } else if self.parse_keyword(Keyword::AS) {
13286            self.expected("VALUE or STRUCT", self.peek_token())?
13287        } else {
13288            None
13289        };
13290
13291        Ok(mode)
13292    }
13293
13294    /// Invoke `f` after first setting the parser's `ParserState` to `state`.
13295    ///
13296    /// Upon return, restores the parser's state to what it started at.
13297    fn with_state<T, F>(&mut self, state: ParserState, mut f: F) -> Result<T, ParserError>
13298    where
13299        F: FnMut(&mut Parser) -> Result<T, ParserError>,
13300    {
13301        let current_state = self.state;
13302        self.state = state;
13303        let res = f(self);
13304        self.state = current_state;
13305        res
13306    }
13307
13308    pub fn parse_connect_by(&mut self) -> Result<ConnectBy, ParserError> {
13309        let (condition, relationships) = if self.parse_keywords(&[Keyword::CONNECT, Keyword::BY]) {
13310            let relationships = self.with_state(ParserState::ConnectBy, |parser| {
13311                parser.parse_comma_separated(Parser::parse_expr)
13312            })?;
13313            self.expect_keywords(&[Keyword::START, Keyword::WITH])?;
13314            let condition = self.parse_expr()?;
13315            (condition, relationships)
13316        } else {
13317            self.expect_keywords(&[Keyword::START, Keyword::WITH])?;
13318            let condition = self.parse_expr()?;
13319            self.expect_keywords(&[Keyword::CONNECT, Keyword::BY])?;
13320            let relationships = self.with_state(ParserState::ConnectBy, |parser| {
13321                parser.parse_comma_separated(Parser::parse_expr)
13322            })?;
13323            (condition, relationships)
13324        };
13325        Ok(ConnectBy {
13326            condition,
13327            relationships,
13328        })
13329    }
13330
13331    /// Parse `CREATE TABLE x AS TABLE y`
13332    pub fn parse_as_table(&mut self) -> Result<Table, ParserError> {
13333        let token1 = self.next_token();
13334        let token2 = self.next_token();
13335        let token3 = self.next_token();
13336
13337        let table_name;
13338        let schema_name;
13339        if token2 == Token::Period {
13340            match token1.token {
13341                Token::Word(w) => {
13342                    schema_name = w.value;
13343                }
13344                _ => {
13345                    return self.expected("Schema name", token1);
13346                }
13347            }
13348            match token3.token {
13349                Token::Word(w) => {
13350                    table_name = w.value;
13351                }
13352                _ => {
13353                    return self.expected("Table name", token3);
13354                }
13355            }
13356            Ok(Table {
13357                table_name: Some(table_name),
13358                schema_name: Some(schema_name),
13359            })
13360        } else {
13361            match token1.token {
13362                Token::Word(w) => {
13363                    table_name = w.value;
13364                }
13365                _ => {
13366                    return self.expected("Table name", token1);
13367                }
13368            }
13369            Ok(Table {
13370                table_name: Some(table_name),
13371                schema_name: None,
13372            })
13373        }
13374    }
13375
13376    /// Parse a `SET ROLE` statement. Expects SET to be consumed already.
13377    fn parse_set_role(
13378        &mut self,
13379        modifier: Option<ContextModifier>,
13380    ) -> Result<Statement, ParserError> {
13381        self.expect_keyword_is(Keyword::ROLE)?;
13382
13383        let role_name = if self.parse_keyword(Keyword::NONE) {
13384            None
13385        } else {
13386            Some(self.parse_identifier()?)
13387        };
13388        Ok(Statement::Set(Set::SetRole {
13389            context_modifier: modifier,
13390            role_name,
13391        }))
13392    }
13393
13394    fn parse_set_values(
13395        &mut self,
13396        parenthesized_assignment: bool,
13397    ) -> Result<Vec<Expr>, ParserError> {
13398        let mut values = vec![];
13399
13400        if parenthesized_assignment {
13401            self.expect_token(&Token::LParen)?;
13402        }
13403
13404        loop {
13405            let value = if let Some(expr) = self.try_parse_expr_sub_query()? {
13406                expr
13407            } else if let Ok(expr) = self.parse_expr() {
13408                expr
13409            } else {
13410                self.expected("variable value", self.peek_token())?
13411            };
13412
13413            values.push(value);
13414            if self.consume_token(&Token::Comma) {
13415                continue;
13416            }
13417
13418            if parenthesized_assignment {
13419                self.expect_token(&Token::RParen)?;
13420            }
13421            return Ok(values);
13422        }
13423    }
13424
13425    fn parse_context_modifier(&mut self) -> Option<ContextModifier> {
13426        let modifier =
13427            self.parse_one_of_keywords(&[Keyword::SESSION, Keyword::LOCAL, Keyword::GLOBAL])?;
13428
13429        Self::keyword_to_modifier(modifier)
13430    }
13431
13432    /// Parse a single SET statement assignment `var = expr`.
13433    fn parse_set_assignment(&mut self) -> Result<SetAssignment, ParserError> {
13434        let scope = self.parse_context_modifier();
13435
13436        let name = if self.dialect.supports_parenthesized_set_variables()
13437            && self.consume_token(&Token::LParen)
13438        {
13439            // Parenthesized assignments are handled in the `parse_set` function after
13440            // trying to parse list of assignments using this function.
13441            // If a dialect supports both, and we find a LParen, we early exit from this function.
13442            self.expected("Unparenthesized assignment", self.peek_token())?
13443        } else {
13444            self.parse_object_name(false)?
13445        };
13446
13447        if !(self.consume_token(&Token::Eq) || self.parse_keyword(Keyword::TO)) {
13448            return self.expected("assignment operator", self.peek_token());
13449        }
13450
13451        let value = self.parse_expr()?;
13452
13453        Ok(SetAssignment { scope, name, value })
13454    }
13455
13456    fn parse_set(&mut self) -> Result<Statement, ParserError> {
13457        let hivevar = self.parse_keyword(Keyword::HIVEVAR);
13458
13459        // Modifier is either HIVEVAR: or a ContextModifier (LOCAL, SESSION, etc), not both
13460        let scope = if !hivevar {
13461            self.parse_context_modifier()
13462        } else {
13463            None
13464        };
13465
13466        if hivevar {
13467            self.expect_token(&Token::Colon)?;
13468        }
13469
13470        if let Some(set_role_stmt) = self.maybe_parse(|parser| parser.parse_set_role(scope))? {
13471            return Ok(set_role_stmt);
13472        }
13473
13474        // Handle special cases first
13475        if self.parse_keywords(&[Keyword::TIME, Keyword::ZONE])
13476            || self.parse_keyword(Keyword::TIMEZONE)
13477        {
13478            if self.consume_token(&Token::Eq) || self.parse_keyword(Keyword::TO) {
13479                return Ok(Set::SingleAssignment {
13480                    scope,
13481                    hivevar,
13482                    variable: ObjectName::from(vec!["TIMEZONE".into()]),
13483                    values: self.parse_set_values(false)?,
13484                }
13485                .into());
13486            } else {
13487                // A shorthand alias for SET TIME ZONE that doesn't require
13488                // the assignment operator. It's originally PostgreSQL specific,
13489                // but we allow it for all the dialects
13490                return Ok(Set::SetTimeZone {
13491                    local: scope == Some(ContextModifier::Local),
13492                    value: self.parse_expr()?,
13493                }
13494                .into());
13495            }
13496        } else if self.dialect.supports_set_names() && self.parse_keyword(Keyword::NAMES) {
13497            if self.parse_keyword(Keyword::DEFAULT) {
13498                return Ok(Set::SetNamesDefault {}.into());
13499            }
13500            let charset_name = self.parse_identifier()?;
13501            let collation_name = if self.parse_one_of_keywords(&[Keyword::COLLATE]).is_some() {
13502                Some(self.parse_literal_string()?)
13503            } else {
13504                None
13505            };
13506
13507            return Ok(Set::SetNames {
13508                charset_name,
13509                collation_name,
13510            }
13511            .into());
13512        } else if self.parse_keyword(Keyword::CHARACTERISTICS) {
13513            self.expect_keywords(&[Keyword::AS, Keyword::TRANSACTION])?;
13514            return Ok(Set::SetTransaction {
13515                modes: self.parse_transaction_modes()?,
13516                snapshot: None,
13517                session: true,
13518            }
13519            .into());
13520        } else if self.parse_keyword(Keyword::TRANSACTION) {
13521            if self.parse_keyword(Keyword::SNAPSHOT) {
13522                let snapshot_id = self.parse_value()?.value;
13523                return Ok(Set::SetTransaction {
13524                    modes: vec![],
13525                    snapshot: Some(snapshot_id),
13526                    session: false,
13527                }
13528                .into());
13529            }
13530            return Ok(Set::SetTransaction {
13531                modes: self.parse_transaction_modes()?,
13532                snapshot: None,
13533                session: false,
13534            }
13535            .into());
13536        } else if self.parse_keyword(Keyword::AUTHORIZATION) {
13537            let auth_value = if self.parse_keyword(Keyword::DEFAULT) {
13538                SetSessionAuthorizationParamKind::Default
13539            } else {
13540                let value = self.parse_identifier()?;
13541                SetSessionAuthorizationParamKind::User(value)
13542            };
13543            return Ok(Set::SetSessionAuthorization(SetSessionAuthorizationParam {
13544                scope: scope.expect("SET ... AUTHORIZATION must have a scope"),
13545                kind: auth_value,
13546            })
13547            .into());
13548        }
13549
13550        if self.dialect.supports_comma_separated_set_assignments() {
13551            if scope.is_some() {
13552                self.prev_token();
13553            }
13554
13555            if let Some(assignments) = self
13556                .maybe_parse(|parser| parser.parse_comma_separated(Parser::parse_set_assignment))?
13557            {
13558                return if assignments.len() > 1 {
13559                    Ok(Set::MultipleAssignments { assignments }.into())
13560                } else {
13561                    let SetAssignment { scope, name, value } =
13562                        assignments.into_iter().next().ok_or_else(|| {
13563                            ParserError::ParserError("Expected at least one assignment".to_string())
13564                        })?;
13565
13566                    Ok(Set::SingleAssignment {
13567                        scope,
13568                        hivevar,
13569                        variable: name,
13570                        values: vec![value],
13571                    }
13572                    .into())
13573                };
13574            }
13575        }
13576
13577        let variables = if self.dialect.supports_parenthesized_set_variables()
13578            && self.consume_token(&Token::LParen)
13579        {
13580            let vars = OneOrManyWithParens::Many(
13581                self.parse_comma_separated(|parser: &mut Parser<'a>| parser.parse_identifier())?
13582                    .into_iter()
13583                    .map(|ident| ObjectName::from(vec![ident]))
13584                    .collect(),
13585            );
13586            self.expect_token(&Token::RParen)?;
13587            vars
13588        } else {
13589            OneOrManyWithParens::One(self.parse_object_name(false)?)
13590        };
13591
13592        if self.consume_token(&Token::Eq) || self.parse_keyword(Keyword::TO) {
13593            let stmt = match variables {
13594                OneOrManyWithParens::One(var) => Set::SingleAssignment {
13595                    scope,
13596                    hivevar,
13597                    variable: var,
13598                    values: self.parse_set_values(false)?,
13599                },
13600                OneOrManyWithParens::Many(vars) => Set::ParenthesizedAssignments {
13601                    variables: vars,
13602                    values: self.parse_set_values(true)?,
13603                },
13604            };
13605
13606            return Ok(stmt.into());
13607        }
13608
13609        if self.dialect.supports_set_stmt_without_operator() {
13610            self.prev_token();
13611            return self.parse_set_session_params();
13612        };
13613
13614        self.expected("equals sign or TO", self.peek_token())
13615    }
13616
13617    pub fn parse_set_session_params(&mut self) -> Result<Statement, ParserError> {
13618        if self.parse_keyword(Keyword::STATISTICS) {
13619            let topic = match self.parse_one_of_keywords(&[
13620                Keyword::IO,
13621                Keyword::PROFILE,
13622                Keyword::TIME,
13623                Keyword::XML,
13624            ]) {
13625                Some(Keyword::IO) => SessionParamStatsTopic::IO,
13626                Some(Keyword::PROFILE) => SessionParamStatsTopic::Profile,
13627                Some(Keyword::TIME) => SessionParamStatsTopic::Time,
13628                Some(Keyword::XML) => SessionParamStatsTopic::Xml,
13629                _ => return self.expected("IO, PROFILE, TIME or XML", self.peek_token()),
13630            };
13631            let value = self.parse_session_param_value()?;
13632            Ok(
13633                Set::SetSessionParam(SetSessionParamKind::Statistics(SetSessionParamStatistics {
13634                    topic,
13635                    value,
13636                }))
13637                .into(),
13638            )
13639        } else if self.parse_keyword(Keyword::IDENTITY_INSERT) {
13640            let obj = self.parse_object_name(false)?;
13641            let value = self.parse_session_param_value()?;
13642            Ok(Set::SetSessionParam(SetSessionParamKind::IdentityInsert(
13643                SetSessionParamIdentityInsert { obj, value },
13644            ))
13645            .into())
13646        } else if self.parse_keyword(Keyword::OFFSETS) {
13647            let keywords = self.parse_comma_separated(|parser| {
13648                let next_token = parser.next_token();
13649                match &next_token.token {
13650                    Token::Word(w) => Ok(w.to_string()),
13651                    _ => parser.expected("SQL keyword", next_token),
13652                }
13653            })?;
13654            let value = self.parse_session_param_value()?;
13655            Ok(
13656                Set::SetSessionParam(SetSessionParamKind::Offsets(SetSessionParamOffsets {
13657                    keywords,
13658                    value,
13659                }))
13660                .into(),
13661            )
13662        } else {
13663            let names = self.parse_comma_separated(|parser| {
13664                let next_token = parser.next_token();
13665                match next_token.token {
13666                    Token::Word(w) => Ok(w.to_string()),
13667                    _ => parser.expected("Session param name", next_token),
13668                }
13669            })?;
13670            let value = self.parse_expr()?.to_string();
13671            Ok(
13672                Set::SetSessionParam(SetSessionParamKind::Generic(SetSessionParamGeneric {
13673                    names,
13674                    value,
13675                }))
13676                .into(),
13677            )
13678        }
13679    }
13680
13681    fn parse_session_param_value(&mut self) -> Result<SessionParamValue, ParserError> {
13682        if self.parse_keyword(Keyword::ON) {
13683            Ok(SessionParamValue::On)
13684        } else if self.parse_keyword(Keyword::OFF) {
13685            Ok(SessionParamValue::Off)
13686        } else {
13687            self.expected("ON or OFF", self.peek_token())
13688        }
13689    }
13690
13691    pub fn parse_show(&mut self) -> Result<Statement, ParserError> {
13692        let terse = self.parse_keyword(Keyword::TERSE);
13693        let extended = self.parse_keyword(Keyword::EXTENDED);
13694        let full = self.parse_keyword(Keyword::FULL);
13695        let session = self.parse_keyword(Keyword::SESSION);
13696        let global = self.parse_keyword(Keyword::GLOBAL);
13697        let external = self.parse_keyword(Keyword::EXTERNAL);
13698        if self
13699            .parse_one_of_keywords(&[Keyword::COLUMNS, Keyword::FIELDS])
13700            .is_some()
13701        {
13702            Ok(self.parse_show_columns(extended, full)?)
13703        } else if self.parse_keyword(Keyword::TABLES) {
13704            Ok(self.parse_show_tables(terse, extended, full, external)?)
13705        } else if self.parse_keywords(&[Keyword::MATERIALIZED, Keyword::VIEWS]) {
13706            Ok(self.parse_show_views(terse, true)?)
13707        } else if self.parse_keyword(Keyword::VIEWS) {
13708            Ok(self.parse_show_views(terse, false)?)
13709        } else if self.parse_keyword(Keyword::FUNCTIONS) {
13710            Ok(self.parse_show_functions()?)
13711        } else if extended || full {
13712            Err(ParserError::ParserError(
13713                "EXTENDED/FULL are not supported with this type of SHOW query".to_string(),
13714            ))
13715        } else if self.parse_one_of_keywords(&[Keyword::CREATE]).is_some() {
13716            Ok(self.parse_show_create()?)
13717        } else if self.parse_keyword(Keyword::COLLATION) {
13718            Ok(self.parse_show_collation()?)
13719        } else if self.parse_keyword(Keyword::VARIABLES)
13720            && dialect_of!(self is MySqlDialect | GenericDialect)
13721        {
13722            Ok(Statement::ShowVariables {
13723                filter: self.parse_show_statement_filter()?,
13724                session,
13725                global,
13726            })
13727        } else if self.parse_keyword(Keyword::STATUS)
13728            && dialect_of!(self is MySqlDialect | GenericDialect)
13729        {
13730            Ok(Statement::ShowStatus {
13731                filter: self.parse_show_statement_filter()?,
13732                session,
13733                global,
13734            })
13735        } else if self.parse_keyword(Keyword::DATABASES) {
13736            self.parse_show_databases(terse)
13737        } else if self.parse_keyword(Keyword::SCHEMAS) {
13738            self.parse_show_schemas(terse)
13739        } else if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
13740            self.parse_show_charset(false)
13741        } else if self.parse_keyword(Keyword::CHARSET) {
13742            self.parse_show_charset(true)
13743        } else {
13744            Ok(Statement::ShowVariable {
13745                variable: self.parse_identifiers()?,
13746            })
13747        }
13748    }
13749
13750    fn parse_show_charset(&mut self, is_shorthand: bool) -> Result<Statement, ParserError> {
13751        // parse one of keywords
13752        Ok(Statement::ShowCharset(ShowCharset {
13753            is_shorthand,
13754            filter: self.parse_show_statement_filter()?,
13755        }))
13756    }
13757
13758    fn parse_show_databases(&mut self, terse: bool) -> Result<Statement, ParserError> {
13759        let history = self.parse_keyword(Keyword::HISTORY);
13760        let show_options = self.parse_show_stmt_options()?;
13761        Ok(Statement::ShowDatabases {
13762            terse,
13763            history,
13764            show_options,
13765        })
13766    }
13767
13768    fn parse_show_schemas(&mut self, terse: bool) -> Result<Statement, ParserError> {
13769        let history = self.parse_keyword(Keyword::HISTORY);
13770        let show_options = self.parse_show_stmt_options()?;
13771        Ok(Statement::ShowSchemas {
13772            terse,
13773            history,
13774            show_options,
13775        })
13776    }
13777
13778    pub fn parse_show_create(&mut self) -> Result<Statement, ParserError> {
13779        let obj_type = match self.expect_one_of_keywords(&[
13780            Keyword::TABLE,
13781            Keyword::TRIGGER,
13782            Keyword::FUNCTION,
13783            Keyword::PROCEDURE,
13784            Keyword::EVENT,
13785            Keyword::VIEW,
13786        ])? {
13787            Keyword::TABLE => Ok(ShowCreateObject::Table),
13788            Keyword::TRIGGER => Ok(ShowCreateObject::Trigger),
13789            Keyword::FUNCTION => Ok(ShowCreateObject::Function),
13790            Keyword::PROCEDURE => Ok(ShowCreateObject::Procedure),
13791            Keyword::EVENT => Ok(ShowCreateObject::Event),
13792            Keyword::VIEW => Ok(ShowCreateObject::View),
13793            keyword => Err(ParserError::ParserError(format!(
13794                "Unable to map keyword to ShowCreateObject: {keyword:?}"
13795            ))),
13796        }?;
13797
13798        let obj_name = self.parse_object_name(false)?;
13799
13800        Ok(Statement::ShowCreate { obj_type, obj_name })
13801    }
13802
13803    pub fn parse_show_columns(
13804        &mut self,
13805        extended: bool,
13806        full: bool,
13807    ) -> Result<Statement, ParserError> {
13808        let show_options = self.parse_show_stmt_options()?;
13809        Ok(Statement::ShowColumns {
13810            extended,
13811            full,
13812            show_options,
13813        })
13814    }
13815
13816    fn parse_show_tables(
13817        &mut self,
13818        terse: bool,
13819        extended: bool,
13820        full: bool,
13821        external: bool,
13822    ) -> Result<Statement, ParserError> {
13823        let history = !external && self.parse_keyword(Keyword::HISTORY);
13824        let show_options = self.parse_show_stmt_options()?;
13825        Ok(Statement::ShowTables {
13826            terse,
13827            history,
13828            extended,
13829            full,
13830            external,
13831            show_options,
13832        })
13833    }
13834
13835    fn parse_show_views(
13836        &mut self,
13837        terse: bool,
13838        materialized: bool,
13839    ) -> Result<Statement, ParserError> {
13840        let show_options = self.parse_show_stmt_options()?;
13841        Ok(Statement::ShowViews {
13842            materialized,
13843            terse,
13844            show_options,
13845        })
13846    }
13847
13848    pub fn parse_show_functions(&mut self) -> Result<Statement, ParserError> {
13849        let filter = self.parse_show_statement_filter()?;
13850        Ok(Statement::ShowFunctions { filter })
13851    }
13852
13853    pub fn parse_show_collation(&mut self) -> Result<Statement, ParserError> {
13854        let filter = self.parse_show_statement_filter()?;
13855        Ok(Statement::ShowCollation { filter })
13856    }
13857
13858    pub fn parse_show_statement_filter(
13859        &mut self,
13860    ) -> Result<Option<ShowStatementFilter>, ParserError> {
13861        if self.parse_keyword(Keyword::LIKE) {
13862            Ok(Some(ShowStatementFilter::Like(
13863                self.parse_literal_string()?,
13864            )))
13865        } else if self.parse_keyword(Keyword::ILIKE) {
13866            Ok(Some(ShowStatementFilter::ILike(
13867                self.parse_literal_string()?,
13868            )))
13869        } else if self.parse_keyword(Keyword::WHERE) {
13870            Ok(Some(ShowStatementFilter::Where(self.parse_expr()?)))
13871        } else {
13872            self.maybe_parse(|parser| -> Result<String, ParserError> {
13873                parser.parse_literal_string()
13874            })?
13875            .map_or(Ok(None), |filter| {
13876                Ok(Some(ShowStatementFilter::NoKeyword(filter)))
13877            })
13878        }
13879    }
13880
13881    pub fn parse_use(&mut self) -> Result<Statement, ParserError> {
13882        // Determine which keywords are recognized by the current dialect
13883        let parsed_keyword = if dialect_of!(self is HiveDialect) {
13884            // HiveDialect accepts USE DEFAULT; statement without any db specified
13885            if self.parse_keyword(Keyword::DEFAULT) {
13886                return Ok(Statement::Use(Use::Default));
13887            }
13888            None // HiveDialect doesn't expect any other specific keyword after `USE`
13889        } else if dialect_of!(self is DatabricksDialect) {
13890            self.parse_one_of_keywords(&[Keyword::CATALOG, Keyword::DATABASE, Keyword::SCHEMA])
13891        } else if dialect_of!(self is SnowflakeDialect) {
13892            self.parse_one_of_keywords(&[
13893                Keyword::DATABASE,
13894                Keyword::SCHEMA,
13895                Keyword::WAREHOUSE,
13896                Keyword::ROLE,
13897                Keyword::SECONDARY,
13898            ])
13899        } else {
13900            None // No specific keywords for other dialects, including GenericDialect
13901        };
13902
13903        let result = if matches!(parsed_keyword, Some(Keyword::SECONDARY)) {
13904            self.parse_secondary_roles()?
13905        } else {
13906            let obj_name = self.parse_object_name(false)?;
13907            match parsed_keyword {
13908                Some(Keyword::CATALOG) => Use::Catalog(obj_name),
13909                Some(Keyword::DATABASE) => Use::Database(obj_name),
13910                Some(Keyword::SCHEMA) => Use::Schema(obj_name),
13911                Some(Keyword::WAREHOUSE) => Use::Warehouse(obj_name),
13912                Some(Keyword::ROLE) => Use::Role(obj_name),
13913                _ => Use::Object(obj_name),
13914            }
13915        };
13916
13917        Ok(Statement::Use(result))
13918    }
13919
13920    fn parse_secondary_roles(&mut self) -> Result<Use, ParserError> {
13921        self.expect_one_of_keywords(&[Keyword::ROLES, Keyword::ROLE])?;
13922        if self.parse_keyword(Keyword::NONE) {
13923            Ok(Use::SecondaryRoles(SecondaryRoles::None))
13924        } else if self.parse_keyword(Keyword::ALL) {
13925            Ok(Use::SecondaryRoles(SecondaryRoles::All))
13926        } else {
13927            let roles = self.parse_comma_separated(|parser| parser.parse_identifier())?;
13928            Ok(Use::SecondaryRoles(SecondaryRoles::List(roles)))
13929        }
13930    }
13931
13932    pub fn parse_table_and_joins(&mut self) -> Result<TableWithJoins, ParserError> {
13933        let relation = self.parse_table_factor()?;
13934        // Note that for keywords to be properly handled here, they need to be
13935        // added to `RESERVED_FOR_TABLE_ALIAS`, otherwise they may be parsed as
13936        // a table alias.
13937        let joins = self.parse_joins()?;
13938        Ok(TableWithJoins { relation, joins })
13939    }
13940
13941    fn parse_joins(&mut self) -> Result<Vec<Join>, ParserError> {
13942        let mut joins = vec![];
13943        loop {
13944            let global = self.parse_keyword(Keyword::GLOBAL);
13945            let join = if self.parse_keyword(Keyword::CROSS) {
13946                let join_operator = if self.parse_keyword(Keyword::JOIN) {
13947                    JoinOperator::CrossJoin(JoinConstraint::None)
13948                } else if self.parse_keyword(Keyword::APPLY) {
13949                    // MSSQL extension, similar to CROSS JOIN LATERAL
13950                    JoinOperator::CrossApply
13951                } else {
13952                    return self.expected("JOIN or APPLY after CROSS", self.peek_token());
13953                };
13954                let relation = self.parse_table_factor()?;
13955                let join_operator = if matches!(join_operator, JoinOperator::CrossJoin(_))
13956                    && self.dialect.supports_cross_join_constraint()
13957                {
13958                    let constraint = self.parse_join_constraint(false)?;
13959                    JoinOperator::CrossJoin(constraint)
13960                } else {
13961                    join_operator
13962                };
13963                Join {
13964                    relation,
13965                    global,
13966                    join_operator,
13967                }
13968            } else if self.parse_keyword(Keyword::OUTER) {
13969                // MSSQL extension, similar to LEFT JOIN LATERAL .. ON 1=1
13970                self.expect_keyword_is(Keyword::APPLY)?;
13971                Join {
13972                    relation: self.parse_table_factor()?,
13973                    global,
13974                    join_operator: JoinOperator::OuterApply,
13975                }
13976            } else if self.parse_keyword(Keyword::ASOF) {
13977                self.expect_keyword_is(Keyword::JOIN)?;
13978                let relation = self.parse_table_factor()?;
13979                self.expect_keyword_is(Keyword::MATCH_CONDITION)?;
13980                let match_condition = self.parse_parenthesized(Self::parse_expr)?;
13981                Join {
13982                    relation,
13983                    global,
13984                    join_operator: JoinOperator::AsOf {
13985                        match_condition,
13986                        constraint: self.parse_join_constraint(false)?,
13987                    },
13988                }
13989            } else {
13990                let natural = self.parse_keyword(Keyword::NATURAL);
13991                let peek_keyword = if let Token::Word(w) = self.peek_token().token {
13992                    w.keyword
13993                } else {
13994                    Keyword::NoKeyword
13995                };
13996
13997                let join_operator_type = match peek_keyword {
13998                    Keyword::INNER | Keyword::JOIN => {
13999                        let inner = self.parse_keyword(Keyword::INNER); // [ INNER ]
14000                        self.expect_keyword_is(Keyword::JOIN)?;
14001                        if inner {
14002                            JoinOperator::Inner
14003                        } else {
14004                            JoinOperator::Join
14005                        }
14006                    }
14007                    kw @ Keyword::LEFT | kw @ Keyword::RIGHT => {
14008                        let _ = self.next_token(); // consume LEFT/RIGHT
14009                        let is_left = kw == Keyword::LEFT;
14010                        let join_type = self.parse_one_of_keywords(&[
14011                            Keyword::OUTER,
14012                            Keyword::SEMI,
14013                            Keyword::ANTI,
14014                            Keyword::JOIN,
14015                        ]);
14016                        match join_type {
14017                            Some(Keyword::OUTER) => {
14018                                self.expect_keyword_is(Keyword::JOIN)?;
14019                                if is_left {
14020                                    JoinOperator::LeftOuter
14021                                } else {
14022                                    JoinOperator::RightOuter
14023                                }
14024                            }
14025                            Some(Keyword::SEMI) => {
14026                                self.expect_keyword_is(Keyword::JOIN)?;
14027                                if is_left {
14028                                    JoinOperator::LeftSemi
14029                                } else {
14030                                    JoinOperator::RightSemi
14031                                }
14032                            }
14033                            Some(Keyword::ANTI) => {
14034                                self.expect_keyword_is(Keyword::JOIN)?;
14035                                if is_left {
14036                                    JoinOperator::LeftAnti
14037                                } else {
14038                                    JoinOperator::RightAnti
14039                                }
14040                            }
14041                            Some(Keyword::JOIN) => {
14042                                if is_left {
14043                                    JoinOperator::Left
14044                                } else {
14045                                    JoinOperator::Right
14046                                }
14047                            }
14048                            _ => {
14049                                return Err(ParserError::ParserError(format!(
14050                                    "expected OUTER, SEMI, ANTI or JOIN after {kw:?}"
14051                                )))
14052                            }
14053                        }
14054                    }
14055                    Keyword::ANTI => {
14056                        let _ = self.next_token(); // consume ANTI
14057                        self.expect_keyword_is(Keyword::JOIN)?;
14058                        JoinOperator::Anti
14059                    }
14060                    Keyword::SEMI => {
14061                        let _ = self.next_token(); // consume SEMI
14062                        self.expect_keyword_is(Keyword::JOIN)?;
14063                        JoinOperator::Semi
14064                    }
14065                    Keyword::FULL => {
14066                        let _ = self.next_token(); // consume FULL
14067                        let _ = self.parse_keyword(Keyword::OUTER); // [ OUTER ]
14068                        self.expect_keyword_is(Keyword::JOIN)?;
14069                        JoinOperator::FullOuter
14070                    }
14071                    Keyword::OUTER => {
14072                        return self.expected("LEFT, RIGHT, or FULL", self.peek_token());
14073                    }
14074                    Keyword::STRAIGHT_JOIN => {
14075                        let _ = self.next_token(); // consume STRAIGHT_JOIN
14076                        JoinOperator::StraightJoin
14077                    }
14078                    _ if natural => {
14079                        return self.expected("a join type after NATURAL", self.peek_token());
14080                    }
14081                    _ => break,
14082                };
14083                let mut relation = self.parse_table_factor()?;
14084
14085                if !self
14086                    .dialect
14087                    .supports_left_associative_joins_without_parens()
14088                    && self.peek_parens_less_nested_join()
14089                {
14090                    let joins = self.parse_joins()?;
14091                    relation = TableFactor::NestedJoin {
14092                        table_with_joins: Box::new(TableWithJoins { relation, joins }),
14093                        alias: None,
14094                    };
14095                }
14096
14097                let join_constraint = self.parse_join_constraint(natural)?;
14098                Join {
14099                    relation,
14100                    global,
14101                    join_operator: join_operator_type(join_constraint),
14102                }
14103            };
14104            joins.push(join);
14105        }
14106        Ok(joins)
14107    }
14108
14109    fn peek_parens_less_nested_join(&self) -> bool {
14110        matches!(
14111            self.peek_token_ref().token,
14112            Token::Word(Word {
14113                keyword: Keyword::JOIN
14114                    | Keyword::INNER
14115                    | Keyword::LEFT
14116                    | Keyword::RIGHT
14117                    | Keyword::FULL,
14118                ..
14119            })
14120        )
14121    }
14122
14123    /// A table name or a parenthesized subquery, followed by optional `[AS] alias`
14124    pub fn parse_table_factor(&mut self) -> Result<TableFactor, ParserError> {
14125        if self.parse_keyword(Keyword::LATERAL) {
14126            // LATERAL must always be followed by a subquery or table function.
14127            if self.consume_token(&Token::LParen) {
14128                self.parse_derived_table_factor(Lateral)
14129            } else {
14130                let name = self.parse_object_name(false)?;
14131                self.expect_token(&Token::LParen)?;
14132                let args = self.parse_optional_args()?;
14133                let alias = self.maybe_parse_table_alias()?;
14134                Ok(TableFactor::Function {
14135                    lateral: true,
14136                    name,
14137                    args,
14138                    alias,
14139                })
14140            }
14141        } else if self.parse_keyword(Keyword::TABLE) {
14142            // parse table function (SELECT * FROM TABLE (<expr>) [ AS <alias> ])
14143            self.expect_token(&Token::LParen)?;
14144            let expr = self.parse_expr()?;
14145            self.expect_token(&Token::RParen)?;
14146            let alias = self.maybe_parse_table_alias()?;
14147            Ok(TableFactor::TableFunction { expr, alias })
14148        } else if self.consume_token(&Token::LParen) {
14149            // A left paren introduces either a derived table (i.e., a subquery)
14150            // or a nested join. It's nearly impossible to determine ahead of
14151            // time which it is... so we just try to parse both.
14152            //
14153            // Here's an example that demonstrates the complexity:
14154            //                     /-------------------------------------------------------\
14155            //                     | /-----------------------------------\                 |
14156            //     SELECT * FROM ( ( ( (SELECT 1) UNION (SELECT 2) ) AS t1 NATURAL JOIN t2 ) )
14157            //                   ^ ^ ^ ^
14158            //                   | | | |
14159            //                   | | | |
14160            //                   | | | (4) belongs to a SetExpr::Query inside the subquery
14161            //                   | | (3) starts a derived table (subquery)
14162            //                   | (2) starts a nested join
14163            //                   (1) an additional set of parens around a nested join
14164            //
14165
14166            // If the recently consumed '(' starts a derived table, the call to
14167            // `parse_derived_table_factor` below will return success after parsing the
14168            // subquery, followed by the closing ')', and the alias of the derived table.
14169            // In the example above this is case (3).
14170            if let Some(mut table) =
14171                self.maybe_parse(|parser| parser.parse_derived_table_factor(NotLateral))?
14172            {
14173                while let Some(kw) = self.parse_one_of_keywords(&[Keyword::PIVOT, Keyword::UNPIVOT])
14174                {
14175                    table = match kw {
14176                        Keyword::PIVOT => self.parse_pivot_table_factor(table)?,
14177                        Keyword::UNPIVOT => self.parse_unpivot_table_factor(table)?,
14178                        _ => unreachable!(),
14179                    }
14180                }
14181                return Ok(table);
14182            }
14183
14184            // A parsing error from `parse_derived_table_factor` indicates that the '(' we've
14185            // recently consumed does not start a derived table (cases 1, 2, or 4).
14186            // `maybe_parse` will ignore such an error and rewind to be after the opening '('.
14187
14188            // Inside the parentheses we expect to find an (A) table factor
14189            // followed by some joins or (B) another level of nesting.
14190            let mut table_and_joins = self.parse_table_and_joins()?;
14191
14192            #[allow(clippy::if_same_then_else)]
14193            if !table_and_joins.joins.is_empty() {
14194                self.expect_token(&Token::RParen)?;
14195                let alias = self.maybe_parse_table_alias()?;
14196                Ok(TableFactor::NestedJoin {
14197                    table_with_joins: Box::new(table_and_joins),
14198                    alias,
14199                }) // (A)
14200            } else if let TableFactor::NestedJoin {
14201                table_with_joins: _,
14202                alias: _,
14203            } = &table_and_joins.relation
14204            {
14205                // (B): `table_and_joins` (what we found inside the parentheses)
14206                // is a nested join `(foo JOIN bar)`, not followed by other joins.
14207                self.expect_token(&Token::RParen)?;
14208                let alias = self.maybe_parse_table_alias()?;
14209                Ok(TableFactor::NestedJoin {
14210                    table_with_joins: Box::new(table_and_joins),
14211                    alias,
14212                })
14213            } else if dialect_of!(self is SnowflakeDialect | GenericDialect) {
14214                // Dialect-specific behavior: Snowflake diverges from the
14215                // standard and from most of the other implementations by
14216                // allowing extra parentheses not only around a join (B), but
14217                // around lone table names (e.g. `FROM (mytable [AS alias])`)
14218                // and around derived tables (e.g. `FROM ((SELECT ...)
14219                // [AS alias])`) as well.
14220                self.expect_token(&Token::RParen)?;
14221
14222                if let Some(outer_alias) = self.maybe_parse_table_alias()? {
14223                    // Snowflake also allows specifying an alias *after* parens
14224                    // e.g. `FROM (mytable) AS alias`
14225                    match &mut table_and_joins.relation {
14226                        TableFactor::Derived { alias, .. }
14227                        | TableFactor::Table { alias, .. }
14228                        | TableFactor::Function { alias, .. }
14229                        | TableFactor::UNNEST { alias, .. }
14230                        | TableFactor::JsonTable { alias, .. }
14231                        | TableFactor::XmlTable { alias, .. }
14232                        | TableFactor::OpenJsonTable { alias, .. }
14233                        | TableFactor::TableFunction { alias, .. }
14234                        | TableFactor::Pivot { alias, .. }
14235                        | TableFactor::Unpivot { alias, .. }
14236                        | TableFactor::MatchRecognize { alias, .. }
14237                        | TableFactor::SemanticView { alias, .. }
14238                        | TableFactor::NestedJoin { alias, .. } => {
14239                            // but not `FROM (mytable AS alias1) AS alias2`.
14240                            if let Some(inner_alias) = alias {
14241                                return Err(ParserError::ParserError(format!(
14242                                    "duplicate alias {inner_alias}"
14243                                )));
14244                            }
14245                            // Act as if the alias was specified normally next
14246                            // to the table name: `(mytable) AS alias` ->
14247                            // `(mytable AS alias)`
14248                            alias.replace(outer_alias);
14249                        }
14250                    };
14251                }
14252                // Do not store the extra set of parens in the AST
14253                Ok(table_and_joins.relation)
14254            } else {
14255                // The SQL spec prohibits derived tables and bare tables from
14256                // appearing alone in parentheses (e.g. `FROM (mytable)`)
14257                self.expected("joined table", self.peek_token())
14258            }
14259        } else if dialect_of!(self is SnowflakeDialect | DatabricksDialect | GenericDialect)
14260            && matches!(
14261                self.peek_tokens(),
14262                [
14263                    Token::Word(Word {
14264                        keyword: Keyword::VALUES,
14265                        ..
14266                    }),
14267                    Token::LParen
14268                ]
14269            )
14270        {
14271            self.expect_keyword_is(Keyword::VALUES)?;
14272
14273            // Snowflake and Databricks allow syntax like below:
14274            // SELECT * FROM VALUES (1, 'a'), (2, 'b') AS t (col1, col2)
14275            // where there are no parentheses around the VALUES clause.
14276            let values = SetExpr::Values(self.parse_values(false, false)?);
14277            let alias = self.maybe_parse_table_alias()?;
14278            Ok(TableFactor::Derived {
14279                lateral: false,
14280                subquery: Box::new(Query {
14281                    with: None,
14282                    body: Box::new(values),
14283                    order_by: None,
14284                    limit_clause: None,
14285                    fetch: None,
14286                    locks: vec![],
14287                    for_clause: None,
14288                    settings: None,
14289                    format_clause: None,
14290                    pipe_operators: vec![],
14291                }),
14292                alias,
14293            })
14294        } else if dialect_of!(self is BigQueryDialect | PostgreSqlDialect | GenericDialect)
14295            && self.parse_keyword(Keyword::UNNEST)
14296        {
14297            self.expect_token(&Token::LParen)?;
14298            let array_exprs = self.parse_comma_separated(Parser::parse_expr)?;
14299            self.expect_token(&Token::RParen)?;
14300
14301            let with_ordinality = self.parse_keywords(&[Keyword::WITH, Keyword::ORDINALITY]);
14302            let alias = match self.maybe_parse_table_alias() {
14303                Ok(Some(alias)) => Some(alias),
14304                Ok(None) => None,
14305                Err(e) => return Err(e),
14306            };
14307
14308            let with_offset = match self.expect_keywords(&[Keyword::WITH, Keyword::OFFSET]) {
14309                Ok(()) => true,
14310                Err(_) => false,
14311            };
14312
14313            let with_offset_alias = if with_offset {
14314                match self.parse_optional_alias(keywords::RESERVED_FOR_COLUMN_ALIAS) {
14315                    Ok(Some(alias)) => Some(alias),
14316                    Ok(None) => None,
14317                    Err(e) => return Err(e),
14318                }
14319            } else {
14320                None
14321            };
14322
14323            Ok(TableFactor::UNNEST {
14324                alias,
14325                array_exprs,
14326                with_offset,
14327                with_offset_alias,
14328                with_ordinality,
14329            })
14330        } else if self.parse_keyword_with_tokens(Keyword::JSON_TABLE, &[Token::LParen]) {
14331            let json_expr = self.parse_expr()?;
14332            self.expect_token(&Token::Comma)?;
14333            let json_path = self.parse_value()?.value;
14334            self.expect_keyword_is(Keyword::COLUMNS)?;
14335            self.expect_token(&Token::LParen)?;
14336            let columns = self.parse_comma_separated(Parser::parse_json_table_column_def)?;
14337            self.expect_token(&Token::RParen)?;
14338            self.expect_token(&Token::RParen)?;
14339            let alias = self.maybe_parse_table_alias()?;
14340            Ok(TableFactor::JsonTable {
14341                json_expr,
14342                json_path,
14343                columns,
14344                alias,
14345            })
14346        } else if self.parse_keyword_with_tokens(Keyword::OPENJSON, &[Token::LParen]) {
14347            self.prev_token();
14348            self.parse_open_json_table_factor()
14349        } else if self.parse_keyword_with_tokens(Keyword::XMLTABLE, &[Token::LParen]) {
14350            self.prev_token();
14351            self.parse_xml_table_factor()
14352        } else if self.dialect.supports_semantic_view_table_factor()
14353            && self.peek_keyword_with_tokens(Keyword::SEMANTIC_VIEW, &[Token::LParen])
14354        {
14355            self.parse_semantic_view_table_factor()
14356        } else {
14357            let name = self.parse_object_name(true)?;
14358
14359            let json_path = match self.peek_token().token {
14360                Token::LBracket if self.dialect.supports_partiql() => Some(self.parse_json_path()?),
14361                _ => None,
14362            };
14363
14364            let partitions: Vec<Ident> = if dialect_of!(self is MySqlDialect | GenericDialect)
14365                && self.parse_keyword(Keyword::PARTITION)
14366            {
14367                self.parse_parenthesized_identifiers()?
14368            } else {
14369                vec![]
14370            };
14371
14372            // Parse potential version qualifier
14373            let version = self.maybe_parse_table_version()?;
14374
14375            // Postgres, MSSQL, ClickHouse: table-valued functions:
14376            let args = if self.consume_token(&Token::LParen) {
14377                Some(self.parse_table_function_args()?)
14378            } else {
14379                None
14380            };
14381
14382            let with_ordinality = self.parse_keywords(&[Keyword::WITH, Keyword::ORDINALITY]);
14383
14384            let mut sample = None;
14385            if self.dialect.supports_table_sample_before_alias() {
14386                if let Some(parsed_sample) = self.maybe_parse_table_sample()? {
14387                    sample = Some(TableSampleKind::BeforeTableAlias(parsed_sample));
14388                }
14389            }
14390
14391            let alias = self.maybe_parse_table_alias()?;
14392
14393            // MYSQL-specific table hints:
14394            let index_hints = if self.dialect.supports_table_hints() {
14395                self.maybe_parse(|p| p.parse_table_index_hints())?
14396                    .unwrap_or(vec![])
14397            } else {
14398                vec![]
14399            };
14400
14401            // MSSQL-specific table hints:
14402            let mut with_hints = vec![];
14403            if self.parse_keyword(Keyword::WITH) {
14404                if self.consume_token(&Token::LParen) {
14405                    with_hints = self.parse_comma_separated(Parser::parse_expr)?;
14406                    self.expect_token(&Token::RParen)?;
14407                } else {
14408                    // rewind, as WITH may belong to the next statement's CTE
14409                    self.prev_token();
14410                }
14411            };
14412
14413            if !self.dialect.supports_table_sample_before_alias() {
14414                if let Some(parsed_sample) = self.maybe_parse_table_sample()? {
14415                    sample = Some(TableSampleKind::AfterTableAlias(parsed_sample));
14416                }
14417            }
14418
14419            let mut table = TableFactor::Table {
14420                name,
14421                alias,
14422                args,
14423                with_hints,
14424                version,
14425                partitions,
14426                with_ordinality,
14427                json_path,
14428                sample,
14429                index_hints,
14430            };
14431
14432            while let Some(kw) = self.parse_one_of_keywords(&[Keyword::PIVOT, Keyword::UNPIVOT]) {
14433                table = match kw {
14434                    Keyword::PIVOT => self.parse_pivot_table_factor(table)?,
14435                    Keyword::UNPIVOT => self.parse_unpivot_table_factor(table)?,
14436                    _ => unreachable!(),
14437                }
14438            }
14439
14440            if self.dialect.supports_match_recognize()
14441                && self.parse_keyword(Keyword::MATCH_RECOGNIZE)
14442            {
14443                table = self.parse_match_recognize(table)?;
14444            }
14445
14446            Ok(table)
14447        }
14448    }
14449
14450    fn maybe_parse_table_sample(&mut self) -> Result<Option<Box<TableSample>>, ParserError> {
14451        let modifier = if self.parse_keyword(Keyword::TABLESAMPLE) {
14452            TableSampleModifier::TableSample
14453        } else if self.parse_keyword(Keyword::SAMPLE) {
14454            TableSampleModifier::Sample
14455        } else {
14456            return Ok(None);
14457        };
14458        self.parse_table_sample(modifier).map(Some)
14459    }
14460
14461    fn parse_table_sample(
14462        &mut self,
14463        modifier: TableSampleModifier,
14464    ) -> Result<Box<TableSample>, ParserError> {
14465        let name = match self.parse_one_of_keywords(&[
14466            Keyword::BERNOULLI,
14467            Keyword::ROW,
14468            Keyword::SYSTEM,
14469            Keyword::BLOCK,
14470        ]) {
14471            Some(Keyword::BERNOULLI) => Some(TableSampleMethod::Bernoulli),
14472            Some(Keyword::ROW) => Some(TableSampleMethod::Row),
14473            Some(Keyword::SYSTEM) => Some(TableSampleMethod::System),
14474            Some(Keyword::BLOCK) => Some(TableSampleMethod::Block),
14475            _ => None,
14476        };
14477
14478        let parenthesized = self.consume_token(&Token::LParen);
14479
14480        let (quantity, bucket) = if parenthesized && self.parse_keyword(Keyword::BUCKET) {
14481            let selected_bucket = self.parse_number_value()?.value;
14482            self.expect_keywords(&[Keyword::OUT, Keyword::OF])?;
14483            let total = self.parse_number_value()?.value;
14484            let on = if self.parse_keyword(Keyword::ON) {
14485                Some(self.parse_expr()?)
14486            } else {
14487                None
14488            };
14489            (
14490                None,
14491                Some(TableSampleBucket {
14492                    bucket: selected_bucket,
14493                    total,
14494                    on,
14495                }),
14496            )
14497        } else {
14498            let value = match self.maybe_parse(|p| p.parse_expr())? {
14499                Some(num) => num,
14500                None => {
14501                    let next_token = self.next_token();
14502                    if let Token::Word(w) = next_token.token {
14503                        Expr::Value(Value::Placeholder(w.value).with_span(next_token.span))
14504                    } else {
14505                        return parser_err!(
14506                            "Expecting number or byte length e.g. 100M",
14507                            self.peek_token().span.start
14508                        );
14509                    }
14510                }
14511            };
14512            let unit = if self.parse_keyword(Keyword::ROWS) {
14513                Some(TableSampleUnit::Rows)
14514            } else if self.parse_keyword(Keyword::PERCENT) {
14515                Some(TableSampleUnit::Percent)
14516            } else {
14517                None
14518            };
14519            (
14520                Some(TableSampleQuantity {
14521                    parenthesized,
14522                    value,
14523                    unit,
14524                }),
14525                None,
14526            )
14527        };
14528        if parenthesized {
14529            self.expect_token(&Token::RParen)?;
14530        }
14531
14532        let seed = if self.parse_keyword(Keyword::REPEATABLE) {
14533            Some(self.parse_table_sample_seed(TableSampleSeedModifier::Repeatable)?)
14534        } else if self.parse_keyword(Keyword::SEED) {
14535            Some(self.parse_table_sample_seed(TableSampleSeedModifier::Seed)?)
14536        } else {
14537            None
14538        };
14539
14540        let offset = if self.parse_keyword(Keyword::OFFSET) {
14541            Some(self.parse_expr()?)
14542        } else {
14543            None
14544        };
14545
14546        Ok(Box::new(TableSample {
14547            modifier,
14548            name,
14549            quantity,
14550            seed,
14551            bucket,
14552            offset,
14553        }))
14554    }
14555
14556    fn parse_table_sample_seed(
14557        &mut self,
14558        modifier: TableSampleSeedModifier,
14559    ) -> Result<TableSampleSeed, ParserError> {
14560        self.expect_token(&Token::LParen)?;
14561        let value = self.parse_number_value()?.value;
14562        self.expect_token(&Token::RParen)?;
14563        Ok(TableSampleSeed { modifier, value })
14564    }
14565
14566    /// Parses `OPENJSON( jsonExpression [ , path ] )  [ <with_clause> ]` clause,
14567    /// assuming the `OPENJSON` keyword was already consumed.
14568    fn parse_open_json_table_factor(&mut self) -> Result<TableFactor, ParserError> {
14569        self.expect_token(&Token::LParen)?;
14570        let json_expr = self.parse_expr()?;
14571        let json_path = if self.consume_token(&Token::Comma) {
14572            Some(self.parse_value()?.value)
14573        } else {
14574            None
14575        };
14576        self.expect_token(&Token::RParen)?;
14577        let columns = if self.parse_keyword(Keyword::WITH) {
14578            self.expect_token(&Token::LParen)?;
14579            let columns = self.parse_comma_separated(Parser::parse_openjson_table_column_def)?;
14580            self.expect_token(&Token::RParen)?;
14581            columns
14582        } else {
14583            Vec::new()
14584        };
14585        let alias = self.maybe_parse_table_alias()?;
14586        Ok(TableFactor::OpenJsonTable {
14587            json_expr,
14588            json_path,
14589            columns,
14590            alias,
14591        })
14592    }
14593
14594    fn parse_xml_table_factor(&mut self) -> Result<TableFactor, ParserError> {
14595        self.expect_token(&Token::LParen)?;
14596        let namespaces = if self.parse_keyword(Keyword::XMLNAMESPACES) {
14597            self.expect_token(&Token::LParen)?;
14598            let namespaces = self.parse_comma_separated(Parser::parse_xml_namespace_definition)?;
14599            self.expect_token(&Token::RParen)?;
14600            self.expect_token(&Token::Comma)?;
14601            namespaces
14602        } else {
14603            vec![]
14604        };
14605        let row_expression = self.parse_expr()?;
14606        let passing = self.parse_xml_passing_clause()?;
14607        self.expect_keyword_is(Keyword::COLUMNS)?;
14608        let columns = self.parse_comma_separated(Parser::parse_xml_table_column)?;
14609        self.expect_token(&Token::RParen)?;
14610        let alias = self.maybe_parse_table_alias()?;
14611        Ok(TableFactor::XmlTable {
14612            namespaces,
14613            row_expression,
14614            passing,
14615            columns,
14616            alias,
14617        })
14618    }
14619
14620    fn parse_xml_namespace_definition(&mut self) -> Result<XmlNamespaceDefinition, ParserError> {
14621        let uri = self.parse_expr()?;
14622        self.expect_keyword_is(Keyword::AS)?;
14623        let name = self.parse_identifier()?;
14624        Ok(XmlNamespaceDefinition { uri, name })
14625    }
14626
14627    fn parse_xml_table_column(&mut self) -> Result<XmlTableColumn, ParserError> {
14628        let name = self.parse_identifier()?;
14629
14630        let option = if self.parse_keyword(Keyword::FOR) {
14631            self.expect_keyword(Keyword::ORDINALITY)?;
14632            XmlTableColumnOption::ForOrdinality
14633        } else {
14634            let r#type = self.parse_data_type()?;
14635            let mut path = None;
14636            let mut default = None;
14637
14638            if self.parse_keyword(Keyword::PATH) {
14639                path = Some(self.parse_expr()?);
14640            }
14641
14642            if self.parse_keyword(Keyword::DEFAULT) {
14643                default = Some(self.parse_expr()?);
14644            }
14645
14646            let not_null = self.parse_keywords(&[Keyword::NOT, Keyword::NULL]);
14647            if !not_null {
14648                // NULL is the default but can be specified explicitly
14649                let _ = self.parse_keyword(Keyword::NULL);
14650            }
14651
14652            XmlTableColumnOption::NamedInfo {
14653                r#type,
14654                path,
14655                default,
14656                nullable: !not_null,
14657            }
14658        };
14659        Ok(XmlTableColumn { name, option })
14660    }
14661
14662    fn parse_xml_passing_clause(&mut self) -> Result<XmlPassingClause, ParserError> {
14663        let mut arguments = vec![];
14664        if self.parse_keyword(Keyword::PASSING) {
14665            loop {
14666                let by_value =
14667                    self.parse_keyword(Keyword::BY) && self.expect_keyword(Keyword::VALUE).is_ok();
14668                let expr = self.parse_expr()?;
14669                let alias = if self.parse_keyword(Keyword::AS) {
14670                    Some(self.parse_identifier()?)
14671                } else {
14672                    None
14673                };
14674                arguments.push(XmlPassingArgument {
14675                    expr,
14676                    alias,
14677                    by_value,
14678                });
14679                if !self.consume_token(&Token::Comma) {
14680                    break;
14681                }
14682            }
14683        }
14684        Ok(XmlPassingClause { arguments })
14685    }
14686
14687    /// Parse a [TableFactor::SemanticView]
14688    fn parse_semantic_view_table_factor(&mut self) -> Result<TableFactor, ParserError> {
14689        self.expect_keyword(Keyword::SEMANTIC_VIEW)?;
14690        self.expect_token(&Token::LParen)?;
14691
14692        let name = self.parse_object_name(true)?;
14693
14694        // Parse DIMENSIONS, METRICS, FACTS and WHERE clauses in flexible order
14695        let mut dimensions = Vec::new();
14696        let mut metrics = Vec::new();
14697        let mut facts = Vec::new();
14698        let mut where_clause = None;
14699
14700        while self.peek_token().token != Token::RParen {
14701            if self.parse_keyword(Keyword::DIMENSIONS) {
14702                if !dimensions.is_empty() {
14703                    return Err(ParserError::ParserError(
14704                        "DIMENSIONS clause can only be specified once".to_string(),
14705                    ));
14706                }
14707                dimensions = self.parse_comma_separated(Parser::parse_wildcard_expr)?;
14708            } else if self.parse_keyword(Keyword::METRICS) {
14709                if !metrics.is_empty() {
14710                    return Err(ParserError::ParserError(
14711                        "METRICS clause can only be specified once".to_string(),
14712                    ));
14713                }
14714                metrics = self.parse_comma_separated(Parser::parse_wildcard_expr)?;
14715            } else if self.parse_keyword(Keyword::FACTS) {
14716                if !facts.is_empty() {
14717                    return Err(ParserError::ParserError(
14718                        "FACTS clause can only be specified once".to_string(),
14719                    ));
14720                }
14721                facts = self.parse_comma_separated(Parser::parse_wildcard_expr)?;
14722            } else if self.parse_keyword(Keyword::WHERE) {
14723                if where_clause.is_some() {
14724                    return Err(ParserError::ParserError(
14725                        "WHERE clause can only be specified once".to_string(),
14726                    ));
14727                }
14728                where_clause = Some(self.parse_expr()?);
14729            } else {
14730                return parser_err!(
14731                    format!(
14732                        "Expected one of DIMENSIONS, METRICS, FACTS or WHERE, got {}",
14733                        self.peek_token().token
14734                    ),
14735                    self.peek_token().span.start
14736                )?;
14737            }
14738        }
14739
14740        self.expect_token(&Token::RParen)?;
14741
14742        let alias = self.maybe_parse_table_alias()?;
14743
14744        Ok(TableFactor::SemanticView {
14745            name,
14746            dimensions,
14747            metrics,
14748            facts,
14749            where_clause,
14750            alias,
14751        })
14752    }
14753
14754    fn parse_match_recognize(&mut self, table: TableFactor) -> Result<TableFactor, ParserError> {
14755        self.expect_token(&Token::LParen)?;
14756
14757        let partition_by = if self.parse_keywords(&[Keyword::PARTITION, Keyword::BY]) {
14758            self.parse_comma_separated(Parser::parse_expr)?
14759        } else {
14760            vec![]
14761        };
14762
14763        let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
14764            self.parse_comma_separated(Parser::parse_order_by_expr)?
14765        } else {
14766            vec![]
14767        };
14768
14769        let measures = if self.parse_keyword(Keyword::MEASURES) {
14770            self.parse_comma_separated(|p| {
14771                let expr = p.parse_expr()?;
14772                let _ = p.parse_keyword(Keyword::AS);
14773                let alias = p.parse_identifier()?;
14774                Ok(Measure { expr, alias })
14775            })?
14776        } else {
14777            vec![]
14778        };
14779
14780        let rows_per_match =
14781            if self.parse_keywords(&[Keyword::ONE, Keyword::ROW, Keyword::PER, Keyword::MATCH]) {
14782                Some(RowsPerMatch::OneRow)
14783            } else if self.parse_keywords(&[
14784                Keyword::ALL,
14785                Keyword::ROWS,
14786                Keyword::PER,
14787                Keyword::MATCH,
14788            ]) {
14789                Some(RowsPerMatch::AllRows(
14790                    if self.parse_keywords(&[Keyword::SHOW, Keyword::EMPTY, Keyword::MATCHES]) {
14791                        Some(EmptyMatchesMode::Show)
14792                    } else if self.parse_keywords(&[
14793                        Keyword::OMIT,
14794                        Keyword::EMPTY,
14795                        Keyword::MATCHES,
14796                    ]) {
14797                        Some(EmptyMatchesMode::Omit)
14798                    } else if self.parse_keywords(&[
14799                        Keyword::WITH,
14800                        Keyword::UNMATCHED,
14801                        Keyword::ROWS,
14802                    ]) {
14803                        Some(EmptyMatchesMode::WithUnmatched)
14804                    } else {
14805                        None
14806                    },
14807                ))
14808            } else {
14809                None
14810            };
14811
14812        let after_match_skip =
14813            if self.parse_keywords(&[Keyword::AFTER, Keyword::MATCH, Keyword::SKIP]) {
14814                if self.parse_keywords(&[Keyword::PAST, Keyword::LAST, Keyword::ROW]) {
14815                    Some(AfterMatchSkip::PastLastRow)
14816                } else if self.parse_keywords(&[Keyword::TO, Keyword::NEXT, Keyword::ROW]) {
14817                    Some(AfterMatchSkip::ToNextRow)
14818                } else if self.parse_keywords(&[Keyword::TO, Keyword::FIRST]) {
14819                    Some(AfterMatchSkip::ToFirst(self.parse_identifier()?))
14820                } else if self.parse_keywords(&[Keyword::TO, Keyword::LAST]) {
14821                    Some(AfterMatchSkip::ToLast(self.parse_identifier()?))
14822                } else {
14823                    let found = self.next_token();
14824                    return self.expected("after match skip option", found);
14825                }
14826            } else {
14827                None
14828            };
14829
14830        self.expect_keyword_is(Keyword::PATTERN)?;
14831        let pattern = self.parse_parenthesized(Self::parse_pattern)?;
14832
14833        self.expect_keyword_is(Keyword::DEFINE)?;
14834
14835        let symbols = self.parse_comma_separated(|p| {
14836            let symbol = p.parse_identifier()?;
14837            p.expect_keyword_is(Keyword::AS)?;
14838            let definition = p.parse_expr()?;
14839            Ok(SymbolDefinition { symbol, definition })
14840        })?;
14841
14842        self.expect_token(&Token::RParen)?;
14843
14844        let alias = self.maybe_parse_table_alias()?;
14845
14846        Ok(TableFactor::MatchRecognize {
14847            table: Box::new(table),
14848            partition_by,
14849            order_by,
14850            measures,
14851            rows_per_match,
14852            after_match_skip,
14853            pattern,
14854            symbols,
14855            alias,
14856        })
14857    }
14858
14859    fn parse_base_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
14860        match self.next_token().token {
14861            Token::Caret => Ok(MatchRecognizePattern::Symbol(MatchRecognizeSymbol::Start)),
14862            Token::Placeholder(s) if s == "$" => {
14863                Ok(MatchRecognizePattern::Symbol(MatchRecognizeSymbol::End))
14864            }
14865            Token::LBrace => {
14866                self.expect_token(&Token::Minus)?;
14867                let symbol = self.parse_identifier().map(MatchRecognizeSymbol::Named)?;
14868                self.expect_token(&Token::Minus)?;
14869                self.expect_token(&Token::RBrace)?;
14870                Ok(MatchRecognizePattern::Exclude(symbol))
14871            }
14872            Token::Word(Word {
14873                value,
14874                quote_style: None,
14875                ..
14876            }) if value == "PERMUTE" => {
14877                self.expect_token(&Token::LParen)?;
14878                let symbols = self.parse_comma_separated(|p| {
14879                    p.parse_identifier().map(MatchRecognizeSymbol::Named)
14880                })?;
14881                self.expect_token(&Token::RParen)?;
14882                Ok(MatchRecognizePattern::Permute(symbols))
14883            }
14884            Token::LParen => {
14885                let pattern = self.parse_pattern()?;
14886                self.expect_token(&Token::RParen)?;
14887                Ok(MatchRecognizePattern::Group(Box::new(pattern)))
14888            }
14889            _ => {
14890                self.prev_token();
14891                self.parse_identifier()
14892                    .map(MatchRecognizeSymbol::Named)
14893                    .map(MatchRecognizePattern::Symbol)
14894            }
14895        }
14896    }
14897
14898    fn parse_repetition_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
14899        let mut pattern = self.parse_base_pattern()?;
14900        loop {
14901            let token = self.next_token();
14902            let quantifier = match token.token {
14903                Token::Mul => RepetitionQuantifier::ZeroOrMore,
14904                Token::Plus => RepetitionQuantifier::OneOrMore,
14905                Token::Placeholder(s) if s == "?" => RepetitionQuantifier::AtMostOne,
14906                Token::LBrace => {
14907                    // quantifier is a range like {n} or {n,} or {,m} or {n,m}
14908                    let token = self.next_token();
14909                    match token.token {
14910                        Token::Comma => {
14911                            let next_token = self.next_token();
14912                            let Token::Number(n, _) = next_token.token else {
14913                                return self.expected("literal number", next_token);
14914                            };
14915                            self.expect_token(&Token::RBrace)?;
14916                            RepetitionQuantifier::AtMost(Self::parse(n, token.span.start)?)
14917                        }
14918                        Token::Number(n, _) if self.consume_token(&Token::Comma) => {
14919                            let next_token = self.next_token();
14920                            match next_token.token {
14921                                Token::Number(m, _) => {
14922                                    self.expect_token(&Token::RBrace)?;
14923                                    RepetitionQuantifier::Range(
14924                                        Self::parse(n, token.span.start)?,
14925                                        Self::parse(m, token.span.start)?,
14926                                    )
14927                                }
14928                                Token::RBrace => {
14929                                    RepetitionQuantifier::AtLeast(Self::parse(n, token.span.start)?)
14930                                }
14931                                _ => {
14932                                    return self.expected("} or upper bound", next_token);
14933                                }
14934                            }
14935                        }
14936                        Token::Number(n, _) => {
14937                            self.expect_token(&Token::RBrace)?;
14938                            RepetitionQuantifier::Exactly(Self::parse(n, token.span.start)?)
14939                        }
14940                        _ => return self.expected("quantifier range", token),
14941                    }
14942                }
14943                _ => {
14944                    self.prev_token();
14945                    break;
14946                }
14947            };
14948            pattern = MatchRecognizePattern::Repetition(Box::new(pattern), quantifier);
14949        }
14950        Ok(pattern)
14951    }
14952
14953    fn parse_concat_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
14954        let mut patterns = vec![self.parse_repetition_pattern()?];
14955        while !matches!(self.peek_token().token, Token::RParen | Token::Pipe) {
14956            patterns.push(self.parse_repetition_pattern()?);
14957        }
14958        match <[MatchRecognizePattern; 1]>::try_from(patterns) {
14959            Ok([pattern]) => Ok(pattern),
14960            Err(patterns) => Ok(MatchRecognizePattern::Concat(patterns)),
14961        }
14962    }
14963
14964    fn parse_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
14965        let pattern = self.parse_concat_pattern()?;
14966        if self.consume_token(&Token::Pipe) {
14967            match self.parse_pattern()? {
14968                // flatten nested alternations
14969                MatchRecognizePattern::Alternation(mut patterns) => {
14970                    patterns.insert(0, pattern);
14971                    Ok(MatchRecognizePattern::Alternation(patterns))
14972                }
14973                next => Ok(MatchRecognizePattern::Alternation(vec![pattern, next])),
14974            }
14975        } else {
14976            Ok(pattern)
14977        }
14978    }
14979
14980    /// Parses a the timestamp version specifier (i.e. query historical data)
14981    pub fn maybe_parse_table_version(&mut self) -> Result<Option<TableVersion>, ParserError> {
14982        if self.dialect.supports_timestamp_versioning() {
14983            if self.parse_keywords(&[Keyword::FOR, Keyword::SYSTEM_TIME, Keyword::AS, Keyword::OF])
14984            {
14985                let expr = self.parse_expr()?;
14986                return Ok(Some(TableVersion::ForSystemTimeAsOf(expr)));
14987            } else if self.peek_keyword(Keyword::AT) || self.peek_keyword(Keyword::BEFORE) {
14988                let func_name = self.parse_object_name(true)?;
14989                let func = self.parse_function(func_name)?;
14990                return Ok(Some(TableVersion::Function(func)));
14991            }
14992        }
14993        Ok(None)
14994    }
14995
14996    /// Parses MySQL's JSON_TABLE column definition.
14997    /// For example: `id INT EXISTS PATH '$' DEFAULT '0' ON EMPTY ERROR ON ERROR`
14998    pub fn parse_json_table_column_def(&mut self) -> Result<JsonTableColumn, ParserError> {
14999        if self.parse_keyword(Keyword::NESTED) {
15000            let _has_path_keyword = self.parse_keyword(Keyword::PATH);
15001            let path = self.parse_value()?.value;
15002            self.expect_keyword_is(Keyword::COLUMNS)?;
15003            let columns = self.parse_parenthesized(|p| {
15004                p.parse_comma_separated(Self::parse_json_table_column_def)
15005            })?;
15006            return Ok(JsonTableColumn::Nested(JsonTableNestedColumn {
15007                path,
15008                columns,
15009            }));
15010        }
15011        let name = self.parse_identifier()?;
15012        if self.parse_keyword(Keyword::FOR) {
15013            self.expect_keyword_is(Keyword::ORDINALITY)?;
15014            return Ok(JsonTableColumn::ForOrdinality(name));
15015        }
15016        let r#type = self.parse_data_type()?;
15017        let exists = self.parse_keyword(Keyword::EXISTS);
15018        self.expect_keyword_is(Keyword::PATH)?;
15019        let path = self.parse_value()?.value;
15020        let mut on_empty = None;
15021        let mut on_error = None;
15022        while let Some(error_handling) = self.parse_json_table_column_error_handling()? {
15023            if self.parse_keyword(Keyword::EMPTY) {
15024                on_empty = Some(error_handling);
15025            } else {
15026                self.expect_keyword_is(Keyword::ERROR)?;
15027                on_error = Some(error_handling);
15028            }
15029        }
15030        Ok(JsonTableColumn::Named(JsonTableNamedColumn {
15031            name,
15032            r#type,
15033            path,
15034            exists,
15035            on_empty,
15036            on_error,
15037        }))
15038    }
15039
15040    /// Parses MSSQL's `OPENJSON WITH` column definition.
15041    ///
15042    /// ```sql
15043    /// colName type [ column_path ] [ AS JSON ]
15044    /// ```
15045    ///
15046    /// Reference: <https://learn.microsoft.com/en-us/sql/t-sql/functions/openjson-transact-sql?view=sql-server-ver16#syntax>
15047    pub fn parse_openjson_table_column_def(&mut self) -> Result<OpenJsonTableColumn, ParserError> {
15048        let name = self.parse_identifier()?;
15049        let r#type = self.parse_data_type()?;
15050        let path = if let Token::SingleQuotedString(path) = self.peek_token().token {
15051            self.next_token();
15052            Some(path)
15053        } else {
15054            None
15055        };
15056        let as_json = self.parse_keyword(Keyword::AS);
15057        if as_json {
15058            self.expect_keyword_is(Keyword::JSON)?;
15059        }
15060        Ok(OpenJsonTableColumn {
15061            name,
15062            r#type,
15063            path,
15064            as_json,
15065        })
15066    }
15067
15068    fn parse_json_table_column_error_handling(
15069        &mut self,
15070    ) -> Result<Option<JsonTableColumnErrorHandling>, ParserError> {
15071        let res = if self.parse_keyword(Keyword::NULL) {
15072            JsonTableColumnErrorHandling::Null
15073        } else if self.parse_keyword(Keyword::ERROR) {
15074            JsonTableColumnErrorHandling::Error
15075        } else if self.parse_keyword(Keyword::DEFAULT) {
15076            JsonTableColumnErrorHandling::Default(self.parse_value()?.value)
15077        } else {
15078            return Ok(None);
15079        };
15080        self.expect_keyword_is(Keyword::ON)?;
15081        Ok(Some(res))
15082    }
15083
15084    pub fn parse_derived_table_factor(
15085        &mut self,
15086        lateral: IsLateral,
15087    ) -> Result<TableFactor, ParserError> {
15088        let subquery = self.parse_query()?;
15089        self.expect_token(&Token::RParen)?;
15090        let alias = self.maybe_parse_table_alias()?;
15091        Ok(TableFactor::Derived {
15092            lateral: match lateral {
15093                Lateral => true,
15094                NotLateral => false,
15095            },
15096            subquery,
15097            alias,
15098        })
15099    }
15100
15101    fn parse_aliased_function_call(&mut self) -> Result<ExprWithAlias, ParserError> {
15102        let function_name = match self.next_token().token {
15103            Token::Word(w) => Ok(w.value),
15104            _ => self.expected("a function identifier", self.peek_token()),
15105        }?;
15106        let expr = self.parse_function(ObjectName::from(vec![Ident::new(function_name)]))?;
15107        let alias = if self.parse_keyword(Keyword::AS) {
15108            Some(self.parse_identifier()?)
15109        } else {
15110            None
15111        };
15112
15113        Ok(ExprWithAlias { expr, alias })
15114    }
15115    /// Parses an expression with an optional alias
15116    ///
15117    /// Examples:
15118    ///
15119    /// ```sql
15120    /// SUM(price) AS total_price
15121    /// ```
15122    /// ```sql
15123    /// SUM(price)
15124    /// ```
15125    ///
15126    /// Example
15127    /// ```
15128    /// # use sqlparser::parser::{Parser, ParserError};
15129    /// # use sqlparser::dialect::GenericDialect;
15130    /// # fn main() ->Result<(), ParserError> {
15131    /// let sql = r#"SUM("a") as "b""#;
15132    /// let mut parser = Parser::new(&GenericDialect).try_with_sql(sql)?;
15133    /// let expr_with_alias = parser.parse_expr_with_alias()?;
15134    /// assert_eq!(Some("b".to_string()), expr_with_alias.alias.map(|x|x.value));
15135    /// # Ok(())
15136    /// # }
15137    pub fn parse_expr_with_alias(&mut self) -> Result<ExprWithAlias, ParserError> {
15138        let expr = self.parse_expr()?;
15139        let alias = if self.parse_keyword(Keyword::AS) {
15140            Some(self.parse_identifier()?)
15141        } else {
15142            None
15143        };
15144
15145        Ok(ExprWithAlias { expr, alias })
15146    }
15147
15148    pub fn parse_pivot_table_factor(
15149        &mut self,
15150        table: TableFactor,
15151    ) -> Result<TableFactor, ParserError> {
15152        self.expect_token(&Token::LParen)?;
15153        let aggregate_functions = self.parse_comma_separated(Self::parse_aliased_function_call)?;
15154        self.expect_keyword_is(Keyword::FOR)?;
15155        let value_column = if self.peek_token_ref().token == Token::LParen {
15156            self.parse_parenthesized_column_list_inner(Mandatory, false, |p| {
15157                p.parse_subexpr(self.dialect.prec_value(Precedence::Between))
15158            })?
15159        } else {
15160            vec![self.parse_subexpr(self.dialect.prec_value(Precedence::Between))?]
15161        };
15162        self.expect_keyword_is(Keyword::IN)?;
15163
15164        self.expect_token(&Token::LParen)?;
15165        let value_source = if self.parse_keyword(Keyword::ANY) {
15166            let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
15167                self.parse_comma_separated(Parser::parse_order_by_expr)?
15168            } else {
15169                vec![]
15170            };
15171            PivotValueSource::Any(order_by)
15172        } else if self.peek_sub_query() {
15173            PivotValueSource::Subquery(self.parse_query()?)
15174        } else {
15175            PivotValueSource::List(self.parse_comma_separated(Self::parse_expr_with_alias)?)
15176        };
15177        self.expect_token(&Token::RParen)?;
15178
15179        let default_on_null =
15180            if self.parse_keywords(&[Keyword::DEFAULT, Keyword::ON, Keyword::NULL]) {
15181                self.expect_token(&Token::LParen)?;
15182                let expr = self.parse_expr()?;
15183                self.expect_token(&Token::RParen)?;
15184                Some(expr)
15185            } else {
15186                None
15187            };
15188
15189        self.expect_token(&Token::RParen)?;
15190        let alias = self.maybe_parse_table_alias()?;
15191        Ok(TableFactor::Pivot {
15192            table: Box::new(table),
15193            aggregate_functions,
15194            value_column,
15195            value_source,
15196            default_on_null,
15197            alias,
15198        })
15199    }
15200
15201    pub fn parse_unpivot_table_factor(
15202        &mut self,
15203        table: TableFactor,
15204    ) -> Result<TableFactor, ParserError> {
15205        let null_inclusion = if self.parse_keyword(Keyword::INCLUDE) {
15206            self.expect_keyword_is(Keyword::NULLS)?;
15207            Some(NullInclusion::IncludeNulls)
15208        } else if self.parse_keyword(Keyword::EXCLUDE) {
15209            self.expect_keyword_is(Keyword::NULLS)?;
15210            Some(NullInclusion::ExcludeNulls)
15211        } else {
15212            None
15213        };
15214        self.expect_token(&Token::LParen)?;
15215        let value = self.parse_expr()?;
15216        self.expect_keyword_is(Keyword::FOR)?;
15217        let name = self.parse_identifier()?;
15218        self.expect_keyword_is(Keyword::IN)?;
15219        let columns = self.parse_parenthesized_column_list_inner(Mandatory, false, |p| {
15220            p.parse_expr_with_alias()
15221        })?;
15222        self.expect_token(&Token::RParen)?;
15223        let alias = self.maybe_parse_table_alias()?;
15224        Ok(TableFactor::Unpivot {
15225            table: Box::new(table),
15226            value,
15227            null_inclusion,
15228            name,
15229            columns,
15230            alias,
15231        })
15232    }
15233
15234    pub fn parse_join_constraint(&mut self, natural: bool) -> Result<JoinConstraint, ParserError> {
15235        if natural {
15236            Ok(JoinConstraint::Natural)
15237        } else if self.parse_keyword(Keyword::ON) {
15238            let constraint = self.parse_expr()?;
15239            Ok(JoinConstraint::On(constraint))
15240        } else if self.parse_keyword(Keyword::USING) {
15241            let columns = self.parse_parenthesized_qualified_column_list(Mandatory, false)?;
15242            Ok(JoinConstraint::Using(columns))
15243        } else {
15244            Ok(JoinConstraint::None)
15245            //self.expected("ON, or USING after JOIN", self.peek_token())
15246        }
15247    }
15248
15249    /// Parse a GRANT statement.
15250    pub fn parse_grant(&mut self) -> Result<Statement, ParserError> {
15251        let (privileges, objects) = self.parse_grant_deny_revoke_privileges_objects()?;
15252
15253        self.expect_keyword_is(Keyword::TO)?;
15254        let grantees = self.parse_grantees()?;
15255
15256        let with_grant_option =
15257            self.parse_keywords(&[Keyword::WITH, Keyword::GRANT, Keyword::OPTION]);
15258
15259        let current_grants =
15260            if self.parse_keywords(&[Keyword::COPY, Keyword::CURRENT, Keyword::GRANTS]) {
15261                Some(CurrentGrantsKind::CopyCurrentGrants)
15262            } else if self.parse_keywords(&[Keyword::REVOKE, Keyword::CURRENT, Keyword::GRANTS]) {
15263                Some(CurrentGrantsKind::RevokeCurrentGrants)
15264            } else {
15265                None
15266            };
15267
15268        let as_grantor = if self.parse_keywords(&[Keyword::AS]) {
15269            Some(self.parse_identifier()?)
15270        } else {
15271            None
15272        };
15273
15274        let granted_by = if self.parse_keywords(&[Keyword::GRANTED, Keyword::BY]) {
15275            Some(self.parse_identifier()?)
15276        } else {
15277            None
15278        };
15279
15280        Ok(Statement::Grant {
15281            privileges,
15282            objects,
15283            grantees,
15284            with_grant_option,
15285            as_grantor,
15286            granted_by,
15287            current_grants,
15288        })
15289    }
15290
15291    fn parse_grantees(&mut self) -> Result<Vec<Grantee>, ParserError> {
15292        let mut values = vec![];
15293        let mut grantee_type = GranteesType::None;
15294        loop {
15295            let new_grantee_type = if self.parse_keyword(Keyword::ROLE) {
15296                GranteesType::Role
15297            } else if self.parse_keyword(Keyword::USER) {
15298                GranteesType::User
15299            } else if self.parse_keyword(Keyword::SHARE) {
15300                GranteesType::Share
15301            } else if self.parse_keyword(Keyword::GROUP) {
15302                GranteesType::Group
15303            } else if self.parse_keyword(Keyword::PUBLIC) {
15304                GranteesType::Public
15305            } else if self.parse_keywords(&[Keyword::DATABASE, Keyword::ROLE]) {
15306                GranteesType::DatabaseRole
15307            } else if self.parse_keywords(&[Keyword::APPLICATION, Keyword::ROLE]) {
15308                GranteesType::ApplicationRole
15309            } else if self.parse_keyword(Keyword::APPLICATION) {
15310                GranteesType::Application
15311            } else {
15312                grantee_type.clone() // keep from previous iteraton, if not specified
15313            };
15314
15315            if self
15316                .dialect
15317                .get_reserved_grantees_types()
15318                .contains(&new_grantee_type)
15319            {
15320                self.prev_token();
15321            } else {
15322                grantee_type = new_grantee_type;
15323            }
15324
15325            let grantee = if grantee_type == GranteesType::Public {
15326                Grantee {
15327                    grantee_type: grantee_type.clone(),
15328                    name: None,
15329                }
15330            } else {
15331                let mut name = self.parse_grantee_name()?;
15332                if self.consume_token(&Token::Colon) {
15333                    // Redshift supports namespace prefix for external users and groups:
15334                    // <Namespace>:<GroupName> or <Namespace>:<UserName>
15335                    // https://docs.aws.amazon.com/redshift/latest/mgmt/redshift-iam-access-control-native-idp.html
15336                    let ident = self.parse_identifier()?;
15337                    if let GranteeName::ObjectName(namespace) = name {
15338                        name = GranteeName::ObjectName(ObjectName::from(vec![Ident::new(
15339                            format!("{namespace}:{ident}"),
15340                        )]));
15341                    };
15342                }
15343                Grantee {
15344                    grantee_type: grantee_type.clone(),
15345                    name: Some(name),
15346                }
15347            };
15348
15349            values.push(grantee);
15350
15351            if !self.consume_token(&Token::Comma) {
15352                break;
15353            }
15354        }
15355
15356        Ok(values)
15357    }
15358
15359    pub fn parse_grant_deny_revoke_privileges_objects(
15360        &mut self,
15361    ) -> Result<(Privileges, Option<GrantObjects>), ParserError> {
15362        let privileges = if self.parse_keyword(Keyword::ALL) {
15363            Privileges::All {
15364                with_privileges_keyword: self.parse_keyword(Keyword::PRIVILEGES),
15365            }
15366        } else {
15367            let actions = self.parse_actions_list()?;
15368            Privileges::Actions(actions)
15369        };
15370
15371        let objects = if self.parse_keyword(Keyword::ON) {
15372            if self.parse_keywords(&[Keyword::ALL, Keyword::TABLES, Keyword::IN, Keyword::SCHEMA]) {
15373                Some(GrantObjects::AllTablesInSchema {
15374                    schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15375                })
15376            } else if self.parse_keywords(&[
15377                Keyword::ALL,
15378                Keyword::EXTERNAL,
15379                Keyword::TABLES,
15380                Keyword::IN,
15381                Keyword::SCHEMA,
15382            ]) {
15383                Some(GrantObjects::AllExternalTablesInSchema {
15384                    schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15385                })
15386            } else if self.parse_keywords(&[
15387                Keyword::ALL,
15388                Keyword::VIEWS,
15389                Keyword::IN,
15390                Keyword::SCHEMA,
15391            ]) {
15392                Some(GrantObjects::AllViewsInSchema {
15393                    schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15394                })
15395            } else if self.parse_keywords(&[
15396                Keyword::ALL,
15397                Keyword::MATERIALIZED,
15398                Keyword::VIEWS,
15399                Keyword::IN,
15400                Keyword::SCHEMA,
15401            ]) {
15402                Some(GrantObjects::AllMaterializedViewsInSchema {
15403                    schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15404                })
15405            } else if self.parse_keywords(&[
15406                Keyword::ALL,
15407                Keyword::FUNCTIONS,
15408                Keyword::IN,
15409                Keyword::SCHEMA,
15410            ]) {
15411                Some(GrantObjects::AllFunctionsInSchema {
15412                    schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15413                })
15414            } else if self.parse_keywords(&[
15415                Keyword::FUTURE,
15416                Keyword::SCHEMAS,
15417                Keyword::IN,
15418                Keyword::DATABASE,
15419            ]) {
15420                Some(GrantObjects::FutureSchemasInDatabase {
15421                    databases: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15422                })
15423            } else if self.parse_keywords(&[
15424                Keyword::FUTURE,
15425                Keyword::TABLES,
15426                Keyword::IN,
15427                Keyword::SCHEMA,
15428            ]) {
15429                Some(GrantObjects::FutureTablesInSchema {
15430                    schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15431                })
15432            } else if self.parse_keywords(&[
15433                Keyword::FUTURE,
15434                Keyword::EXTERNAL,
15435                Keyword::TABLES,
15436                Keyword::IN,
15437                Keyword::SCHEMA,
15438            ]) {
15439                Some(GrantObjects::FutureExternalTablesInSchema {
15440                    schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15441                })
15442            } else if self.parse_keywords(&[
15443                Keyword::FUTURE,
15444                Keyword::VIEWS,
15445                Keyword::IN,
15446                Keyword::SCHEMA,
15447            ]) {
15448                Some(GrantObjects::FutureViewsInSchema {
15449                    schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15450                })
15451            } else if self.parse_keywords(&[
15452                Keyword::FUTURE,
15453                Keyword::MATERIALIZED,
15454                Keyword::VIEWS,
15455                Keyword::IN,
15456                Keyword::SCHEMA,
15457            ]) {
15458                Some(GrantObjects::FutureMaterializedViewsInSchema {
15459                    schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15460                })
15461            } else if self.parse_keywords(&[
15462                Keyword::ALL,
15463                Keyword::SEQUENCES,
15464                Keyword::IN,
15465                Keyword::SCHEMA,
15466            ]) {
15467                Some(GrantObjects::AllSequencesInSchema {
15468                    schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15469                })
15470            } else if self.parse_keywords(&[
15471                Keyword::FUTURE,
15472                Keyword::SEQUENCES,
15473                Keyword::IN,
15474                Keyword::SCHEMA,
15475            ]) {
15476                Some(GrantObjects::FutureSequencesInSchema {
15477                    schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15478                })
15479            } else if self.parse_keywords(&[Keyword::RESOURCE, Keyword::MONITOR]) {
15480                Some(GrantObjects::ResourceMonitors(
15481                    self.parse_comma_separated(|p| p.parse_object_name(false))?,
15482                ))
15483            } else if self.parse_keywords(&[Keyword::COMPUTE, Keyword::POOL]) {
15484                Some(GrantObjects::ComputePools(
15485                    self.parse_comma_separated(|p| p.parse_object_name(false))?,
15486                ))
15487            } else if self.parse_keywords(&[Keyword::FAILOVER, Keyword::GROUP]) {
15488                Some(GrantObjects::FailoverGroup(
15489                    self.parse_comma_separated(|p| p.parse_object_name(false))?,
15490                ))
15491            } else if self.parse_keywords(&[Keyword::REPLICATION, Keyword::GROUP]) {
15492                Some(GrantObjects::ReplicationGroup(
15493                    self.parse_comma_separated(|p| p.parse_object_name(false))?,
15494                ))
15495            } else if self.parse_keywords(&[Keyword::EXTERNAL, Keyword::VOLUME]) {
15496                Some(GrantObjects::ExternalVolumes(
15497                    self.parse_comma_separated(|p| p.parse_object_name(false))?,
15498                ))
15499            } else {
15500                let object_type = self.parse_one_of_keywords(&[
15501                    Keyword::SEQUENCE,
15502                    Keyword::DATABASE,
15503                    Keyword::SCHEMA,
15504                    Keyword::TABLE,
15505                    Keyword::VIEW,
15506                    Keyword::WAREHOUSE,
15507                    Keyword::INTEGRATION,
15508                    Keyword::VIEW,
15509                    Keyword::WAREHOUSE,
15510                    Keyword::INTEGRATION,
15511                    Keyword::USER,
15512                    Keyword::CONNECTION,
15513                    Keyword::PROCEDURE,
15514                    Keyword::FUNCTION,
15515                ]);
15516                let objects =
15517                    self.parse_comma_separated(|p| p.parse_object_name_inner(false, true));
15518                match object_type {
15519                    Some(Keyword::DATABASE) => Some(GrantObjects::Databases(objects?)),
15520                    Some(Keyword::SCHEMA) => Some(GrantObjects::Schemas(objects?)),
15521                    Some(Keyword::SEQUENCE) => Some(GrantObjects::Sequences(objects?)),
15522                    Some(Keyword::WAREHOUSE) => Some(GrantObjects::Warehouses(objects?)),
15523                    Some(Keyword::INTEGRATION) => Some(GrantObjects::Integrations(objects?)),
15524                    Some(Keyword::VIEW) => Some(GrantObjects::Views(objects?)),
15525                    Some(Keyword::USER) => Some(GrantObjects::Users(objects?)),
15526                    Some(Keyword::CONNECTION) => Some(GrantObjects::Connections(objects?)),
15527                    kw @ (Some(Keyword::PROCEDURE) | Some(Keyword::FUNCTION)) => {
15528                        if let Some(name) = objects?.first() {
15529                            self.parse_grant_procedure_or_function(name, &kw)?
15530                        } else {
15531                            self.expected("procedure or function name", self.peek_token())?
15532                        }
15533                    }
15534                    Some(Keyword::TABLE) | None => Some(GrantObjects::Tables(objects?)),
15535                    _ => unreachable!(),
15536                }
15537            }
15538        } else {
15539            None
15540        };
15541
15542        Ok((privileges, objects))
15543    }
15544
15545    fn parse_grant_procedure_or_function(
15546        &mut self,
15547        name: &ObjectName,
15548        kw: &Option<Keyword>,
15549    ) -> Result<Option<GrantObjects>, ParserError> {
15550        let arg_types = if self.consume_token(&Token::LParen) {
15551            let list = self.parse_comma_separated0(Self::parse_data_type, Token::RParen)?;
15552            self.expect_token(&Token::RParen)?;
15553            list
15554        } else {
15555            vec![]
15556        };
15557        match kw {
15558            Some(Keyword::PROCEDURE) => Ok(Some(GrantObjects::Procedure {
15559                name: name.clone(),
15560                arg_types,
15561            })),
15562            Some(Keyword::FUNCTION) => Ok(Some(GrantObjects::Function {
15563                name: name.clone(),
15564                arg_types,
15565            })),
15566            _ => self.expected("procedure or function keywords", self.peek_token())?,
15567        }
15568    }
15569
15570    pub fn parse_grant_permission(&mut self) -> Result<Action, ParserError> {
15571        fn parse_columns(parser: &mut Parser) -> Result<Option<Vec<Ident>>, ParserError> {
15572            let columns = parser.parse_parenthesized_column_list(Optional, false)?;
15573            if columns.is_empty() {
15574                Ok(None)
15575            } else {
15576                Ok(Some(columns))
15577            }
15578        }
15579
15580        // Multi-word privileges
15581        if self.parse_keywords(&[Keyword::IMPORTED, Keyword::PRIVILEGES]) {
15582            Ok(Action::ImportedPrivileges)
15583        } else if self.parse_keywords(&[Keyword::ADD, Keyword::SEARCH, Keyword::OPTIMIZATION]) {
15584            Ok(Action::AddSearchOptimization)
15585        } else if self.parse_keywords(&[Keyword::ATTACH, Keyword::LISTING]) {
15586            Ok(Action::AttachListing)
15587        } else if self.parse_keywords(&[Keyword::ATTACH, Keyword::POLICY]) {
15588            Ok(Action::AttachPolicy)
15589        } else if self.parse_keywords(&[Keyword::BIND, Keyword::SERVICE, Keyword::ENDPOINT]) {
15590            Ok(Action::BindServiceEndpoint)
15591        } else if self.parse_keywords(&[Keyword::DATABASE, Keyword::ROLE]) {
15592            let role = self.parse_object_name(false)?;
15593            Ok(Action::DatabaseRole { role })
15594        } else if self.parse_keywords(&[Keyword::EVOLVE, Keyword::SCHEMA]) {
15595            Ok(Action::EvolveSchema)
15596        } else if self.parse_keywords(&[Keyword::IMPORT, Keyword::SHARE]) {
15597            Ok(Action::ImportShare)
15598        } else if self.parse_keywords(&[Keyword::MANAGE, Keyword::VERSIONS]) {
15599            Ok(Action::ManageVersions)
15600        } else if self.parse_keywords(&[Keyword::MANAGE, Keyword::RELEASES]) {
15601            Ok(Action::ManageReleases)
15602        } else if self.parse_keywords(&[Keyword::OVERRIDE, Keyword::SHARE, Keyword::RESTRICTIONS]) {
15603            Ok(Action::OverrideShareRestrictions)
15604        } else if self.parse_keywords(&[
15605            Keyword::PURCHASE,
15606            Keyword::DATA,
15607            Keyword::EXCHANGE,
15608            Keyword::LISTING,
15609        ]) {
15610            Ok(Action::PurchaseDataExchangeListing)
15611        } else if self.parse_keywords(&[Keyword::RESOLVE, Keyword::ALL]) {
15612            Ok(Action::ResolveAll)
15613        } else if self.parse_keywords(&[Keyword::READ, Keyword::SESSION]) {
15614            Ok(Action::ReadSession)
15615
15616        // Single-word privileges
15617        } else if self.parse_keyword(Keyword::APPLY) {
15618            let apply_type = self.parse_action_apply_type()?;
15619            Ok(Action::Apply { apply_type })
15620        } else if self.parse_keyword(Keyword::APPLYBUDGET) {
15621            Ok(Action::ApplyBudget)
15622        } else if self.parse_keyword(Keyword::AUDIT) {
15623            Ok(Action::Audit)
15624        } else if self.parse_keyword(Keyword::CONNECT) {
15625            Ok(Action::Connect)
15626        } else if self.parse_keyword(Keyword::CREATE) {
15627            let obj_type = self.maybe_parse_action_create_object_type();
15628            Ok(Action::Create { obj_type })
15629        } else if self.parse_keyword(Keyword::DELETE) {
15630            Ok(Action::Delete)
15631        } else if self.parse_keyword(Keyword::EXEC) {
15632            let obj_type = self.maybe_parse_action_execute_obj_type();
15633            Ok(Action::Exec { obj_type })
15634        } else if self.parse_keyword(Keyword::EXECUTE) {
15635            let obj_type = self.maybe_parse_action_execute_obj_type();
15636            Ok(Action::Execute { obj_type })
15637        } else if self.parse_keyword(Keyword::FAILOVER) {
15638            Ok(Action::Failover)
15639        } else if self.parse_keyword(Keyword::INSERT) {
15640            Ok(Action::Insert {
15641                columns: parse_columns(self)?,
15642            })
15643        } else if self.parse_keyword(Keyword::MANAGE) {
15644            let manage_type = self.parse_action_manage_type()?;
15645            Ok(Action::Manage { manage_type })
15646        } else if self.parse_keyword(Keyword::MODIFY) {
15647            let modify_type = self.parse_action_modify_type();
15648            Ok(Action::Modify { modify_type })
15649        } else if self.parse_keyword(Keyword::MONITOR) {
15650            let monitor_type = self.parse_action_monitor_type();
15651            Ok(Action::Monitor { monitor_type })
15652        } else if self.parse_keyword(Keyword::OPERATE) {
15653            Ok(Action::Operate)
15654        } else if self.parse_keyword(Keyword::REFERENCES) {
15655            Ok(Action::References {
15656                columns: parse_columns(self)?,
15657            })
15658        } else if self.parse_keyword(Keyword::READ) {
15659            Ok(Action::Read)
15660        } else if self.parse_keyword(Keyword::REPLICATE) {
15661            Ok(Action::Replicate)
15662        } else if self.parse_keyword(Keyword::ROLE) {
15663            let role = self.parse_object_name(false)?;
15664            Ok(Action::Role { role })
15665        } else if self.parse_keyword(Keyword::SELECT) {
15666            Ok(Action::Select {
15667                columns: parse_columns(self)?,
15668            })
15669        } else if self.parse_keyword(Keyword::TEMPORARY) {
15670            Ok(Action::Temporary)
15671        } else if self.parse_keyword(Keyword::TRIGGER) {
15672            Ok(Action::Trigger)
15673        } else if self.parse_keyword(Keyword::TRUNCATE) {
15674            Ok(Action::Truncate)
15675        } else if self.parse_keyword(Keyword::UPDATE) {
15676            Ok(Action::Update {
15677                columns: parse_columns(self)?,
15678            })
15679        } else if self.parse_keyword(Keyword::USAGE) {
15680            Ok(Action::Usage)
15681        } else if self.parse_keyword(Keyword::OWNERSHIP) {
15682            Ok(Action::Ownership)
15683        } else if self.parse_keyword(Keyword::DROP) {
15684            Ok(Action::Drop)
15685        } else {
15686            self.expected("a privilege keyword", self.peek_token())?
15687        }
15688    }
15689
15690    fn maybe_parse_action_create_object_type(&mut self) -> Option<ActionCreateObjectType> {
15691        // Multi-word object types
15692        if self.parse_keywords(&[Keyword::APPLICATION, Keyword::PACKAGE]) {
15693            Some(ActionCreateObjectType::ApplicationPackage)
15694        } else if self.parse_keywords(&[Keyword::COMPUTE, Keyword::POOL]) {
15695            Some(ActionCreateObjectType::ComputePool)
15696        } else if self.parse_keywords(&[Keyword::DATA, Keyword::EXCHANGE, Keyword::LISTING]) {
15697            Some(ActionCreateObjectType::DataExchangeListing)
15698        } else if self.parse_keywords(&[Keyword::EXTERNAL, Keyword::VOLUME]) {
15699            Some(ActionCreateObjectType::ExternalVolume)
15700        } else if self.parse_keywords(&[Keyword::FAILOVER, Keyword::GROUP]) {
15701            Some(ActionCreateObjectType::FailoverGroup)
15702        } else if self.parse_keywords(&[Keyword::NETWORK, Keyword::POLICY]) {
15703            Some(ActionCreateObjectType::NetworkPolicy)
15704        } else if self.parse_keywords(&[Keyword::ORGANIZATION, Keyword::LISTING]) {
15705            Some(ActionCreateObjectType::OrganiationListing)
15706        } else if self.parse_keywords(&[Keyword::REPLICATION, Keyword::GROUP]) {
15707            Some(ActionCreateObjectType::ReplicationGroup)
15708        }
15709        // Single-word object types
15710        else if self.parse_keyword(Keyword::ACCOUNT) {
15711            Some(ActionCreateObjectType::Account)
15712        } else if self.parse_keyword(Keyword::APPLICATION) {
15713            Some(ActionCreateObjectType::Application)
15714        } else if self.parse_keyword(Keyword::DATABASE) {
15715            Some(ActionCreateObjectType::Database)
15716        } else if self.parse_keyword(Keyword::INTEGRATION) {
15717            Some(ActionCreateObjectType::Integration)
15718        } else if self.parse_keyword(Keyword::ROLE) {
15719            Some(ActionCreateObjectType::Role)
15720        } else if self.parse_keyword(Keyword::SCHEMA) {
15721            Some(ActionCreateObjectType::Schema)
15722        } else if self.parse_keyword(Keyword::SHARE) {
15723            Some(ActionCreateObjectType::Share)
15724        } else if self.parse_keyword(Keyword::USER) {
15725            Some(ActionCreateObjectType::User)
15726        } else if self.parse_keyword(Keyword::WAREHOUSE) {
15727            Some(ActionCreateObjectType::Warehouse)
15728        } else {
15729            None
15730        }
15731    }
15732
15733    fn parse_action_apply_type(&mut self) -> Result<ActionApplyType, ParserError> {
15734        if self.parse_keywords(&[Keyword::AGGREGATION, Keyword::POLICY]) {
15735            Ok(ActionApplyType::AggregationPolicy)
15736        } else if self.parse_keywords(&[Keyword::AUTHENTICATION, Keyword::POLICY]) {
15737            Ok(ActionApplyType::AuthenticationPolicy)
15738        } else if self.parse_keywords(&[Keyword::JOIN, Keyword::POLICY]) {
15739            Ok(ActionApplyType::JoinPolicy)
15740        } else if self.parse_keywords(&[Keyword::MASKING, Keyword::POLICY]) {
15741            Ok(ActionApplyType::MaskingPolicy)
15742        } else if self.parse_keywords(&[Keyword::PACKAGES, Keyword::POLICY]) {
15743            Ok(ActionApplyType::PackagesPolicy)
15744        } else if self.parse_keywords(&[Keyword::PASSWORD, Keyword::POLICY]) {
15745            Ok(ActionApplyType::PasswordPolicy)
15746        } else if self.parse_keywords(&[Keyword::PROJECTION, Keyword::POLICY]) {
15747            Ok(ActionApplyType::ProjectionPolicy)
15748        } else if self.parse_keywords(&[Keyword::ROW, Keyword::ACCESS, Keyword::POLICY]) {
15749            Ok(ActionApplyType::RowAccessPolicy)
15750        } else if self.parse_keywords(&[Keyword::SESSION, Keyword::POLICY]) {
15751            Ok(ActionApplyType::SessionPolicy)
15752        } else if self.parse_keyword(Keyword::TAG) {
15753            Ok(ActionApplyType::Tag)
15754        } else {
15755            self.expected("GRANT APPLY type", self.peek_token())
15756        }
15757    }
15758
15759    fn maybe_parse_action_execute_obj_type(&mut self) -> Option<ActionExecuteObjectType> {
15760        if self.parse_keywords(&[Keyword::DATA, Keyword::METRIC, Keyword::FUNCTION]) {
15761            Some(ActionExecuteObjectType::DataMetricFunction)
15762        } else if self.parse_keywords(&[Keyword::MANAGED, Keyword::ALERT]) {
15763            Some(ActionExecuteObjectType::ManagedAlert)
15764        } else if self.parse_keywords(&[Keyword::MANAGED, Keyword::TASK]) {
15765            Some(ActionExecuteObjectType::ManagedTask)
15766        } else if self.parse_keyword(Keyword::ALERT) {
15767            Some(ActionExecuteObjectType::Alert)
15768        } else if self.parse_keyword(Keyword::TASK) {
15769            Some(ActionExecuteObjectType::Task)
15770        } else {
15771            None
15772        }
15773    }
15774
15775    fn parse_action_manage_type(&mut self) -> Result<ActionManageType, ParserError> {
15776        if self.parse_keywords(&[Keyword::ACCOUNT, Keyword::SUPPORT, Keyword::CASES]) {
15777            Ok(ActionManageType::AccountSupportCases)
15778        } else if self.parse_keywords(&[Keyword::EVENT, Keyword::SHARING]) {
15779            Ok(ActionManageType::EventSharing)
15780        } else if self.parse_keywords(&[Keyword::LISTING, Keyword::AUTO, Keyword::FULFILLMENT]) {
15781            Ok(ActionManageType::ListingAutoFulfillment)
15782        } else if self.parse_keywords(&[Keyword::ORGANIZATION, Keyword::SUPPORT, Keyword::CASES]) {
15783            Ok(ActionManageType::OrganizationSupportCases)
15784        } else if self.parse_keywords(&[Keyword::USER, Keyword::SUPPORT, Keyword::CASES]) {
15785            Ok(ActionManageType::UserSupportCases)
15786        } else if self.parse_keyword(Keyword::GRANTS) {
15787            Ok(ActionManageType::Grants)
15788        } else if self.parse_keyword(Keyword::WAREHOUSES) {
15789            Ok(ActionManageType::Warehouses)
15790        } else {
15791            self.expected("GRANT MANAGE type", self.peek_token())
15792        }
15793    }
15794
15795    fn parse_action_modify_type(&mut self) -> Option<ActionModifyType> {
15796        if self.parse_keywords(&[Keyword::LOG, Keyword::LEVEL]) {
15797            Some(ActionModifyType::LogLevel)
15798        } else if self.parse_keywords(&[Keyword::TRACE, Keyword::LEVEL]) {
15799            Some(ActionModifyType::TraceLevel)
15800        } else if self.parse_keywords(&[Keyword::SESSION, Keyword::LOG, Keyword::LEVEL]) {
15801            Some(ActionModifyType::SessionLogLevel)
15802        } else if self.parse_keywords(&[Keyword::SESSION, Keyword::TRACE, Keyword::LEVEL]) {
15803            Some(ActionModifyType::SessionTraceLevel)
15804        } else {
15805            None
15806        }
15807    }
15808
15809    fn parse_action_monitor_type(&mut self) -> Option<ActionMonitorType> {
15810        if self.parse_keyword(Keyword::EXECUTION) {
15811            Some(ActionMonitorType::Execution)
15812        } else if self.parse_keyword(Keyword::SECURITY) {
15813            Some(ActionMonitorType::Security)
15814        } else if self.parse_keyword(Keyword::USAGE) {
15815            Some(ActionMonitorType::Usage)
15816        } else {
15817            None
15818        }
15819    }
15820
15821    pub fn parse_grantee_name(&mut self) -> Result<GranteeName, ParserError> {
15822        let mut name = self.parse_object_name(false)?;
15823        if self.dialect.supports_user_host_grantee()
15824            && name.0.len() == 1
15825            && name.0[0].as_ident().is_some()
15826            && self.consume_token(&Token::AtSign)
15827        {
15828            let user = name.0.pop().unwrap().as_ident().unwrap().clone();
15829            let host = self.parse_identifier()?;
15830            Ok(GranteeName::UserHost { user, host })
15831        } else {
15832            Ok(GranteeName::ObjectName(name))
15833        }
15834    }
15835
15836    /// Parse [`Statement::Deny`]
15837    pub fn parse_deny(&mut self) -> Result<Statement, ParserError> {
15838        self.expect_keyword(Keyword::DENY)?;
15839
15840        let (privileges, objects) = self.parse_grant_deny_revoke_privileges_objects()?;
15841        let objects = match objects {
15842            Some(o) => o,
15843            None => {
15844                return parser_err!(
15845                    "DENY statements must specify an object",
15846                    self.peek_token().span.start
15847                )
15848            }
15849        };
15850
15851        self.expect_keyword_is(Keyword::TO)?;
15852        let grantees = self.parse_grantees()?;
15853        let cascade = self.parse_cascade_option();
15854        let granted_by = if self.parse_keywords(&[Keyword::AS]) {
15855            Some(self.parse_identifier()?)
15856        } else {
15857            None
15858        };
15859
15860        Ok(Statement::Deny(DenyStatement {
15861            privileges,
15862            objects,
15863            grantees,
15864            cascade,
15865            granted_by,
15866        }))
15867    }
15868
15869    /// Parse a REVOKE statement
15870    pub fn parse_revoke(&mut self) -> Result<Statement, ParserError> {
15871        let (privileges, objects) = self.parse_grant_deny_revoke_privileges_objects()?;
15872
15873        self.expect_keyword_is(Keyword::FROM)?;
15874        let grantees = self.parse_grantees()?;
15875
15876        let granted_by = if self.parse_keywords(&[Keyword::GRANTED, Keyword::BY]) {
15877            Some(self.parse_identifier()?)
15878        } else {
15879            None
15880        };
15881
15882        let cascade = self.parse_cascade_option();
15883
15884        Ok(Statement::Revoke {
15885            privileges,
15886            objects,
15887            grantees,
15888            granted_by,
15889            cascade,
15890        })
15891    }
15892
15893    /// Parse an REPLACE statement
15894    pub fn parse_replace(
15895        &mut self,
15896        replace_token: TokenWithSpan,
15897    ) -> Result<Statement, ParserError> {
15898        if !dialect_of!(self is MySqlDialect | GenericDialect) {
15899            return parser_err!(
15900                "Unsupported statement REPLACE",
15901                self.peek_token().span.start
15902            );
15903        }
15904
15905        let mut insert = self.parse_insert(replace_token)?;
15906        if let Statement::Insert(Insert { replace_into, .. }) = &mut insert {
15907            *replace_into = true;
15908        }
15909
15910        Ok(insert)
15911    }
15912
15913    /// Parse an INSERT statement, returning a `Box`ed SetExpr
15914    ///
15915    /// This is used to reduce the size of the stack frames in debug builds
15916    fn parse_insert_setexpr_boxed(
15917        &mut self,
15918        insert_token: TokenWithSpan,
15919    ) -> Result<Box<SetExpr>, ParserError> {
15920        Ok(Box::new(SetExpr::Insert(self.parse_insert(insert_token)?)))
15921    }
15922
15923    /// Parse an INSERT statement
15924    pub fn parse_insert(&mut self, insert_token: TokenWithSpan) -> Result<Statement, ParserError> {
15925        let or = self.parse_conflict_clause();
15926        let priority = if !dialect_of!(self is MySqlDialect | GenericDialect) {
15927            None
15928        } else if self.parse_keyword(Keyword::LOW_PRIORITY) {
15929            Some(MysqlInsertPriority::LowPriority)
15930        } else if self.parse_keyword(Keyword::DELAYED) {
15931            Some(MysqlInsertPriority::Delayed)
15932        } else if self.parse_keyword(Keyword::HIGH_PRIORITY) {
15933            Some(MysqlInsertPriority::HighPriority)
15934        } else {
15935            None
15936        };
15937
15938        let ignore = dialect_of!(self is MySqlDialect | GenericDialect)
15939            && self.parse_keyword(Keyword::IGNORE);
15940
15941        let replace_into = false;
15942
15943        let overwrite = self.parse_keyword(Keyword::OVERWRITE);
15944        let into = self.parse_keyword(Keyword::INTO);
15945
15946        let local = self.parse_keyword(Keyword::LOCAL);
15947
15948        if self.parse_keyword(Keyword::DIRECTORY) {
15949            let path = self.parse_literal_string()?;
15950            let file_format = if self.parse_keywords(&[Keyword::STORED, Keyword::AS]) {
15951                Some(self.parse_file_format()?)
15952            } else {
15953                None
15954            };
15955            let source = self.parse_query()?;
15956            Ok(Statement::Directory {
15957                local,
15958                path,
15959                overwrite,
15960                file_format,
15961                source,
15962            })
15963        } else {
15964            // Hive lets you put table here regardless
15965            let table = self.parse_keyword(Keyword::TABLE);
15966            let table_object = self.parse_table_object()?;
15967
15968            let table_alias =
15969                if dialect_of!(self is PostgreSqlDialect) && self.parse_keyword(Keyword::AS) {
15970                    Some(self.parse_identifier()?)
15971                } else {
15972                    None
15973                };
15974
15975            let is_mysql = dialect_of!(self is MySqlDialect);
15976
15977            let (columns, partitioned, after_columns, source, assignments) = if self
15978                .parse_keywords(&[Keyword::DEFAULT, Keyword::VALUES])
15979            {
15980                (vec![], None, vec![], None, vec![])
15981            } else {
15982                let (columns, partitioned, after_columns) = if !self.peek_subquery_start() {
15983                    let columns = self.parse_parenthesized_column_list(Optional, is_mysql)?;
15984
15985                    let partitioned = self.parse_insert_partition()?;
15986                    // Hive allows you to specify columns after partitions as well if you want.
15987                    let after_columns = if dialect_of!(self is HiveDialect) {
15988                        self.parse_parenthesized_column_list(Optional, false)?
15989                    } else {
15990                        vec![]
15991                    };
15992                    (columns, partitioned, after_columns)
15993                } else {
15994                    Default::default()
15995                };
15996
15997                let (source, assignments) = if self.peek_keyword(Keyword::FORMAT)
15998                    || self.peek_keyword(Keyword::SETTINGS)
15999                {
16000                    (None, vec![])
16001                } else if self.dialect.supports_insert_set() && self.parse_keyword(Keyword::SET) {
16002                    (None, self.parse_comma_separated(Parser::parse_assignment)?)
16003                } else {
16004                    (Some(self.parse_query()?), vec![])
16005                };
16006
16007                (columns, partitioned, after_columns, source, assignments)
16008            };
16009
16010            let (format_clause, settings) = if self.dialect.supports_insert_format() {
16011                // Settings always comes before `FORMAT` for ClickHouse:
16012                // <https://clickhouse.com/docs/en/sql-reference/statements/insert-into>
16013                let settings = self.parse_settings()?;
16014
16015                let format = if self.parse_keyword(Keyword::FORMAT) {
16016                    Some(self.parse_input_format_clause()?)
16017                } else {
16018                    None
16019                };
16020
16021                (format, settings)
16022            } else {
16023                Default::default()
16024            };
16025
16026            let insert_alias = if dialect_of!(self is MySqlDialect | GenericDialect)
16027                && self.parse_keyword(Keyword::AS)
16028            {
16029                let row_alias = self.parse_object_name(false)?;
16030                let col_aliases = Some(self.parse_parenthesized_column_list(Optional, false)?);
16031                Some(InsertAliases {
16032                    row_alias,
16033                    col_aliases,
16034                })
16035            } else {
16036                None
16037            };
16038
16039            let on = if self.parse_keyword(Keyword::ON) {
16040                if self.parse_keyword(Keyword::CONFLICT) {
16041                    let conflict_target =
16042                        if self.parse_keywords(&[Keyword::ON, Keyword::CONSTRAINT]) {
16043                            Some(ConflictTarget::OnConstraint(self.parse_object_name(false)?))
16044                        } else if self.peek_token() == Token::LParen {
16045                            Some(ConflictTarget::Columns(
16046                                self.parse_parenthesized_column_list(IsOptional::Mandatory, false)?,
16047                            ))
16048                        } else {
16049                            None
16050                        };
16051
16052                    self.expect_keyword_is(Keyword::DO)?;
16053                    let action = if self.parse_keyword(Keyword::NOTHING) {
16054                        OnConflictAction::DoNothing
16055                    } else {
16056                        self.expect_keyword_is(Keyword::UPDATE)?;
16057                        self.expect_keyword_is(Keyword::SET)?;
16058                        let assignments = self.parse_comma_separated(Parser::parse_assignment)?;
16059                        let selection = if self.parse_keyword(Keyword::WHERE) {
16060                            Some(self.parse_expr()?)
16061                        } else {
16062                            None
16063                        };
16064                        OnConflictAction::DoUpdate(DoUpdate {
16065                            assignments,
16066                            selection,
16067                        })
16068                    };
16069
16070                    Some(OnInsert::OnConflict(OnConflict {
16071                        conflict_target,
16072                        action,
16073                    }))
16074                } else {
16075                    self.expect_keyword_is(Keyword::DUPLICATE)?;
16076                    self.expect_keyword_is(Keyword::KEY)?;
16077                    self.expect_keyword_is(Keyword::UPDATE)?;
16078                    let l = self.parse_comma_separated(Parser::parse_assignment)?;
16079
16080                    Some(OnInsert::DuplicateKeyUpdate(l))
16081                }
16082            } else {
16083                None
16084            };
16085
16086            let returning = if self.parse_keyword(Keyword::RETURNING) {
16087                Some(self.parse_comma_separated(Parser::parse_select_item)?)
16088            } else {
16089                None
16090            };
16091
16092            Ok(Statement::Insert(Insert {
16093                insert_token: insert_token.into(),
16094                or,
16095                table: table_object,
16096                table_alias,
16097                ignore,
16098                into,
16099                overwrite,
16100                partitioned,
16101                columns,
16102                after_columns,
16103                source,
16104                assignments,
16105                has_table_keyword: table,
16106                on,
16107                returning,
16108                replace_into,
16109                priority,
16110                insert_alias,
16111                settings,
16112                format_clause,
16113            }))
16114        }
16115    }
16116
16117    // Parses input format clause used for [ClickHouse].
16118    //
16119    // <https://clickhouse.com/docs/en/interfaces/formats>
16120    pub fn parse_input_format_clause(&mut self) -> Result<InputFormatClause, ParserError> {
16121        let ident = self.parse_identifier()?;
16122        let values = self
16123            .maybe_parse(|p| p.parse_comma_separated(|p| p.parse_expr()))?
16124            .unwrap_or_default();
16125
16126        Ok(InputFormatClause { ident, values })
16127    }
16128
16129    /// Returns true if the immediate tokens look like the
16130    /// beginning of a subquery. `(SELECT ...`
16131    fn peek_subquery_start(&mut self) -> bool {
16132        let [maybe_lparen, maybe_select] = self.peek_tokens();
16133        Token::LParen == maybe_lparen
16134            && matches!(maybe_select, Token::Word(w) if w.keyword == Keyword::SELECT)
16135    }
16136
16137    fn parse_conflict_clause(&mut self) -> Option<SqliteOnConflict> {
16138        if self.parse_keywords(&[Keyword::OR, Keyword::REPLACE]) {
16139            Some(SqliteOnConflict::Replace)
16140        } else if self.parse_keywords(&[Keyword::OR, Keyword::ROLLBACK]) {
16141            Some(SqliteOnConflict::Rollback)
16142        } else if self.parse_keywords(&[Keyword::OR, Keyword::ABORT]) {
16143            Some(SqliteOnConflict::Abort)
16144        } else if self.parse_keywords(&[Keyword::OR, Keyword::FAIL]) {
16145            Some(SqliteOnConflict::Fail)
16146        } else if self.parse_keywords(&[Keyword::OR, Keyword::IGNORE]) {
16147            Some(SqliteOnConflict::Ignore)
16148        } else if self.parse_keyword(Keyword::REPLACE) {
16149            Some(SqliteOnConflict::Replace)
16150        } else {
16151            None
16152        }
16153    }
16154
16155    pub fn parse_insert_partition(&mut self) -> Result<Option<Vec<Expr>>, ParserError> {
16156        if self.parse_keyword(Keyword::PARTITION) {
16157            self.expect_token(&Token::LParen)?;
16158            let partition_cols = Some(self.parse_comma_separated(Parser::parse_expr)?);
16159            self.expect_token(&Token::RParen)?;
16160            Ok(partition_cols)
16161        } else {
16162            Ok(None)
16163        }
16164    }
16165
16166    pub fn parse_load_data_table_format(
16167        &mut self,
16168    ) -> Result<Option<HiveLoadDataFormat>, ParserError> {
16169        if self.parse_keyword(Keyword::INPUTFORMAT) {
16170            let input_format = self.parse_expr()?;
16171            self.expect_keyword_is(Keyword::SERDE)?;
16172            let serde = self.parse_expr()?;
16173            Ok(Some(HiveLoadDataFormat {
16174                input_format,
16175                serde,
16176            }))
16177        } else {
16178            Ok(None)
16179        }
16180    }
16181
16182    /// Parse an UPDATE statement, returning a `Box`ed SetExpr
16183    ///
16184    /// This is used to reduce the size of the stack frames in debug builds
16185    fn parse_update_setexpr_boxed(
16186        &mut self,
16187        update_token: TokenWithSpan,
16188    ) -> Result<Box<SetExpr>, ParserError> {
16189        Ok(Box::new(SetExpr::Update(self.parse_update(update_token)?)))
16190    }
16191
16192    pub fn parse_update(&mut self, update_token: TokenWithSpan) -> Result<Statement, ParserError> {
16193        let or = self.parse_conflict_clause();
16194        let table = self.parse_table_and_joins()?;
16195        let from_before_set = if self.parse_keyword(Keyword::FROM) {
16196            Some(UpdateTableFromKind::BeforeSet(
16197                self.parse_table_with_joins()?,
16198            ))
16199        } else {
16200            None
16201        };
16202        self.expect_keyword(Keyword::SET)?;
16203        let assignments = self.parse_comma_separated(Parser::parse_assignment)?;
16204        let from = if from_before_set.is_none() && self.parse_keyword(Keyword::FROM) {
16205            Some(UpdateTableFromKind::AfterSet(
16206                self.parse_table_with_joins()?,
16207            ))
16208        } else {
16209            from_before_set
16210        };
16211        let selection = if self.parse_keyword(Keyword::WHERE) {
16212            Some(self.parse_expr()?)
16213        } else {
16214            None
16215        };
16216        let returning = if self.parse_keyword(Keyword::RETURNING) {
16217            Some(self.parse_comma_separated(Parser::parse_select_item)?)
16218        } else {
16219            None
16220        };
16221        let limit = if self.parse_keyword(Keyword::LIMIT) {
16222            Some(self.parse_expr()?)
16223        } else {
16224            None
16225        };
16226        Ok(Update {
16227            update_token: update_token.into(),
16228            table,
16229            assignments,
16230            from,
16231            selection,
16232            returning,
16233            or,
16234            limit,
16235        }
16236        .into())
16237    }
16238
16239    /// Parse a `var = expr` assignment, used in an UPDATE statement
16240    pub fn parse_assignment(&mut self) -> Result<Assignment, ParserError> {
16241        let target = self.parse_assignment_target()?;
16242        self.expect_token(&Token::Eq)?;
16243        let value = self.parse_expr()?;
16244        Ok(Assignment { target, value })
16245    }
16246
16247    /// Parse the left-hand side of an assignment, used in an UPDATE statement
16248    pub fn parse_assignment_target(&mut self) -> Result<AssignmentTarget, ParserError> {
16249        if self.consume_token(&Token::LParen) {
16250            let columns = self.parse_comma_separated(|p| p.parse_object_name(false))?;
16251            self.expect_token(&Token::RParen)?;
16252            Ok(AssignmentTarget::Tuple(columns))
16253        } else {
16254            let column = self.parse_object_name(false)?;
16255            Ok(AssignmentTarget::ColumnName(column))
16256        }
16257    }
16258
16259    pub fn parse_function_args(&mut self) -> Result<FunctionArg, ParserError> {
16260        let arg = if self.dialect.supports_named_fn_args_with_expr_name() {
16261            self.maybe_parse(|p| {
16262                let name = p.parse_expr()?;
16263                let operator = p.parse_function_named_arg_operator()?;
16264                let arg = p.parse_wildcard_expr()?.into();
16265                Ok(FunctionArg::ExprNamed {
16266                    name,
16267                    arg,
16268                    operator,
16269                })
16270            })?
16271        } else {
16272            self.maybe_parse(|p| {
16273                let name = p.parse_identifier()?;
16274                let operator = p.parse_function_named_arg_operator()?;
16275                let arg = p.parse_wildcard_expr()?.into();
16276                Ok(FunctionArg::Named {
16277                    name,
16278                    arg,
16279                    operator,
16280                })
16281            })?
16282        };
16283        if let Some(arg) = arg {
16284            return Ok(arg);
16285        }
16286        Ok(FunctionArg::Unnamed(self.parse_wildcard_expr()?.into()))
16287    }
16288
16289    fn parse_function_named_arg_operator(&mut self) -> Result<FunctionArgOperator, ParserError> {
16290        if self.parse_keyword(Keyword::VALUE) {
16291            return Ok(FunctionArgOperator::Value);
16292        }
16293        let tok = self.next_token();
16294        match tok.token {
16295            Token::RArrow if self.dialect.supports_named_fn_args_with_rarrow_operator() => {
16296                Ok(FunctionArgOperator::RightArrow)
16297            }
16298            Token::Eq if self.dialect.supports_named_fn_args_with_eq_operator() => {
16299                Ok(FunctionArgOperator::Equals)
16300            }
16301            Token::Assignment
16302                if self
16303                    .dialect
16304                    .supports_named_fn_args_with_assignment_operator() =>
16305            {
16306                Ok(FunctionArgOperator::Assignment)
16307            }
16308            Token::Colon if self.dialect.supports_named_fn_args_with_colon_operator() => {
16309                Ok(FunctionArgOperator::Colon)
16310            }
16311            _ => {
16312                self.prev_token();
16313                self.expected("argument operator", tok)
16314            }
16315        }
16316    }
16317
16318    pub fn parse_optional_args(&mut self) -> Result<Vec<FunctionArg>, ParserError> {
16319        if self.consume_token(&Token::RParen) {
16320            Ok(vec![])
16321        } else {
16322            let args = self.parse_comma_separated(Parser::parse_function_args)?;
16323            self.expect_token(&Token::RParen)?;
16324            Ok(args)
16325        }
16326    }
16327
16328    fn parse_table_function_args(&mut self) -> Result<TableFunctionArgs, ParserError> {
16329        if self.consume_token(&Token::RParen) {
16330            return Ok(TableFunctionArgs {
16331                args: vec![],
16332                settings: None,
16333            });
16334        }
16335        let mut args = vec![];
16336        let settings = loop {
16337            if let Some(settings) = self.parse_settings()? {
16338                break Some(settings);
16339            }
16340            args.push(self.parse_function_args()?);
16341            if self.is_parse_comma_separated_end() {
16342                break None;
16343            }
16344        };
16345        self.expect_token(&Token::RParen)?;
16346        Ok(TableFunctionArgs { args, settings })
16347    }
16348
16349    /// Parses a potentially empty list of arguments to a function
16350    /// (including the closing parenthesis).
16351    ///
16352    /// Examples:
16353    /// ```sql
16354    /// FIRST_VALUE(x ORDER BY 1,2,3);
16355    /// FIRST_VALUE(x IGNORE NULL);
16356    /// ```
16357    fn parse_function_argument_list(&mut self) -> Result<FunctionArgumentList, ParserError> {
16358        let mut clauses = vec![];
16359
16360        // Handle clauses that may exist with an empty argument list
16361
16362        if let Some(null_clause) = self.parse_json_null_clause() {
16363            clauses.push(FunctionArgumentClause::JsonNullClause(null_clause));
16364        }
16365
16366        if let Some(json_returning_clause) = self.maybe_parse_json_returning_clause()? {
16367            clauses.push(FunctionArgumentClause::JsonReturningClause(
16368                json_returning_clause,
16369            ));
16370        }
16371
16372        if self.consume_token(&Token::RParen) {
16373            return Ok(FunctionArgumentList {
16374                duplicate_treatment: None,
16375                args: vec![],
16376                clauses,
16377            });
16378        }
16379
16380        let duplicate_treatment = self.parse_duplicate_treatment()?;
16381        let args = self.parse_comma_separated(Parser::parse_function_args)?;
16382
16383        if self.dialect.supports_window_function_null_treatment_arg() {
16384            if let Some(null_treatment) = self.parse_null_treatment()? {
16385                clauses.push(FunctionArgumentClause::IgnoreOrRespectNulls(null_treatment));
16386            }
16387        }
16388
16389        if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
16390            clauses.push(FunctionArgumentClause::OrderBy(
16391                self.parse_comma_separated(Parser::parse_order_by_expr)?,
16392            ));
16393        }
16394
16395        if self.parse_keyword(Keyword::LIMIT) {
16396            clauses.push(FunctionArgumentClause::Limit(self.parse_expr()?));
16397        }
16398
16399        if dialect_of!(self is GenericDialect | BigQueryDialect)
16400            && self.parse_keyword(Keyword::HAVING)
16401        {
16402            let kind = match self.expect_one_of_keywords(&[Keyword::MIN, Keyword::MAX])? {
16403                Keyword::MIN => HavingBoundKind::Min,
16404                Keyword::MAX => HavingBoundKind::Max,
16405                _ => unreachable!(),
16406            };
16407            clauses.push(FunctionArgumentClause::Having(HavingBound(
16408                kind,
16409                self.parse_expr()?,
16410            )))
16411        }
16412
16413        if dialect_of!(self is GenericDialect | MySqlDialect)
16414            && self.parse_keyword(Keyword::SEPARATOR)
16415        {
16416            clauses.push(FunctionArgumentClause::Separator(self.parse_value()?.value));
16417        }
16418
16419        if let Some(on_overflow) = self.parse_listagg_on_overflow()? {
16420            clauses.push(FunctionArgumentClause::OnOverflow(on_overflow));
16421        }
16422
16423        if let Some(null_clause) = self.parse_json_null_clause() {
16424            clauses.push(FunctionArgumentClause::JsonNullClause(null_clause));
16425        }
16426
16427        if let Some(json_returning_clause) = self.maybe_parse_json_returning_clause()? {
16428            clauses.push(FunctionArgumentClause::JsonReturningClause(
16429                json_returning_clause,
16430            ));
16431        }
16432
16433        self.expect_token(&Token::RParen)?;
16434        Ok(FunctionArgumentList {
16435            duplicate_treatment,
16436            args,
16437            clauses,
16438        })
16439    }
16440
16441    fn parse_json_null_clause(&mut self) -> Option<JsonNullClause> {
16442        if self.parse_keywords(&[Keyword::ABSENT, Keyword::ON, Keyword::NULL]) {
16443            Some(JsonNullClause::AbsentOnNull)
16444        } else if self.parse_keywords(&[Keyword::NULL, Keyword::ON, Keyword::NULL]) {
16445            Some(JsonNullClause::NullOnNull)
16446        } else {
16447            None
16448        }
16449    }
16450
16451    fn maybe_parse_json_returning_clause(
16452        &mut self,
16453    ) -> Result<Option<JsonReturningClause>, ParserError> {
16454        if self.parse_keyword(Keyword::RETURNING) {
16455            let data_type = self.parse_data_type()?;
16456            Ok(Some(JsonReturningClause { data_type }))
16457        } else {
16458            Ok(None)
16459        }
16460    }
16461
16462    fn parse_duplicate_treatment(&mut self) -> Result<Option<DuplicateTreatment>, ParserError> {
16463        let loc = self.peek_token().span.start;
16464        match (
16465            self.parse_keyword(Keyword::ALL),
16466            self.parse_keyword(Keyword::DISTINCT),
16467        ) {
16468            (true, false) => Ok(Some(DuplicateTreatment::All)),
16469            (false, true) => Ok(Some(DuplicateTreatment::Distinct)),
16470            (false, false) => Ok(None),
16471            (true, true) => parser_err!("Cannot specify both ALL and DISTINCT".to_string(), loc),
16472        }
16473    }
16474
16475    /// Parse a comma-delimited list of projections after SELECT
16476    pub fn parse_select_item(&mut self) -> Result<SelectItem, ParserError> {
16477        let prefix = self
16478            .parse_one_of_keywords(
16479                self.dialect
16480                    .get_reserved_keywords_for_select_item_operator(),
16481            )
16482            .map(|keyword| Ident::new(format!("{keyword:?}")));
16483
16484        match self.parse_wildcard_expr()? {
16485            Expr::QualifiedWildcard(prefix, token) => Ok(SelectItem::QualifiedWildcard(
16486                SelectItemQualifiedWildcardKind::ObjectName(prefix),
16487                self.parse_wildcard_additional_options(token.0)?,
16488            )),
16489            Expr::Wildcard(token) => Ok(SelectItem::Wildcard(
16490                self.parse_wildcard_additional_options(token.0)?,
16491            )),
16492            Expr::Identifier(v) if v.value.to_lowercase() == "from" && v.quote_style.is_none() => {
16493                parser_err!(
16494                    format!("Expected an expression, found: {}", v),
16495                    self.peek_token().span.start
16496                )
16497            }
16498            Expr::BinaryOp {
16499                left,
16500                op: BinaryOperator::Eq,
16501                right,
16502            } if self.dialect.supports_eq_alias_assignment()
16503                && matches!(left.as_ref(), Expr::Identifier(_)) =>
16504            {
16505                let Expr::Identifier(alias) = *left else {
16506                    return parser_err!(
16507                        "BUG: expected identifier expression as alias",
16508                        self.peek_token().span.start
16509                    );
16510                };
16511                Ok(SelectItem::ExprWithAlias {
16512                    expr: *right,
16513                    alias,
16514                })
16515            }
16516            expr if self.dialect.supports_select_expr_star()
16517                && self.consume_tokens(&[Token::Period, Token::Mul]) =>
16518            {
16519                let wildcard_token = self.get_previous_token().clone();
16520                Ok(SelectItem::QualifiedWildcard(
16521                    SelectItemQualifiedWildcardKind::Expr(expr),
16522                    self.parse_wildcard_additional_options(wildcard_token)?,
16523                ))
16524            }
16525            expr => self
16526                .maybe_parse_select_item_alias()
16527                .map(|alias| match alias {
16528                    Some(alias) => SelectItem::ExprWithAlias {
16529                        expr: maybe_prefixed_expr(expr, prefix),
16530                        alias,
16531                    },
16532                    None => SelectItem::UnnamedExpr(maybe_prefixed_expr(expr, prefix)),
16533                }),
16534        }
16535    }
16536
16537    /// Parse an [`WildcardAdditionalOptions`] information for wildcard select items.
16538    ///
16539    /// If it is not possible to parse it, will return an option.
16540    pub fn parse_wildcard_additional_options(
16541        &mut self,
16542        wildcard_token: TokenWithSpan,
16543    ) -> Result<WildcardAdditionalOptions, ParserError> {
16544        let opt_ilike = if dialect_of!(self is GenericDialect | SnowflakeDialect) {
16545            self.parse_optional_select_item_ilike()?
16546        } else {
16547            None
16548        };
16549        let opt_exclude = if opt_ilike.is_none() && self.dialect.supports_select_wildcard_exclude()
16550        {
16551            self.parse_optional_select_item_exclude()?
16552        } else {
16553            None
16554        };
16555        let opt_except = if self.dialect.supports_select_wildcard_except() {
16556            self.parse_optional_select_item_except()?
16557        } else {
16558            None
16559        };
16560        let opt_replace = if dialect_of!(self is GenericDialect | BigQueryDialect | ClickHouseDialect | DuckDbDialect | SnowflakeDialect)
16561        {
16562            self.parse_optional_select_item_replace()?
16563        } else {
16564            None
16565        };
16566        let opt_rename = if dialect_of!(self is GenericDialect | SnowflakeDialect) {
16567            self.parse_optional_select_item_rename()?
16568        } else {
16569            None
16570        };
16571
16572        Ok(WildcardAdditionalOptions {
16573            wildcard_token: wildcard_token.into(),
16574            opt_ilike,
16575            opt_exclude,
16576            opt_except,
16577            opt_rename,
16578            opt_replace,
16579        })
16580    }
16581
16582    /// Parse an [`Ilike`](IlikeSelectItem) information for wildcard select items.
16583    ///
16584    /// If it is not possible to parse it, will return an option.
16585    pub fn parse_optional_select_item_ilike(
16586        &mut self,
16587    ) -> Result<Option<IlikeSelectItem>, ParserError> {
16588        let opt_ilike = if self.parse_keyword(Keyword::ILIKE) {
16589            let next_token = self.next_token();
16590            let pattern = match next_token.token {
16591                Token::SingleQuotedString(s) => s,
16592                _ => return self.expected("ilike pattern", next_token),
16593            };
16594            Some(IlikeSelectItem { pattern })
16595        } else {
16596            None
16597        };
16598        Ok(opt_ilike)
16599    }
16600
16601    /// Parse an [`Exclude`](ExcludeSelectItem) information for wildcard select items.
16602    ///
16603    /// If it is not possible to parse it, will return an option.
16604    pub fn parse_optional_select_item_exclude(
16605        &mut self,
16606    ) -> Result<Option<ExcludeSelectItem>, ParserError> {
16607        let opt_exclude = if self.parse_keyword(Keyword::EXCLUDE) {
16608            if self.consume_token(&Token::LParen) {
16609                let columns = self.parse_comma_separated(|parser| parser.parse_identifier())?;
16610                self.expect_token(&Token::RParen)?;
16611                Some(ExcludeSelectItem::Multiple(columns))
16612            } else {
16613                let column = self.parse_identifier()?;
16614                Some(ExcludeSelectItem::Single(column))
16615            }
16616        } else {
16617            None
16618        };
16619
16620        Ok(opt_exclude)
16621    }
16622
16623    /// Parse an [`Except`](ExceptSelectItem) information for wildcard select items.
16624    ///
16625    /// If it is not possible to parse it, will return an option.
16626    pub fn parse_optional_select_item_except(
16627        &mut self,
16628    ) -> Result<Option<ExceptSelectItem>, ParserError> {
16629        let opt_except = if self.parse_keyword(Keyword::EXCEPT) {
16630            if self.peek_token().token == Token::LParen {
16631                let idents = self.parse_parenthesized_column_list(Mandatory, false)?;
16632                match &idents[..] {
16633                    [] => {
16634                        return self.expected(
16635                            "at least one column should be parsed by the expect clause",
16636                            self.peek_token(),
16637                        )?;
16638                    }
16639                    [first, idents @ ..] => Some(ExceptSelectItem {
16640                        first_element: first.clone(),
16641                        additional_elements: idents.to_vec(),
16642                    }),
16643                }
16644            } else {
16645                // Clickhouse allows EXCEPT column_name
16646                let ident = self.parse_identifier()?;
16647                Some(ExceptSelectItem {
16648                    first_element: ident,
16649                    additional_elements: vec![],
16650                })
16651            }
16652        } else {
16653            None
16654        };
16655
16656        Ok(opt_except)
16657    }
16658
16659    /// Parse a [`Rename`](RenameSelectItem) information for wildcard select items.
16660    pub fn parse_optional_select_item_rename(
16661        &mut self,
16662    ) -> Result<Option<RenameSelectItem>, ParserError> {
16663        let opt_rename = if self.parse_keyword(Keyword::RENAME) {
16664            if self.consume_token(&Token::LParen) {
16665                let idents =
16666                    self.parse_comma_separated(|parser| parser.parse_identifier_with_alias())?;
16667                self.expect_token(&Token::RParen)?;
16668                Some(RenameSelectItem::Multiple(idents))
16669            } else {
16670                let ident = self.parse_identifier_with_alias()?;
16671                Some(RenameSelectItem::Single(ident))
16672            }
16673        } else {
16674            None
16675        };
16676
16677        Ok(opt_rename)
16678    }
16679
16680    /// Parse a [`Replace`](ReplaceSelectItem) information for wildcard select items.
16681    pub fn parse_optional_select_item_replace(
16682        &mut self,
16683    ) -> Result<Option<ReplaceSelectItem>, ParserError> {
16684        let opt_replace = if self.parse_keyword(Keyword::REPLACE) {
16685            if self.consume_token(&Token::LParen) {
16686                let items = self.parse_comma_separated(|parser| {
16687                    Ok(Box::new(parser.parse_replace_elements()?))
16688                })?;
16689                self.expect_token(&Token::RParen)?;
16690                Some(ReplaceSelectItem { items })
16691            } else {
16692                let tok = self.next_token();
16693                return self.expected("( after REPLACE but", tok);
16694            }
16695        } else {
16696            None
16697        };
16698
16699        Ok(opt_replace)
16700    }
16701    pub fn parse_replace_elements(&mut self) -> Result<ReplaceSelectElement, ParserError> {
16702        let expr = self.parse_expr()?;
16703        let as_keyword = self.parse_keyword(Keyword::AS);
16704        let ident = self.parse_identifier()?;
16705        Ok(ReplaceSelectElement {
16706            expr,
16707            column_name: ident,
16708            as_keyword,
16709        })
16710    }
16711
16712    /// Parse ASC or DESC, returns an Option with true if ASC, false of DESC or `None` if none of
16713    /// them.
16714    pub fn parse_asc_desc(&mut self) -> Option<bool> {
16715        if self.parse_keyword(Keyword::ASC) {
16716            Some(true)
16717        } else if self.parse_keyword(Keyword::DESC) {
16718            Some(false)
16719        } else {
16720            None
16721        }
16722    }
16723
16724    /// Parse an [OrderByExpr] expression.
16725    pub fn parse_order_by_expr(&mut self) -> Result<OrderByExpr, ParserError> {
16726        self.parse_order_by_expr_inner(false)
16727            .map(|(order_by, _)| order_by)
16728    }
16729
16730    /// Parse an [IndexColumn].
16731    pub fn parse_create_index_expr(&mut self) -> Result<IndexColumn, ParserError> {
16732        self.parse_order_by_expr_inner(true)
16733            .map(|(column, operator_class)| IndexColumn {
16734                column,
16735                operator_class,
16736            })
16737    }
16738
16739    fn parse_order_by_expr_inner(
16740        &mut self,
16741        with_operator_class: bool,
16742    ) -> Result<(OrderByExpr, Option<Ident>), ParserError> {
16743        let expr = self.parse_expr()?;
16744
16745        let operator_class: Option<Ident> = if with_operator_class {
16746            // We check that if non of the following keywords are present, then we parse an
16747            // identifier as operator class.
16748            if self
16749                .peek_one_of_keywords(&[Keyword::ASC, Keyword::DESC, Keyword::NULLS, Keyword::WITH])
16750                .is_some()
16751            {
16752                None
16753            } else {
16754                self.maybe_parse(|parser| parser.parse_identifier())?
16755            }
16756        } else {
16757            None
16758        };
16759
16760        let options = self.parse_order_by_options()?;
16761
16762        let with_fill = if dialect_of!(self is ClickHouseDialect | GenericDialect)
16763            && self.parse_keywords(&[Keyword::WITH, Keyword::FILL])
16764        {
16765            Some(self.parse_with_fill()?)
16766        } else {
16767            None
16768        };
16769
16770        Ok((
16771            OrderByExpr {
16772                expr,
16773                options,
16774                with_fill,
16775            },
16776            operator_class,
16777        ))
16778    }
16779
16780    fn parse_order_by_options(&mut self) -> Result<OrderByOptions, ParserError> {
16781        let asc = self.parse_asc_desc();
16782
16783        let nulls_first = if self.parse_keywords(&[Keyword::NULLS, Keyword::FIRST]) {
16784            Some(true)
16785        } else if self.parse_keywords(&[Keyword::NULLS, Keyword::LAST]) {
16786            Some(false)
16787        } else {
16788            None
16789        };
16790
16791        Ok(OrderByOptions { asc, nulls_first })
16792    }
16793
16794    // Parse a WITH FILL clause (ClickHouse dialect)
16795    // that follow the WITH FILL keywords in a ORDER BY clause
16796    pub fn parse_with_fill(&mut self) -> Result<WithFill, ParserError> {
16797        let from = if self.parse_keyword(Keyword::FROM) {
16798            Some(self.parse_expr()?)
16799        } else {
16800            None
16801        };
16802
16803        let to = if self.parse_keyword(Keyword::TO) {
16804            Some(self.parse_expr()?)
16805        } else {
16806            None
16807        };
16808
16809        let step = if self.parse_keyword(Keyword::STEP) {
16810            Some(self.parse_expr()?)
16811        } else {
16812            None
16813        };
16814
16815        Ok(WithFill { from, to, step })
16816    }
16817
16818    // Parse a set of comma separated INTERPOLATE expressions (ClickHouse dialect)
16819    // that follow the INTERPOLATE keyword in an ORDER BY clause with the WITH FILL modifier
16820    pub fn parse_interpolations(&mut self) -> Result<Option<Interpolate>, ParserError> {
16821        if !self.parse_keyword(Keyword::INTERPOLATE) {
16822            return Ok(None);
16823        }
16824
16825        if self.consume_token(&Token::LParen) {
16826            let interpolations =
16827                self.parse_comma_separated0(|p| p.parse_interpolation(), Token::RParen)?;
16828            self.expect_token(&Token::RParen)?;
16829            // INTERPOLATE () and INTERPOLATE ( ... ) variants
16830            return Ok(Some(Interpolate {
16831                exprs: Some(interpolations),
16832            }));
16833        }
16834
16835        // INTERPOLATE
16836        Ok(Some(Interpolate { exprs: None }))
16837    }
16838
16839    // Parse a INTERPOLATE expression (ClickHouse dialect)
16840    pub fn parse_interpolation(&mut self) -> Result<InterpolateExpr, ParserError> {
16841        let column = self.parse_identifier()?;
16842        let expr = if self.parse_keyword(Keyword::AS) {
16843            Some(self.parse_expr()?)
16844        } else {
16845            None
16846        };
16847        Ok(InterpolateExpr { column, expr })
16848    }
16849
16850    /// Parse a TOP clause, MSSQL equivalent of LIMIT,
16851    /// that follows after `SELECT [DISTINCT]`.
16852    pub fn parse_top(&mut self) -> Result<Top, ParserError> {
16853        let quantity = if self.consume_token(&Token::LParen) {
16854            let quantity = self.parse_expr()?;
16855            self.expect_token(&Token::RParen)?;
16856            Some(TopQuantity::Expr(quantity))
16857        } else {
16858            let next_token = self.next_token();
16859            let quantity = match next_token.token {
16860                Token::Number(s, _) => Self::parse::<u64>(s, next_token.span.start)?,
16861                _ => self.expected("literal int", next_token)?,
16862            };
16863            Some(TopQuantity::Constant(quantity))
16864        };
16865
16866        let percent = self.parse_keyword(Keyword::PERCENT);
16867
16868        let with_ties = self.parse_keywords(&[Keyword::WITH, Keyword::TIES]);
16869
16870        Ok(Top {
16871            with_ties,
16872            percent,
16873            quantity,
16874        })
16875    }
16876
16877    /// Parse a LIMIT clause
16878    pub fn parse_limit(&mut self) -> Result<Option<Expr>, ParserError> {
16879        if self.parse_keyword(Keyword::ALL) {
16880            Ok(None)
16881        } else {
16882            Ok(Some(self.parse_expr()?))
16883        }
16884    }
16885
16886    /// Parse an OFFSET clause
16887    pub fn parse_offset(&mut self) -> Result<Offset, ParserError> {
16888        let value = self.parse_expr()?;
16889        let rows = if self.parse_keyword(Keyword::ROW) {
16890            OffsetRows::Row
16891        } else if self.parse_keyword(Keyword::ROWS) {
16892            OffsetRows::Rows
16893        } else {
16894            OffsetRows::None
16895        };
16896        Ok(Offset { value, rows })
16897    }
16898
16899    /// Parse a FETCH clause
16900    pub fn parse_fetch(&mut self) -> Result<Fetch, ParserError> {
16901        let _ = self.parse_one_of_keywords(&[Keyword::FIRST, Keyword::NEXT]);
16902
16903        let (quantity, percent) = if self
16904            .parse_one_of_keywords(&[Keyword::ROW, Keyword::ROWS])
16905            .is_some()
16906        {
16907            (None, false)
16908        } else {
16909            let quantity = Expr::Value(self.parse_value()?);
16910            let percent = self.parse_keyword(Keyword::PERCENT);
16911            let _ = self.parse_one_of_keywords(&[Keyword::ROW, Keyword::ROWS]);
16912            (Some(quantity), percent)
16913        };
16914
16915        let with_ties = if self.parse_keyword(Keyword::ONLY) {
16916            false
16917        } else {
16918            self.parse_keywords(&[Keyword::WITH, Keyword::TIES])
16919        };
16920
16921        Ok(Fetch {
16922            with_ties,
16923            percent,
16924            quantity,
16925        })
16926    }
16927
16928    /// Parse a FOR UPDATE/FOR SHARE clause
16929    pub fn parse_lock(&mut self) -> Result<LockClause, ParserError> {
16930        let lock_type = match self.expect_one_of_keywords(&[Keyword::UPDATE, Keyword::SHARE])? {
16931            Keyword::UPDATE => LockType::Update,
16932            Keyword::SHARE => LockType::Share,
16933            _ => unreachable!(),
16934        };
16935        let of = if self.parse_keyword(Keyword::OF) {
16936            Some(self.parse_object_name(false)?)
16937        } else {
16938            None
16939        };
16940        let nonblock = if self.parse_keyword(Keyword::NOWAIT) {
16941            Some(NonBlock::Nowait)
16942        } else if self.parse_keywords(&[Keyword::SKIP, Keyword::LOCKED]) {
16943            Some(NonBlock::SkipLocked)
16944        } else {
16945            None
16946        };
16947        Ok(LockClause {
16948            lock_type,
16949            of,
16950            nonblock,
16951        })
16952    }
16953
16954    pub fn parse_values(
16955        &mut self,
16956        allow_empty: bool,
16957        value_keyword: bool,
16958    ) -> Result<Values, ParserError> {
16959        let mut explicit_row = false;
16960
16961        let rows = self.parse_comma_separated(|parser| {
16962            if parser.parse_keyword(Keyword::ROW) {
16963                explicit_row = true;
16964            }
16965
16966            parser.expect_token(&Token::LParen)?;
16967            if allow_empty && parser.peek_token().token == Token::RParen {
16968                parser.next_token();
16969                Ok(vec![])
16970            } else {
16971                let exprs = parser.parse_comma_separated(Parser::parse_expr)?;
16972                parser.expect_token(&Token::RParen)?;
16973                Ok(exprs)
16974            }
16975        })?;
16976        Ok(Values {
16977            explicit_row,
16978            rows,
16979            value_keyword,
16980        })
16981    }
16982
16983    pub fn parse_start_transaction(&mut self) -> Result<Statement, ParserError> {
16984        self.expect_keyword_is(Keyword::TRANSACTION)?;
16985        Ok(Statement::StartTransaction {
16986            modes: self.parse_transaction_modes()?,
16987            begin: false,
16988            transaction: Some(BeginTransactionKind::Transaction),
16989            modifier: None,
16990            statements: vec![],
16991            exception: None,
16992            has_end_keyword: false,
16993        })
16994    }
16995
16996    pub fn parse_begin(&mut self) -> Result<Statement, ParserError> {
16997        let modifier = if !self.dialect.supports_start_transaction_modifier() {
16998            None
16999        } else if self.parse_keyword(Keyword::DEFERRED) {
17000            Some(TransactionModifier::Deferred)
17001        } else if self.parse_keyword(Keyword::IMMEDIATE) {
17002            Some(TransactionModifier::Immediate)
17003        } else if self.parse_keyword(Keyword::EXCLUSIVE) {
17004            Some(TransactionModifier::Exclusive)
17005        } else if self.parse_keyword(Keyword::TRY) {
17006            Some(TransactionModifier::Try)
17007        } else if self.parse_keyword(Keyword::CATCH) {
17008            Some(TransactionModifier::Catch)
17009        } else {
17010            None
17011        };
17012        let transaction = match self.parse_one_of_keywords(&[Keyword::TRANSACTION, Keyword::WORK]) {
17013            Some(Keyword::TRANSACTION) => Some(BeginTransactionKind::Transaction),
17014            Some(Keyword::WORK) => Some(BeginTransactionKind::Work),
17015            _ => None,
17016        };
17017        Ok(Statement::StartTransaction {
17018            modes: self.parse_transaction_modes()?,
17019            begin: true,
17020            transaction,
17021            modifier,
17022            statements: vec![],
17023            exception: None,
17024            has_end_keyword: false,
17025        })
17026    }
17027
17028    pub fn parse_begin_exception_end(&mut self) -> Result<Statement, ParserError> {
17029        let statements = self.parse_statement_list(&[Keyword::EXCEPTION, Keyword::END])?;
17030
17031        let exception = if self.parse_keyword(Keyword::EXCEPTION) {
17032            let mut when = Vec::new();
17033
17034            // We can have multiple `WHEN` arms so we consume all cases until `END`
17035            while !self.peek_keyword(Keyword::END) {
17036                self.expect_keyword(Keyword::WHEN)?;
17037
17038                // Each `WHEN` case can have one or more conditions, e.g.
17039                // WHEN EXCEPTION_1 [OR EXCEPTION_2] THEN
17040                // So we parse identifiers until the `THEN` keyword.
17041                let mut idents = Vec::new();
17042
17043                while !self.parse_keyword(Keyword::THEN) {
17044                    let ident = self.parse_identifier()?;
17045                    idents.push(ident);
17046
17047                    self.maybe_parse(|p| p.expect_keyword(Keyword::OR))?;
17048                }
17049
17050                let statements = self.parse_statement_list(&[Keyword::WHEN, Keyword::END])?;
17051
17052                when.push(ExceptionWhen { idents, statements });
17053            }
17054
17055            Some(when)
17056        } else {
17057            None
17058        };
17059
17060        self.expect_keyword(Keyword::END)?;
17061
17062        Ok(Statement::StartTransaction {
17063            begin: true,
17064            statements,
17065            exception,
17066            has_end_keyword: true,
17067            transaction: None,
17068            modifier: None,
17069            modes: Default::default(),
17070        })
17071    }
17072
17073    pub fn parse_end(&mut self) -> Result<Statement, ParserError> {
17074        let modifier = if !self.dialect.supports_end_transaction_modifier() {
17075            None
17076        } else if self.parse_keyword(Keyword::TRY) {
17077            Some(TransactionModifier::Try)
17078        } else if self.parse_keyword(Keyword::CATCH) {
17079            Some(TransactionModifier::Catch)
17080        } else {
17081            None
17082        };
17083        Ok(Statement::Commit {
17084            chain: self.parse_commit_rollback_chain()?,
17085            end: true,
17086            modifier,
17087        })
17088    }
17089
17090    pub fn parse_transaction_modes(&mut self) -> Result<Vec<TransactionMode>, ParserError> {
17091        let mut modes = vec![];
17092        let mut required = false;
17093        loop {
17094            let mode = if self.parse_keywords(&[Keyword::ISOLATION, Keyword::LEVEL]) {
17095                let iso_level = if self.parse_keywords(&[Keyword::READ, Keyword::UNCOMMITTED]) {
17096                    TransactionIsolationLevel::ReadUncommitted
17097                } else if self.parse_keywords(&[Keyword::READ, Keyword::COMMITTED]) {
17098                    TransactionIsolationLevel::ReadCommitted
17099                } else if self.parse_keywords(&[Keyword::REPEATABLE, Keyword::READ]) {
17100                    TransactionIsolationLevel::RepeatableRead
17101                } else if self.parse_keyword(Keyword::SERIALIZABLE) {
17102                    TransactionIsolationLevel::Serializable
17103                } else if self.parse_keyword(Keyword::SNAPSHOT) {
17104                    TransactionIsolationLevel::Snapshot
17105                } else {
17106                    self.expected("isolation level", self.peek_token())?
17107                };
17108                TransactionMode::IsolationLevel(iso_level)
17109            } else if self.parse_keywords(&[Keyword::READ, Keyword::ONLY]) {
17110                TransactionMode::AccessMode(TransactionAccessMode::ReadOnly)
17111            } else if self.parse_keywords(&[Keyword::READ, Keyword::WRITE]) {
17112                TransactionMode::AccessMode(TransactionAccessMode::ReadWrite)
17113            } else if required {
17114                self.expected("transaction mode", self.peek_token())?
17115            } else {
17116                break;
17117            };
17118            modes.push(mode);
17119            // ANSI requires a comma after each transaction mode, but
17120            // PostgreSQL, for historical reasons, does not. We follow
17121            // PostgreSQL in making the comma optional, since that is strictly
17122            // more general.
17123            required = self.consume_token(&Token::Comma);
17124        }
17125        Ok(modes)
17126    }
17127
17128    pub fn parse_commit(&mut self) -> Result<Statement, ParserError> {
17129        Ok(Statement::Commit {
17130            chain: self.parse_commit_rollback_chain()?,
17131            end: false,
17132            modifier: None,
17133        })
17134    }
17135
17136    pub fn parse_rollback(&mut self) -> Result<Statement, ParserError> {
17137        let chain = self.parse_commit_rollback_chain()?;
17138        let savepoint = self.parse_rollback_savepoint()?;
17139
17140        Ok(Statement::Rollback { chain, savepoint })
17141    }
17142
17143    pub fn parse_commit_rollback_chain(&mut self) -> Result<bool, ParserError> {
17144        let _ = self.parse_one_of_keywords(&[Keyword::TRANSACTION, Keyword::WORK]);
17145        if self.parse_keyword(Keyword::AND) {
17146            let chain = !self.parse_keyword(Keyword::NO);
17147            self.expect_keyword_is(Keyword::CHAIN)?;
17148            Ok(chain)
17149        } else {
17150            Ok(false)
17151        }
17152    }
17153
17154    pub fn parse_rollback_savepoint(&mut self) -> Result<Option<Ident>, ParserError> {
17155        if self.parse_keyword(Keyword::TO) {
17156            let _ = self.parse_keyword(Keyword::SAVEPOINT);
17157            let savepoint = self.parse_identifier()?;
17158
17159            Ok(Some(savepoint))
17160        } else {
17161            Ok(None)
17162        }
17163    }
17164
17165    /// Parse a 'RAISERROR' statement
17166    pub fn parse_raiserror(&mut self) -> Result<Statement, ParserError> {
17167        self.expect_token(&Token::LParen)?;
17168        let message = Box::new(self.parse_expr()?);
17169        self.expect_token(&Token::Comma)?;
17170        let severity = Box::new(self.parse_expr()?);
17171        self.expect_token(&Token::Comma)?;
17172        let state = Box::new(self.parse_expr()?);
17173        let arguments = if self.consume_token(&Token::Comma) {
17174            self.parse_comma_separated(Parser::parse_expr)?
17175        } else {
17176            vec![]
17177        };
17178        self.expect_token(&Token::RParen)?;
17179        let options = if self.parse_keyword(Keyword::WITH) {
17180            self.parse_comma_separated(Parser::parse_raiserror_option)?
17181        } else {
17182            vec![]
17183        };
17184        Ok(Statement::RaisError {
17185            message,
17186            severity,
17187            state,
17188            arguments,
17189            options,
17190        })
17191    }
17192
17193    pub fn parse_raiserror_option(&mut self) -> Result<RaisErrorOption, ParserError> {
17194        match self.expect_one_of_keywords(&[Keyword::LOG, Keyword::NOWAIT, Keyword::SETERROR])? {
17195            Keyword::LOG => Ok(RaisErrorOption::Log),
17196            Keyword::NOWAIT => Ok(RaisErrorOption::NoWait),
17197            Keyword::SETERROR => Ok(RaisErrorOption::SetError),
17198            _ => self.expected(
17199                "LOG, NOWAIT OR SETERROR raiserror option",
17200                self.peek_token(),
17201            ),
17202        }
17203    }
17204
17205    pub fn parse_deallocate(&mut self) -> Result<Statement, ParserError> {
17206        let prepare = self.parse_keyword(Keyword::PREPARE);
17207        let name = self.parse_identifier()?;
17208        Ok(Statement::Deallocate { name, prepare })
17209    }
17210
17211    pub fn parse_execute(&mut self) -> Result<Statement, ParserError> {
17212        let name = if self.dialect.supports_execute_immediate()
17213            && self.parse_keyword(Keyword::IMMEDIATE)
17214        {
17215            None
17216        } else {
17217            let name = self.parse_object_name(false)?;
17218            Some(name)
17219        };
17220
17221        let has_parentheses = self.consume_token(&Token::LParen);
17222
17223        let end_kws = &[Keyword::USING, Keyword::OUTPUT, Keyword::DEFAULT];
17224        let end_token = match (has_parentheses, self.peek_token().token) {
17225            (true, _) => Token::RParen,
17226            (false, Token::EOF) => Token::EOF,
17227            (false, Token::Word(w)) if end_kws.contains(&w.keyword) => Token::Word(w),
17228            (false, _) => Token::SemiColon,
17229        };
17230
17231        let parameters = self.parse_comma_separated0(Parser::parse_expr, end_token)?;
17232
17233        if has_parentheses {
17234            self.expect_token(&Token::RParen)?;
17235        }
17236
17237        let into = if self.parse_keyword(Keyword::INTO) {
17238            self.parse_comma_separated(Self::parse_identifier)?
17239        } else {
17240            vec![]
17241        };
17242
17243        let using = if self.parse_keyword(Keyword::USING) {
17244            self.parse_comma_separated(Self::parse_expr_with_alias)?
17245        } else {
17246            vec![]
17247        };
17248
17249        let output = self.parse_keyword(Keyword::OUTPUT);
17250
17251        let default = self.parse_keyword(Keyword::DEFAULT);
17252
17253        Ok(Statement::Execute {
17254            immediate: name.is_none(),
17255            name,
17256            parameters,
17257            has_parentheses,
17258            into,
17259            using,
17260            output,
17261            default,
17262        })
17263    }
17264
17265    pub fn parse_prepare(&mut self) -> Result<Statement, ParserError> {
17266        let name = self.parse_identifier()?;
17267
17268        let mut data_types = vec![];
17269        if self.consume_token(&Token::LParen) {
17270            data_types = self.parse_comma_separated(Parser::parse_data_type)?;
17271            self.expect_token(&Token::RParen)?;
17272        }
17273
17274        self.expect_keyword_is(Keyword::AS)?;
17275        let statement = Box::new(self.parse_statement()?);
17276        Ok(Statement::Prepare {
17277            name,
17278            data_types,
17279            statement,
17280        })
17281    }
17282
17283    pub fn parse_unload(&mut self) -> Result<Statement, ParserError> {
17284        self.expect_keyword(Keyword::UNLOAD)?;
17285        self.expect_token(&Token::LParen)?;
17286        let (query, query_text) = if matches!(self.peek_token().token, Token::SingleQuotedString(_))
17287        {
17288            (None, Some(self.parse_literal_string()?))
17289        } else {
17290            (Some(self.parse_query()?), None)
17291        };
17292        self.expect_token(&Token::RParen)?;
17293
17294        self.expect_keyword_is(Keyword::TO)?;
17295        let to = self.parse_identifier()?;
17296        let auth = if self.parse_keyword(Keyword::IAM_ROLE) {
17297            Some(self.parse_iam_role_kind()?)
17298        } else {
17299            None
17300        };
17301        let with = self.parse_options(Keyword::WITH)?;
17302        let mut options = vec![];
17303        while let Some(opt) = self.maybe_parse(|parser| parser.parse_copy_legacy_option())? {
17304            options.push(opt);
17305        }
17306        Ok(Statement::Unload {
17307            query,
17308            query_text,
17309            to,
17310            auth,
17311            with,
17312            options,
17313        })
17314    }
17315
17316    pub fn parse_merge_clauses(&mut self) -> Result<Vec<MergeClause>, ParserError> {
17317        let mut clauses = vec![];
17318        loop {
17319            if !(self.parse_keyword(Keyword::WHEN)) {
17320                break;
17321            }
17322            let when_token = self.get_current_token().clone();
17323
17324            let mut clause_kind = MergeClauseKind::Matched;
17325            if self.parse_keyword(Keyword::NOT) {
17326                clause_kind = MergeClauseKind::NotMatched;
17327            }
17328            self.expect_keyword_is(Keyword::MATCHED)?;
17329
17330            if matches!(clause_kind, MergeClauseKind::NotMatched)
17331                && self.parse_keywords(&[Keyword::BY, Keyword::SOURCE])
17332            {
17333                clause_kind = MergeClauseKind::NotMatchedBySource;
17334            } else if matches!(clause_kind, MergeClauseKind::NotMatched)
17335                && self.parse_keywords(&[Keyword::BY, Keyword::TARGET])
17336            {
17337                clause_kind = MergeClauseKind::NotMatchedByTarget;
17338            }
17339
17340            let predicate = if self.parse_keyword(Keyword::AND) {
17341                Some(self.parse_expr()?)
17342            } else {
17343                None
17344            };
17345
17346            self.expect_keyword_is(Keyword::THEN)?;
17347
17348            let merge_clause = match self.parse_one_of_keywords(&[
17349                Keyword::UPDATE,
17350                Keyword::INSERT,
17351                Keyword::DELETE,
17352            ]) {
17353                Some(Keyword::UPDATE) => {
17354                    if matches!(
17355                        clause_kind,
17356                        MergeClauseKind::NotMatched | MergeClauseKind::NotMatchedByTarget
17357                    ) {
17358                        return parser_err!(
17359                            format_args!("UPDATE is not allowed in a {clause_kind} merge clause"),
17360                            self.get_current_token().span.start
17361                        );
17362                    }
17363
17364                    let update_token = self.get_current_token().clone();
17365                    self.expect_keyword_is(Keyword::SET)?;
17366                    MergeAction::Update {
17367                        update_token: update_token.into(),
17368                        assignments: self.parse_comma_separated(Parser::parse_assignment)?,
17369                    }
17370                }
17371                Some(Keyword::DELETE) => {
17372                    if matches!(
17373                        clause_kind,
17374                        MergeClauseKind::NotMatched | MergeClauseKind::NotMatchedByTarget
17375                    ) {
17376                        return parser_err!(
17377                            format_args!("DELETE is not allowed in a {clause_kind} merge clause"),
17378                            self.get_current_token().span.start
17379                        );
17380                    };
17381
17382                    let delete_token = self.get_current_token().clone();
17383                    MergeAction::Delete {
17384                        delete_token: delete_token.into(),
17385                    }
17386                }
17387                Some(Keyword::INSERT) => {
17388                    if !matches!(
17389                        clause_kind,
17390                        MergeClauseKind::NotMatched | MergeClauseKind::NotMatchedByTarget
17391                    ) {
17392                        return parser_err!(
17393                            format_args!("INSERT is not allowed in a {clause_kind} merge clause"),
17394                            self.get_current_token().span.start
17395                        );
17396                    };
17397
17398                    let insert_token = self.get_current_token().clone();
17399                    let is_mysql = dialect_of!(self is MySqlDialect);
17400
17401                    let columns = self.parse_parenthesized_column_list(Optional, is_mysql)?;
17402                    let (kind, kind_token) = if dialect_of!(self is BigQueryDialect | GenericDialect)
17403                        && self.parse_keyword(Keyword::ROW)
17404                    {
17405                        (MergeInsertKind::Row, self.get_current_token().clone())
17406                    } else {
17407                        self.expect_keyword_is(Keyword::VALUES)?;
17408                        let values_token = self.get_current_token().clone();
17409                        let values = self.parse_values(is_mysql, false)?;
17410                        (MergeInsertKind::Values(values), values_token)
17411                    };
17412                    MergeAction::Insert(MergeInsertExpr {
17413                        insert_token: insert_token.into(),
17414                        columns,
17415                        kind_token: kind_token.into(),
17416                        kind,
17417                    })
17418                }
17419                _ => {
17420                    return parser_err!(
17421                        "expected UPDATE, DELETE or INSERT in merge clause",
17422                        self.peek_token_ref().span.start
17423                    );
17424                }
17425            };
17426            clauses.push(MergeClause {
17427                when_token: when_token.into(),
17428                clause_kind,
17429                predicate,
17430                action: merge_clause,
17431            });
17432        }
17433        Ok(clauses)
17434    }
17435
17436    fn parse_output(
17437        &mut self,
17438        start_keyword: Keyword,
17439        start_token: TokenWithSpan,
17440    ) -> Result<OutputClause, ParserError> {
17441        let select_items = self.parse_projection()?;
17442        let into_table = if start_keyword == Keyword::OUTPUT && self.peek_keyword(Keyword::INTO) {
17443            self.expect_keyword_is(Keyword::INTO)?;
17444            Some(self.parse_select_into()?)
17445        } else {
17446            None
17447        };
17448
17449        Ok(if start_keyword == Keyword::OUTPUT {
17450            OutputClause::Output {
17451                output_token: start_token.into(),
17452                select_items,
17453                into_table,
17454            }
17455        } else {
17456            OutputClause::Returning {
17457                returning_token: start_token.into(),
17458                select_items,
17459            }
17460        })
17461    }
17462
17463    fn parse_select_into(&mut self) -> Result<SelectInto, ParserError> {
17464        let temporary = self
17465            .parse_one_of_keywords(&[Keyword::TEMP, Keyword::TEMPORARY])
17466            .is_some();
17467        let unlogged = self.parse_keyword(Keyword::UNLOGGED);
17468        let table = self.parse_keyword(Keyword::TABLE);
17469        let name = self.parse_object_name(false)?;
17470
17471        Ok(SelectInto {
17472            temporary,
17473            unlogged,
17474            table,
17475            name,
17476        })
17477    }
17478
17479    pub fn parse_merge(&mut self, merge_token: TokenWithSpan) -> Result<Statement, ParserError> {
17480        let into = self.parse_keyword(Keyword::INTO);
17481
17482        let table = self.parse_table_factor()?;
17483
17484        self.expect_keyword_is(Keyword::USING)?;
17485        let source = self.parse_table_factor()?;
17486        self.expect_keyword_is(Keyword::ON)?;
17487        let on = self.parse_expr()?;
17488        let clauses = self.parse_merge_clauses()?;
17489        let output = match self.parse_one_of_keywords(&[Keyword::OUTPUT, Keyword::RETURNING]) {
17490            Some(keyword) => Some(self.parse_output(keyword, self.get_current_token().clone())?),
17491            None => None,
17492        };
17493
17494        Ok(Statement::Merge {
17495            merge_token: merge_token.into(),
17496            into,
17497            table,
17498            source,
17499            on: Box::new(on),
17500            clauses,
17501            output,
17502        })
17503    }
17504
17505    fn parse_pragma_value(&mut self) -> Result<Value, ParserError> {
17506        match self.parse_value()?.value {
17507            v @ Value::SingleQuotedString(_) => Ok(v),
17508            v @ Value::DoubleQuotedString(_) => Ok(v),
17509            v @ Value::Number(_, _) => Ok(v),
17510            v @ Value::Placeholder(_) => Ok(v),
17511            _ => {
17512                self.prev_token();
17513                self.expected("number or string or ? placeholder", self.peek_token())
17514            }
17515        }
17516    }
17517
17518    // PRAGMA [schema-name '.'] pragma-name [('=' pragma-value) | '(' pragma-value ')']
17519    pub fn parse_pragma(&mut self) -> Result<Statement, ParserError> {
17520        let name = self.parse_object_name(false)?;
17521        if self.consume_token(&Token::LParen) {
17522            let value = self.parse_pragma_value()?;
17523            self.expect_token(&Token::RParen)?;
17524            Ok(Statement::Pragma {
17525                name,
17526                value: Some(value),
17527                is_eq: false,
17528            })
17529        } else if self.consume_token(&Token::Eq) {
17530            Ok(Statement::Pragma {
17531                name,
17532                value: Some(self.parse_pragma_value()?),
17533                is_eq: true,
17534            })
17535        } else {
17536            Ok(Statement::Pragma {
17537                name,
17538                value: None,
17539                is_eq: false,
17540            })
17541        }
17542    }
17543
17544    /// `INSTALL [extension_name]`
17545    pub fn parse_install(&mut self) -> Result<Statement, ParserError> {
17546        let extension_name = self.parse_identifier()?;
17547
17548        Ok(Statement::Install { extension_name })
17549    }
17550
17551    /// Parse a SQL LOAD statement
17552    pub fn parse_load(&mut self) -> Result<Statement, ParserError> {
17553        if self.dialect.supports_load_extension() {
17554            let extension_name = self.parse_identifier()?;
17555            Ok(Statement::Load { extension_name })
17556        } else if self.parse_keyword(Keyword::DATA) && self.dialect.supports_load_data() {
17557            let local = self.parse_one_of_keywords(&[Keyword::LOCAL]).is_some();
17558            self.expect_keyword_is(Keyword::INPATH)?;
17559            let inpath = self.parse_literal_string()?;
17560            let overwrite = self.parse_one_of_keywords(&[Keyword::OVERWRITE]).is_some();
17561            self.expect_keyword_is(Keyword::INTO)?;
17562            self.expect_keyword_is(Keyword::TABLE)?;
17563            let table_name = self.parse_object_name(false)?;
17564            let partitioned = self.parse_insert_partition()?;
17565            let table_format = self.parse_load_data_table_format()?;
17566            Ok(Statement::LoadData {
17567                local,
17568                inpath,
17569                overwrite,
17570                table_name,
17571                partitioned,
17572                table_format,
17573            })
17574        } else {
17575            self.expected(
17576                "`DATA` or an extension name after `LOAD`",
17577                self.peek_token(),
17578            )
17579        }
17580    }
17581
17582    /// ```sql
17583    /// OPTIMIZE TABLE [db.]name [ON CLUSTER cluster] [PARTITION partition | PARTITION ID 'partition_id'] [FINAL] [DEDUPLICATE [BY expression]]
17584    /// ```
17585    /// [ClickHouse](https://clickhouse.com/docs/en/sql-reference/statements/optimize)
17586    pub fn parse_optimize_table(&mut self) -> Result<Statement, ParserError> {
17587        self.expect_keyword_is(Keyword::TABLE)?;
17588        let name = self.parse_object_name(false)?;
17589        let on_cluster = self.parse_optional_on_cluster()?;
17590
17591        let partition = if self.parse_keyword(Keyword::PARTITION) {
17592            if self.parse_keyword(Keyword::ID) {
17593                Some(Partition::Identifier(self.parse_identifier()?))
17594            } else {
17595                Some(Partition::Expr(self.parse_expr()?))
17596            }
17597        } else {
17598            None
17599        };
17600
17601        let include_final = self.parse_keyword(Keyword::FINAL);
17602        let deduplicate = if self.parse_keyword(Keyword::DEDUPLICATE) {
17603            if self.parse_keyword(Keyword::BY) {
17604                Some(Deduplicate::ByExpression(self.parse_expr()?))
17605            } else {
17606                Some(Deduplicate::All)
17607            }
17608        } else {
17609            None
17610        };
17611
17612        Ok(Statement::OptimizeTable {
17613            name,
17614            on_cluster,
17615            partition,
17616            include_final,
17617            deduplicate,
17618        })
17619    }
17620
17621    /// ```sql
17622    /// CREATE [ { TEMPORARY | TEMP } ] SEQUENCE [ IF NOT EXISTS ] <sequence_name>
17623    /// ```
17624    ///
17625    /// See [Postgres docs](https://www.postgresql.org/docs/current/sql-createsequence.html) for more details.
17626    pub fn parse_create_sequence(&mut self, temporary: bool) -> Result<Statement, ParserError> {
17627        //[ IF NOT EXISTS ]
17628        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
17629        //name
17630        let name = self.parse_object_name(false)?;
17631        //[ AS data_type ]
17632        let mut data_type: Option<DataType> = None;
17633        if self.parse_keywords(&[Keyword::AS]) {
17634            data_type = Some(self.parse_data_type()?)
17635        }
17636        let sequence_options = self.parse_create_sequence_options()?;
17637        // [ OWNED BY { table_name.column_name | NONE } ]
17638        let owned_by = if self.parse_keywords(&[Keyword::OWNED, Keyword::BY]) {
17639            if self.parse_keywords(&[Keyword::NONE]) {
17640                Some(ObjectName::from(vec![Ident::new("NONE")]))
17641            } else {
17642                Some(self.parse_object_name(false)?)
17643            }
17644        } else {
17645            None
17646        };
17647        Ok(Statement::CreateSequence {
17648            temporary,
17649            if_not_exists,
17650            name,
17651            data_type,
17652            sequence_options,
17653            owned_by,
17654        })
17655    }
17656
17657    fn parse_create_sequence_options(&mut self) -> Result<Vec<SequenceOptions>, ParserError> {
17658        let mut sequence_options = vec![];
17659        //[ INCREMENT [ BY ] increment ]
17660        if self.parse_keywords(&[Keyword::INCREMENT]) {
17661            if self.parse_keywords(&[Keyword::BY]) {
17662                sequence_options.push(SequenceOptions::IncrementBy(self.parse_number()?, true));
17663            } else {
17664                sequence_options.push(SequenceOptions::IncrementBy(self.parse_number()?, false));
17665            }
17666        }
17667        //[ MINVALUE minvalue | NO MINVALUE ]
17668        if self.parse_keyword(Keyword::MINVALUE) {
17669            sequence_options.push(SequenceOptions::MinValue(Some(self.parse_number()?)));
17670        } else if self.parse_keywords(&[Keyword::NO, Keyword::MINVALUE]) {
17671            sequence_options.push(SequenceOptions::MinValue(None));
17672        }
17673        //[ MAXVALUE maxvalue | NO MAXVALUE ]
17674        if self.parse_keywords(&[Keyword::MAXVALUE]) {
17675            sequence_options.push(SequenceOptions::MaxValue(Some(self.parse_number()?)));
17676        } else if self.parse_keywords(&[Keyword::NO, Keyword::MAXVALUE]) {
17677            sequence_options.push(SequenceOptions::MaxValue(None));
17678        }
17679
17680        //[ START [ WITH ] start ]
17681        if self.parse_keywords(&[Keyword::START]) {
17682            if self.parse_keywords(&[Keyword::WITH]) {
17683                sequence_options.push(SequenceOptions::StartWith(self.parse_number()?, true));
17684            } else {
17685                sequence_options.push(SequenceOptions::StartWith(self.parse_number()?, false));
17686            }
17687        }
17688        //[ CACHE cache ]
17689        if self.parse_keywords(&[Keyword::CACHE]) {
17690            sequence_options.push(SequenceOptions::Cache(self.parse_number()?));
17691        }
17692        // [ [ NO ] CYCLE ]
17693        if self.parse_keywords(&[Keyword::NO, Keyword::CYCLE]) {
17694            sequence_options.push(SequenceOptions::Cycle(true));
17695        } else if self.parse_keywords(&[Keyword::CYCLE]) {
17696            sequence_options.push(SequenceOptions::Cycle(false));
17697        }
17698
17699        Ok(sequence_options)
17700    }
17701
17702    ///   Parse a `CREATE SERVER` statement.
17703    ///
17704    ///  See [Statement::CreateServer]
17705    pub fn parse_pg_create_server(&mut self) -> Result<Statement, ParserError> {
17706        let ine = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
17707        let name = self.parse_object_name(false)?;
17708
17709        let server_type = if self.parse_keyword(Keyword::TYPE) {
17710            Some(self.parse_identifier()?)
17711        } else {
17712            None
17713        };
17714
17715        let version = if self.parse_keyword(Keyword::VERSION) {
17716            Some(self.parse_identifier()?)
17717        } else {
17718            None
17719        };
17720
17721        self.expect_keywords(&[Keyword::FOREIGN, Keyword::DATA, Keyword::WRAPPER])?;
17722        let foreign_data_wrapper = self.parse_object_name(false)?;
17723
17724        let mut options = None;
17725        if self.parse_keyword(Keyword::OPTIONS) {
17726            self.expect_token(&Token::LParen)?;
17727            options = Some(self.parse_comma_separated(|p| {
17728                let key = p.parse_identifier()?;
17729                let value = p.parse_identifier()?;
17730                Ok(CreateServerOption { key, value })
17731            })?);
17732            self.expect_token(&Token::RParen)?;
17733        }
17734
17735        Ok(Statement::CreateServer(CreateServerStatement {
17736            name,
17737            if_not_exists: ine,
17738            server_type,
17739            version,
17740            foreign_data_wrapper,
17741            options,
17742        }))
17743    }
17744
17745    /// The index of the first unprocessed token.
17746    pub fn index(&self) -> usize {
17747        self.index
17748    }
17749
17750    pub fn parse_named_window(&mut self) -> Result<NamedWindowDefinition, ParserError> {
17751        let ident = self.parse_identifier()?;
17752        self.expect_keyword_is(Keyword::AS)?;
17753
17754        let window_expr = if self.consume_token(&Token::LParen) {
17755            NamedWindowExpr::WindowSpec(self.parse_window_spec()?)
17756        } else if self.dialect.supports_window_clause_named_window_reference() {
17757            NamedWindowExpr::NamedWindow(self.parse_identifier()?)
17758        } else {
17759            return self.expected("(", self.peek_token());
17760        };
17761
17762        Ok(NamedWindowDefinition(ident, window_expr))
17763    }
17764
17765    pub fn parse_create_procedure(&mut self, or_alter: bool) -> Result<Statement, ParserError> {
17766        let name = self.parse_object_name(false)?;
17767        let params = self.parse_optional_procedure_parameters()?;
17768
17769        let language = if self.parse_keyword(Keyword::LANGUAGE) {
17770            Some(self.parse_identifier()?)
17771        } else {
17772            None
17773        };
17774
17775        self.expect_keyword_is(Keyword::AS)?;
17776
17777        let body = self.parse_conditional_statements(&[Keyword::END])?;
17778
17779        Ok(Statement::CreateProcedure {
17780            name,
17781            or_alter,
17782            params,
17783            language,
17784            body,
17785        })
17786    }
17787
17788    pub fn parse_window_spec(&mut self) -> Result<WindowSpec, ParserError> {
17789        let window_name = match self.peek_token().token {
17790            Token::Word(word) if word.keyword == Keyword::NoKeyword => {
17791                self.parse_optional_ident()?
17792            }
17793            _ => None,
17794        };
17795
17796        let partition_by = if self.parse_keywords(&[Keyword::PARTITION, Keyword::BY]) {
17797            self.parse_comma_separated(Parser::parse_expr)?
17798        } else {
17799            vec![]
17800        };
17801        let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
17802            self.parse_comma_separated(Parser::parse_order_by_expr)?
17803        } else {
17804            vec![]
17805        };
17806
17807        let window_frame = if !self.consume_token(&Token::RParen) {
17808            let window_frame = self.parse_window_frame()?;
17809            self.expect_token(&Token::RParen)?;
17810            Some(window_frame)
17811        } else {
17812            None
17813        };
17814        Ok(WindowSpec {
17815            window_name,
17816            partition_by,
17817            order_by,
17818            window_frame,
17819        })
17820    }
17821
17822    pub fn parse_create_type(&mut self) -> Result<Statement, ParserError> {
17823        let name = self.parse_object_name(false)?;
17824
17825        // Check if we have AS keyword
17826        let has_as = self.parse_keyword(Keyword::AS);
17827
17828        if !has_as {
17829            // Two cases: CREATE TYPE name; or CREATE TYPE name (options);
17830            if self.consume_token(&Token::LParen) {
17831                // CREATE TYPE name (options) - SQL definition without AS
17832                let options = self.parse_create_type_sql_definition_options()?;
17833                self.expect_token(&Token::RParen)?;
17834                return Ok(Statement::CreateType {
17835                    name,
17836                    representation: Some(UserDefinedTypeRepresentation::SqlDefinition { options }),
17837                });
17838            }
17839
17840            // CREATE TYPE name; - no representation
17841            return Ok(Statement::CreateType {
17842                name,
17843                representation: None,
17844            });
17845        }
17846
17847        // We have AS keyword
17848        if self.parse_keyword(Keyword::ENUM) {
17849            // CREATE TYPE name AS ENUM (labels)
17850            self.parse_create_type_enum(name)
17851        } else if self.parse_keyword(Keyword::RANGE) {
17852            // CREATE TYPE name AS RANGE (options)
17853            self.parse_create_type_range(name)
17854        } else if self.consume_token(&Token::LParen) {
17855            // CREATE TYPE name AS (attributes) - Composite
17856            self.parse_create_type_composite(name)
17857        } else {
17858            self.expected("ENUM, RANGE, or '(' after AS", self.peek_token())
17859        }
17860    }
17861
17862    /// Parse remainder of `CREATE TYPE AS (attributes)` statement (composite type)
17863    ///
17864    /// See [PostgreSQL](https://www.postgresql.org/docs/current/sql-createtype.html)
17865    fn parse_create_type_composite(&mut self, name: ObjectName) -> Result<Statement, ParserError> {
17866        if self.consume_token(&Token::RParen) {
17867            // Empty composite type
17868            return Ok(Statement::CreateType {
17869                name,
17870                representation: Some(UserDefinedTypeRepresentation::Composite {
17871                    attributes: vec![],
17872                }),
17873            });
17874        }
17875
17876        let mut attributes = vec![];
17877        loop {
17878            let attr_name = self.parse_identifier()?;
17879            let attr_data_type = self.parse_data_type()?;
17880            let attr_collation = if self.parse_keyword(Keyword::COLLATE) {
17881                Some(self.parse_object_name(false)?)
17882            } else {
17883                None
17884            };
17885            attributes.push(UserDefinedTypeCompositeAttributeDef {
17886                name: attr_name,
17887                data_type: attr_data_type,
17888                collation: attr_collation,
17889            });
17890
17891            if !self.consume_token(&Token::Comma) {
17892                break;
17893            }
17894        }
17895        self.expect_token(&Token::RParen)?;
17896
17897        Ok(Statement::CreateType {
17898            name,
17899            representation: Some(UserDefinedTypeRepresentation::Composite { attributes }),
17900        })
17901    }
17902
17903    /// Parse remainder of `CREATE TYPE AS ENUM` statement (see [Statement::CreateType] and [Self::parse_create_type])
17904    ///
17905    /// See [PostgreSQL](https://www.postgresql.org/docs/current/sql-createtype.html)
17906    pub fn parse_create_type_enum(&mut self, name: ObjectName) -> Result<Statement, ParserError> {
17907        self.expect_token(&Token::LParen)?;
17908        let labels = self.parse_comma_separated0(|p| p.parse_identifier(), Token::RParen)?;
17909        self.expect_token(&Token::RParen)?;
17910
17911        Ok(Statement::CreateType {
17912            name,
17913            representation: Some(UserDefinedTypeRepresentation::Enum { labels }),
17914        })
17915    }
17916
17917    /// Parse remainder of `CREATE TYPE AS RANGE` statement
17918    ///
17919    /// See [PostgreSQL](https://www.postgresql.org/docs/current/sql-createtype.html)
17920    fn parse_create_type_range(&mut self, name: ObjectName) -> Result<Statement, ParserError> {
17921        self.expect_token(&Token::LParen)?;
17922        let options = self.parse_comma_separated0(|p| p.parse_range_option(), Token::RParen)?;
17923        self.expect_token(&Token::RParen)?;
17924
17925        Ok(Statement::CreateType {
17926            name,
17927            representation: Some(UserDefinedTypeRepresentation::Range { options }),
17928        })
17929    }
17930
17931    /// Parse a single range option for a `CREATE TYPE AS RANGE` statement
17932    fn parse_range_option(&mut self) -> Result<UserDefinedTypeRangeOption, ParserError> {
17933        let keyword = self.parse_one_of_keywords(&[
17934            Keyword::SUBTYPE,
17935            Keyword::SUBTYPE_OPCLASS,
17936            Keyword::COLLATION,
17937            Keyword::CANONICAL,
17938            Keyword::SUBTYPE_DIFF,
17939            Keyword::MULTIRANGE_TYPE_NAME,
17940        ]);
17941
17942        match keyword {
17943            Some(Keyword::SUBTYPE) => {
17944                self.expect_token(&Token::Eq)?;
17945                let data_type = self.parse_data_type()?;
17946                Ok(UserDefinedTypeRangeOption::Subtype(data_type))
17947            }
17948            Some(Keyword::SUBTYPE_OPCLASS) => {
17949                self.expect_token(&Token::Eq)?;
17950                let name = self.parse_object_name(false)?;
17951                Ok(UserDefinedTypeRangeOption::SubtypeOpClass(name))
17952            }
17953            Some(Keyword::COLLATION) => {
17954                self.expect_token(&Token::Eq)?;
17955                let name = self.parse_object_name(false)?;
17956                Ok(UserDefinedTypeRangeOption::Collation(name))
17957            }
17958            Some(Keyword::CANONICAL) => {
17959                self.expect_token(&Token::Eq)?;
17960                let name = self.parse_object_name(false)?;
17961                Ok(UserDefinedTypeRangeOption::Canonical(name))
17962            }
17963            Some(Keyword::SUBTYPE_DIFF) => {
17964                self.expect_token(&Token::Eq)?;
17965                let name = self.parse_object_name(false)?;
17966                Ok(UserDefinedTypeRangeOption::SubtypeDiff(name))
17967            }
17968            Some(Keyword::MULTIRANGE_TYPE_NAME) => {
17969                self.expect_token(&Token::Eq)?;
17970                let name = self.parse_object_name(false)?;
17971                Ok(UserDefinedTypeRangeOption::MultirangeTypeName(name))
17972            }
17973            _ => self.expected("range option keyword", self.peek_token()),
17974        }
17975    }
17976
17977    /// Parse SQL definition options for CREATE TYPE (options)
17978    fn parse_create_type_sql_definition_options(
17979        &mut self,
17980    ) -> Result<Vec<UserDefinedTypeSqlDefinitionOption>, ParserError> {
17981        self.parse_comma_separated0(|p| p.parse_sql_definition_option(), Token::RParen)
17982    }
17983
17984    /// Parse a single SQL definition option for CREATE TYPE (options)
17985    fn parse_sql_definition_option(
17986        &mut self,
17987    ) -> Result<UserDefinedTypeSqlDefinitionOption, ParserError> {
17988        let keyword = self.parse_one_of_keywords(&[
17989            Keyword::INPUT,
17990            Keyword::OUTPUT,
17991            Keyword::RECEIVE,
17992            Keyword::SEND,
17993            Keyword::TYPMOD_IN,
17994            Keyword::TYPMOD_OUT,
17995            Keyword::ANALYZE,
17996            Keyword::SUBSCRIPT,
17997            Keyword::INTERNALLENGTH,
17998            Keyword::PASSEDBYVALUE,
17999            Keyword::ALIGNMENT,
18000            Keyword::STORAGE,
18001            Keyword::LIKE,
18002            Keyword::CATEGORY,
18003            Keyword::PREFERRED,
18004            Keyword::DEFAULT,
18005            Keyword::ELEMENT,
18006            Keyword::DELIMITER,
18007            Keyword::COLLATABLE,
18008        ]);
18009
18010        match keyword {
18011            Some(Keyword::INPUT) => {
18012                self.expect_token(&Token::Eq)?;
18013                let name = self.parse_object_name(false)?;
18014                Ok(UserDefinedTypeSqlDefinitionOption::Input(name))
18015            }
18016            Some(Keyword::OUTPUT) => {
18017                self.expect_token(&Token::Eq)?;
18018                let name = self.parse_object_name(false)?;
18019                Ok(UserDefinedTypeSqlDefinitionOption::Output(name))
18020            }
18021            Some(Keyword::RECEIVE) => {
18022                self.expect_token(&Token::Eq)?;
18023                let name = self.parse_object_name(false)?;
18024                Ok(UserDefinedTypeSqlDefinitionOption::Receive(name))
18025            }
18026            Some(Keyword::SEND) => {
18027                self.expect_token(&Token::Eq)?;
18028                let name = self.parse_object_name(false)?;
18029                Ok(UserDefinedTypeSqlDefinitionOption::Send(name))
18030            }
18031            Some(Keyword::TYPMOD_IN) => {
18032                self.expect_token(&Token::Eq)?;
18033                let name = self.parse_object_name(false)?;
18034                Ok(UserDefinedTypeSqlDefinitionOption::TypmodIn(name))
18035            }
18036            Some(Keyword::TYPMOD_OUT) => {
18037                self.expect_token(&Token::Eq)?;
18038                let name = self.parse_object_name(false)?;
18039                Ok(UserDefinedTypeSqlDefinitionOption::TypmodOut(name))
18040            }
18041            Some(Keyword::ANALYZE) => {
18042                self.expect_token(&Token::Eq)?;
18043                let name = self.parse_object_name(false)?;
18044                Ok(UserDefinedTypeSqlDefinitionOption::Analyze(name))
18045            }
18046            Some(Keyword::SUBSCRIPT) => {
18047                self.expect_token(&Token::Eq)?;
18048                let name = self.parse_object_name(false)?;
18049                Ok(UserDefinedTypeSqlDefinitionOption::Subscript(name))
18050            }
18051            Some(Keyword::INTERNALLENGTH) => {
18052                self.expect_token(&Token::Eq)?;
18053                if self.parse_keyword(Keyword::VARIABLE) {
18054                    Ok(UserDefinedTypeSqlDefinitionOption::InternalLength(
18055                        UserDefinedTypeInternalLength::Variable,
18056                    ))
18057                } else {
18058                    let value = self.parse_literal_uint()?;
18059                    Ok(UserDefinedTypeSqlDefinitionOption::InternalLength(
18060                        UserDefinedTypeInternalLength::Fixed(value),
18061                    ))
18062                }
18063            }
18064            Some(Keyword::PASSEDBYVALUE) => Ok(UserDefinedTypeSqlDefinitionOption::PassedByValue),
18065            Some(Keyword::ALIGNMENT) => {
18066                self.expect_token(&Token::Eq)?;
18067                let align_keyword = self.parse_one_of_keywords(&[
18068                    Keyword::CHAR,
18069                    Keyword::INT2,
18070                    Keyword::INT4,
18071                    Keyword::DOUBLE,
18072                ]);
18073                match align_keyword {
18074                    Some(Keyword::CHAR) => Ok(UserDefinedTypeSqlDefinitionOption::Alignment(
18075                        Alignment::Char,
18076                    )),
18077                    Some(Keyword::INT2) => Ok(UserDefinedTypeSqlDefinitionOption::Alignment(
18078                        Alignment::Int2,
18079                    )),
18080                    Some(Keyword::INT4) => Ok(UserDefinedTypeSqlDefinitionOption::Alignment(
18081                        Alignment::Int4,
18082                    )),
18083                    Some(Keyword::DOUBLE) => Ok(UserDefinedTypeSqlDefinitionOption::Alignment(
18084                        Alignment::Double,
18085                    )),
18086                    _ => self.expected(
18087                        "alignment value (char, int2, int4, or double)",
18088                        self.peek_token(),
18089                    ),
18090                }
18091            }
18092            Some(Keyword::STORAGE) => {
18093                self.expect_token(&Token::Eq)?;
18094                let storage_keyword = self.parse_one_of_keywords(&[
18095                    Keyword::PLAIN,
18096                    Keyword::EXTERNAL,
18097                    Keyword::EXTENDED,
18098                    Keyword::MAIN,
18099                ]);
18100                match storage_keyword {
18101                    Some(Keyword::PLAIN) => Ok(UserDefinedTypeSqlDefinitionOption::Storage(
18102                        UserDefinedTypeStorage::Plain,
18103                    )),
18104                    Some(Keyword::EXTERNAL) => Ok(UserDefinedTypeSqlDefinitionOption::Storage(
18105                        UserDefinedTypeStorage::External,
18106                    )),
18107                    Some(Keyword::EXTENDED) => Ok(UserDefinedTypeSqlDefinitionOption::Storage(
18108                        UserDefinedTypeStorage::Extended,
18109                    )),
18110                    Some(Keyword::MAIN) => Ok(UserDefinedTypeSqlDefinitionOption::Storage(
18111                        UserDefinedTypeStorage::Main,
18112                    )),
18113                    _ => self.expected(
18114                        "storage value (plain, external, extended, or main)",
18115                        self.peek_token(),
18116                    ),
18117                }
18118            }
18119            Some(Keyword::LIKE) => {
18120                self.expect_token(&Token::Eq)?;
18121                let name = self.parse_object_name(false)?;
18122                Ok(UserDefinedTypeSqlDefinitionOption::Like(name))
18123            }
18124            Some(Keyword::CATEGORY) => {
18125                self.expect_token(&Token::Eq)?;
18126                let category_str = self.parse_literal_string()?;
18127                let category_char = category_str.chars().next().ok_or_else(|| {
18128                    ParserError::ParserError(
18129                        "CATEGORY value must be a single character".to_string(),
18130                    )
18131                })?;
18132                Ok(UserDefinedTypeSqlDefinitionOption::Category(category_char))
18133            }
18134            Some(Keyword::PREFERRED) => {
18135                self.expect_token(&Token::Eq)?;
18136                let value =
18137                    self.parse_keyword(Keyword::TRUE) || !self.parse_keyword(Keyword::FALSE);
18138                Ok(UserDefinedTypeSqlDefinitionOption::Preferred(value))
18139            }
18140            Some(Keyword::DEFAULT) => {
18141                self.expect_token(&Token::Eq)?;
18142                let expr = self.parse_expr()?;
18143                Ok(UserDefinedTypeSqlDefinitionOption::Default(expr))
18144            }
18145            Some(Keyword::ELEMENT) => {
18146                self.expect_token(&Token::Eq)?;
18147                let data_type = self.parse_data_type()?;
18148                Ok(UserDefinedTypeSqlDefinitionOption::Element(data_type))
18149            }
18150            Some(Keyword::DELIMITER) => {
18151                self.expect_token(&Token::Eq)?;
18152                let delimiter = self.parse_literal_string()?;
18153                Ok(UserDefinedTypeSqlDefinitionOption::Delimiter(delimiter))
18154            }
18155            Some(Keyword::COLLATABLE) => {
18156                self.expect_token(&Token::Eq)?;
18157                let value =
18158                    self.parse_keyword(Keyword::TRUE) || !self.parse_keyword(Keyword::FALSE);
18159                Ok(UserDefinedTypeSqlDefinitionOption::Collatable(value))
18160            }
18161            _ => self.expected("SQL definition option keyword", self.peek_token()),
18162        }
18163    }
18164
18165    fn parse_parenthesized_identifiers(&mut self) -> Result<Vec<Ident>, ParserError> {
18166        self.expect_token(&Token::LParen)?;
18167        let idents = self.parse_comma_separated0(|p| p.parse_identifier(), Token::RParen)?;
18168        self.expect_token(&Token::RParen)?;
18169        Ok(idents)
18170    }
18171
18172    fn parse_column_position(&mut self) -> Result<Option<MySQLColumnPosition>, ParserError> {
18173        if dialect_of!(self is MySqlDialect | GenericDialect) {
18174            if self.parse_keyword(Keyword::FIRST) {
18175                Ok(Some(MySQLColumnPosition::First))
18176            } else if self.parse_keyword(Keyword::AFTER) {
18177                let ident = self.parse_identifier()?;
18178                Ok(Some(MySQLColumnPosition::After(ident)))
18179            } else {
18180                Ok(None)
18181            }
18182        } else {
18183            Ok(None)
18184        }
18185    }
18186
18187    /// Parse [Statement::Print]
18188    fn parse_print(&mut self) -> Result<Statement, ParserError> {
18189        Ok(Statement::Print(PrintStatement {
18190            message: Box::new(self.parse_expr()?),
18191        }))
18192    }
18193
18194    /// Parse [Statement::Return]
18195    fn parse_return(&mut self) -> Result<Statement, ParserError> {
18196        match self.maybe_parse(|p| p.parse_expr())? {
18197            Some(expr) => Ok(Statement::Return(ReturnStatement {
18198                value: Some(ReturnStatementValue::Expr(expr)),
18199            })),
18200            None => Ok(Statement::Return(ReturnStatement { value: None })),
18201        }
18202    }
18203
18204    /// /// Parse a `EXPORT DATA` statement.
18205    ///
18206    /// See [Statement::ExportData]
18207    fn parse_export_data(&mut self) -> Result<Statement, ParserError> {
18208        self.expect_keywords(&[Keyword::EXPORT, Keyword::DATA])?;
18209
18210        let connection = if self.parse_keywords(&[Keyword::WITH, Keyword::CONNECTION]) {
18211            Some(self.parse_object_name(false)?)
18212        } else {
18213            None
18214        };
18215        self.expect_keyword(Keyword::OPTIONS)?;
18216        self.expect_token(&Token::LParen)?;
18217        let options = self.parse_comma_separated(|p| p.parse_sql_option())?;
18218        self.expect_token(&Token::RParen)?;
18219        self.expect_keyword(Keyword::AS)?;
18220        let query = self.parse_query()?;
18221        Ok(Statement::ExportData(ExportData {
18222            options,
18223            query,
18224            connection,
18225        }))
18226    }
18227
18228    fn parse_vacuum(&mut self) -> Result<Statement, ParserError> {
18229        self.expect_keyword(Keyword::VACUUM)?;
18230        let full = self.parse_keyword(Keyword::FULL);
18231        let sort_only = self.parse_keywords(&[Keyword::SORT, Keyword::ONLY]);
18232        let delete_only = self.parse_keywords(&[Keyword::DELETE, Keyword::ONLY]);
18233        let reindex = self.parse_keyword(Keyword::REINDEX);
18234        let recluster = self.parse_keyword(Keyword::RECLUSTER);
18235        let (table_name, threshold, boost) =
18236            match self.maybe_parse(|p| p.parse_object_name(false))? {
18237                Some(table_name) => {
18238                    let threshold = if self.parse_keyword(Keyword::TO) {
18239                        let value = self.parse_value()?;
18240                        self.expect_keyword(Keyword::PERCENT)?;
18241                        Some(value.value)
18242                    } else {
18243                        None
18244                    };
18245                    let boost = self.parse_keyword(Keyword::BOOST);
18246                    (Some(table_name), threshold, boost)
18247                }
18248                _ => (None, None, false),
18249            };
18250        Ok(Statement::Vacuum(VacuumStatement {
18251            full,
18252            sort_only,
18253            delete_only,
18254            reindex,
18255            recluster,
18256            table_name,
18257            threshold,
18258            boost,
18259        }))
18260    }
18261
18262    /// Consume the parser and return its underlying token buffer
18263    pub fn into_tokens(self) -> Vec<TokenWithSpan> {
18264        self.tokens
18265    }
18266
18267    /// Returns true if the next keyword indicates a sub query, i.e. SELECT or WITH
18268    fn peek_sub_query(&mut self) -> bool {
18269        if self
18270            .parse_one_of_keywords(&[Keyword::SELECT, Keyword::WITH])
18271            .is_some()
18272        {
18273            self.prev_token();
18274            return true;
18275        }
18276        false
18277    }
18278
18279    pub(crate) fn parse_show_stmt_options(&mut self) -> Result<ShowStatementOptions, ParserError> {
18280        let show_in;
18281        let mut filter_position = None;
18282        if self.dialect.supports_show_like_before_in() {
18283            if let Some(filter) = self.parse_show_statement_filter()? {
18284                filter_position = Some(ShowStatementFilterPosition::Infix(filter));
18285            }
18286            show_in = self.maybe_parse_show_stmt_in()?;
18287        } else {
18288            show_in = self.maybe_parse_show_stmt_in()?;
18289            if let Some(filter) = self.parse_show_statement_filter()? {
18290                filter_position = Some(ShowStatementFilterPosition::Suffix(filter));
18291            }
18292        }
18293        let starts_with = self.maybe_parse_show_stmt_starts_with()?;
18294        let limit = self.maybe_parse_show_stmt_limit()?;
18295        let from = self.maybe_parse_show_stmt_from()?;
18296        Ok(ShowStatementOptions {
18297            filter_position,
18298            show_in,
18299            starts_with,
18300            limit,
18301            limit_from: from,
18302        })
18303    }
18304
18305    fn maybe_parse_show_stmt_in(&mut self) -> Result<Option<ShowStatementIn>, ParserError> {
18306        let clause = match self.parse_one_of_keywords(&[Keyword::FROM, Keyword::IN]) {
18307            Some(Keyword::FROM) => ShowStatementInClause::FROM,
18308            Some(Keyword::IN) => ShowStatementInClause::IN,
18309            None => return Ok(None),
18310            _ => return self.expected("FROM or IN", self.peek_token()),
18311        };
18312
18313        let (parent_type, parent_name) = match self.parse_one_of_keywords(&[
18314            Keyword::ACCOUNT,
18315            Keyword::DATABASE,
18316            Keyword::SCHEMA,
18317            Keyword::TABLE,
18318            Keyword::VIEW,
18319        ]) {
18320            // If we see these next keywords it means we don't have a parent name
18321            Some(Keyword::DATABASE)
18322                if self.peek_keywords(&[Keyword::STARTS, Keyword::WITH])
18323                    | self.peek_keyword(Keyword::LIMIT) =>
18324            {
18325                (Some(ShowStatementInParentType::Database), None)
18326            }
18327            Some(Keyword::SCHEMA)
18328                if self.peek_keywords(&[Keyword::STARTS, Keyword::WITH])
18329                    | self.peek_keyword(Keyword::LIMIT) =>
18330            {
18331                (Some(ShowStatementInParentType::Schema), None)
18332            }
18333            Some(parent_kw) => {
18334                // The parent name here is still optional, for example:
18335                // SHOW TABLES IN ACCOUNT, so parsing the object name
18336                // may fail because the statement ends.
18337                let parent_name = self.maybe_parse(|p| p.parse_object_name(false))?;
18338                match parent_kw {
18339                    Keyword::ACCOUNT => (Some(ShowStatementInParentType::Account), parent_name),
18340                    Keyword::DATABASE => (Some(ShowStatementInParentType::Database), parent_name),
18341                    Keyword::SCHEMA => (Some(ShowStatementInParentType::Schema), parent_name),
18342                    Keyword::TABLE => (Some(ShowStatementInParentType::Table), parent_name),
18343                    Keyword::VIEW => (Some(ShowStatementInParentType::View), parent_name),
18344                    _ => {
18345                        return self.expected(
18346                            "one of ACCOUNT, DATABASE, SCHEMA, TABLE or VIEW",
18347                            self.peek_token(),
18348                        )
18349                    }
18350                }
18351            }
18352            None => {
18353                // Parsing MySQL style FROM tbl_name FROM db_name
18354                // which is equivalent to FROM tbl_name.db_name
18355                let mut parent_name = self.parse_object_name(false)?;
18356                if self
18357                    .parse_one_of_keywords(&[Keyword::FROM, Keyword::IN])
18358                    .is_some()
18359                {
18360                    parent_name
18361                        .0
18362                        .insert(0, ObjectNamePart::Identifier(self.parse_identifier()?));
18363                }
18364                (None, Some(parent_name))
18365            }
18366        };
18367
18368        Ok(Some(ShowStatementIn {
18369            clause,
18370            parent_type,
18371            parent_name,
18372        }))
18373    }
18374
18375    fn maybe_parse_show_stmt_starts_with(&mut self) -> Result<Option<Value>, ParserError> {
18376        if self.parse_keywords(&[Keyword::STARTS, Keyword::WITH]) {
18377            Ok(Some(self.parse_value()?.value))
18378        } else {
18379            Ok(None)
18380        }
18381    }
18382
18383    fn maybe_parse_show_stmt_limit(&mut self) -> Result<Option<Expr>, ParserError> {
18384        if self.parse_keyword(Keyword::LIMIT) {
18385            Ok(self.parse_limit()?)
18386        } else {
18387            Ok(None)
18388        }
18389    }
18390
18391    fn maybe_parse_show_stmt_from(&mut self) -> Result<Option<Value>, ParserError> {
18392        if self.parse_keyword(Keyword::FROM) {
18393            Ok(Some(self.parse_value()?.value))
18394        } else {
18395            Ok(None)
18396        }
18397    }
18398
18399    pub(crate) fn in_column_definition_state(&self) -> bool {
18400        matches!(self.state, ColumnDefinition)
18401    }
18402
18403    /// Parses options provided in key-value format.
18404    ///
18405    /// * `parenthesized` - true if the options are enclosed in parenthesis
18406    /// * `end_words` - a list of keywords that any of them indicates the end of the options section
18407    pub(crate) fn parse_key_value_options(
18408        &mut self,
18409        parenthesized: bool,
18410        end_words: &[Keyword],
18411    ) -> Result<KeyValueOptions, ParserError> {
18412        let mut options: Vec<KeyValueOption> = Vec::new();
18413        let mut delimiter = KeyValueOptionsDelimiter::Space;
18414        if parenthesized {
18415            self.expect_token(&Token::LParen)?;
18416        }
18417        loop {
18418            match self.next_token().token {
18419                Token::RParen => {
18420                    if parenthesized {
18421                        break;
18422                    } else {
18423                        return self.expected(" another option or EOF", self.peek_token());
18424                    }
18425                }
18426                Token::EOF => break,
18427                Token::Comma => {
18428                    delimiter = KeyValueOptionsDelimiter::Comma;
18429                    continue;
18430                }
18431                Token::Word(w) if !end_words.contains(&w.keyword) => {
18432                    options.push(self.parse_key_value_option(&w)?)
18433                }
18434                Token::Word(w) if end_words.contains(&w.keyword) => {
18435                    self.prev_token();
18436                    break;
18437                }
18438                _ => return self.expected("another option, EOF, Comma or ')'", self.peek_token()),
18439            };
18440        }
18441
18442        Ok(KeyValueOptions { delimiter, options })
18443    }
18444
18445    /// Parses a `KEY = VALUE` construct based on the specified key
18446    pub(crate) fn parse_key_value_option(
18447        &mut self,
18448        key: &Word,
18449    ) -> Result<KeyValueOption, ParserError> {
18450        self.expect_token(&Token::Eq)?;
18451        match self.peek_token().token {
18452            Token::SingleQuotedString(_) => Ok(KeyValueOption {
18453                option_name: key.value.clone(),
18454                option_value: KeyValueOptionKind::Single(self.parse_value()?.into()),
18455            }),
18456            Token::Word(word)
18457                if word.keyword == Keyword::TRUE || word.keyword == Keyword::FALSE =>
18458            {
18459                Ok(KeyValueOption {
18460                    option_name: key.value.clone(),
18461                    option_value: KeyValueOptionKind::Single(self.parse_value()?.into()),
18462                })
18463            }
18464            Token::Number(..) => Ok(KeyValueOption {
18465                option_name: key.value.clone(),
18466                option_value: KeyValueOptionKind::Single(self.parse_value()?.into()),
18467            }),
18468            Token::Word(word) => {
18469                self.next_token();
18470                Ok(KeyValueOption {
18471                    option_name: key.value.clone(),
18472                    option_value: KeyValueOptionKind::Single(Value::Placeholder(
18473                        word.value.clone(),
18474                    )),
18475                })
18476            }
18477            Token::LParen => {
18478                // Can be a list of values or a list of key value properties.
18479                // Try to parse a list of values and if that fails, try to parse
18480                // a list of key-value properties.
18481                match self.maybe_parse(|parser| {
18482                    parser.expect_token(&Token::LParen)?;
18483                    let values = parser.parse_comma_separated0(|p| p.parse_value(), Token::RParen);
18484                    parser.expect_token(&Token::RParen)?;
18485                    values
18486                })? {
18487                    Some(values) => {
18488                        let values = values.into_iter().map(|v| v.value).collect();
18489                        Ok(KeyValueOption {
18490                            option_name: key.value.clone(),
18491                            option_value: KeyValueOptionKind::Multi(values),
18492                        })
18493                    }
18494                    None => Ok(KeyValueOption {
18495                        option_name: key.value.clone(),
18496                        option_value: KeyValueOptionKind::KeyValueOptions(Box::new(
18497                            self.parse_key_value_options(true, &[])?,
18498                        )),
18499                    }),
18500                }
18501            }
18502            _ => self.expected("expected option value", self.peek_token()),
18503        }
18504    }
18505
18506    /// Parses a RESET statement
18507    fn parse_reset(&mut self) -> Result<Statement, ParserError> {
18508        if self.parse_keyword(Keyword::ALL) {
18509            return Ok(Statement::Reset(ResetStatement { reset: Reset::ALL }));
18510        }
18511
18512        let obj = self.parse_object_name(false)?;
18513        Ok(Statement::Reset(ResetStatement {
18514            reset: Reset::ConfigurationParameter(obj),
18515        }))
18516    }
18517}
18518
18519fn maybe_prefixed_expr(expr: Expr, prefix: Option<Ident>) -> Expr {
18520    if let Some(prefix) = prefix {
18521        Expr::Prefixed {
18522            prefix,
18523            value: Box::new(expr),
18524        }
18525    } else {
18526        expr
18527    }
18528}
18529
18530impl Word {
18531    #[deprecated(since = "0.54.0", note = "please use `into_ident` instead")]
18532    pub fn to_ident(&self, span: Span) -> Ident {
18533        Ident {
18534            value: self.value.clone(),
18535            quote_style: self.quote_style,
18536            span,
18537        }
18538    }
18539
18540    /// Convert this word into an [`Ident`] identifier
18541    pub fn into_ident(self, span: Span) -> Ident {
18542        Ident {
18543            value: self.value,
18544            quote_style: self.quote_style,
18545            span,
18546        }
18547    }
18548}
18549
18550#[cfg(test)]
18551mod tests {
18552    use crate::test_utils::{all_dialects, TestedDialects};
18553
18554    use super::*;
18555
18556    #[test]
18557    fn test_prev_index() {
18558        let sql = "SELECT version";
18559        all_dialects().run_parser_method(sql, |parser| {
18560            assert_eq!(parser.peek_token(), Token::make_keyword("SELECT"));
18561            assert_eq!(parser.next_token(), Token::make_keyword("SELECT"));
18562            parser.prev_token();
18563            assert_eq!(parser.next_token(), Token::make_keyword("SELECT"));
18564            assert_eq!(parser.next_token(), Token::make_word("version", None));
18565            parser.prev_token();
18566            assert_eq!(parser.peek_token(), Token::make_word("version", None));
18567            assert_eq!(parser.next_token(), Token::make_word("version", None));
18568            assert_eq!(parser.peek_token(), Token::EOF);
18569            parser.prev_token();
18570            assert_eq!(parser.next_token(), Token::make_word("version", None));
18571            assert_eq!(parser.next_token(), Token::EOF);
18572            assert_eq!(parser.next_token(), Token::EOF);
18573            parser.prev_token();
18574        });
18575    }
18576
18577    #[test]
18578    fn test_peek_tokens() {
18579        all_dialects().run_parser_method("SELECT foo AS bar FROM baz", |parser| {
18580            assert!(matches!(
18581                parser.peek_tokens(),
18582                [Token::Word(Word {
18583                    keyword: Keyword::SELECT,
18584                    ..
18585                })]
18586            ));
18587
18588            assert!(matches!(
18589                parser.peek_tokens(),
18590                [
18591                    Token::Word(Word {
18592                        keyword: Keyword::SELECT,
18593                        ..
18594                    }),
18595                    Token::Word(_),
18596                    Token::Word(Word {
18597                        keyword: Keyword::AS,
18598                        ..
18599                    }),
18600                ]
18601            ));
18602
18603            for _ in 0..4 {
18604                parser.next_token();
18605            }
18606
18607            assert!(matches!(
18608                parser.peek_tokens(),
18609                [
18610                    Token::Word(Word {
18611                        keyword: Keyword::FROM,
18612                        ..
18613                    }),
18614                    Token::Word(_),
18615                    Token::EOF,
18616                    Token::EOF,
18617                ]
18618            ))
18619        })
18620    }
18621
18622    #[cfg(test)]
18623    mod test_parse_data_type {
18624        use crate::ast::{
18625            CharLengthUnits, CharacterLength, DataType, ExactNumberInfo, ObjectName, TimezoneInfo,
18626        };
18627        use crate::dialect::{AnsiDialect, GenericDialect, PostgreSqlDialect};
18628        use crate::test_utils::TestedDialects;
18629
18630        macro_rules! test_parse_data_type {
18631            ($dialect:expr, $input:expr, $expected_type:expr $(,)?) => {{
18632                $dialect.run_parser_method(&*$input, |parser| {
18633                    let data_type = parser.parse_data_type().unwrap();
18634                    assert_eq!($expected_type, data_type);
18635                    assert_eq!($input.to_string(), data_type.to_string());
18636                });
18637            }};
18638        }
18639
18640        #[test]
18641        fn test_ansii_character_string_types() {
18642            // Character string types: <https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#character-string-type>
18643            let dialect =
18644                TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
18645
18646            test_parse_data_type!(dialect, "CHARACTER", DataType::Character(None));
18647
18648            test_parse_data_type!(
18649                dialect,
18650                "CHARACTER(20)",
18651                DataType::Character(Some(CharacterLength::IntegerLength {
18652                    length: 20,
18653                    unit: None
18654                }))
18655            );
18656
18657            test_parse_data_type!(
18658                dialect,
18659                "CHARACTER(20 CHARACTERS)",
18660                DataType::Character(Some(CharacterLength::IntegerLength {
18661                    length: 20,
18662                    unit: Some(CharLengthUnits::Characters)
18663                }))
18664            );
18665
18666            test_parse_data_type!(
18667                dialect,
18668                "CHARACTER(20 OCTETS)",
18669                DataType::Character(Some(CharacterLength::IntegerLength {
18670                    length: 20,
18671                    unit: Some(CharLengthUnits::Octets)
18672                }))
18673            );
18674
18675            test_parse_data_type!(dialect, "CHAR", DataType::Char(None));
18676
18677            test_parse_data_type!(
18678                dialect,
18679                "CHAR(20)",
18680                DataType::Char(Some(CharacterLength::IntegerLength {
18681                    length: 20,
18682                    unit: None
18683                }))
18684            );
18685
18686            test_parse_data_type!(
18687                dialect,
18688                "CHAR(20 CHARACTERS)",
18689                DataType::Char(Some(CharacterLength::IntegerLength {
18690                    length: 20,
18691                    unit: Some(CharLengthUnits::Characters)
18692                }))
18693            );
18694
18695            test_parse_data_type!(
18696                dialect,
18697                "CHAR(20 OCTETS)",
18698                DataType::Char(Some(CharacterLength::IntegerLength {
18699                    length: 20,
18700                    unit: Some(CharLengthUnits::Octets)
18701                }))
18702            );
18703
18704            test_parse_data_type!(
18705                dialect,
18706                "CHARACTER VARYING(20)",
18707                DataType::CharacterVarying(Some(CharacterLength::IntegerLength {
18708                    length: 20,
18709                    unit: None
18710                }))
18711            );
18712
18713            test_parse_data_type!(
18714                dialect,
18715                "CHARACTER VARYING(20 CHARACTERS)",
18716                DataType::CharacterVarying(Some(CharacterLength::IntegerLength {
18717                    length: 20,
18718                    unit: Some(CharLengthUnits::Characters)
18719                }))
18720            );
18721
18722            test_parse_data_type!(
18723                dialect,
18724                "CHARACTER VARYING(20 OCTETS)",
18725                DataType::CharacterVarying(Some(CharacterLength::IntegerLength {
18726                    length: 20,
18727                    unit: Some(CharLengthUnits::Octets)
18728                }))
18729            );
18730
18731            test_parse_data_type!(
18732                dialect,
18733                "CHAR VARYING(20)",
18734                DataType::CharVarying(Some(CharacterLength::IntegerLength {
18735                    length: 20,
18736                    unit: None
18737                }))
18738            );
18739
18740            test_parse_data_type!(
18741                dialect,
18742                "CHAR VARYING(20 CHARACTERS)",
18743                DataType::CharVarying(Some(CharacterLength::IntegerLength {
18744                    length: 20,
18745                    unit: Some(CharLengthUnits::Characters)
18746                }))
18747            );
18748
18749            test_parse_data_type!(
18750                dialect,
18751                "CHAR VARYING(20 OCTETS)",
18752                DataType::CharVarying(Some(CharacterLength::IntegerLength {
18753                    length: 20,
18754                    unit: Some(CharLengthUnits::Octets)
18755                }))
18756            );
18757
18758            test_parse_data_type!(
18759                dialect,
18760                "VARCHAR(20)",
18761                DataType::Varchar(Some(CharacterLength::IntegerLength {
18762                    length: 20,
18763                    unit: None
18764                }))
18765            );
18766        }
18767
18768        #[test]
18769        fn test_ansii_character_large_object_types() {
18770            // Character large object types: <https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#character-large-object-length>
18771            let dialect =
18772                TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
18773
18774            test_parse_data_type!(
18775                dialect,
18776                "CHARACTER LARGE OBJECT",
18777                DataType::CharacterLargeObject(None)
18778            );
18779            test_parse_data_type!(
18780                dialect,
18781                "CHARACTER LARGE OBJECT(20)",
18782                DataType::CharacterLargeObject(Some(20))
18783            );
18784
18785            test_parse_data_type!(
18786                dialect,
18787                "CHAR LARGE OBJECT",
18788                DataType::CharLargeObject(None)
18789            );
18790            test_parse_data_type!(
18791                dialect,
18792                "CHAR LARGE OBJECT(20)",
18793                DataType::CharLargeObject(Some(20))
18794            );
18795
18796            test_parse_data_type!(dialect, "CLOB", DataType::Clob(None));
18797            test_parse_data_type!(dialect, "CLOB(20)", DataType::Clob(Some(20)));
18798        }
18799
18800        #[test]
18801        fn test_parse_custom_types() {
18802            let dialect =
18803                TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
18804
18805            test_parse_data_type!(
18806                dialect,
18807                "GEOMETRY",
18808                DataType::Custom(ObjectName::from(vec!["GEOMETRY".into()]), vec![])
18809            );
18810
18811            test_parse_data_type!(
18812                dialect,
18813                "GEOMETRY(POINT)",
18814                DataType::Custom(
18815                    ObjectName::from(vec!["GEOMETRY".into()]),
18816                    vec!["POINT".to_string()]
18817                )
18818            );
18819
18820            test_parse_data_type!(
18821                dialect,
18822                "GEOMETRY(POINT, 4326)",
18823                DataType::Custom(
18824                    ObjectName::from(vec!["GEOMETRY".into()]),
18825                    vec!["POINT".to_string(), "4326".to_string()]
18826                )
18827            );
18828        }
18829
18830        #[test]
18831        fn test_ansii_exact_numeric_types() {
18832            // Exact numeric types: <https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#exact-numeric-type>
18833            let dialect = TestedDialects::new(vec![
18834                Box::new(GenericDialect {}),
18835                Box::new(AnsiDialect {}),
18836                Box::new(PostgreSqlDialect {}),
18837            ]);
18838
18839            test_parse_data_type!(dialect, "NUMERIC", DataType::Numeric(ExactNumberInfo::None));
18840
18841            test_parse_data_type!(
18842                dialect,
18843                "NUMERIC(2)",
18844                DataType::Numeric(ExactNumberInfo::Precision(2))
18845            );
18846
18847            test_parse_data_type!(
18848                dialect,
18849                "NUMERIC(2,10)",
18850                DataType::Numeric(ExactNumberInfo::PrecisionAndScale(2, 10))
18851            );
18852
18853            test_parse_data_type!(dialect, "DECIMAL", DataType::Decimal(ExactNumberInfo::None));
18854
18855            test_parse_data_type!(
18856                dialect,
18857                "DECIMAL(2)",
18858                DataType::Decimal(ExactNumberInfo::Precision(2))
18859            );
18860
18861            test_parse_data_type!(
18862                dialect,
18863                "DECIMAL(2,10)",
18864                DataType::Decimal(ExactNumberInfo::PrecisionAndScale(2, 10))
18865            );
18866
18867            test_parse_data_type!(dialect, "DEC", DataType::Dec(ExactNumberInfo::None));
18868
18869            test_parse_data_type!(
18870                dialect,
18871                "DEC(2)",
18872                DataType::Dec(ExactNumberInfo::Precision(2))
18873            );
18874
18875            test_parse_data_type!(
18876                dialect,
18877                "DEC(2,10)",
18878                DataType::Dec(ExactNumberInfo::PrecisionAndScale(2, 10))
18879            );
18880
18881            // Test negative scale values.
18882            test_parse_data_type!(
18883                dialect,
18884                "NUMERIC(10,-2)",
18885                DataType::Numeric(ExactNumberInfo::PrecisionAndScale(10, -2))
18886            );
18887
18888            test_parse_data_type!(
18889                dialect,
18890                "DECIMAL(1000,-10)",
18891                DataType::Decimal(ExactNumberInfo::PrecisionAndScale(1000, -10))
18892            );
18893
18894            test_parse_data_type!(
18895                dialect,
18896                "DEC(5,-1000)",
18897                DataType::Dec(ExactNumberInfo::PrecisionAndScale(5, -1000))
18898            );
18899
18900            test_parse_data_type!(
18901                dialect,
18902                "NUMERIC(10,-5)",
18903                DataType::Numeric(ExactNumberInfo::PrecisionAndScale(10, -5))
18904            );
18905
18906            test_parse_data_type!(
18907                dialect,
18908                "DECIMAL(20,-10)",
18909                DataType::Decimal(ExactNumberInfo::PrecisionAndScale(20, -10))
18910            );
18911
18912            test_parse_data_type!(
18913                dialect,
18914                "DEC(5,-2)",
18915                DataType::Dec(ExactNumberInfo::PrecisionAndScale(5, -2))
18916            );
18917
18918            dialect.run_parser_method("NUMERIC(10,+5)", |parser| {
18919                let data_type = parser.parse_data_type().unwrap();
18920                assert_eq!(
18921                    DataType::Numeric(ExactNumberInfo::PrecisionAndScale(10, 5)),
18922                    data_type
18923                );
18924                // Note: Explicit '+' sign is not preserved in output, which is correct
18925                assert_eq!("NUMERIC(10,5)", data_type.to_string());
18926            });
18927        }
18928
18929        #[test]
18930        fn test_ansii_date_type() {
18931            // Datetime types: <https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#datetime-type>
18932            let dialect =
18933                TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
18934
18935            test_parse_data_type!(dialect, "DATE", DataType::Date);
18936
18937            test_parse_data_type!(dialect, "TIME", DataType::Time(None, TimezoneInfo::None));
18938
18939            test_parse_data_type!(
18940                dialect,
18941                "TIME(6)",
18942                DataType::Time(Some(6), TimezoneInfo::None)
18943            );
18944
18945            test_parse_data_type!(
18946                dialect,
18947                "TIME WITH TIME ZONE",
18948                DataType::Time(None, TimezoneInfo::WithTimeZone)
18949            );
18950
18951            test_parse_data_type!(
18952                dialect,
18953                "TIME(6) WITH TIME ZONE",
18954                DataType::Time(Some(6), TimezoneInfo::WithTimeZone)
18955            );
18956
18957            test_parse_data_type!(
18958                dialect,
18959                "TIME WITHOUT TIME ZONE",
18960                DataType::Time(None, TimezoneInfo::WithoutTimeZone)
18961            );
18962
18963            test_parse_data_type!(
18964                dialect,
18965                "TIME(6) WITHOUT TIME ZONE",
18966                DataType::Time(Some(6), TimezoneInfo::WithoutTimeZone)
18967            );
18968
18969            test_parse_data_type!(
18970                dialect,
18971                "TIMESTAMP",
18972                DataType::Timestamp(None, TimezoneInfo::None)
18973            );
18974
18975            test_parse_data_type!(
18976                dialect,
18977                "TIMESTAMP(22)",
18978                DataType::Timestamp(Some(22), TimezoneInfo::None)
18979            );
18980
18981            test_parse_data_type!(
18982                dialect,
18983                "TIMESTAMP(22) WITH TIME ZONE",
18984                DataType::Timestamp(Some(22), TimezoneInfo::WithTimeZone)
18985            );
18986
18987            test_parse_data_type!(
18988                dialect,
18989                "TIMESTAMP(33) WITHOUT TIME ZONE",
18990                DataType::Timestamp(Some(33), TimezoneInfo::WithoutTimeZone)
18991            );
18992        }
18993    }
18994
18995    #[test]
18996    fn test_parse_schema_name() {
18997        // The expected name should be identical as the input name, that's why I don't receive both
18998        macro_rules! test_parse_schema_name {
18999            ($input:expr, $expected_name:expr $(,)?) => {{
19000                all_dialects().run_parser_method(&*$input, |parser| {
19001                    let schema_name = parser.parse_schema_name().unwrap();
19002                    // Validate that the structure is the same as expected
19003                    assert_eq!(schema_name, $expected_name);
19004                    // Validate that the input and the expected structure serialization are the same
19005                    assert_eq!(schema_name.to_string(), $input.to_string());
19006                });
19007            }};
19008        }
19009
19010        let dummy_name = ObjectName::from(vec![Ident::new("dummy_name")]);
19011        let dummy_authorization = Ident::new("dummy_authorization");
19012
19013        test_parse_schema_name!(
19014            format!("{dummy_name}"),
19015            SchemaName::Simple(dummy_name.clone())
19016        );
19017
19018        test_parse_schema_name!(
19019            format!("AUTHORIZATION {dummy_authorization}"),
19020            SchemaName::UnnamedAuthorization(dummy_authorization.clone()),
19021        );
19022        test_parse_schema_name!(
19023            format!("{dummy_name} AUTHORIZATION {dummy_authorization}"),
19024            SchemaName::NamedAuthorization(dummy_name.clone(), dummy_authorization.clone()),
19025        );
19026    }
19027
19028    #[test]
19029    fn mysql_parse_index_table_constraint() {
19030        macro_rules! test_parse_table_constraint {
19031            ($dialect:expr, $input:expr, $expected:expr $(,)?) => {{
19032                $dialect.run_parser_method(&*$input, |parser| {
19033                    let constraint = parser.parse_optional_table_constraint().unwrap().unwrap();
19034                    // Validate that the structure is the same as expected
19035                    assert_eq!(constraint, $expected);
19036                    // Validate that the input and the expected structure serialization are the same
19037                    assert_eq!(constraint.to_string(), $input.to_string());
19038                });
19039            }};
19040        }
19041
19042        fn mk_expected_col(name: &str) -> IndexColumn {
19043            IndexColumn {
19044                column: OrderByExpr {
19045                    expr: Expr::Identifier(name.into()),
19046                    options: OrderByOptions {
19047                        asc: None,
19048                        nulls_first: None,
19049                    },
19050                    with_fill: None,
19051                },
19052                operator_class: None,
19053            }
19054        }
19055
19056        let dialect =
19057            TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(MySqlDialect {})]);
19058
19059        test_parse_table_constraint!(
19060            dialect,
19061            "INDEX (c1)",
19062            IndexConstraint {
19063                display_as_key: false,
19064                name: None,
19065                index_type: None,
19066                columns: vec![mk_expected_col("c1")],
19067                index_options: vec![],
19068            }
19069            .into()
19070        );
19071
19072        test_parse_table_constraint!(
19073            dialect,
19074            "KEY (c1)",
19075            IndexConstraint {
19076                display_as_key: true,
19077                name: None,
19078                index_type: None,
19079                columns: vec![mk_expected_col("c1")],
19080                index_options: vec![],
19081            }
19082            .into()
19083        );
19084
19085        test_parse_table_constraint!(
19086            dialect,
19087            "INDEX 'index' (c1, c2)",
19088            TableConstraint::Index(IndexConstraint {
19089                display_as_key: false,
19090                name: Some(Ident::with_quote('\'', "index")),
19091                index_type: None,
19092                columns: vec![mk_expected_col("c1"), mk_expected_col("c2")],
19093                index_options: vec![],
19094            })
19095        );
19096
19097        test_parse_table_constraint!(
19098            dialect,
19099            "INDEX USING BTREE (c1)",
19100            IndexConstraint {
19101                display_as_key: false,
19102                name: None,
19103                index_type: Some(IndexType::BTree),
19104                columns: vec![mk_expected_col("c1")],
19105                index_options: vec![],
19106            }
19107            .into()
19108        );
19109
19110        test_parse_table_constraint!(
19111            dialect,
19112            "INDEX USING HASH (c1)",
19113            IndexConstraint {
19114                display_as_key: false,
19115                name: None,
19116                index_type: Some(IndexType::Hash),
19117                columns: vec![mk_expected_col("c1")],
19118                index_options: vec![],
19119            }
19120            .into()
19121        );
19122
19123        test_parse_table_constraint!(
19124            dialect,
19125            "INDEX idx_name USING BTREE (c1)",
19126            IndexConstraint {
19127                display_as_key: false,
19128                name: Some(Ident::new("idx_name")),
19129                index_type: Some(IndexType::BTree),
19130                columns: vec![mk_expected_col("c1")],
19131                index_options: vec![],
19132            }
19133            .into()
19134        );
19135
19136        test_parse_table_constraint!(
19137            dialect,
19138            "INDEX idx_name USING HASH (c1)",
19139            IndexConstraint {
19140                display_as_key: false,
19141                name: Some(Ident::new("idx_name")),
19142                index_type: Some(IndexType::Hash),
19143                columns: vec![mk_expected_col("c1")],
19144                index_options: vec![],
19145            }
19146            .into()
19147        );
19148    }
19149
19150    #[test]
19151    fn test_tokenizer_error_loc() {
19152        let sql = "foo '";
19153        let ast = Parser::parse_sql(&GenericDialect, sql);
19154        assert_eq!(
19155            ast,
19156            Err(ParserError::TokenizerError(
19157                "Unterminated string literal at Line: 1, Column: 5".to_string()
19158            ))
19159        );
19160    }
19161
19162    #[test]
19163    fn test_parser_error_loc() {
19164        let sql = "SELECT this is a syntax error";
19165        let ast = Parser::parse_sql(&GenericDialect, sql);
19166        assert_eq!(
19167            ast,
19168            Err(ParserError::ParserError(
19169                "Expected: [NOT] NULL | TRUE | FALSE | DISTINCT | [form] NORMALIZED FROM after IS, found: a at Line: 1, Column: 16"
19170                    .to_string()
19171            ))
19172        );
19173    }
19174
19175    #[test]
19176    fn test_nested_explain_error() {
19177        let sql = "EXPLAIN EXPLAIN SELECT 1";
19178        let ast = Parser::parse_sql(&GenericDialect, sql);
19179        assert_eq!(
19180            ast,
19181            Err(ParserError::ParserError(
19182                "Explain must be root of the plan".to_string()
19183            ))
19184        );
19185    }
19186
19187    #[test]
19188    fn test_parse_multipart_identifier_positive() {
19189        let dialect = TestedDialects::new(vec![Box::new(GenericDialect {})]);
19190
19191        // parse multipart with quotes
19192        let expected = vec![
19193            Ident {
19194                value: "CATALOG".to_string(),
19195                quote_style: None,
19196                span: Span::empty(),
19197            },
19198            Ident {
19199                value: "F(o)o. \"bar".to_string(),
19200                quote_style: Some('"'),
19201                span: Span::empty(),
19202            },
19203            Ident {
19204                value: "table".to_string(),
19205                quote_style: None,
19206                span: Span::empty(),
19207            },
19208        ];
19209        dialect.run_parser_method(r#"CATALOG."F(o)o. ""bar".table"#, |parser| {
19210            let actual = parser.parse_multipart_identifier().unwrap();
19211            assert_eq!(expected, actual);
19212        });
19213
19214        // allow whitespace between ident parts
19215        let expected = vec![
19216            Ident {
19217                value: "CATALOG".to_string(),
19218                quote_style: None,
19219                span: Span::empty(),
19220            },
19221            Ident {
19222                value: "table".to_string(),
19223                quote_style: None,
19224                span: Span::empty(),
19225            },
19226        ];
19227        dialect.run_parser_method("CATALOG . table", |parser| {
19228            let actual = parser.parse_multipart_identifier().unwrap();
19229            assert_eq!(expected, actual);
19230        });
19231    }
19232
19233    #[test]
19234    fn test_parse_multipart_identifier_negative() {
19235        macro_rules! test_parse_multipart_identifier_error {
19236            ($input:expr, $expected_err:expr $(,)?) => {{
19237                all_dialects().run_parser_method(&*$input, |parser| {
19238                    let actual_err = parser.parse_multipart_identifier().unwrap_err();
19239                    assert_eq!(actual_err.to_string(), $expected_err);
19240                });
19241            }};
19242        }
19243
19244        test_parse_multipart_identifier_error!(
19245            "",
19246            "sql parser error: Empty input when parsing identifier",
19247        );
19248
19249        test_parse_multipart_identifier_error!(
19250            "*schema.table",
19251            "sql parser error: Unexpected token in identifier: *",
19252        );
19253
19254        test_parse_multipart_identifier_error!(
19255            "schema.table*",
19256            "sql parser error: Unexpected token in identifier: *",
19257        );
19258
19259        test_parse_multipart_identifier_error!(
19260            "schema.table.",
19261            "sql parser error: Trailing period in identifier",
19262        );
19263
19264        test_parse_multipart_identifier_error!(
19265            "schema.*",
19266            "sql parser error: Unexpected token following period in identifier: *",
19267        );
19268    }
19269
19270    #[test]
19271    fn test_mysql_partition_selection() {
19272        let sql = "SELECT * FROM employees PARTITION (p0, p2)";
19273        let expected = vec!["p0", "p2"];
19274
19275        let ast: Vec<Statement> = Parser::parse_sql(&MySqlDialect {}, sql).unwrap();
19276        assert_eq!(ast.len(), 1);
19277        if let Statement::Query(v) = &ast[0] {
19278            if let SetExpr::Select(select) = &*v.body {
19279                assert_eq!(select.from.len(), 1);
19280                let from: &TableWithJoins = &select.from[0];
19281                let table_factor = &from.relation;
19282                if let TableFactor::Table { partitions, .. } = table_factor {
19283                    let actual: Vec<&str> = partitions
19284                        .iter()
19285                        .map(|ident| ident.value.as_str())
19286                        .collect();
19287                    assert_eq!(expected, actual);
19288                }
19289            }
19290        } else {
19291            panic!("fail to parse mysql partition selection");
19292        }
19293    }
19294
19295    #[test]
19296    fn test_replace_into_placeholders() {
19297        let sql = "REPLACE INTO t (a) VALUES (&a)";
19298
19299        assert!(Parser::parse_sql(&GenericDialect {}, sql).is_err());
19300    }
19301
19302    #[test]
19303    fn test_replace_into_set_placeholder() {
19304        let sql = "REPLACE INTO t SET ?";
19305
19306        assert!(Parser::parse_sql(&GenericDialect {}, sql).is_err());
19307    }
19308
19309    #[test]
19310    fn test_replace_incomplete() {
19311        let sql = r#"REPLACE"#;
19312
19313        assert!(Parser::parse_sql(&MySqlDialect {}, sql).is_err());
19314    }
19315
19316    #[test]
19317    fn test_placeholder_invalid_whitespace() {
19318        for w in ["  ", "/*invalid*/"] {
19319            let sql = format!("\nSELECT\n  :{w}fooBar");
19320            assert!(Parser::parse_sql(&GenericDialect, &sql).is_err());
19321        }
19322    }
19323}