sqlparser/parser/
mod.rs

1// Licensed under the Apache License, Version 2.0 (the "License");
2// you may not use this file except in compliance with the License.
3// You may obtain a copy of the License at
4//
5// http://www.apache.org/licenses/LICENSE-2.0
6//
7// Unless required by applicable law or agreed to in writing, software
8// distributed under the License is distributed on an "AS IS" BASIS,
9// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
10// See the License for the specific language governing permissions and
11// limitations under the License.
12
13//! SQL Parser
14
15#[cfg(not(feature = "std"))]
16use alloc::{
17    boxed::Box,
18    format,
19    string::{String, ToString},
20    vec,
21    vec::Vec,
22};
23use core::{
24    fmt::{self, Display},
25    str::FromStr,
26};
27use helpers::attached_token::AttachedToken;
28
29use log::debug;
30
31use recursion::RecursionCounter;
32use IsLateral::*;
33use IsOptional::*;
34
35use crate::ast::helpers::{
36    key_value_options::{
37        KeyValueOption, KeyValueOptionType, KeyValueOptions, KeyValueOptionsDelimiter,
38    },
39    stmt_create_table::{CreateTableBuilder, CreateTableConfiguration},
40};
41use crate::ast::Statement::CreatePolicy;
42use crate::ast::*;
43use crate::dialect::*;
44use crate::keywords::{Keyword, ALL_KEYWORDS};
45use crate::tokenizer::*;
46use sqlparser::parser::ParserState::ColumnDefinition;
47
48mod alter;
49
50#[derive(Debug, Clone, PartialEq, Eq)]
51pub enum ParserError {
52    TokenizerError(String),
53    ParserError(String),
54    RecursionLimitExceeded,
55}
56
57// Use `Parser::expected` instead, if possible
58macro_rules! parser_err {
59    ($MSG:expr, $loc:expr) => {
60        Err(ParserError::ParserError(format!("{}{}", $MSG, $loc)))
61    };
62}
63
64#[cfg(feature = "std")]
65/// Implementation [`RecursionCounter`] if std is available
66mod recursion {
67    use std::cell::Cell;
68    use std::rc::Rc;
69
70    use super::ParserError;
71
72    /// Tracks remaining recursion depth. This value is decremented on
73    /// each call to [`RecursionCounter::try_decrease()`], when it reaches 0 an error will
74    /// be returned.
75    ///
76    /// Note: Uses an [`std::rc::Rc`] and [`std::cell::Cell`] in order to satisfy the Rust
77    /// borrow checker so the automatic [`DepthGuard`] decrement a
78    /// reference to the counter.
79    ///
80    /// Note: when "recursive-protection" feature is enabled, this crate uses additional stack overflow protection
81    /// for some of its recursive methods. See [`recursive::recursive`] for more information.
82    pub(crate) struct RecursionCounter {
83        remaining_depth: Rc<Cell<usize>>,
84    }
85
86    impl RecursionCounter {
87        /// Creates a [`RecursionCounter`] with the specified maximum
88        /// depth
89        pub fn new(remaining_depth: usize) -> Self {
90            Self {
91                remaining_depth: Rc::new(remaining_depth.into()),
92            }
93        }
94
95        /// Decreases the remaining depth by 1.
96        ///
97        /// Returns [`Err`] if the remaining depth falls to 0.
98        ///
99        /// Returns a [`DepthGuard`] which will adds 1 to the
100        /// remaining depth upon drop;
101        pub fn try_decrease(&self) -> Result<DepthGuard, ParserError> {
102            let old_value = self.remaining_depth.get();
103            // ran out of space
104            if old_value == 0 {
105                Err(ParserError::RecursionLimitExceeded)
106            } else {
107                self.remaining_depth.set(old_value - 1);
108                Ok(DepthGuard::new(Rc::clone(&self.remaining_depth)))
109            }
110        }
111    }
112
113    /// Guard that increases the remaining depth by 1 on drop
114    pub struct DepthGuard {
115        remaining_depth: Rc<Cell<usize>>,
116    }
117
118    impl DepthGuard {
119        fn new(remaining_depth: Rc<Cell<usize>>) -> Self {
120            Self { remaining_depth }
121        }
122    }
123    impl Drop for DepthGuard {
124        fn drop(&mut self) {
125            let old_value = self.remaining_depth.get();
126            self.remaining_depth.set(old_value + 1);
127        }
128    }
129}
130
131#[cfg(not(feature = "std"))]
132mod recursion {
133    /// Implementation [`RecursionCounter`] if std is NOT available (and does not
134    /// guard against stack overflow).
135    ///
136    /// Has the same API as the std [`RecursionCounter`] implementation
137    /// but does not actually limit stack depth.
138    pub(crate) struct RecursionCounter {}
139
140    impl RecursionCounter {
141        pub fn new(_remaining_depth: usize) -> Self {
142            Self {}
143        }
144        pub fn try_decrease(&self) -> Result<DepthGuard, super::ParserError> {
145            Ok(DepthGuard {})
146        }
147    }
148
149    pub struct DepthGuard {}
150}
151
152#[derive(PartialEq, Eq)]
153pub enum IsOptional {
154    Optional,
155    Mandatory,
156}
157
158pub enum IsLateral {
159    Lateral,
160    NotLateral,
161}
162
163pub enum WildcardExpr {
164    Expr(Expr),
165    QualifiedWildcard(ObjectName),
166    Wildcard,
167}
168
169impl From<TokenizerError> for ParserError {
170    fn from(e: TokenizerError) -> Self {
171        ParserError::TokenizerError(e.to_string())
172    }
173}
174
175impl fmt::Display for ParserError {
176    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
177        write!(
178            f,
179            "sql parser error: {}",
180            match self {
181                ParserError::TokenizerError(s) => s,
182                ParserError::ParserError(s) => s,
183                ParserError::RecursionLimitExceeded => "recursion limit exceeded",
184            }
185        )
186    }
187}
188
189#[cfg(feature = "std")]
190impl std::error::Error for ParserError {}
191
192// By default, allow expressions up to this deep before erroring
193const DEFAULT_REMAINING_DEPTH: usize = 50;
194
195// A constant EOF token that can be referenced.
196const EOF_TOKEN: TokenWithSpan = TokenWithSpan {
197    token: Token::EOF,
198    span: Span {
199        start: Location { line: 0, column: 0 },
200        end: Location { line: 0, column: 0 },
201    },
202};
203
204/// Composite types declarations using angle brackets syntax can be arbitrary
205/// nested such that the following declaration is possible:
206///      `ARRAY<ARRAY<INT>>`
207/// But the tokenizer recognizes the `>>` as a ShiftRight token.
208/// We work around that limitation when parsing a data type by accepting
209/// either a `>` or `>>` token in such cases, remembering which variant we
210/// matched.
211/// In the latter case having matched a `>>`, the parent type will not look to
212/// match its closing `>` as a result since that will have taken place at the
213/// child type.
214///
215/// See [Parser::parse_data_type] for details
216struct MatchedTrailingBracket(bool);
217
218impl From<bool> for MatchedTrailingBracket {
219    fn from(value: bool) -> Self {
220        Self(value)
221    }
222}
223
224/// Options that control how the [`Parser`] parses SQL text
225#[derive(Debug, Clone, PartialEq, Eq)]
226pub struct ParserOptions {
227    pub trailing_commas: bool,
228    /// Controls how literal values are unescaped. See
229    /// [`Tokenizer::with_unescape`] for more details.
230    pub unescape: bool,
231    /// Controls if the parser expects a semi-colon token
232    /// between statements. Default is `true`.
233    pub require_semicolon_stmt_delimiter: bool,
234}
235
236impl Default for ParserOptions {
237    fn default() -> Self {
238        Self {
239            trailing_commas: false,
240            unescape: true,
241            require_semicolon_stmt_delimiter: true,
242        }
243    }
244}
245
246impl ParserOptions {
247    /// Create a new [`ParserOptions`]
248    pub fn new() -> Self {
249        Default::default()
250    }
251
252    /// Set if trailing commas are allowed.
253    ///
254    /// If this option is `false` (the default), the following SQL will
255    /// not parse. If the option is `true`, the SQL will parse.
256    ///
257    /// ```sql
258    ///  SELECT
259    ///   foo,
260    ///   bar,
261    ///  FROM baz
262    /// ```
263    pub fn with_trailing_commas(mut self, trailing_commas: bool) -> Self {
264        self.trailing_commas = trailing_commas;
265        self
266    }
267
268    /// Set if literal values are unescaped. Defaults to true. See
269    /// [`Tokenizer::with_unescape`] for more details.
270    pub fn with_unescape(mut self, unescape: bool) -> Self {
271        self.unescape = unescape;
272        self
273    }
274}
275
276#[derive(Copy, Clone)]
277enum ParserState {
278    /// The default state of the parser.
279    Normal,
280    /// The state when parsing a CONNECT BY expression. This allows parsing
281    /// PRIOR expressions while still allowing prior as an identifier name
282    /// in other contexts.
283    ConnectBy,
284    /// The state when parsing column definitions.  This state prohibits
285    /// NOT NULL as an alias for IS NOT NULL.  For example:
286    /// ```sql
287    /// CREATE TABLE foo (abc BIGINT NOT NULL);
288    /// ```
289    ColumnDefinition,
290}
291
292/// A SQL Parser
293///
294/// This struct is the main entry point for parsing SQL queries.
295///
296/// # Functionality:
297/// * Parsing SQL: see examples on [`Parser::new`] and [`Parser::parse_sql`]
298/// * Controlling recursion: See [`Parser::with_recursion_limit`]
299/// * Controlling parser options: See [`Parser::with_options`]
300/// * Providing your own tokens: See [`Parser::with_tokens`]
301///
302/// # Internals
303///
304/// The parser uses a [`Tokenizer`] to tokenize the input SQL string into a
305/// `Vec` of [`TokenWithSpan`]s and maintains an `index` to the current token
306/// being processed. The token vec may contain multiple SQL statements.
307///
308/// * The "current" token is the token at `index - 1`
309/// * The "next" token is the token at `index`
310/// * The "previous" token is the token at `index - 2`
311///
312/// If `index` is equal to the length of the token stream, the 'next' token is
313/// [`Token::EOF`].
314///
315/// For example, the SQL string "SELECT * FROM foo" will be tokenized into
316/// following tokens:
317/// ```text
318///  [
319///    "SELECT", // token index 0
320///    " ",      // whitespace
321///    "*",
322///    " ",
323///    "FROM",
324///    " ",
325///    "foo"
326///   ]
327/// ```
328///
329///
330pub struct Parser<'a> {
331    /// The tokens
332    tokens: Vec<TokenWithSpan>,
333    /// The index of the first unprocessed token in [`Parser::tokens`].
334    index: usize,
335    /// The current state of the parser.
336    state: ParserState,
337    /// The SQL dialect to use.
338    dialect: &'a dyn Dialect,
339    /// Additional options that allow you to mix & match behavior
340    /// otherwise constrained to certain dialects (e.g. trailing
341    /// commas) and/or format of parse (e.g. unescaping).
342    options: ParserOptions,
343    /// Ensures the stack does not overflow by limiting recursion depth.
344    recursion_counter: RecursionCounter,
345}
346
347impl<'a> Parser<'a> {
348    /// Create a parser for a [`Dialect`]
349    ///
350    /// See also [`Parser::parse_sql`]
351    ///
352    /// Example:
353    /// ```
354    /// # use sqlparser::{parser::{Parser, ParserError}, dialect::GenericDialect};
355    /// # fn main() -> Result<(), ParserError> {
356    /// let dialect = GenericDialect{};
357    /// let statements = Parser::new(&dialect)
358    ///   .try_with_sql("SELECT * FROM foo")?
359    ///   .parse_statements()?;
360    /// # Ok(())
361    /// # }
362    /// ```
363    pub fn new(dialect: &'a dyn Dialect) -> Self {
364        Self {
365            tokens: vec![],
366            index: 0,
367            state: ParserState::Normal,
368            dialect,
369            recursion_counter: RecursionCounter::new(DEFAULT_REMAINING_DEPTH),
370            options: ParserOptions::new().with_trailing_commas(dialect.supports_trailing_commas()),
371        }
372    }
373
374    /// Specify the maximum recursion limit while parsing.
375    ///
376    /// [`Parser`] prevents stack overflows by returning
377    /// [`ParserError::RecursionLimitExceeded`] if the parser exceeds
378    /// this depth while processing the query.
379    ///
380    /// Example:
381    /// ```
382    /// # use sqlparser::{parser::{Parser, ParserError}, dialect::GenericDialect};
383    /// # fn main() -> Result<(), ParserError> {
384    /// let dialect = GenericDialect{};
385    /// let result = Parser::new(&dialect)
386    ///   .with_recursion_limit(1)
387    ///   .try_with_sql("SELECT * FROM foo WHERE (a OR (b OR (c OR d)))")?
388    ///   .parse_statements();
389    ///   assert_eq!(result, Err(ParserError::RecursionLimitExceeded));
390    /// # Ok(())
391    /// # }
392    /// ```
393    ///
394    /// Note: when "recursive-protection" feature is enabled, this crate uses additional stack overflow protection
395    //  for some of its recursive methods. See [`recursive::recursive`] for more information.
396    pub fn with_recursion_limit(mut self, recursion_limit: usize) -> Self {
397        self.recursion_counter = RecursionCounter::new(recursion_limit);
398        self
399    }
400
401    /// Specify additional parser options
402    ///
403    /// [`Parser`] supports additional options ([`ParserOptions`])
404    /// that allow you to mix & match behavior otherwise constrained
405    /// to certain dialects (e.g. trailing commas).
406    ///
407    /// Example:
408    /// ```
409    /// # use sqlparser::{parser::{Parser, ParserError, ParserOptions}, dialect::GenericDialect};
410    /// # fn main() -> Result<(), ParserError> {
411    /// let dialect = GenericDialect{};
412    /// let options = ParserOptions::new()
413    ///    .with_trailing_commas(true)
414    ///    .with_unescape(false);
415    /// let result = Parser::new(&dialect)
416    ///   .with_options(options)
417    ///   .try_with_sql("SELECT a, b, COUNT(*), FROM foo GROUP BY a, b,")?
418    ///   .parse_statements();
419    ///   assert!(matches!(result, Ok(_)));
420    /// # Ok(())
421    /// # }
422    /// ```
423    pub fn with_options(mut self, options: ParserOptions) -> Self {
424        self.options = options;
425        self
426    }
427
428    /// Reset this parser to parse the specified token stream
429    pub fn with_tokens_with_locations(mut self, tokens: Vec<TokenWithSpan>) -> Self {
430        self.tokens = tokens;
431        self.index = 0;
432        self
433    }
434
435    /// Reset this parser state to parse the specified tokens
436    pub fn with_tokens(self, tokens: Vec<Token>) -> Self {
437        // Put in dummy locations
438        let tokens_with_locations: Vec<TokenWithSpan> = tokens
439            .into_iter()
440            .map(|token| TokenWithSpan {
441                token,
442                span: Span::empty(),
443            })
444            .collect();
445        self.with_tokens_with_locations(tokens_with_locations)
446    }
447
448    /// Tokenize the sql string and sets this [`Parser`]'s state to
449    /// parse the resulting tokens
450    ///
451    /// Returns an error if there was an error tokenizing the SQL string.
452    ///
453    /// See example on [`Parser::new()`] for an example
454    pub fn try_with_sql(self, sql: &str) -> Result<Self, ParserError> {
455        debug!("Parsing sql '{sql}'...");
456        let tokens = Tokenizer::new(self.dialect, sql)
457            .with_unescape(self.options.unescape)
458            .tokenize_with_location()?;
459        Ok(self.with_tokens_with_locations(tokens))
460    }
461
462    /// Parse potentially multiple statements
463    ///
464    /// Example
465    /// ```
466    /// # use sqlparser::{parser::{Parser, ParserError}, dialect::GenericDialect};
467    /// # fn main() -> Result<(), ParserError> {
468    /// let dialect = GenericDialect{};
469    /// let statements = Parser::new(&dialect)
470    ///   // Parse a SQL string with 2 separate statements
471    ///   .try_with_sql("SELECT * FROM foo; SELECT * FROM bar;")?
472    ///   .parse_statements()?;
473    /// assert_eq!(statements.len(), 2);
474    /// # Ok(())
475    /// # }
476    /// ```
477    pub fn parse_statements(&mut self) -> Result<Vec<Statement>, ParserError> {
478        let mut stmts = Vec::new();
479        let mut expecting_statement_delimiter = false;
480        loop {
481            // ignore empty statements (between successive statement delimiters)
482            while self.consume_token(&Token::SemiColon) {
483                expecting_statement_delimiter = false;
484            }
485
486            if !self.options.require_semicolon_stmt_delimiter {
487                expecting_statement_delimiter = false;
488            }
489
490            match self.peek_token().token {
491                Token::EOF => break,
492
493                // end of statement
494                Token::Word(word) => {
495                    if expecting_statement_delimiter && word.keyword == Keyword::END {
496                        break;
497                    }
498                }
499                _ => {}
500            }
501
502            if expecting_statement_delimiter {
503                return self.expected("end of statement", self.peek_token());
504            }
505
506            let statement = self.parse_statement()?;
507            stmts.push(statement);
508            expecting_statement_delimiter = true;
509        }
510        Ok(stmts)
511    }
512
513    /// Convenience method to parse a string with one or more SQL
514    /// statements into produce an Abstract Syntax Tree (AST).
515    ///
516    /// Example
517    /// ```
518    /// # use sqlparser::{parser::{Parser, ParserError}, dialect::GenericDialect};
519    /// # fn main() -> Result<(), ParserError> {
520    /// let dialect = GenericDialect{};
521    /// let statements = Parser::parse_sql(
522    ///   &dialect, "SELECT * FROM foo"
523    /// )?;
524    /// assert_eq!(statements.len(), 1);
525    /// # Ok(())
526    /// # }
527    /// ```
528    pub fn parse_sql(dialect: &dyn Dialect, sql: &str) -> Result<Vec<Statement>, ParserError> {
529        Parser::new(dialect).try_with_sql(sql)?.parse_statements()
530    }
531
532    /// Parse a single top-level statement (such as SELECT, INSERT, CREATE, etc.),
533    /// stopping before the statement separator, if any.
534    pub fn parse_statement(&mut self) -> Result<Statement, ParserError> {
535        let _guard = self.recursion_counter.try_decrease()?;
536
537        // allow the dialect to override statement parsing
538        if let Some(statement) = self.dialect.parse_statement(self) {
539            return statement;
540        }
541
542        let next_token = self.next_token();
543        match &next_token.token {
544            Token::Word(w) => match w.keyword {
545                Keyword::KILL => self.parse_kill(),
546                Keyword::FLUSH => self.parse_flush(),
547                Keyword::DESC => self.parse_explain(DescribeAlias::Desc),
548                Keyword::DESCRIBE => self.parse_explain(DescribeAlias::Describe),
549                Keyword::EXPLAIN => self.parse_explain(DescribeAlias::Explain),
550                Keyword::ANALYZE => self.parse_analyze(),
551                Keyword::CASE => {
552                    self.prev_token();
553                    self.parse_case_stmt()
554                }
555                Keyword::IF => {
556                    self.prev_token();
557                    self.parse_if_stmt()
558                }
559                Keyword::WHILE => {
560                    self.prev_token();
561                    self.parse_while()
562                }
563                Keyword::RAISE => {
564                    self.prev_token();
565                    self.parse_raise_stmt()
566                }
567                Keyword::SELECT | Keyword::WITH | Keyword::VALUES | Keyword::FROM => {
568                    self.prev_token();
569                    self.parse_query().map(Statement::Query)
570                }
571                Keyword::TRUNCATE => self.parse_truncate(),
572                Keyword::ATTACH => {
573                    if dialect_of!(self is DuckDbDialect) {
574                        self.parse_attach_duckdb_database()
575                    } else {
576                        self.parse_attach_database()
577                    }
578                }
579                Keyword::DETACH if dialect_of!(self is DuckDbDialect | GenericDialect) => {
580                    self.parse_detach_duckdb_database()
581                }
582                Keyword::MSCK => self.parse_msck(),
583                Keyword::CREATE => self.parse_create(),
584                Keyword::CACHE => self.parse_cache_table(),
585                Keyword::DROP => self.parse_drop(),
586                Keyword::DISCARD => self.parse_discard(),
587                Keyword::DECLARE => self.parse_declare(),
588                Keyword::FETCH => self.parse_fetch_statement(),
589                Keyword::DELETE => self.parse_delete(),
590                Keyword::INSERT => self.parse_insert(),
591                Keyword::REPLACE => self.parse_replace(),
592                Keyword::UNCACHE => self.parse_uncache_table(),
593                Keyword::UPDATE => self.parse_update(),
594                Keyword::ALTER => self.parse_alter(),
595                Keyword::CALL => self.parse_call(),
596                Keyword::COPY => self.parse_copy(),
597                Keyword::OPEN => {
598                    self.prev_token();
599                    self.parse_open()
600                }
601                Keyword::CLOSE => self.parse_close(),
602                Keyword::SET => self.parse_set(),
603                Keyword::SHOW => self.parse_show(),
604                Keyword::USE => self.parse_use(),
605                Keyword::GRANT => self.parse_grant(),
606                Keyword::DENY => {
607                    self.prev_token();
608                    self.parse_deny()
609                }
610                Keyword::REVOKE => self.parse_revoke(),
611                Keyword::START => self.parse_start_transaction(),
612                Keyword::BEGIN => self.parse_begin(),
613                Keyword::END => self.parse_end(),
614                Keyword::SAVEPOINT => self.parse_savepoint(),
615                Keyword::RELEASE => self.parse_release(),
616                Keyword::COMMIT => self.parse_commit(),
617                Keyword::RAISERROR => Ok(self.parse_raiserror()?),
618                Keyword::ROLLBACK => self.parse_rollback(),
619                Keyword::ASSERT => self.parse_assert(),
620                // `PREPARE`, `EXECUTE` and `DEALLOCATE` are Postgres-specific
621                // syntaxes. They are used for Postgres prepared statement.
622                Keyword::DEALLOCATE => self.parse_deallocate(),
623                Keyword::EXECUTE | Keyword::EXEC => self.parse_execute(),
624                Keyword::PREPARE => self.parse_prepare(),
625                Keyword::MERGE => self.parse_merge(),
626                // `LISTEN`, `UNLISTEN` and `NOTIFY` are Postgres-specific
627                // syntaxes. They are used for Postgres statement.
628                Keyword::LISTEN if self.dialect.supports_listen_notify() => self.parse_listen(),
629                Keyword::UNLISTEN if self.dialect.supports_listen_notify() => self.parse_unlisten(),
630                Keyword::NOTIFY if self.dialect.supports_listen_notify() => self.parse_notify(),
631                // `PRAGMA` is sqlite specific https://www.sqlite.org/pragma.html
632                Keyword::PRAGMA => self.parse_pragma(),
633                Keyword::UNLOAD => {
634                    self.prev_token();
635                    self.parse_unload()
636                }
637                Keyword::RENAME => self.parse_rename(),
638                // `INSTALL` is duckdb specific https://duckdb.org/docs/extensions/overview
639                Keyword::INSTALL if dialect_of!(self is DuckDbDialect | GenericDialect) => {
640                    self.parse_install()
641                }
642                Keyword::LOAD => self.parse_load(),
643                // `OPTIMIZE` is clickhouse specific https://clickhouse.tech/docs/en/sql-reference/statements/optimize/
644                Keyword::OPTIMIZE if dialect_of!(self is ClickHouseDialect | GenericDialect) => {
645                    self.parse_optimize_table()
646                }
647                // `COMMENT` is snowflake specific https://docs.snowflake.com/en/sql-reference/sql/comment
648                Keyword::COMMENT if self.dialect.supports_comment_on() => self.parse_comment(),
649                Keyword::PRINT => self.parse_print(),
650                Keyword::RETURN => self.parse_return(),
651                Keyword::EXPORT => {
652                    self.prev_token();
653                    self.parse_export_data()
654                }
655                Keyword::VACUUM => {
656                    self.prev_token();
657                    self.parse_vacuum()
658                }
659                _ => self.expected("an SQL statement", next_token),
660            },
661            Token::LParen => {
662                self.prev_token();
663                self.parse_query().map(Statement::Query)
664            }
665            _ => self.expected("an SQL statement", next_token),
666        }
667    }
668
669    /// Parse a `CASE` statement.
670    ///
671    /// See [Statement::Case]
672    pub fn parse_case_stmt(&mut self) -> Result<Statement, ParserError> {
673        let case_token = self.expect_keyword(Keyword::CASE)?;
674
675        let match_expr = if self.peek_keyword(Keyword::WHEN) {
676            None
677        } else {
678            Some(self.parse_expr()?)
679        };
680
681        self.expect_keyword_is(Keyword::WHEN)?;
682        let when_blocks = self.parse_keyword_separated(Keyword::WHEN, |parser| {
683            parser.parse_conditional_statement_block(&[Keyword::WHEN, Keyword::ELSE, Keyword::END])
684        })?;
685
686        let else_block = if self.parse_keyword(Keyword::ELSE) {
687            Some(self.parse_conditional_statement_block(&[Keyword::END])?)
688        } else {
689            None
690        };
691
692        let mut end_case_token = self.expect_keyword(Keyword::END)?;
693        if self.peek_keyword(Keyword::CASE) {
694            end_case_token = self.expect_keyword(Keyword::CASE)?;
695        }
696
697        Ok(Statement::Case(CaseStatement {
698            case_token: AttachedToken(case_token),
699            match_expr,
700            when_blocks,
701            else_block,
702            end_case_token: AttachedToken(end_case_token),
703        }))
704    }
705
706    /// Parse an `IF` statement.
707    ///
708    /// See [Statement::If]
709    pub fn parse_if_stmt(&mut self) -> Result<Statement, ParserError> {
710        self.expect_keyword_is(Keyword::IF)?;
711        let if_block = self.parse_conditional_statement_block(&[
712            Keyword::ELSE,
713            Keyword::ELSEIF,
714            Keyword::END,
715        ])?;
716
717        let elseif_blocks = if self.parse_keyword(Keyword::ELSEIF) {
718            self.parse_keyword_separated(Keyword::ELSEIF, |parser| {
719                parser.parse_conditional_statement_block(&[
720                    Keyword::ELSEIF,
721                    Keyword::ELSE,
722                    Keyword::END,
723                ])
724            })?
725        } else {
726            vec![]
727        };
728
729        let else_block = if self.parse_keyword(Keyword::ELSE) {
730            Some(self.parse_conditional_statement_block(&[Keyword::END])?)
731        } else {
732            None
733        };
734
735        self.expect_keyword_is(Keyword::END)?;
736        let end_token = self.expect_keyword(Keyword::IF)?;
737
738        Ok(Statement::If(IfStatement {
739            if_block,
740            elseif_blocks,
741            else_block,
742            end_token: Some(AttachedToken(end_token)),
743        }))
744    }
745
746    /// Parse a `WHILE` statement.
747    ///
748    /// See [Statement::While]
749    fn parse_while(&mut self) -> Result<Statement, ParserError> {
750        self.expect_keyword_is(Keyword::WHILE)?;
751        let while_block = self.parse_conditional_statement_block(&[Keyword::END])?;
752
753        Ok(Statement::While(WhileStatement { while_block }))
754    }
755
756    /// Parses an expression and associated list of statements
757    /// belonging to a conditional statement like `IF` or `WHEN` or `WHILE`.
758    ///
759    /// Example:
760    /// ```sql
761    /// IF condition THEN statement1; statement2;
762    /// ```
763    fn parse_conditional_statement_block(
764        &mut self,
765        terminal_keywords: &[Keyword],
766    ) -> Result<ConditionalStatementBlock, ParserError> {
767        let start_token = self.get_current_token().clone(); // self.expect_keyword(keyword)?;
768        let mut then_token = None;
769
770        let condition = match &start_token.token {
771            Token::Word(w) if w.keyword == Keyword::ELSE => None,
772            Token::Word(w) if w.keyword == Keyword::WHILE => {
773                let expr = self.parse_expr()?;
774                Some(expr)
775            }
776            _ => {
777                let expr = self.parse_expr()?;
778                then_token = Some(AttachedToken(self.expect_keyword(Keyword::THEN)?));
779                Some(expr)
780            }
781        };
782
783        let conditional_statements = self.parse_conditional_statements(terminal_keywords)?;
784
785        Ok(ConditionalStatementBlock {
786            start_token: AttachedToken(start_token),
787            condition,
788            then_token,
789            conditional_statements,
790        })
791    }
792
793    /// Parse a BEGIN/END block or a sequence of statements
794    /// This could be inside of a conditional (IF, CASE, WHILE etc.) or an object body defined optionally BEGIN/END and one or more statements.
795    pub(crate) fn parse_conditional_statements(
796        &mut self,
797        terminal_keywords: &[Keyword],
798    ) -> Result<ConditionalStatements, ParserError> {
799        let conditional_statements = if self.peek_keyword(Keyword::BEGIN) {
800            let begin_token = self.expect_keyword(Keyword::BEGIN)?;
801            let statements = self.parse_statement_list(terminal_keywords)?;
802            let end_token = self.expect_keyword(Keyword::END)?;
803
804            ConditionalStatements::BeginEnd(BeginEndStatements {
805                begin_token: AttachedToken(begin_token),
806                statements,
807                end_token: AttachedToken(end_token),
808            })
809        } else {
810            ConditionalStatements::Sequence {
811                statements: self.parse_statement_list(terminal_keywords)?,
812            }
813        };
814        Ok(conditional_statements)
815    }
816
817    /// Parse a `RAISE` statement.
818    ///
819    /// See [Statement::Raise]
820    pub fn parse_raise_stmt(&mut self) -> Result<Statement, ParserError> {
821        self.expect_keyword_is(Keyword::RAISE)?;
822
823        let value = if self.parse_keywords(&[Keyword::USING, Keyword::MESSAGE]) {
824            self.expect_token(&Token::Eq)?;
825            Some(RaiseStatementValue::UsingMessage(self.parse_expr()?))
826        } else {
827            self.maybe_parse(|parser| parser.parse_expr().map(RaiseStatementValue::Expr))?
828        };
829
830        Ok(Statement::Raise(RaiseStatement { value }))
831    }
832
833    pub fn parse_comment(&mut self) -> Result<Statement, ParserError> {
834        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
835
836        self.expect_keyword_is(Keyword::ON)?;
837        let token = self.next_token();
838
839        let (object_type, object_name) = match token.token {
840            Token::Word(w) if w.keyword == Keyword::COLUMN => {
841                (CommentObject::Column, self.parse_object_name(false)?)
842            }
843            Token::Word(w) if w.keyword == Keyword::TABLE => {
844                (CommentObject::Table, self.parse_object_name(false)?)
845            }
846            Token::Word(w) if w.keyword == Keyword::EXTENSION => {
847                (CommentObject::Extension, self.parse_object_name(false)?)
848            }
849            Token::Word(w) if w.keyword == Keyword::SCHEMA => {
850                (CommentObject::Schema, self.parse_object_name(false)?)
851            }
852            Token::Word(w) if w.keyword == Keyword::DATABASE => {
853                (CommentObject::Database, self.parse_object_name(false)?)
854            }
855            Token::Word(w) if w.keyword == Keyword::USER => {
856                (CommentObject::User, self.parse_object_name(false)?)
857            }
858            Token::Word(w) if w.keyword == Keyword::ROLE => {
859                (CommentObject::Role, self.parse_object_name(false)?)
860            }
861            _ => self.expected("comment object_type", token)?,
862        };
863
864        self.expect_keyword_is(Keyword::IS)?;
865        let comment = if self.parse_keyword(Keyword::NULL) {
866            None
867        } else {
868            Some(self.parse_literal_string()?)
869        };
870        Ok(Statement::Comment {
871            object_type,
872            object_name,
873            comment,
874            if_exists,
875        })
876    }
877
878    pub fn parse_flush(&mut self) -> Result<Statement, ParserError> {
879        let mut channel = None;
880        let mut tables: Vec<ObjectName> = vec![];
881        let mut read_lock = false;
882        let mut export = false;
883
884        if !dialect_of!(self is MySqlDialect | GenericDialect) {
885            return parser_err!("Unsupported statement FLUSH", self.peek_token().span.start);
886        }
887
888        let location = if self.parse_keyword(Keyword::NO_WRITE_TO_BINLOG) {
889            Some(FlushLocation::NoWriteToBinlog)
890        } else if self.parse_keyword(Keyword::LOCAL) {
891            Some(FlushLocation::Local)
892        } else {
893            None
894        };
895
896        let object_type = if self.parse_keywords(&[Keyword::BINARY, Keyword::LOGS]) {
897            FlushType::BinaryLogs
898        } else if self.parse_keywords(&[Keyword::ENGINE, Keyword::LOGS]) {
899            FlushType::EngineLogs
900        } else if self.parse_keywords(&[Keyword::ERROR, Keyword::LOGS]) {
901            FlushType::ErrorLogs
902        } else if self.parse_keywords(&[Keyword::GENERAL, Keyword::LOGS]) {
903            FlushType::GeneralLogs
904        } else if self.parse_keywords(&[Keyword::HOSTS]) {
905            FlushType::Hosts
906        } else if self.parse_keyword(Keyword::PRIVILEGES) {
907            FlushType::Privileges
908        } else if self.parse_keyword(Keyword::OPTIMIZER_COSTS) {
909            FlushType::OptimizerCosts
910        } else if self.parse_keywords(&[Keyword::RELAY, Keyword::LOGS]) {
911            if self.parse_keywords(&[Keyword::FOR, Keyword::CHANNEL]) {
912                channel = Some(self.parse_object_name(false).unwrap().to_string());
913            }
914            FlushType::RelayLogs
915        } else if self.parse_keywords(&[Keyword::SLOW, Keyword::LOGS]) {
916            FlushType::SlowLogs
917        } else if self.parse_keyword(Keyword::STATUS) {
918            FlushType::Status
919        } else if self.parse_keyword(Keyword::USER_RESOURCES) {
920            FlushType::UserResources
921        } else if self.parse_keywords(&[Keyword::LOGS]) {
922            FlushType::Logs
923        } else if self.parse_keywords(&[Keyword::TABLES]) {
924            loop {
925                let next_token = self.next_token();
926                match &next_token.token {
927                    Token::Word(w) => match w.keyword {
928                        Keyword::WITH => {
929                            read_lock = self.parse_keywords(&[Keyword::READ, Keyword::LOCK]);
930                        }
931                        Keyword::FOR => {
932                            export = self.parse_keyword(Keyword::EXPORT);
933                        }
934                        Keyword::NoKeyword => {
935                            self.prev_token();
936                            tables = self.parse_comma_separated(|p| p.parse_object_name(false))?;
937                        }
938                        _ => {}
939                    },
940                    _ => {
941                        break;
942                    }
943                }
944            }
945
946            FlushType::Tables
947        } else {
948            return self.expected(
949                "BINARY LOGS, ENGINE LOGS, ERROR LOGS, GENERAL LOGS, HOSTS, LOGS, PRIVILEGES, OPTIMIZER_COSTS,\
950                 RELAY LOGS [FOR CHANNEL channel], SLOW LOGS, STATUS, USER_RESOURCES",
951                self.peek_token(),
952            );
953        };
954
955        Ok(Statement::Flush {
956            object_type,
957            location,
958            channel,
959            read_lock,
960            export,
961            tables,
962        })
963    }
964
965    pub fn parse_msck(&mut self) -> Result<Statement, ParserError> {
966        let repair = self.parse_keyword(Keyword::REPAIR);
967        self.expect_keyword_is(Keyword::TABLE)?;
968        let table_name = self.parse_object_name(false)?;
969        let partition_action = self
970            .maybe_parse(|parser| {
971                let pa = match parser.parse_one_of_keywords(&[
972                    Keyword::ADD,
973                    Keyword::DROP,
974                    Keyword::SYNC,
975                ]) {
976                    Some(Keyword::ADD) => Some(AddDropSync::ADD),
977                    Some(Keyword::DROP) => Some(AddDropSync::DROP),
978                    Some(Keyword::SYNC) => Some(AddDropSync::SYNC),
979                    _ => None,
980                };
981                parser.expect_keyword_is(Keyword::PARTITIONS)?;
982                Ok(pa)
983            })?
984            .unwrap_or_default();
985        Ok(Statement::Msck {
986            repair,
987            table_name,
988            partition_action,
989        })
990    }
991
992    pub fn parse_truncate(&mut self) -> Result<Statement, ParserError> {
993        let table = self.parse_keyword(Keyword::TABLE);
994
995        let table_names = self
996            .parse_comma_separated(|p| {
997                Ok((p.parse_keyword(Keyword::ONLY), p.parse_object_name(false)?))
998            })?
999            .into_iter()
1000            .map(|(only, name)| TruncateTableTarget { name, only })
1001            .collect();
1002
1003        let mut partitions = None;
1004        if self.parse_keyword(Keyword::PARTITION) {
1005            self.expect_token(&Token::LParen)?;
1006            partitions = Some(self.parse_comma_separated(Parser::parse_expr)?);
1007            self.expect_token(&Token::RParen)?;
1008        }
1009
1010        let mut identity = None;
1011        let mut cascade = None;
1012
1013        if dialect_of!(self is PostgreSqlDialect | GenericDialect) {
1014            identity = if self.parse_keywords(&[Keyword::RESTART, Keyword::IDENTITY]) {
1015                Some(TruncateIdentityOption::Restart)
1016            } else if self.parse_keywords(&[Keyword::CONTINUE, Keyword::IDENTITY]) {
1017                Some(TruncateIdentityOption::Continue)
1018            } else {
1019                None
1020            };
1021
1022            cascade = self.parse_cascade_option();
1023        };
1024
1025        let on_cluster = self.parse_optional_on_cluster()?;
1026
1027        Ok(Statement::Truncate {
1028            table_names,
1029            partitions,
1030            table,
1031            identity,
1032            cascade,
1033            on_cluster,
1034        })
1035    }
1036
1037    fn parse_cascade_option(&mut self) -> Option<CascadeOption> {
1038        if self.parse_keyword(Keyword::CASCADE) {
1039            Some(CascadeOption::Cascade)
1040        } else if self.parse_keyword(Keyword::RESTRICT) {
1041            Some(CascadeOption::Restrict)
1042        } else {
1043            None
1044        }
1045    }
1046
1047    pub fn parse_attach_duckdb_database_options(
1048        &mut self,
1049    ) -> Result<Vec<AttachDuckDBDatabaseOption>, ParserError> {
1050        if !self.consume_token(&Token::LParen) {
1051            return Ok(vec![]);
1052        }
1053
1054        let mut options = vec![];
1055        loop {
1056            if self.parse_keyword(Keyword::READ_ONLY) {
1057                let boolean = if self.parse_keyword(Keyword::TRUE) {
1058                    Some(true)
1059                } else if self.parse_keyword(Keyword::FALSE) {
1060                    Some(false)
1061                } else {
1062                    None
1063                };
1064                options.push(AttachDuckDBDatabaseOption::ReadOnly(boolean));
1065            } else if self.parse_keyword(Keyword::TYPE) {
1066                let ident = self.parse_identifier()?;
1067                options.push(AttachDuckDBDatabaseOption::Type(ident));
1068            } else {
1069                return self.expected("expected one of: ), READ_ONLY, TYPE", self.peek_token());
1070            };
1071
1072            if self.consume_token(&Token::RParen) {
1073                return Ok(options);
1074            } else if self.consume_token(&Token::Comma) {
1075                continue;
1076            } else {
1077                return self.expected("expected one of: ')', ','", self.peek_token());
1078            }
1079        }
1080    }
1081
1082    pub fn parse_attach_duckdb_database(&mut self) -> Result<Statement, ParserError> {
1083        let database = self.parse_keyword(Keyword::DATABASE);
1084        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
1085        let database_path = self.parse_identifier()?;
1086        let database_alias = if self.parse_keyword(Keyword::AS) {
1087            Some(self.parse_identifier()?)
1088        } else {
1089            None
1090        };
1091
1092        let attach_options = self.parse_attach_duckdb_database_options()?;
1093        Ok(Statement::AttachDuckDBDatabase {
1094            if_not_exists,
1095            database,
1096            database_path,
1097            database_alias,
1098            attach_options,
1099        })
1100    }
1101
1102    pub fn parse_detach_duckdb_database(&mut self) -> Result<Statement, ParserError> {
1103        let database = self.parse_keyword(Keyword::DATABASE);
1104        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
1105        let database_alias = self.parse_identifier()?;
1106        Ok(Statement::DetachDuckDBDatabase {
1107            if_exists,
1108            database,
1109            database_alias,
1110        })
1111    }
1112
1113    pub fn parse_attach_database(&mut self) -> Result<Statement, ParserError> {
1114        let database = self.parse_keyword(Keyword::DATABASE);
1115        let database_file_name = self.parse_expr()?;
1116        self.expect_keyword_is(Keyword::AS)?;
1117        let schema_name = self.parse_identifier()?;
1118        Ok(Statement::AttachDatabase {
1119            database,
1120            schema_name,
1121            database_file_name,
1122        })
1123    }
1124
1125    pub fn parse_analyze(&mut self) -> Result<Statement, ParserError> {
1126        let has_table_keyword = self.parse_keyword(Keyword::TABLE);
1127        let table_name = self.parse_object_name(false)?;
1128        let mut for_columns = false;
1129        let mut cache_metadata = false;
1130        let mut noscan = false;
1131        let mut partitions = None;
1132        let mut compute_statistics = false;
1133        let mut columns = vec![];
1134        loop {
1135            match self.parse_one_of_keywords(&[
1136                Keyword::PARTITION,
1137                Keyword::FOR,
1138                Keyword::CACHE,
1139                Keyword::NOSCAN,
1140                Keyword::COMPUTE,
1141            ]) {
1142                Some(Keyword::PARTITION) => {
1143                    self.expect_token(&Token::LParen)?;
1144                    partitions = Some(self.parse_comma_separated(Parser::parse_expr)?);
1145                    self.expect_token(&Token::RParen)?;
1146                }
1147                Some(Keyword::NOSCAN) => noscan = true,
1148                Some(Keyword::FOR) => {
1149                    self.expect_keyword_is(Keyword::COLUMNS)?;
1150
1151                    columns = self
1152                        .maybe_parse(|parser| {
1153                            parser.parse_comma_separated(|p| p.parse_identifier())
1154                        })?
1155                        .unwrap_or_default();
1156                    for_columns = true
1157                }
1158                Some(Keyword::CACHE) => {
1159                    self.expect_keyword_is(Keyword::METADATA)?;
1160                    cache_metadata = true
1161                }
1162                Some(Keyword::COMPUTE) => {
1163                    self.expect_keyword_is(Keyword::STATISTICS)?;
1164                    compute_statistics = true
1165                }
1166                _ => break,
1167            }
1168        }
1169
1170        Ok(Statement::Analyze {
1171            has_table_keyword,
1172            table_name,
1173            for_columns,
1174            columns,
1175            partitions,
1176            cache_metadata,
1177            noscan,
1178            compute_statistics,
1179        })
1180    }
1181
1182    /// Parse a new expression including wildcard & qualified wildcard.
1183    pub fn parse_wildcard_expr(&mut self) -> Result<Expr, ParserError> {
1184        let index = self.index;
1185
1186        let next_token = self.next_token();
1187        match next_token.token {
1188            t @ (Token::Word(_) | Token::SingleQuotedString(_)) => {
1189                if self.peek_token().token == Token::Period {
1190                    let mut id_parts: Vec<Ident> = vec![match t {
1191                        Token::Word(w) => w.into_ident(next_token.span),
1192                        Token::SingleQuotedString(s) => Ident::with_quote('\'', s),
1193                        _ => unreachable!(), // We matched above
1194                    }];
1195
1196                    while self.consume_token(&Token::Period) {
1197                        let next_token = self.next_token();
1198                        match next_token.token {
1199                            Token::Word(w) => id_parts.push(w.into_ident(next_token.span)),
1200                            Token::SingleQuotedString(s) => {
1201                                // SQLite has single-quoted identifiers
1202                                id_parts.push(Ident::with_quote('\'', s))
1203                            }
1204                            Token::Mul => {
1205                                return Ok(Expr::QualifiedWildcard(
1206                                    ObjectName::from(id_parts),
1207                                    AttachedToken(next_token),
1208                                ));
1209                            }
1210                            _ => {
1211                                return self
1212                                    .expected("an identifier or a '*' after '.'", next_token);
1213                            }
1214                        }
1215                    }
1216                }
1217            }
1218            Token::Mul => {
1219                return Ok(Expr::Wildcard(AttachedToken(next_token)));
1220            }
1221            _ => (),
1222        };
1223
1224        self.index = index;
1225        self.parse_expr()
1226    }
1227
1228    /// Parse a new expression.
1229    pub fn parse_expr(&mut self) -> Result<Expr, ParserError> {
1230        self.parse_subexpr(self.dialect.prec_unknown())
1231    }
1232
1233    pub fn parse_expr_with_alias_and_order_by(
1234        &mut self,
1235    ) -> Result<ExprWithAliasAndOrderBy, ParserError> {
1236        let expr = self.parse_expr()?;
1237
1238        fn validator(explicit: bool, kw: &Keyword, _parser: &mut Parser) -> bool {
1239            explicit || !&[Keyword::ASC, Keyword::DESC, Keyword::GROUP].contains(kw)
1240        }
1241        let alias = self.parse_optional_alias_inner(None, validator)?;
1242        let order_by = OrderByOptions {
1243            asc: self.parse_asc_desc(),
1244            nulls_first: None,
1245        };
1246        Ok(ExprWithAliasAndOrderBy {
1247            expr: ExprWithAlias { expr, alias },
1248            order_by,
1249        })
1250    }
1251
1252    /// Parse tokens until the precedence changes.
1253    pub fn parse_subexpr(&mut self, precedence: u8) -> Result<Expr, ParserError> {
1254        let _guard = self.recursion_counter.try_decrease()?;
1255        debug!("parsing expr");
1256        let mut expr = self.parse_prefix()?;
1257
1258        expr = self.parse_compound_expr(expr, vec![])?;
1259
1260        debug!("prefix: {expr:?}");
1261        loop {
1262            let next_precedence = self.get_next_precedence()?;
1263            debug!("next precedence: {next_precedence:?}");
1264
1265            if precedence >= next_precedence {
1266                break;
1267            }
1268
1269            // The period operator is handled exclusively by the
1270            // compound field access parsing.
1271            if Token::Period == self.peek_token_ref().token {
1272                break;
1273            }
1274
1275            expr = self.parse_infix(expr, next_precedence)?;
1276        }
1277        Ok(expr)
1278    }
1279
1280    pub fn parse_assert(&mut self) -> Result<Statement, ParserError> {
1281        let condition = self.parse_expr()?;
1282        let message = if self.parse_keyword(Keyword::AS) {
1283            Some(self.parse_expr()?)
1284        } else {
1285            None
1286        };
1287
1288        Ok(Statement::Assert { condition, message })
1289    }
1290
1291    pub fn parse_savepoint(&mut self) -> Result<Statement, ParserError> {
1292        let name = self.parse_identifier()?;
1293        Ok(Statement::Savepoint { name })
1294    }
1295
1296    pub fn parse_release(&mut self) -> Result<Statement, ParserError> {
1297        let _ = self.parse_keyword(Keyword::SAVEPOINT);
1298        let name = self.parse_identifier()?;
1299
1300        Ok(Statement::ReleaseSavepoint { name })
1301    }
1302
1303    pub fn parse_listen(&mut self) -> Result<Statement, ParserError> {
1304        let channel = self.parse_identifier()?;
1305        Ok(Statement::LISTEN { channel })
1306    }
1307
1308    pub fn parse_unlisten(&mut self) -> Result<Statement, ParserError> {
1309        let channel = if self.consume_token(&Token::Mul) {
1310            Ident::new(Expr::Wildcard(AttachedToken::empty()).to_string())
1311        } else {
1312            match self.parse_identifier() {
1313                Ok(expr) => expr,
1314                _ => {
1315                    self.prev_token();
1316                    return self.expected("wildcard or identifier", self.peek_token());
1317                }
1318            }
1319        };
1320        Ok(Statement::UNLISTEN { channel })
1321    }
1322
1323    pub fn parse_notify(&mut self) -> Result<Statement, ParserError> {
1324        let channel = self.parse_identifier()?;
1325        let payload = if self.consume_token(&Token::Comma) {
1326            Some(self.parse_literal_string()?)
1327        } else {
1328            None
1329        };
1330        Ok(Statement::NOTIFY { channel, payload })
1331    }
1332
1333    /// Parses a `RENAME TABLE` statement. See [Statement::RenameTable]
1334    pub fn parse_rename(&mut self) -> Result<Statement, ParserError> {
1335        if self.peek_keyword(Keyword::TABLE) {
1336            self.expect_keyword(Keyword::TABLE)?;
1337            let rename_tables = self.parse_comma_separated(|parser| {
1338                let old_name = parser.parse_object_name(false)?;
1339                parser.expect_keyword(Keyword::TO)?;
1340                let new_name = parser.parse_object_name(false)?;
1341
1342                Ok(RenameTable { old_name, new_name })
1343            })?;
1344            Ok(Statement::RenameTable(rename_tables))
1345        } else {
1346            self.expected("KEYWORD `TABLE` after RENAME", self.peek_token())
1347        }
1348    }
1349
1350    /// Tries to parse an expression by matching the specified word to known keywords that have a special meaning in the dialect.
1351    /// Returns `None if no match is found.
1352    fn parse_expr_prefix_by_reserved_word(
1353        &mut self,
1354        w: &Word,
1355        w_span: Span,
1356    ) -> Result<Option<Expr>, ParserError> {
1357        match w.keyword {
1358            Keyword::TRUE | Keyword::FALSE if self.dialect.supports_boolean_literals() => {
1359                self.prev_token();
1360                Ok(Some(Expr::Value(self.parse_value()?)))
1361            }
1362            Keyword::NULL => {
1363                self.prev_token();
1364                Ok(Some(Expr::Value(self.parse_value()?)))
1365            }
1366            Keyword::CURRENT_CATALOG
1367            | Keyword::CURRENT_USER
1368            | Keyword::SESSION_USER
1369            | Keyword::USER
1370            if dialect_of!(self is PostgreSqlDialect | GenericDialect) =>
1371                {
1372                    Ok(Some(Expr::Function(Function {
1373                        name: ObjectName::from(vec![w.clone().into_ident(w_span)]),
1374                        uses_odbc_syntax: false,
1375                        parameters: FunctionArguments::None,
1376                        args: FunctionArguments::None,
1377                        null_treatment: None,
1378                        filter: None,
1379                        over: None,
1380                        within_group: vec![],
1381                    })))
1382                }
1383            Keyword::CURRENT_TIMESTAMP
1384            | Keyword::CURRENT_TIME
1385            | Keyword::CURRENT_DATE
1386            | Keyword::LOCALTIME
1387            | Keyword::LOCALTIMESTAMP => {
1388                Ok(Some(self.parse_time_functions(ObjectName::from(vec![w.clone().into_ident(w_span)]))?))
1389            }
1390            Keyword::CASE => Ok(Some(self.parse_case_expr()?)),
1391            Keyword::CONVERT => Ok(Some(self.parse_convert_expr(false)?)),
1392            Keyword::TRY_CONVERT if self.dialect.supports_try_convert() => Ok(Some(self.parse_convert_expr(true)?)),
1393            Keyword::CAST => Ok(Some(self.parse_cast_expr(CastKind::Cast)?)),
1394            Keyword::TRY_CAST => Ok(Some(self.parse_cast_expr(CastKind::TryCast)?)),
1395            Keyword::SAFE_CAST => Ok(Some(self.parse_cast_expr(CastKind::SafeCast)?)),
1396            Keyword::EXISTS
1397            // Support parsing Databricks has a function named `exists`.
1398            if !dialect_of!(self is DatabricksDialect)
1399                || matches!(
1400                        self.peek_nth_token_ref(1).token,
1401                        Token::Word(Word {
1402                            keyword: Keyword::SELECT | Keyword::WITH,
1403                            ..
1404                        })
1405                    ) =>
1406                {
1407                    Ok(Some(self.parse_exists_expr(false)?))
1408                }
1409            Keyword::EXTRACT => Ok(Some(self.parse_extract_expr()?)),
1410            Keyword::CEIL => Ok(Some(self.parse_ceil_floor_expr(true)?)),
1411            Keyword::FLOOR => Ok(Some(self.parse_ceil_floor_expr(false)?)),
1412            Keyword::POSITION if self.peek_token_ref().token == Token::LParen => {
1413                Ok(Some(self.parse_position_expr(w.clone().into_ident(w_span))?))
1414            }
1415            Keyword::SUBSTR | Keyword::SUBSTRING => {
1416                self.prev_token();
1417                Ok(Some(self.parse_substring()?))
1418            }
1419            Keyword::OVERLAY => Ok(Some(self.parse_overlay_expr()?)),
1420            Keyword::TRIM => Ok(Some(self.parse_trim_expr()?)),
1421            Keyword::INTERVAL => Ok(Some(self.parse_interval()?)),
1422            // Treat ARRAY[1,2,3] as an array [1,2,3], otherwise try as subquery or a function call
1423            Keyword::ARRAY if *self.peek_token_ref() == Token::LBracket => {
1424                self.expect_token(&Token::LBracket)?;
1425                Ok(Some(self.parse_array_expr(true)?))
1426            }
1427            Keyword::ARRAY
1428            if self.peek_token() == Token::LParen
1429                && !dialect_of!(self is ClickHouseDialect | DatabricksDialect) =>
1430                {
1431                    self.expect_token(&Token::LParen)?;
1432                    let query = self.parse_query()?;
1433                    self.expect_token(&Token::RParen)?;
1434                    Ok(Some(Expr::Function(Function {
1435                        name: ObjectName::from(vec![w.clone().into_ident(w_span)]),
1436                        uses_odbc_syntax: false,
1437                        parameters: FunctionArguments::None,
1438                        args: FunctionArguments::Subquery(query),
1439                        filter: None,
1440                        null_treatment: None,
1441                        over: None,
1442                        within_group: vec![],
1443                    })))
1444                }
1445            Keyword::NOT => Ok(Some(self.parse_not()?)),
1446            Keyword::MATCH if self.dialect.supports_match_against() => {
1447                Ok(Some(self.parse_match_against()?))
1448            }
1449            Keyword::STRUCT if self.dialect.supports_struct_literal() => {
1450                let struct_expr = self.parse_struct_literal()?;
1451                Ok(Some(struct_expr))
1452            }
1453            Keyword::PRIOR if matches!(self.state, ParserState::ConnectBy) => {
1454                let expr = self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?;
1455                Ok(Some(Expr::Prior(Box::new(expr))))
1456            }
1457            Keyword::MAP if *self.peek_token_ref() == Token::LBrace && self.dialect.support_map_literal_syntax() => {
1458                Ok(Some(self.parse_duckdb_map_literal()?))
1459            }
1460            _ if self.dialect.supports_geometric_types() => match w.keyword {
1461                Keyword::CIRCLE => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Circle)?)),
1462                Keyword::BOX => Ok(Some(self.parse_geometric_type(GeometricTypeKind::GeometricBox)?)),
1463                Keyword::PATH => Ok(Some(self.parse_geometric_type(GeometricTypeKind::GeometricPath)?)),
1464                Keyword::LINE => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Line)?)),
1465                Keyword::LSEG => Ok(Some(self.parse_geometric_type(GeometricTypeKind::LineSegment)?)),
1466                Keyword::POINT => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Point)?)),
1467                Keyword::POLYGON => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Polygon)?)),
1468                _ => Ok(None),
1469            },
1470            _ => Ok(None),
1471        }
1472    }
1473
1474    /// Tries to parse an expression by a word that is not known to have a special meaning in the dialect.
1475    fn parse_expr_prefix_by_unreserved_word(
1476        &mut self,
1477        w: &Word,
1478        w_span: Span,
1479    ) -> Result<Expr, ParserError> {
1480        match self.peek_token().token {
1481            Token::LParen if !self.peek_outer_join_operator() => {
1482                let id_parts = vec![w.clone().into_ident(w_span)];
1483                self.parse_function(ObjectName::from(id_parts))
1484            }
1485            // string introducer https://dev.mysql.com/doc/refman/8.0/en/charset-introducer.html
1486            Token::SingleQuotedString(_)
1487            | Token::DoubleQuotedString(_)
1488            | Token::HexStringLiteral(_)
1489                if w.value.starts_with('_') =>
1490            {
1491                Ok(Expr::Prefixed {
1492                    prefix: w.clone().into_ident(w_span),
1493                    value: self.parse_introduced_string_expr()?.into(),
1494                })
1495            }
1496            // string introducer https://dev.mysql.com/doc/refman/8.0/en/charset-introducer.html
1497            Token::SingleQuotedString(_)
1498            | Token::DoubleQuotedString(_)
1499            | Token::HexStringLiteral(_)
1500                if w.value.starts_with('_') =>
1501            {
1502                Ok(Expr::Prefixed {
1503                    prefix: w.clone().into_ident(w_span),
1504                    value: self.parse_introduced_string_expr()?.into(),
1505                })
1506            }
1507            Token::Arrow if self.dialect.supports_lambda_functions() => {
1508                self.expect_token(&Token::Arrow)?;
1509                Ok(Expr::Lambda(LambdaFunction {
1510                    params: OneOrManyWithParens::One(w.clone().into_ident(w_span)),
1511                    body: Box::new(self.parse_expr()?),
1512                }))
1513            }
1514            _ => Ok(Expr::Identifier(w.clone().into_ident(w_span))),
1515        }
1516    }
1517
1518    /// Parse an expression prefix.
1519    pub fn parse_prefix(&mut self) -> Result<Expr, ParserError> {
1520        // allow the dialect to override prefix parsing
1521        if let Some(prefix) = self.dialect.parse_prefix(self) {
1522            return prefix;
1523        }
1524
1525        // PostgreSQL allows any string literal to be preceded by a type name, indicating that the
1526        // string literal represents a literal of that type. Some examples:
1527        //
1528        //      DATE '2020-05-20'
1529        //      TIMESTAMP WITH TIME ZONE '2020-05-20 7:43:54'
1530        //      BOOL 'true'
1531        //
1532        // The first two are standard SQL, while the latter is a PostgreSQL extension. Complicating
1533        // matters is the fact that INTERVAL string literals may optionally be followed by special
1534        // keywords, e.g.:
1535        //
1536        //      INTERVAL '7' DAY
1537        //
1538        // Note also that naively `SELECT date` looks like a syntax error because the `date` type
1539        // name is not followed by a string literal, but in fact in PostgreSQL it is a valid
1540        // expression that should parse as the column name "date".
1541        let loc = self.peek_token_ref().span.start;
1542        let opt_expr = self.maybe_parse(|parser| {
1543            match parser.parse_data_type()? {
1544                DataType::Interval { .. } => parser.parse_interval(),
1545                // PostgreSQL allows almost any identifier to be used as custom data type name,
1546                // and we support that in `parse_data_type()`. But unlike Postgres we don't
1547                // have a list of globally reserved keywords (since they vary across dialects),
1548                // so given `NOT 'a' LIKE 'b'`, we'd accept `NOT` as a possible custom data type
1549                // name, resulting in `NOT 'a'` being recognized as a `TypedString` instead of
1550                // an unary negation `NOT ('a' LIKE 'b')`. To solve this, we don't accept the
1551                // `type 'string'` syntax for the custom data types at all.
1552                DataType::Custom(..) => parser_err!("dummy", loc),
1553                data_type => Ok(Expr::TypedString(TypedString {
1554                    data_type,
1555                    value: parser.parse_value()?,
1556                    uses_odbc_syntax: false,
1557                })),
1558            }
1559        })?;
1560
1561        if let Some(expr) = opt_expr {
1562            return Ok(expr);
1563        }
1564
1565        // Cache some dialect properties to avoid lifetime issues with the
1566        // next_token reference.
1567
1568        let dialect = self.dialect;
1569
1570        self.advance_token();
1571        let next_token_index = self.get_current_index();
1572        let next_token = self.get_current_token();
1573        let span = next_token.span;
1574        let expr = match &next_token.token {
1575            Token::Word(w) => {
1576                // The word we consumed may fall into one of two cases: it has a special meaning, or not.
1577                // For example, in Snowflake, the word `interval` may have two meanings depending on the context:
1578                // `SELECT CURRENT_DATE() + INTERVAL '1 DAY', MAX(interval) FROM tbl;`
1579                //                          ^^^^^^^^^^^^^^^^      ^^^^^^^^
1580                //                         interval expression   identifier
1581                //
1582                // We first try to parse the word and following tokens as a special expression, and if that fails,
1583                // we rollback and try to parse it as an identifier.
1584                let w = w.clone();
1585                match self.try_parse(|parser| parser.parse_expr_prefix_by_reserved_word(&w, span)) {
1586                    // This word indicated an expression prefix and parsing was successful
1587                    Ok(Some(expr)) => Ok(expr),
1588
1589                    // No expression prefix associated with this word
1590                    Ok(None) => Ok(self.parse_expr_prefix_by_unreserved_word(&w, span)?),
1591
1592                    // If parsing of the word as a special expression failed, we are facing two options:
1593                    // 1. The statement is malformed, e.g. `SELECT INTERVAL '1 DAI` (`DAI` instead of `DAY`)
1594                    // 2. The word is used as an identifier, e.g. `SELECT MAX(interval) FROM tbl`
1595                    // We first try to parse the word as an identifier and if that fails
1596                    // we rollback and return the parsing error we got from trying to parse a
1597                    // special expression (to maintain backwards compatibility of parsing errors).
1598                    Err(e) => {
1599                        if !self.dialect.is_reserved_for_identifier(w.keyword) {
1600                            if let Ok(Some(expr)) = self.maybe_parse(|parser| {
1601                                parser.parse_expr_prefix_by_unreserved_word(&w, span)
1602                            }) {
1603                                return Ok(expr);
1604                            }
1605                        }
1606                        return Err(e);
1607                    }
1608                }
1609            } // End of Token::Word
1610            // array `[1, 2, 3]`
1611            Token::LBracket => self.parse_array_expr(false),
1612            tok @ Token::Minus | tok @ Token::Plus => {
1613                let op = if *tok == Token::Plus {
1614                    UnaryOperator::Plus
1615                } else {
1616                    UnaryOperator::Minus
1617                };
1618                Ok(Expr::UnaryOp {
1619                    op,
1620                    expr: Box::new(
1621                        self.parse_subexpr(self.dialect.prec_value(Precedence::MulDivModOp))?,
1622                    ),
1623                })
1624            }
1625            Token::ExclamationMark if dialect.supports_bang_not_operator() => Ok(Expr::UnaryOp {
1626                op: UnaryOperator::BangNot,
1627                expr: Box::new(self.parse_subexpr(self.dialect.prec_value(Precedence::UnaryNot))?),
1628            }),
1629            tok @ Token::DoubleExclamationMark
1630            | tok @ Token::PGSquareRoot
1631            | tok @ Token::PGCubeRoot
1632            | tok @ Token::AtSign
1633            | tok @ Token::Tilde
1634                if dialect_is!(dialect is PostgreSqlDialect) =>
1635            {
1636                let op = match tok {
1637                    Token::DoubleExclamationMark => UnaryOperator::PGPrefixFactorial,
1638                    Token::PGSquareRoot => UnaryOperator::PGSquareRoot,
1639                    Token::PGCubeRoot => UnaryOperator::PGCubeRoot,
1640                    Token::AtSign => UnaryOperator::PGAbs,
1641                    Token::Tilde => UnaryOperator::PGBitwiseNot,
1642                    _ => unreachable!(),
1643                };
1644                Ok(Expr::UnaryOp {
1645                    op,
1646                    expr: Box::new(
1647                        self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?,
1648                    ),
1649                })
1650            }
1651            tok @ Token::Sharp
1652            | tok @ Token::AtDashAt
1653            | tok @ Token::AtAt
1654            | tok @ Token::QuestionMarkDash
1655            | tok @ Token::QuestionPipe
1656                if self.dialect.supports_geometric_types() =>
1657            {
1658                let op = match tok {
1659                    Token::Sharp => UnaryOperator::Hash,
1660                    Token::AtDashAt => UnaryOperator::AtDashAt,
1661                    Token::AtAt => UnaryOperator::DoubleAt,
1662                    Token::QuestionMarkDash => UnaryOperator::QuestionDash,
1663                    Token::QuestionPipe => UnaryOperator::QuestionPipe,
1664                    _ => {
1665                        return Err(ParserError::ParserError(format!(
1666                            "Unexpected token in unary operator parsing: {tok:?}"
1667                        )))
1668                    }
1669                };
1670                Ok(Expr::UnaryOp {
1671                    op,
1672                    expr: Box::new(
1673                        self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?,
1674                    ),
1675                })
1676            }
1677            Token::EscapedStringLiteral(_) if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) =>
1678            {
1679                self.prev_token();
1680                Ok(Expr::Value(self.parse_value()?))
1681            }
1682            Token::UnicodeStringLiteral(_) => {
1683                self.prev_token();
1684                Ok(Expr::Value(self.parse_value()?))
1685            }
1686            Token::Number(_, _)
1687            | Token::SingleQuotedString(_)
1688            | Token::DoubleQuotedString(_)
1689            | Token::TripleSingleQuotedString(_)
1690            | Token::TripleDoubleQuotedString(_)
1691            | Token::DollarQuotedString(_)
1692            | Token::SingleQuotedByteStringLiteral(_)
1693            | Token::DoubleQuotedByteStringLiteral(_)
1694            | Token::TripleSingleQuotedByteStringLiteral(_)
1695            | Token::TripleDoubleQuotedByteStringLiteral(_)
1696            | Token::SingleQuotedRawStringLiteral(_)
1697            | Token::DoubleQuotedRawStringLiteral(_)
1698            | Token::TripleSingleQuotedRawStringLiteral(_)
1699            | Token::TripleDoubleQuotedRawStringLiteral(_)
1700            | Token::NationalStringLiteral(_)
1701            | Token::HexStringLiteral(_) => {
1702                self.prev_token();
1703                Ok(Expr::Value(self.parse_value()?))
1704            }
1705            Token::LParen => {
1706                let expr = if let Some(expr) = self.try_parse_expr_sub_query()? {
1707                    expr
1708                } else if let Some(lambda) = self.try_parse_lambda()? {
1709                    return Ok(lambda);
1710                } else {
1711                    let exprs = self.parse_comma_separated(Parser::parse_expr)?;
1712                    match exprs.len() {
1713                        0 => unreachable!(), // parse_comma_separated ensures 1 or more
1714                        1 => Expr::Nested(Box::new(exprs.into_iter().next().unwrap())),
1715                        _ => Expr::Tuple(exprs),
1716                    }
1717                };
1718                self.expect_token(&Token::RParen)?;
1719                Ok(expr)
1720            }
1721            Token::Placeholder(_) | Token::Colon | Token::AtSign => {
1722                self.prev_token();
1723                Ok(Expr::Value(self.parse_value()?))
1724            }
1725            Token::LBrace => {
1726                self.prev_token();
1727                self.parse_lbrace_expr()
1728            }
1729            _ => self.expected_at("an expression", next_token_index),
1730        }?;
1731
1732        if !self.in_column_definition_state() && self.parse_keyword(Keyword::COLLATE) {
1733            Ok(Expr::Collate {
1734                expr: Box::new(expr),
1735                collation: self.parse_object_name(false)?,
1736            })
1737        } else {
1738            Ok(expr)
1739        }
1740    }
1741
1742    fn parse_geometric_type(&mut self, kind: GeometricTypeKind) -> Result<Expr, ParserError> {
1743        Ok(Expr::TypedString(TypedString {
1744            data_type: DataType::GeometricType(kind),
1745            value: self.parse_value()?,
1746            uses_odbc_syntax: false,
1747        }))
1748    }
1749
1750    /// Try to parse an [Expr::CompoundFieldAccess] like `a.b.c` or `a.b[1].c`.
1751    /// If all the fields are `Expr::Identifier`s, return an [Expr::CompoundIdentifier] instead.
1752    /// If only the root exists, return the root.
1753    /// Parses compound expressions which may be delimited by period
1754    /// or bracket notation.
1755    /// For example: `a.b.c`, `a.b[1]`.
1756    pub fn parse_compound_expr(
1757        &mut self,
1758        root: Expr,
1759        mut chain: Vec<AccessExpr>,
1760    ) -> Result<Expr, ParserError> {
1761        let mut ending_wildcard: Option<TokenWithSpan> = None;
1762        loop {
1763            if self.consume_token(&Token::Period) {
1764                let next_token = self.peek_token_ref();
1765                match &next_token.token {
1766                    Token::Mul => {
1767                        // Postgres explicitly allows funcnm(tablenm.*) and the
1768                        // function array_agg traverses this control flow
1769                        if dialect_of!(self is PostgreSqlDialect) {
1770                            ending_wildcard = Some(self.next_token());
1771                        } else {
1772                            // Put back the consumed `.` tokens before exiting.
1773                            // If this expression is being parsed in the
1774                            // context of a projection, then the `.*` could imply
1775                            // a wildcard expansion. For example:
1776                            // `SELECT STRUCT('foo').* FROM T`
1777                            self.prev_token(); // .
1778                        }
1779
1780                        break;
1781                    }
1782                    Token::SingleQuotedString(s) => {
1783                        let expr =
1784                            Expr::Identifier(Ident::with_quote_and_span('\'', next_token.span, s));
1785                        chain.push(AccessExpr::Dot(expr));
1786                        self.advance_token(); // The consumed string
1787                    }
1788                    // Fallback to parsing an arbitrary expression.
1789                    _ => match self.parse_subexpr(self.dialect.prec_value(Precedence::Period))? {
1790                        // If we get back a compound field access or identifier,
1791                        // we flatten the nested expression.
1792                        // For example if the current root is `foo`
1793                        // and we get back a compound identifier expression `bar.baz`
1794                        // The full expression should be `foo.bar.baz` (i.e.
1795                        // a root with an access chain with 2 entries) and not
1796                        // `foo.(bar.baz)` (i.e. a root with an access chain with
1797                        // 1 entry`).
1798                        Expr::CompoundFieldAccess { root, access_chain } => {
1799                            chain.push(AccessExpr::Dot(*root));
1800                            chain.extend(access_chain);
1801                        }
1802                        Expr::CompoundIdentifier(parts) => chain
1803                            .extend(parts.into_iter().map(Expr::Identifier).map(AccessExpr::Dot)),
1804                        expr => {
1805                            chain.push(AccessExpr::Dot(expr));
1806                        }
1807                    },
1808                }
1809            } else if !self.dialect.supports_partiql()
1810                && self.peek_token_ref().token == Token::LBracket
1811            {
1812                self.parse_multi_dim_subscript(&mut chain)?;
1813            } else {
1814                break;
1815            }
1816        }
1817
1818        let tok_index = self.get_current_index();
1819        if let Some(wildcard_token) = ending_wildcard {
1820            if !Self::is_all_ident(&root, &chain) {
1821                return self.expected("an identifier or a '*' after '.'", self.peek_token());
1822            };
1823            Ok(Expr::QualifiedWildcard(
1824                ObjectName::from(Self::exprs_to_idents(root, chain)?),
1825                AttachedToken(wildcard_token),
1826            ))
1827        } else if self.maybe_parse_outer_join_operator() {
1828            if !Self::is_all_ident(&root, &chain) {
1829                return self.expected_at("column identifier before (+)", tok_index);
1830            };
1831            let expr = if chain.is_empty() {
1832                root
1833            } else {
1834                Expr::CompoundIdentifier(Self::exprs_to_idents(root, chain)?)
1835            };
1836            Ok(Expr::OuterJoin(expr.into()))
1837        } else {
1838            Self::build_compound_expr(root, chain)
1839        }
1840    }
1841
1842    /// Combines a root expression and access chain to form
1843    /// a compound expression. Which may be a [Expr::CompoundFieldAccess]
1844    /// or other special cased expressions like [Expr::CompoundIdentifier],
1845    /// [Expr::OuterJoin].
1846    fn build_compound_expr(
1847        root: Expr,
1848        mut access_chain: Vec<AccessExpr>,
1849    ) -> Result<Expr, ParserError> {
1850        if access_chain.is_empty() {
1851            return Ok(root);
1852        }
1853
1854        if Self::is_all_ident(&root, &access_chain) {
1855            return Ok(Expr::CompoundIdentifier(Self::exprs_to_idents(
1856                root,
1857                access_chain,
1858            )?));
1859        }
1860
1861        // Flatten qualified function calls.
1862        // For example, the expression `a.b.c.foo(1,2,3)` should
1863        // represent a function called `a.b.c.foo`, rather than
1864        // a composite expression.
1865        if matches!(root, Expr::Identifier(_))
1866            && matches!(
1867                access_chain.last(),
1868                Some(AccessExpr::Dot(Expr::Function(_)))
1869            )
1870            && access_chain
1871                .iter()
1872                .rev()
1873                .skip(1) // All except the Function
1874                .all(|access| matches!(access, AccessExpr::Dot(Expr::Identifier(_))))
1875        {
1876            let Some(AccessExpr::Dot(Expr::Function(mut func))) = access_chain.pop() else {
1877                return parser_err!("expected function expression", root.span().start);
1878            };
1879
1880            let compound_func_name = [root]
1881                .into_iter()
1882                .chain(access_chain.into_iter().flat_map(|access| match access {
1883                    AccessExpr::Dot(expr) => Some(expr),
1884                    _ => None,
1885                }))
1886                .flat_map(|expr| match expr {
1887                    Expr::Identifier(ident) => Some(ident),
1888                    _ => None,
1889                })
1890                .map(ObjectNamePart::Identifier)
1891                .chain(func.name.0)
1892                .collect::<Vec<_>>();
1893            func.name = ObjectName(compound_func_name);
1894
1895            return Ok(Expr::Function(func));
1896        }
1897
1898        // Flatten qualified outer join expressions.
1899        // For example, the expression `T.foo(+)` should
1900        // represent an outer join on the column name `T.foo`
1901        // rather than a composite expression.
1902        if access_chain.len() == 1
1903            && matches!(
1904                access_chain.last(),
1905                Some(AccessExpr::Dot(Expr::OuterJoin(_)))
1906            )
1907        {
1908            let Some(AccessExpr::Dot(Expr::OuterJoin(inner_expr))) = access_chain.pop() else {
1909                return parser_err!("expected (+) expression", root.span().start);
1910            };
1911
1912            if !Self::is_all_ident(&root, &[]) {
1913                return parser_err!("column identifier before (+)", root.span().start);
1914            };
1915
1916            let token_start = root.span().start;
1917            let mut idents = Self::exprs_to_idents(root, vec![])?;
1918            match *inner_expr {
1919                Expr::CompoundIdentifier(suffix) => idents.extend(suffix),
1920                Expr::Identifier(suffix) => idents.push(suffix),
1921                _ => {
1922                    return parser_err!("column identifier before (+)", token_start);
1923                }
1924            }
1925
1926            return Ok(Expr::OuterJoin(Expr::CompoundIdentifier(idents).into()));
1927        }
1928
1929        Ok(Expr::CompoundFieldAccess {
1930            root: Box::new(root),
1931            access_chain,
1932        })
1933    }
1934
1935    fn keyword_to_modifier(k: Keyword) -> Option<ContextModifier> {
1936        match k {
1937            Keyword::LOCAL => Some(ContextModifier::Local),
1938            Keyword::GLOBAL => Some(ContextModifier::Global),
1939            Keyword::SESSION => Some(ContextModifier::Session),
1940            _ => None,
1941        }
1942    }
1943
1944    /// Check if the root is an identifier and all fields are identifiers.
1945    fn is_all_ident(root: &Expr, fields: &[AccessExpr]) -> bool {
1946        if !matches!(root, Expr::Identifier(_)) {
1947            return false;
1948        }
1949        fields
1950            .iter()
1951            .all(|x| matches!(x, AccessExpr::Dot(Expr::Identifier(_))))
1952    }
1953
1954    /// Convert a root and a list of fields to a list of identifiers.
1955    fn exprs_to_idents(root: Expr, fields: Vec<AccessExpr>) -> Result<Vec<Ident>, ParserError> {
1956        let mut idents = vec![];
1957        if let Expr::Identifier(root) = root {
1958            idents.push(root);
1959            for x in fields {
1960                if let AccessExpr::Dot(Expr::Identifier(ident)) = x {
1961                    idents.push(ident);
1962                } else {
1963                    return parser_err!(
1964                        format!("Expected identifier, found: {}", x),
1965                        x.span().start
1966                    );
1967                }
1968            }
1969            Ok(idents)
1970        } else {
1971            parser_err!(
1972                format!("Expected identifier, found: {}", root),
1973                root.span().start
1974            )
1975        }
1976    }
1977
1978    /// Returns true if the next tokens indicate the outer join operator `(+)`.
1979    fn peek_outer_join_operator(&mut self) -> bool {
1980        if !self.dialect.supports_outer_join_operator() {
1981            return false;
1982        }
1983
1984        let [maybe_lparen, maybe_plus, maybe_rparen] = self.peek_tokens_ref();
1985        Token::LParen == maybe_lparen.token
1986            && Token::Plus == maybe_plus.token
1987            && Token::RParen == maybe_rparen.token
1988    }
1989
1990    /// If the next tokens indicates the outer join operator `(+)`, consume
1991    /// the tokens and return true.
1992    fn maybe_parse_outer_join_operator(&mut self) -> bool {
1993        self.dialect.supports_outer_join_operator()
1994            && self.consume_tokens(&[Token::LParen, Token::Plus, Token::RParen])
1995    }
1996
1997    pub fn parse_utility_options(&mut self) -> Result<Vec<UtilityOption>, ParserError> {
1998        self.expect_token(&Token::LParen)?;
1999        let options = self.parse_comma_separated(Self::parse_utility_option)?;
2000        self.expect_token(&Token::RParen)?;
2001
2002        Ok(options)
2003    }
2004
2005    fn parse_utility_option(&mut self) -> Result<UtilityOption, ParserError> {
2006        let name = self.parse_identifier()?;
2007
2008        let next_token = self.peek_token();
2009        if next_token == Token::Comma || next_token == Token::RParen {
2010            return Ok(UtilityOption { name, arg: None });
2011        }
2012        let arg = self.parse_expr()?;
2013
2014        Ok(UtilityOption {
2015            name,
2016            arg: Some(arg),
2017        })
2018    }
2019
2020    fn try_parse_expr_sub_query(&mut self) -> Result<Option<Expr>, ParserError> {
2021        if !self.peek_sub_query() {
2022            return Ok(None);
2023        }
2024
2025        Ok(Some(Expr::Subquery(self.parse_query()?)))
2026    }
2027
2028    fn try_parse_lambda(&mut self) -> Result<Option<Expr>, ParserError> {
2029        if !self.dialect.supports_lambda_functions() {
2030            return Ok(None);
2031        }
2032        self.maybe_parse(|p| {
2033            let params = p.parse_comma_separated(|p| p.parse_identifier())?;
2034            p.expect_token(&Token::RParen)?;
2035            p.expect_token(&Token::Arrow)?;
2036            let expr = p.parse_expr()?;
2037            Ok(Expr::Lambda(LambdaFunction {
2038                params: OneOrManyWithParens::Many(params),
2039                body: Box::new(expr),
2040            }))
2041        })
2042    }
2043
2044    /// Tries to parse the body of an [ODBC escaping sequence]
2045    /// i.e. without the enclosing braces
2046    /// Currently implemented:
2047    /// Scalar Function Calls
2048    /// Date, Time, and Timestamp Literals
2049    /// See <https://learn.microsoft.com/en-us/sql/odbc/reference/develop-app/escape-sequences-in-odbc?view=sql-server-2017>
2050    fn maybe_parse_odbc_body(&mut self) -> Result<Option<Expr>, ParserError> {
2051        // Attempt 1: Try to parse it as a function.
2052        if let Some(expr) = self.maybe_parse_odbc_fn_body()? {
2053            return Ok(Some(expr));
2054        }
2055        // Attempt 2: Try to parse it as a Date, Time or Timestamp Literal
2056        self.maybe_parse_odbc_body_datetime()
2057    }
2058
2059    /// Tries to parse the body of an [ODBC Date, Time, and Timestamp Literals] call.
2060    ///
2061    /// ```sql
2062    /// {d '2025-07-17'}
2063    /// {t '14:12:01'}
2064    /// {ts '2025-07-17 14:12:01'}
2065    /// ```
2066    ///
2067    /// [ODBC Date, Time, and Timestamp Literals]:
2068    /// https://learn.microsoft.com/en-us/sql/odbc/reference/develop-app/date-time-and-timestamp-literals?view=sql-server-2017
2069    fn maybe_parse_odbc_body_datetime(&mut self) -> Result<Option<Expr>, ParserError> {
2070        self.maybe_parse(|p| {
2071            let token = p.next_token().clone();
2072            let word_string = token.token.to_string();
2073            let data_type = match word_string.as_str() {
2074                "t" => DataType::Time(None, TimezoneInfo::None),
2075                "d" => DataType::Date,
2076                "ts" => DataType::Timestamp(None, TimezoneInfo::None),
2077                _ => return p.expected("ODBC datetime keyword (t, d, or ts)", token),
2078            };
2079            let value = p.parse_value()?;
2080            Ok(Expr::TypedString(TypedString {
2081                data_type,
2082                value,
2083                uses_odbc_syntax: true,
2084            }))
2085        })
2086    }
2087
2088    /// Tries to parse the body of an [ODBC function] call.
2089    /// i.e. without the enclosing braces
2090    ///
2091    /// ```sql
2092    /// fn myfunc(1,2,3)
2093    /// ```
2094    ///
2095    /// [ODBC function]: https://learn.microsoft.com/en-us/sql/odbc/reference/develop-app/scalar-function-calls?view=sql-server-2017
2096    fn maybe_parse_odbc_fn_body(&mut self) -> Result<Option<Expr>, ParserError> {
2097        self.maybe_parse(|p| {
2098            p.expect_keyword(Keyword::FN)?;
2099            let fn_name = p.parse_object_name(false)?;
2100            let mut fn_call = p.parse_function_call(fn_name)?;
2101            fn_call.uses_odbc_syntax = true;
2102            Ok(Expr::Function(fn_call))
2103        })
2104    }
2105
2106    pub fn parse_function(&mut self, name: ObjectName) -> Result<Expr, ParserError> {
2107        self.parse_function_call(name).map(Expr::Function)
2108    }
2109
2110    fn parse_function_call(&mut self, name: ObjectName) -> Result<Function, ParserError> {
2111        self.expect_token(&Token::LParen)?;
2112
2113        // Snowflake permits a subquery to be passed as an argument without
2114        // an enclosing set of parens if it's the only argument.
2115        if dialect_of!(self is SnowflakeDialect) && self.peek_sub_query() {
2116            let subquery = self.parse_query()?;
2117            self.expect_token(&Token::RParen)?;
2118            return Ok(Function {
2119                name,
2120                uses_odbc_syntax: false,
2121                parameters: FunctionArguments::None,
2122                args: FunctionArguments::Subquery(subquery),
2123                filter: None,
2124                null_treatment: None,
2125                over: None,
2126                within_group: vec![],
2127            });
2128        }
2129
2130        let mut args = self.parse_function_argument_list()?;
2131        let mut parameters = FunctionArguments::None;
2132        // ClickHouse aggregations support parametric functions like `HISTOGRAM(0.5, 0.6)(x, y)`
2133        // which (0.5, 0.6) is a parameter to the function.
2134        if dialect_of!(self is ClickHouseDialect | GenericDialect)
2135            && self.consume_token(&Token::LParen)
2136        {
2137            parameters = FunctionArguments::List(args);
2138            args = self.parse_function_argument_list()?;
2139        }
2140
2141        let within_group = if self.parse_keywords(&[Keyword::WITHIN, Keyword::GROUP]) {
2142            self.expect_token(&Token::LParen)?;
2143            self.expect_keywords(&[Keyword::ORDER, Keyword::BY])?;
2144            let order_by = self.parse_comma_separated(Parser::parse_order_by_expr)?;
2145            self.expect_token(&Token::RParen)?;
2146            order_by
2147        } else {
2148            vec![]
2149        };
2150
2151        let filter = if self.dialect.supports_filter_during_aggregation()
2152            && self.parse_keyword(Keyword::FILTER)
2153            && self.consume_token(&Token::LParen)
2154            && self.parse_keyword(Keyword::WHERE)
2155        {
2156            let filter = Some(Box::new(self.parse_expr()?));
2157            self.expect_token(&Token::RParen)?;
2158            filter
2159        } else {
2160            None
2161        };
2162
2163        // Syntax for null treatment shows up either in the args list
2164        // or after the function call, but not both.
2165        let null_treatment = if args
2166            .clauses
2167            .iter()
2168            .all(|clause| !matches!(clause, FunctionArgumentClause::IgnoreOrRespectNulls(_)))
2169        {
2170            self.parse_null_treatment()?
2171        } else {
2172            None
2173        };
2174
2175        let over = if self.parse_keyword(Keyword::OVER) {
2176            if self.consume_token(&Token::LParen) {
2177                let window_spec = self.parse_window_spec()?;
2178                Some(WindowType::WindowSpec(window_spec))
2179            } else {
2180                Some(WindowType::NamedWindow(self.parse_identifier()?))
2181            }
2182        } else {
2183            None
2184        };
2185
2186        Ok(Function {
2187            name,
2188            uses_odbc_syntax: false,
2189            parameters,
2190            args: FunctionArguments::List(args),
2191            null_treatment,
2192            filter,
2193            over,
2194            within_group,
2195        })
2196    }
2197
2198    /// Optionally parses a null treatment clause.
2199    fn parse_null_treatment(&mut self) -> Result<Option<NullTreatment>, ParserError> {
2200        match self.parse_one_of_keywords(&[Keyword::RESPECT, Keyword::IGNORE]) {
2201            Some(keyword) => {
2202                self.expect_keyword_is(Keyword::NULLS)?;
2203
2204                Ok(match keyword {
2205                    Keyword::RESPECT => Some(NullTreatment::RespectNulls),
2206                    Keyword::IGNORE => Some(NullTreatment::IgnoreNulls),
2207                    _ => None,
2208                })
2209            }
2210            None => Ok(None),
2211        }
2212    }
2213
2214    pub fn parse_time_functions(&mut self, name: ObjectName) -> Result<Expr, ParserError> {
2215        let args = if self.consume_token(&Token::LParen) {
2216            FunctionArguments::List(self.parse_function_argument_list()?)
2217        } else {
2218            FunctionArguments::None
2219        };
2220        Ok(Expr::Function(Function {
2221            name,
2222            uses_odbc_syntax: false,
2223            parameters: FunctionArguments::None,
2224            args,
2225            filter: None,
2226            over: None,
2227            null_treatment: None,
2228            within_group: vec![],
2229        }))
2230    }
2231
2232    pub fn parse_window_frame_units(&mut self) -> Result<WindowFrameUnits, ParserError> {
2233        let next_token = self.next_token();
2234        match &next_token.token {
2235            Token::Word(w) => match w.keyword {
2236                Keyword::ROWS => Ok(WindowFrameUnits::Rows),
2237                Keyword::RANGE => Ok(WindowFrameUnits::Range),
2238                Keyword::GROUPS => Ok(WindowFrameUnits::Groups),
2239                _ => self.expected("ROWS, RANGE, GROUPS", next_token)?,
2240            },
2241            _ => self.expected("ROWS, RANGE, GROUPS", next_token),
2242        }
2243    }
2244
2245    pub fn parse_window_frame(&mut self) -> Result<WindowFrame, ParserError> {
2246        let units = self.parse_window_frame_units()?;
2247        let (start_bound, end_bound) = if self.parse_keyword(Keyword::BETWEEN) {
2248            let start_bound = self.parse_window_frame_bound()?;
2249            self.expect_keyword_is(Keyword::AND)?;
2250            let end_bound = Some(self.parse_window_frame_bound()?);
2251            (start_bound, end_bound)
2252        } else {
2253            (self.parse_window_frame_bound()?, None)
2254        };
2255        Ok(WindowFrame {
2256            units,
2257            start_bound,
2258            end_bound,
2259        })
2260    }
2261
2262    /// Parse `CURRENT ROW` or `{ <positive number> | UNBOUNDED } { PRECEDING | FOLLOWING }`
2263    pub fn parse_window_frame_bound(&mut self) -> Result<WindowFrameBound, ParserError> {
2264        if self.parse_keywords(&[Keyword::CURRENT, Keyword::ROW]) {
2265            Ok(WindowFrameBound::CurrentRow)
2266        } else {
2267            let rows = if self.parse_keyword(Keyword::UNBOUNDED) {
2268                None
2269            } else {
2270                Some(Box::new(match self.peek_token().token {
2271                    Token::SingleQuotedString(_) => self.parse_interval()?,
2272                    _ => self.parse_expr()?,
2273                }))
2274            };
2275            if self.parse_keyword(Keyword::PRECEDING) {
2276                Ok(WindowFrameBound::Preceding(rows))
2277            } else if self.parse_keyword(Keyword::FOLLOWING) {
2278                Ok(WindowFrameBound::Following(rows))
2279            } else {
2280                self.expected("PRECEDING or FOLLOWING", self.peek_token())
2281            }
2282        }
2283    }
2284
2285    /// Parse a group by expr. Group by expr can be one of group sets, roll up, cube, or simple expr.
2286    fn parse_group_by_expr(&mut self) -> Result<Expr, ParserError> {
2287        if self.dialect.supports_group_by_expr() {
2288            if self.parse_keywords(&[Keyword::GROUPING, Keyword::SETS]) {
2289                self.expect_token(&Token::LParen)?;
2290                let result = self.parse_comma_separated(|p| p.parse_tuple(false, true))?;
2291                self.expect_token(&Token::RParen)?;
2292                Ok(Expr::GroupingSets(result))
2293            } else if self.parse_keyword(Keyword::CUBE) {
2294                self.expect_token(&Token::LParen)?;
2295                let result = self.parse_comma_separated(|p| p.parse_tuple(true, true))?;
2296                self.expect_token(&Token::RParen)?;
2297                Ok(Expr::Cube(result))
2298            } else if self.parse_keyword(Keyword::ROLLUP) {
2299                self.expect_token(&Token::LParen)?;
2300                let result = self.parse_comma_separated(|p| p.parse_tuple(true, true))?;
2301                self.expect_token(&Token::RParen)?;
2302                Ok(Expr::Rollup(result))
2303            } else if self.consume_tokens(&[Token::LParen, Token::RParen]) {
2304                // PostgreSQL allow to use empty tuple as a group by expression,
2305                // e.g. `GROUP BY (), name`. Please refer to GROUP BY Clause section in
2306                // [PostgreSQL](https://www.postgresql.org/docs/16/sql-select.html)
2307                Ok(Expr::Tuple(vec![]))
2308            } else {
2309                self.parse_expr()
2310            }
2311        } else {
2312            // TODO parse rollup for other dialects
2313            self.parse_expr()
2314        }
2315    }
2316
2317    /// Parse a tuple with `(` and `)`.
2318    /// If `lift_singleton` is true, then a singleton tuple is lifted to a tuple of length 1, otherwise it will fail.
2319    /// If `allow_empty` is true, then an empty tuple is allowed.
2320    fn parse_tuple(
2321        &mut self,
2322        lift_singleton: bool,
2323        allow_empty: bool,
2324    ) -> Result<Vec<Expr>, ParserError> {
2325        if lift_singleton {
2326            if self.consume_token(&Token::LParen) {
2327                let result = if allow_empty && self.consume_token(&Token::RParen) {
2328                    vec![]
2329                } else {
2330                    let result = self.parse_comma_separated(Parser::parse_expr)?;
2331                    self.expect_token(&Token::RParen)?;
2332                    result
2333                };
2334                Ok(result)
2335            } else {
2336                Ok(vec![self.parse_expr()?])
2337            }
2338        } else {
2339            self.expect_token(&Token::LParen)?;
2340            let result = if allow_empty && self.consume_token(&Token::RParen) {
2341                vec![]
2342            } else {
2343                let result = self.parse_comma_separated(Parser::parse_expr)?;
2344                self.expect_token(&Token::RParen)?;
2345                result
2346            };
2347            Ok(result)
2348        }
2349    }
2350
2351    pub fn parse_case_expr(&mut self) -> Result<Expr, ParserError> {
2352        let case_token = AttachedToken(self.get_current_token().clone());
2353        let mut operand = None;
2354        if !self.parse_keyword(Keyword::WHEN) {
2355            operand = Some(Box::new(self.parse_expr()?));
2356            self.expect_keyword_is(Keyword::WHEN)?;
2357        }
2358        let mut conditions = vec![];
2359        loop {
2360            let condition = self.parse_expr()?;
2361            self.expect_keyword_is(Keyword::THEN)?;
2362            let result = self.parse_expr()?;
2363            conditions.push(CaseWhen { condition, result });
2364            if !self.parse_keyword(Keyword::WHEN) {
2365                break;
2366            }
2367        }
2368        let else_result = if self.parse_keyword(Keyword::ELSE) {
2369            Some(Box::new(self.parse_expr()?))
2370        } else {
2371            None
2372        };
2373        let end_token = AttachedToken(self.expect_keyword(Keyword::END)?);
2374        Ok(Expr::Case {
2375            case_token,
2376            end_token,
2377            operand,
2378            conditions,
2379            else_result,
2380        })
2381    }
2382
2383    pub fn parse_optional_cast_format(&mut self) -> Result<Option<CastFormat>, ParserError> {
2384        if self.parse_keyword(Keyword::FORMAT) {
2385            let value = self.parse_value()?.value;
2386            match self.parse_optional_time_zone()? {
2387                Some(tz) => Ok(Some(CastFormat::ValueAtTimeZone(value, tz))),
2388                None => Ok(Some(CastFormat::Value(value))),
2389            }
2390        } else {
2391            Ok(None)
2392        }
2393    }
2394
2395    pub fn parse_optional_time_zone(&mut self) -> Result<Option<Value>, ParserError> {
2396        if self.parse_keywords(&[Keyword::AT, Keyword::TIME, Keyword::ZONE]) {
2397            self.parse_value().map(|v| Some(v.value))
2398        } else {
2399            Ok(None)
2400        }
2401    }
2402
2403    /// mssql-like convert function
2404    fn parse_mssql_convert(&mut self, is_try: bool) -> Result<Expr, ParserError> {
2405        self.expect_token(&Token::LParen)?;
2406        let data_type = self.parse_data_type()?;
2407        self.expect_token(&Token::Comma)?;
2408        let expr = self.parse_expr()?;
2409        let styles = if self.consume_token(&Token::Comma) {
2410            self.parse_comma_separated(Parser::parse_expr)?
2411        } else {
2412            Default::default()
2413        };
2414        self.expect_token(&Token::RParen)?;
2415        Ok(Expr::Convert {
2416            is_try,
2417            expr: Box::new(expr),
2418            data_type: Some(data_type),
2419            charset: None,
2420            target_before_value: true,
2421            styles,
2422        })
2423    }
2424
2425    /// Parse a SQL CONVERT function:
2426    ///  - `CONVERT('héhé' USING utf8mb4)` (MySQL)
2427    ///  - `CONVERT('héhé', CHAR CHARACTER SET utf8mb4)` (MySQL)
2428    ///  - `CONVERT(DECIMAL(10, 5), 42)` (MSSQL) - the type comes first
2429    pub fn parse_convert_expr(&mut self, is_try: bool) -> Result<Expr, ParserError> {
2430        if self.dialect.convert_type_before_value() {
2431            return self.parse_mssql_convert(is_try);
2432        }
2433        self.expect_token(&Token::LParen)?;
2434        let expr = self.parse_expr()?;
2435        if self.parse_keyword(Keyword::USING) {
2436            let charset = self.parse_object_name(false)?;
2437            self.expect_token(&Token::RParen)?;
2438            return Ok(Expr::Convert {
2439                is_try,
2440                expr: Box::new(expr),
2441                data_type: None,
2442                charset: Some(charset),
2443                target_before_value: false,
2444                styles: vec![],
2445            });
2446        }
2447        self.expect_token(&Token::Comma)?;
2448        let data_type = self.parse_data_type()?;
2449        let charset = if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
2450            Some(self.parse_object_name(false)?)
2451        } else {
2452            None
2453        };
2454        self.expect_token(&Token::RParen)?;
2455        Ok(Expr::Convert {
2456            is_try,
2457            expr: Box::new(expr),
2458            data_type: Some(data_type),
2459            charset,
2460            target_before_value: false,
2461            styles: vec![],
2462        })
2463    }
2464
2465    /// Parse a SQL CAST function e.g. `CAST(expr AS FLOAT)`
2466    pub fn parse_cast_expr(&mut self, kind: CastKind) -> Result<Expr, ParserError> {
2467        self.expect_token(&Token::LParen)?;
2468        let expr = self.parse_expr()?;
2469        self.expect_keyword_is(Keyword::AS)?;
2470        let data_type = self.parse_data_type()?;
2471        let format = self.parse_optional_cast_format()?;
2472        self.expect_token(&Token::RParen)?;
2473        Ok(Expr::Cast {
2474            kind,
2475            expr: Box::new(expr),
2476            data_type,
2477            format,
2478        })
2479    }
2480
2481    /// Parse a SQL EXISTS expression e.g. `WHERE EXISTS(SELECT ...)`.
2482    pub fn parse_exists_expr(&mut self, negated: bool) -> Result<Expr, ParserError> {
2483        self.expect_token(&Token::LParen)?;
2484        let exists_node = Expr::Exists {
2485            negated,
2486            subquery: self.parse_query()?,
2487        };
2488        self.expect_token(&Token::RParen)?;
2489        Ok(exists_node)
2490    }
2491
2492    pub fn parse_extract_expr(&mut self) -> Result<Expr, ParserError> {
2493        self.expect_token(&Token::LParen)?;
2494        let field = self.parse_date_time_field()?;
2495
2496        let syntax = if self.parse_keyword(Keyword::FROM) {
2497            ExtractSyntax::From
2498        } else if self.consume_token(&Token::Comma)
2499            && dialect_of!(self is SnowflakeDialect | GenericDialect)
2500        {
2501            ExtractSyntax::Comma
2502        } else {
2503            return Err(ParserError::ParserError(
2504                "Expected 'FROM' or ','".to_string(),
2505            ));
2506        };
2507
2508        let expr = self.parse_expr()?;
2509        self.expect_token(&Token::RParen)?;
2510        Ok(Expr::Extract {
2511            field,
2512            expr: Box::new(expr),
2513            syntax,
2514        })
2515    }
2516
2517    pub fn parse_ceil_floor_expr(&mut self, is_ceil: bool) -> Result<Expr, ParserError> {
2518        self.expect_token(&Token::LParen)?;
2519        let expr = self.parse_expr()?;
2520        // Parse `CEIL/FLOOR(expr)`
2521        let field = if self.parse_keyword(Keyword::TO) {
2522            // Parse `CEIL/FLOOR(expr TO DateTimeField)`
2523            CeilFloorKind::DateTimeField(self.parse_date_time_field()?)
2524        } else if self.consume_token(&Token::Comma) {
2525            // Parse `CEIL/FLOOR(expr, scale)`
2526            match self.parse_value()?.value {
2527                Value::Number(n, s) => CeilFloorKind::Scale(Value::Number(n, s)),
2528                _ => {
2529                    return Err(ParserError::ParserError(
2530                        "Scale field can only be of number type".to_string(),
2531                    ))
2532                }
2533            }
2534        } else {
2535            CeilFloorKind::DateTimeField(DateTimeField::NoDateTime)
2536        };
2537        self.expect_token(&Token::RParen)?;
2538        if is_ceil {
2539            Ok(Expr::Ceil {
2540                expr: Box::new(expr),
2541                field,
2542            })
2543        } else {
2544            Ok(Expr::Floor {
2545                expr: Box::new(expr),
2546                field,
2547            })
2548        }
2549    }
2550
2551    pub fn parse_position_expr(&mut self, ident: Ident) -> Result<Expr, ParserError> {
2552        let between_prec = self.dialect.prec_value(Precedence::Between);
2553        let position_expr = self.maybe_parse(|p| {
2554            // PARSE SELECT POSITION('@' in field)
2555            p.expect_token(&Token::LParen)?;
2556
2557            // Parse the subexpr till the IN keyword
2558            let expr = p.parse_subexpr(between_prec)?;
2559            p.expect_keyword_is(Keyword::IN)?;
2560            let from = p.parse_expr()?;
2561            p.expect_token(&Token::RParen)?;
2562            Ok(Expr::Position {
2563                expr: Box::new(expr),
2564                r#in: Box::new(from),
2565            })
2566        })?;
2567        match position_expr {
2568            Some(expr) => Ok(expr),
2569            // Snowflake supports `position` as an ordinary function call
2570            // without the special `IN` syntax.
2571            None => self.parse_function(ObjectName::from(vec![ident])),
2572        }
2573    }
2574
2575    // { SUBSTRING | SUBSTR } (<EXPR> [FROM 1] [FOR 3])
2576    pub fn parse_substring(&mut self) -> Result<Expr, ParserError> {
2577        let shorthand = match self.expect_one_of_keywords(&[Keyword::SUBSTR, Keyword::SUBSTRING])? {
2578            Keyword::SUBSTR => true,
2579            Keyword::SUBSTRING => false,
2580            _ => {
2581                self.prev_token();
2582                return self.expected("SUBSTR or SUBSTRING", self.peek_token());
2583            }
2584        };
2585        self.expect_token(&Token::LParen)?;
2586        let expr = self.parse_expr()?;
2587        let mut from_expr = None;
2588        let special = self.consume_token(&Token::Comma);
2589        if special || self.parse_keyword(Keyword::FROM) {
2590            from_expr = Some(self.parse_expr()?);
2591        }
2592
2593        let mut to_expr = None;
2594        if self.parse_keyword(Keyword::FOR) || self.consume_token(&Token::Comma) {
2595            to_expr = Some(self.parse_expr()?);
2596        }
2597        self.expect_token(&Token::RParen)?;
2598
2599        Ok(Expr::Substring {
2600            expr: Box::new(expr),
2601            substring_from: from_expr.map(Box::new),
2602            substring_for: to_expr.map(Box::new),
2603            special,
2604            shorthand,
2605        })
2606    }
2607
2608    pub fn parse_overlay_expr(&mut self) -> Result<Expr, ParserError> {
2609        // PARSE OVERLAY (EXPR PLACING EXPR FROM 1 [FOR 3])
2610        self.expect_token(&Token::LParen)?;
2611        let expr = self.parse_expr()?;
2612        self.expect_keyword_is(Keyword::PLACING)?;
2613        let what_expr = self.parse_expr()?;
2614        self.expect_keyword_is(Keyword::FROM)?;
2615        let from_expr = self.parse_expr()?;
2616        let mut for_expr = None;
2617        if self.parse_keyword(Keyword::FOR) {
2618            for_expr = Some(self.parse_expr()?);
2619        }
2620        self.expect_token(&Token::RParen)?;
2621
2622        Ok(Expr::Overlay {
2623            expr: Box::new(expr),
2624            overlay_what: Box::new(what_expr),
2625            overlay_from: Box::new(from_expr),
2626            overlay_for: for_expr.map(Box::new),
2627        })
2628    }
2629
2630    /// ```sql
2631    /// TRIM ([WHERE] ['text' FROM] 'text')
2632    /// TRIM ('text')
2633    /// TRIM(<expr>, [, characters]) -- only Snowflake or BigQuery
2634    /// ```
2635    pub fn parse_trim_expr(&mut self) -> Result<Expr, ParserError> {
2636        self.expect_token(&Token::LParen)?;
2637        let mut trim_where = None;
2638        if let Token::Word(word) = self.peek_token().token {
2639            if [Keyword::BOTH, Keyword::LEADING, Keyword::TRAILING].contains(&word.keyword) {
2640                trim_where = Some(self.parse_trim_where()?);
2641            }
2642        }
2643        let expr = self.parse_expr()?;
2644        if self.parse_keyword(Keyword::FROM) {
2645            let trim_what = Box::new(expr);
2646            let expr = self.parse_expr()?;
2647            self.expect_token(&Token::RParen)?;
2648            Ok(Expr::Trim {
2649                expr: Box::new(expr),
2650                trim_where,
2651                trim_what: Some(trim_what),
2652                trim_characters: None,
2653            })
2654        } else if self.consume_token(&Token::Comma)
2655            && dialect_of!(self is DuckDbDialect | SnowflakeDialect | BigQueryDialect | GenericDialect)
2656        {
2657            let characters = self.parse_comma_separated(Parser::parse_expr)?;
2658            self.expect_token(&Token::RParen)?;
2659            Ok(Expr::Trim {
2660                expr: Box::new(expr),
2661                trim_where: None,
2662                trim_what: None,
2663                trim_characters: Some(characters),
2664            })
2665        } else {
2666            self.expect_token(&Token::RParen)?;
2667            Ok(Expr::Trim {
2668                expr: Box::new(expr),
2669                trim_where,
2670                trim_what: None,
2671                trim_characters: None,
2672            })
2673        }
2674    }
2675
2676    pub fn parse_trim_where(&mut self) -> Result<TrimWhereField, ParserError> {
2677        let next_token = self.next_token();
2678        match &next_token.token {
2679            Token::Word(w) => match w.keyword {
2680                Keyword::BOTH => Ok(TrimWhereField::Both),
2681                Keyword::LEADING => Ok(TrimWhereField::Leading),
2682                Keyword::TRAILING => Ok(TrimWhereField::Trailing),
2683                _ => self.expected("trim_where field", next_token)?,
2684            },
2685            _ => self.expected("trim_where field", next_token),
2686        }
2687    }
2688
2689    /// Parses an array expression `[ex1, ex2, ..]`
2690    /// if `named` is `true`, came from an expression like  `ARRAY[ex1, ex2]`
2691    pub fn parse_array_expr(&mut self, named: bool) -> Result<Expr, ParserError> {
2692        let exprs = self.parse_comma_separated0(Parser::parse_expr, Token::RBracket)?;
2693        self.expect_token(&Token::RBracket)?;
2694        Ok(Expr::Array(Array { elem: exprs, named }))
2695    }
2696
2697    pub fn parse_listagg_on_overflow(&mut self) -> Result<Option<ListAggOnOverflow>, ParserError> {
2698        if self.parse_keywords(&[Keyword::ON, Keyword::OVERFLOW]) {
2699            if self.parse_keyword(Keyword::ERROR) {
2700                Ok(Some(ListAggOnOverflow::Error))
2701            } else {
2702                self.expect_keyword_is(Keyword::TRUNCATE)?;
2703                let filler = match self.peek_token().token {
2704                    Token::Word(w)
2705                        if w.keyword == Keyword::WITH || w.keyword == Keyword::WITHOUT =>
2706                    {
2707                        None
2708                    }
2709                    Token::SingleQuotedString(_)
2710                    | Token::EscapedStringLiteral(_)
2711                    | Token::UnicodeStringLiteral(_)
2712                    | Token::NationalStringLiteral(_)
2713                    | Token::HexStringLiteral(_) => Some(Box::new(self.parse_expr()?)),
2714                    _ => self.expected(
2715                        "either filler, WITH, or WITHOUT in LISTAGG",
2716                        self.peek_token(),
2717                    )?,
2718                };
2719                let with_count = self.parse_keyword(Keyword::WITH);
2720                if !with_count && !self.parse_keyword(Keyword::WITHOUT) {
2721                    self.expected("either WITH or WITHOUT in LISTAGG", self.peek_token())?;
2722                }
2723                self.expect_keyword_is(Keyword::COUNT)?;
2724                Ok(Some(ListAggOnOverflow::Truncate { filler, with_count }))
2725            }
2726        } else {
2727            Ok(None)
2728        }
2729    }
2730
2731    // This function parses date/time fields for the EXTRACT function-like
2732    // operator, interval qualifiers, and the ceil/floor operations.
2733    // EXTRACT supports a wider set of date/time fields than interval qualifiers,
2734    // so this function may need to be split in two.
2735    pub fn parse_date_time_field(&mut self) -> Result<DateTimeField, ParserError> {
2736        let next_token = self.next_token();
2737        match &next_token.token {
2738            Token::Word(w) => match w.keyword {
2739                Keyword::YEAR => Ok(DateTimeField::Year),
2740                Keyword::YEARS => Ok(DateTimeField::Years),
2741                Keyword::MONTH => Ok(DateTimeField::Month),
2742                Keyword::MONTHS => Ok(DateTimeField::Months),
2743                Keyword::WEEK => {
2744                    let week_day = if dialect_of!(self is BigQueryDialect | GenericDialect)
2745                        && self.consume_token(&Token::LParen)
2746                    {
2747                        let week_day = self.parse_identifier()?;
2748                        self.expect_token(&Token::RParen)?;
2749                        Some(week_day)
2750                    } else {
2751                        None
2752                    };
2753                    Ok(DateTimeField::Week(week_day))
2754                }
2755                Keyword::WEEKS => Ok(DateTimeField::Weeks),
2756                Keyword::DAY => Ok(DateTimeField::Day),
2757                Keyword::DAYOFWEEK => Ok(DateTimeField::DayOfWeek),
2758                Keyword::DAYOFYEAR => Ok(DateTimeField::DayOfYear),
2759                Keyword::DAYS => Ok(DateTimeField::Days),
2760                Keyword::DATE => Ok(DateTimeField::Date),
2761                Keyword::DATETIME => Ok(DateTimeField::Datetime),
2762                Keyword::HOUR => Ok(DateTimeField::Hour),
2763                Keyword::HOURS => Ok(DateTimeField::Hours),
2764                Keyword::MINUTE => Ok(DateTimeField::Minute),
2765                Keyword::MINUTES => Ok(DateTimeField::Minutes),
2766                Keyword::SECOND => Ok(DateTimeField::Second),
2767                Keyword::SECONDS => Ok(DateTimeField::Seconds),
2768                Keyword::CENTURY => Ok(DateTimeField::Century),
2769                Keyword::DECADE => Ok(DateTimeField::Decade),
2770                Keyword::DOY => Ok(DateTimeField::Doy),
2771                Keyword::DOW => Ok(DateTimeField::Dow),
2772                Keyword::EPOCH => Ok(DateTimeField::Epoch),
2773                Keyword::ISODOW => Ok(DateTimeField::Isodow),
2774                Keyword::ISOYEAR => Ok(DateTimeField::Isoyear),
2775                Keyword::ISOWEEK => Ok(DateTimeField::IsoWeek),
2776                Keyword::JULIAN => Ok(DateTimeField::Julian),
2777                Keyword::MICROSECOND => Ok(DateTimeField::Microsecond),
2778                Keyword::MICROSECONDS => Ok(DateTimeField::Microseconds),
2779                Keyword::MILLENIUM => Ok(DateTimeField::Millenium),
2780                Keyword::MILLENNIUM => Ok(DateTimeField::Millennium),
2781                Keyword::MILLISECOND => Ok(DateTimeField::Millisecond),
2782                Keyword::MILLISECONDS => Ok(DateTimeField::Milliseconds),
2783                Keyword::NANOSECOND => Ok(DateTimeField::Nanosecond),
2784                Keyword::NANOSECONDS => Ok(DateTimeField::Nanoseconds),
2785                Keyword::QUARTER => Ok(DateTimeField::Quarter),
2786                Keyword::TIME => Ok(DateTimeField::Time),
2787                Keyword::TIMEZONE => Ok(DateTimeField::Timezone),
2788                Keyword::TIMEZONE_ABBR => Ok(DateTimeField::TimezoneAbbr),
2789                Keyword::TIMEZONE_HOUR => Ok(DateTimeField::TimezoneHour),
2790                Keyword::TIMEZONE_MINUTE => Ok(DateTimeField::TimezoneMinute),
2791                Keyword::TIMEZONE_REGION => Ok(DateTimeField::TimezoneRegion),
2792                _ if self.dialect.allow_extract_custom() => {
2793                    self.prev_token();
2794                    let custom = self.parse_identifier()?;
2795                    Ok(DateTimeField::Custom(custom))
2796                }
2797                _ => self.expected("date/time field", next_token),
2798            },
2799            Token::SingleQuotedString(_) if self.dialect.allow_extract_single_quotes() => {
2800                self.prev_token();
2801                let custom = self.parse_identifier()?;
2802                Ok(DateTimeField::Custom(custom))
2803            }
2804            _ => self.expected("date/time field", next_token),
2805        }
2806    }
2807
2808    pub fn parse_not(&mut self) -> Result<Expr, ParserError> {
2809        match self.peek_token().token {
2810            Token::Word(w) => match w.keyword {
2811                Keyword::EXISTS => {
2812                    let negated = true;
2813                    let _ = self.parse_keyword(Keyword::EXISTS);
2814                    self.parse_exists_expr(negated)
2815                }
2816                _ => Ok(Expr::UnaryOp {
2817                    op: UnaryOperator::Not,
2818                    expr: Box::new(
2819                        self.parse_subexpr(self.dialect.prec_value(Precedence::UnaryNot))?,
2820                    ),
2821                }),
2822            },
2823            _ => Ok(Expr::UnaryOp {
2824                op: UnaryOperator::Not,
2825                expr: Box::new(self.parse_subexpr(self.dialect.prec_value(Precedence::UnaryNot))?),
2826            }),
2827        }
2828    }
2829
2830    /// Parse expression types that start with a left brace '{'.
2831    /// Examples:
2832    /// ```sql
2833    /// -- Dictionary expr.
2834    /// {'key1': 'value1', 'key2': 'value2'}
2835    ///
2836    /// -- Function call using the ODBC syntax.
2837    /// { fn CONCAT('foo', 'bar') }
2838    /// ```
2839    fn parse_lbrace_expr(&mut self) -> Result<Expr, ParserError> {
2840        let token = self.expect_token(&Token::LBrace)?;
2841
2842        if let Some(fn_expr) = self.maybe_parse_odbc_body()? {
2843            self.expect_token(&Token::RBrace)?;
2844            return Ok(fn_expr);
2845        }
2846
2847        if self.dialect.supports_dictionary_syntax() {
2848            self.prev_token(); // Put back the '{'
2849            return self.parse_dictionary();
2850        }
2851
2852        self.expected("an expression", token)
2853    }
2854
2855    /// Parses fulltext expressions [`sqlparser::ast::Expr::MatchAgainst`]
2856    ///
2857    /// # Errors
2858    /// This method will raise an error if the column list is empty or with invalid identifiers,
2859    /// the match expression is not a literal string, or if the search modifier is not valid.
2860    pub fn parse_match_against(&mut self) -> Result<Expr, ParserError> {
2861        let columns = self.parse_parenthesized_qualified_column_list(Mandatory, false)?;
2862
2863        self.expect_keyword_is(Keyword::AGAINST)?;
2864
2865        self.expect_token(&Token::LParen)?;
2866
2867        // MySQL is too permissive about the value, IMO we can't validate it perfectly on syntax level.
2868        let match_value = self.parse_value()?.value;
2869
2870        let in_natural_language_mode_keywords = &[
2871            Keyword::IN,
2872            Keyword::NATURAL,
2873            Keyword::LANGUAGE,
2874            Keyword::MODE,
2875        ];
2876
2877        let with_query_expansion_keywords = &[Keyword::WITH, Keyword::QUERY, Keyword::EXPANSION];
2878
2879        let in_boolean_mode_keywords = &[Keyword::IN, Keyword::BOOLEAN, Keyword::MODE];
2880
2881        let opt_search_modifier = if self.parse_keywords(in_natural_language_mode_keywords) {
2882            if self.parse_keywords(with_query_expansion_keywords) {
2883                Some(SearchModifier::InNaturalLanguageModeWithQueryExpansion)
2884            } else {
2885                Some(SearchModifier::InNaturalLanguageMode)
2886            }
2887        } else if self.parse_keywords(in_boolean_mode_keywords) {
2888            Some(SearchModifier::InBooleanMode)
2889        } else if self.parse_keywords(with_query_expansion_keywords) {
2890            Some(SearchModifier::WithQueryExpansion)
2891        } else {
2892            None
2893        };
2894
2895        self.expect_token(&Token::RParen)?;
2896
2897        Ok(Expr::MatchAgainst {
2898            columns,
2899            match_value,
2900            opt_search_modifier,
2901        })
2902    }
2903
2904    /// Parse an `INTERVAL` expression.
2905    ///
2906    /// Some syntactically valid intervals:
2907    ///
2908    /// ```sql
2909    ///   1. INTERVAL '1' DAY
2910    ///   2. INTERVAL '1-1' YEAR TO MONTH
2911    ///   3. INTERVAL '1' SECOND
2912    ///   4. INTERVAL '1:1:1.1' HOUR (5) TO SECOND (5)
2913    ///   5. INTERVAL '1.1' SECOND (2, 2)
2914    ///   6. INTERVAL '1:1' HOUR (5) TO MINUTE (5)
2915    ///   7. (MySql & BigQuery only): INTERVAL 1 DAY
2916    /// ```
2917    ///
2918    /// Note that we do not currently attempt to parse the quoted value.
2919    pub fn parse_interval(&mut self) -> Result<Expr, ParserError> {
2920        // The SQL standard allows an optional sign before the value string, but
2921        // it is not clear if any implementations support that syntax, so we
2922        // don't currently try to parse it. (The sign can instead be included
2923        // inside the value string.)
2924
2925        // to match the different flavours of INTERVAL syntax, we only allow expressions
2926        // if the dialect requires an interval qualifier,
2927        // see https://github.com/sqlparser-rs/sqlparser-rs/pull/1398 for more details
2928        let value = if self.dialect.require_interval_qualifier() {
2929            // parse a whole expression so `INTERVAL 1 + 1 DAY` is valid
2930            self.parse_expr()?
2931        } else {
2932            // parse a prefix expression so `INTERVAL 1 DAY` is valid, but `INTERVAL 1 + 1 DAY` is not
2933            // this also means that `INTERVAL '5 days' > INTERVAL '1 day'` treated properly
2934            self.parse_prefix()?
2935        };
2936
2937        // Following the string literal is a qualifier which indicates the units
2938        // of the duration specified in the string literal.
2939        //
2940        // Note that PostgreSQL allows omitting the qualifier, so we provide
2941        // this more general implementation.
2942        let leading_field = if self.next_token_is_temporal_unit() {
2943            Some(self.parse_date_time_field()?)
2944        } else if self.dialect.require_interval_qualifier() {
2945            return parser_err!(
2946                "INTERVAL requires a unit after the literal value",
2947                self.peek_token().span.start
2948            );
2949        } else {
2950            None
2951        };
2952
2953        let (leading_precision, last_field, fsec_precision) =
2954            if leading_field == Some(DateTimeField::Second) {
2955                // SQL mandates special syntax for `SECOND TO SECOND` literals.
2956                // Instead of
2957                //     `SECOND [(<leading precision>)] TO SECOND[(<fractional seconds precision>)]`
2958                // one must use the special format:
2959                //     `SECOND [( <leading precision> [ , <fractional seconds precision>] )]`
2960                let last_field = None;
2961                let (leading_precision, fsec_precision) = self.parse_optional_precision_scale()?;
2962                (leading_precision, last_field, fsec_precision)
2963            } else {
2964                let leading_precision = self.parse_optional_precision()?;
2965                if self.parse_keyword(Keyword::TO) {
2966                    let last_field = Some(self.parse_date_time_field()?);
2967                    let fsec_precision = if last_field == Some(DateTimeField::Second) {
2968                        self.parse_optional_precision()?
2969                    } else {
2970                        None
2971                    };
2972                    (leading_precision, last_field, fsec_precision)
2973                } else {
2974                    (leading_precision, None, None)
2975                }
2976            };
2977
2978        Ok(Expr::Interval(Interval {
2979            value: Box::new(value),
2980            leading_field,
2981            leading_precision,
2982            last_field,
2983            fractional_seconds_precision: fsec_precision,
2984        }))
2985    }
2986
2987    /// Peek at the next token and determine if it is a temporal unit
2988    /// like `second`.
2989    pub fn next_token_is_temporal_unit(&mut self) -> bool {
2990        if let Token::Word(word) = self.peek_token().token {
2991            matches!(
2992                word.keyword,
2993                Keyword::YEAR
2994                    | Keyword::YEARS
2995                    | Keyword::MONTH
2996                    | Keyword::MONTHS
2997                    | Keyword::WEEK
2998                    | Keyword::WEEKS
2999                    | Keyword::DAY
3000                    | Keyword::DAYS
3001                    | Keyword::HOUR
3002                    | Keyword::HOURS
3003                    | Keyword::MINUTE
3004                    | Keyword::MINUTES
3005                    | Keyword::SECOND
3006                    | Keyword::SECONDS
3007                    | Keyword::CENTURY
3008                    | Keyword::DECADE
3009                    | Keyword::DOW
3010                    | Keyword::DOY
3011                    | Keyword::EPOCH
3012                    | Keyword::ISODOW
3013                    | Keyword::ISOYEAR
3014                    | Keyword::JULIAN
3015                    | Keyword::MICROSECOND
3016                    | Keyword::MICROSECONDS
3017                    | Keyword::MILLENIUM
3018                    | Keyword::MILLENNIUM
3019                    | Keyword::MILLISECOND
3020                    | Keyword::MILLISECONDS
3021                    | Keyword::NANOSECOND
3022                    | Keyword::NANOSECONDS
3023                    | Keyword::QUARTER
3024                    | Keyword::TIMEZONE
3025                    | Keyword::TIMEZONE_HOUR
3026                    | Keyword::TIMEZONE_MINUTE
3027            )
3028        } else {
3029            false
3030        }
3031    }
3032
3033    /// Syntax
3034    /// ```sql
3035    /// -- typed
3036    /// STRUCT<[field_name] field_type, ...>( expr1 [, ... ])
3037    /// -- typeless
3038    /// STRUCT( expr1 [AS field_name] [, ... ])
3039    /// ```
3040    fn parse_struct_literal(&mut self) -> Result<Expr, ParserError> {
3041        // Parse the fields definition if exist `<[field_name] field_type, ...>`
3042        self.prev_token();
3043        let (fields, trailing_bracket) =
3044            self.parse_struct_type_def(Self::parse_struct_field_def)?;
3045        if trailing_bracket.0 {
3046            return parser_err!(
3047                "unmatched > in STRUCT literal",
3048                self.peek_token().span.start
3049            );
3050        }
3051
3052        // Parse the struct values `(expr1 [, ... ])`
3053        self.expect_token(&Token::LParen)?;
3054        let values = self
3055            .parse_comma_separated(|parser| parser.parse_struct_field_expr(!fields.is_empty()))?;
3056        self.expect_token(&Token::RParen)?;
3057
3058        Ok(Expr::Struct { values, fields })
3059    }
3060
3061    /// Parse an expression value for a struct literal
3062    /// Syntax
3063    /// ```sql
3064    /// expr [AS name]
3065    /// ```
3066    ///
3067    /// For biquery [1], Parameter typed_syntax is set to true if the expression
3068    /// is to be parsed as a field expression declared using typed
3069    /// struct syntax [2], and false if using typeless struct syntax [3].
3070    ///
3071    /// [1]: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#constructing_a_struct
3072    /// [2]: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#typed_struct_syntax
3073    /// [3]: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#typeless_struct_syntax
3074    fn parse_struct_field_expr(&mut self, typed_syntax: bool) -> Result<Expr, ParserError> {
3075        let expr = self.parse_expr()?;
3076        if self.parse_keyword(Keyword::AS) {
3077            if typed_syntax {
3078                return parser_err!("Typed syntax does not allow AS", {
3079                    self.prev_token();
3080                    self.peek_token().span.start
3081                });
3082            }
3083            let field_name = self.parse_identifier()?;
3084            Ok(Expr::Named {
3085                expr: expr.into(),
3086                name: field_name,
3087            })
3088        } else {
3089            Ok(expr)
3090        }
3091    }
3092
3093    /// Parse a Struct type definition as a sequence of field-value pairs.
3094    /// The syntax of the Struct elem differs by dialect so it is customised
3095    /// by the `elem_parser` argument.
3096    ///
3097    /// Syntax
3098    /// ```sql
3099    /// Hive:
3100    /// STRUCT<field_name: field_type>
3101    ///
3102    /// BigQuery:
3103    /// STRUCT<[field_name] field_type>
3104    /// ```
3105    fn parse_struct_type_def<F>(
3106        &mut self,
3107        mut elem_parser: F,
3108    ) -> Result<(Vec<StructField>, MatchedTrailingBracket), ParserError>
3109    where
3110        F: FnMut(&mut Parser<'a>) -> Result<(StructField, MatchedTrailingBracket), ParserError>,
3111    {
3112        self.expect_keyword_is(Keyword::STRUCT)?;
3113
3114        // Nothing to do if we have no type information.
3115        if Token::Lt != self.peek_token() {
3116            return Ok((Default::default(), false.into()));
3117        }
3118        self.next_token();
3119
3120        let mut field_defs = vec![];
3121        let trailing_bracket = loop {
3122            let (def, trailing_bracket) = elem_parser(self)?;
3123            field_defs.push(def);
3124            // The struct field definition is finished if it occurs `>>` or comma.
3125            if trailing_bracket.0 || !self.consume_token(&Token::Comma) {
3126                break trailing_bracket;
3127            }
3128        };
3129
3130        Ok((
3131            field_defs,
3132            self.expect_closing_angle_bracket(trailing_bracket)?,
3133        ))
3134    }
3135
3136    /// Duckdb Struct Data Type <https://duckdb.org/docs/sql/data_types/struct.html#retrieving-from-structs>
3137    fn parse_duckdb_struct_type_def(&mut self) -> Result<Vec<StructField>, ParserError> {
3138        self.expect_keyword_is(Keyword::STRUCT)?;
3139        self.expect_token(&Token::LParen)?;
3140        let struct_body = self.parse_comma_separated(|parser| {
3141            let field_name = parser.parse_identifier()?;
3142            let field_type = parser.parse_data_type()?;
3143
3144            Ok(StructField {
3145                field_name: Some(field_name),
3146                field_type,
3147                options: None,
3148            })
3149        });
3150        self.expect_token(&Token::RParen)?;
3151        struct_body
3152    }
3153
3154    /// Parse a field definition in a [struct] or [tuple].
3155    /// Syntax:
3156    ///
3157    /// ```sql
3158    /// [field_name] field_type
3159    /// ```
3160    ///
3161    /// [struct]: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#declaring_a_struct_type
3162    /// [tuple]: https://clickhouse.com/docs/en/sql-reference/data-types/tuple
3163    fn parse_struct_field_def(
3164        &mut self,
3165    ) -> Result<(StructField, MatchedTrailingBracket), ParserError> {
3166        // Look beyond the next item to infer whether both field name
3167        // and type are specified.
3168        let is_anonymous_field = !matches!(
3169            (self.peek_nth_token(0).token, self.peek_nth_token(1).token),
3170            (Token::Word(_), Token::Word(_))
3171        );
3172
3173        let field_name = if is_anonymous_field {
3174            None
3175        } else {
3176            Some(self.parse_identifier()?)
3177        };
3178
3179        let (field_type, trailing_bracket) = self.parse_data_type_helper()?;
3180
3181        let options = self.maybe_parse_options(Keyword::OPTIONS)?;
3182        Ok((
3183            StructField {
3184                field_name,
3185                field_type,
3186                options,
3187            },
3188            trailing_bracket,
3189        ))
3190    }
3191
3192    /// DuckDB specific: Parse a Union type definition as a sequence of field-value pairs.
3193    ///
3194    /// Syntax:
3195    ///
3196    /// ```sql
3197    /// UNION(field_name field_type[,...])
3198    /// ```
3199    ///
3200    /// [1]: https://duckdb.org/docs/sql/data_types/union.html
3201    fn parse_union_type_def(&mut self) -> Result<Vec<UnionField>, ParserError> {
3202        self.expect_keyword_is(Keyword::UNION)?;
3203
3204        self.expect_token(&Token::LParen)?;
3205
3206        let fields = self.parse_comma_separated(|p| {
3207            Ok(UnionField {
3208                field_name: p.parse_identifier()?,
3209                field_type: p.parse_data_type()?,
3210            })
3211        })?;
3212
3213        self.expect_token(&Token::RParen)?;
3214
3215        Ok(fields)
3216    }
3217
3218    /// DuckDB and ClickHouse specific: Parse a duckdb [dictionary] or a clickhouse [map] setting
3219    ///
3220    /// Syntax:
3221    ///
3222    /// ```sql
3223    /// {'field_name': expr1[, ... ]}
3224    /// ```
3225    ///
3226    /// [dictionary]: https://duckdb.org/docs/sql/data_types/struct#creating-structs
3227    /// [map]: https://clickhouse.com/docs/operations/settings/settings#additional_table_filters
3228    fn parse_dictionary(&mut self) -> Result<Expr, ParserError> {
3229        self.expect_token(&Token::LBrace)?;
3230
3231        let fields = self.parse_comma_separated0(Self::parse_dictionary_field, Token::RBrace)?;
3232
3233        self.expect_token(&Token::RBrace)?;
3234
3235        Ok(Expr::Dictionary(fields))
3236    }
3237
3238    /// Parse a field for a duckdb [dictionary] or a clickhouse [map] setting
3239    ///
3240    /// Syntax
3241    ///
3242    /// ```sql
3243    /// 'name': expr
3244    /// ```
3245    ///
3246    /// [dictionary]: https://duckdb.org/docs/sql/data_types/struct#creating-structs
3247    /// [map]: https://clickhouse.com/docs/operations/settings/settings#additional_table_filters
3248    fn parse_dictionary_field(&mut self) -> Result<DictionaryField, ParserError> {
3249        let key = self.parse_identifier()?;
3250
3251        self.expect_token(&Token::Colon)?;
3252
3253        let expr = self.parse_expr()?;
3254
3255        Ok(DictionaryField {
3256            key,
3257            value: Box::new(expr),
3258        })
3259    }
3260
3261    /// DuckDB specific: Parse a duckdb [map]
3262    ///
3263    /// Syntax:
3264    ///
3265    /// ```sql
3266    /// Map {key1: value1[, ... ]}
3267    /// ```
3268    ///
3269    /// [map]: https://duckdb.org/docs/sql/data_types/map.html#creating-maps
3270    fn parse_duckdb_map_literal(&mut self) -> Result<Expr, ParserError> {
3271        self.expect_token(&Token::LBrace)?;
3272        let fields = self.parse_comma_separated0(Self::parse_duckdb_map_field, Token::RBrace)?;
3273        self.expect_token(&Token::RBrace)?;
3274        Ok(Expr::Map(Map { entries: fields }))
3275    }
3276
3277    /// Parse a field for a duckdb [map]
3278    ///
3279    /// Syntax
3280    ///
3281    /// ```sql
3282    /// key: value
3283    /// ```
3284    ///
3285    /// [map]: https://duckdb.org/docs/sql/data_types/map.html#creating-maps
3286    fn parse_duckdb_map_field(&mut self) -> Result<MapEntry, ParserError> {
3287        let key = self.parse_expr()?;
3288
3289        self.expect_token(&Token::Colon)?;
3290
3291        let value = self.parse_expr()?;
3292
3293        Ok(MapEntry {
3294            key: Box::new(key),
3295            value: Box::new(value),
3296        })
3297    }
3298
3299    /// Parse clickhouse [map]
3300    ///
3301    /// Syntax
3302    ///
3303    /// ```sql
3304    /// Map(key_data_type, value_data_type)
3305    /// ```
3306    ///
3307    /// [map]: https://clickhouse.com/docs/en/sql-reference/data-types/map
3308    fn parse_click_house_map_def(&mut self) -> Result<(DataType, DataType), ParserError> {
3309        self.expect_keyword_is(Keyword::MAP)?;
3310        self.expect_token(&Token::LParen)?;
3311        let key_data_type = self.parse_data_type()?;
3312        self.expect_token(&Token::Comma)?;
3313        let value_data_type = self.parse_data_type()?;
3314        self.expect_token(&Token::RParen)?;
3315
3316        Ok((key_data_type, value_data_type))
3317    }
3318
3319    /// Parse clickhouse [tuple]
3320    ///
3321    /// Syntax
3322    ///
3323    /// ```sql
3324    /// Tuple([field_name] field_type, ...)
3325    /// ```
3326    ///
3327    /// [tuple]: https://clickhouse.com/docs/en/sql-reference/data-types/tuple
3328    fn parse_click_house_tuple_def(&mut self) -> Result<Vec<StructField>, ParserError> {
3329        self.expect_keyword_is(Keyword::TUPLE)?;
3330        self.expect_token(&Token::LParen)?;
3331        let mut field_defs = vec![];
3332        loop {
3333            let (def, _) = self.parse_struct_field_def()?;
3334            field_defs.push(def);
3335            if !self.consume_token(&Token::Comma) {
3336                break;
3337            }
3338        }
3339        self.expect_token(&Token::RParen)?;
3340
3341        Ok(field_defs)
3342    }
3343
3344    /// For nested types that use the angle bracket syntax, this matches either
3345    /// `>`, `>>` or nothing depending on which variant is expected (specified by the previously
3346    /// matched `trailing_bracket` argument). It returns whether there is a trailing
3347    /// left to be matched - (i.e. if '>>' was matched).
3348    fn expect_closing_angle_bracket(
3349        &mut self,
3350        trailing_bracket: MatchedTrailingBracket,
3351    ) -> Result<MatchedTrailingBracket, ParserError> {
3352        let trailing_bracket = if !trailing_bracket.0 {
3353            match self.peek_token().token {
3354                Token::Gt => {
3355                    self.next_token();
3356                    false.into()
3357                }
3358                Token::ShiftRight => {
3359                    self.next_token();
3360                    true.into()
3361                }
3362                _ => return self.expected(">", self.peek_token()),
3363            }
3364        } else {
3365            false.into()
3366        };
3367
3368        Ok(trailing_bracket)
3369    }
3370
3371    /// Parse an operator following an expression
3372    pub fn parse_infix(&mut self, expr: Expr, precedence: u8) -> Result<Expr, ParserError> {
3373        // allow the dialect to override infix parsing
3374        if let Some(infix) = self.dialect.parse_infix(self, &expr, precedence) {
3375            return infix;
3376        }
3377
3378        let dialect = self.dialect;
3379
3380        self.advance_token();
3381        let tok = self.get_current_token();
3382        debug!("infix: {tok:?}");
3383        let tok_index = self.get_current_index();
3384        let span = tok.span;
3385        let regular_binary_operator = match &tok.token {
3386            Token::Spaceship => Some(BinaryOperator::Spaceship),
3387            Token::DoubleEq => Some(BinaryOperator::Eq),
3388            Token::Assignment => Some(BinaryOperator::Assignment),
3389            Token::Eq => Some(BinaryOperator::Eq),
3390            Token::Neq => Some(BinaryOperator::NotEq),
3391            Token::Gt => Some(BinaryOperator::Gt),
3392            Token::GtEq => Some(BinaryOperator::GtEq),
3393            Token::Lt => Some(BinaryOperator::Lt),
3394            Token::LtEq => Some(BinaryOperator::LtEq),
3395            Token::Plus => Some(BinaryOperator::Plus),
3396            Token::Minus => Some(BinaryOperator::Minus),
3397            Token::Mul => Some(BinaryOperator::Multiply),
3398            Token::Mod => Some(BinaryOperator::Modulo),
3399            Token::StringConcat => Some(BinaryOperator::StringConcat),
3400            Token::Pipe => Some(BinaryOperator::BitwiseOr),
3401            Token::Caret => {
3402                // In PostgreSQL, ^ stands for the exponentiation operation,
3403                // and # stands for XOR. See https://www.postgresql.org/docs/current/functions-math.html
3404                if dialect_is!(dialect is PostgreSqlDialect) {
3405                    Some(BinaryOperator::PGExp)
3406                } else {
3407                    Some(BinaryOperator::BitwiseXor)
3408                }
3409            }
3410            Token::Ampersand => Some(BinaryOperator::BitwiseAnd),
3411            Token::Div => Some(BinaryOperator::Divide),
3412            Token::DuckIntDiv if dialect_is!(dialect is DuckDbDialect | GenericDialect) => {
3413                Some(BinaryOperator::DuckIntegerDivide)
3414            }
3415            Token::ShiftLeft if dialect_is!(dialect is PostgreSqlDialect | DuckDbDialect | GenericDialect | RedshiftSqlDialect) => {
3416                Some(BinaryOperator::PGBitwiseShiftLeft)
3417            }
3418            Token::ShiftRight if dialect_is!(dialect is PostgreSqlDialect | DuckDbDialect | GenericDialect | RedshiftSqlDialect) => {
3419                Some(BinaryOperator::PGBitwiseShiftRight)
3420            }
3421            Token::Sharp if dialect_is!(dialect is PostgreSqlDialect | RedshiftSqlDialect) => {
3422                Some(BinaryOperator::PGBitwiseXor)
3423            }
3424            Token::Overlap if dialect_is!(dialect is PostgreSqlDialect | RedshiftSqlDialect) => {
3425                Some(BinaryOperator::PGOverlap)
3426            }
3427            Token::Overlap if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
3428                Some(BinaryOperator::PGOverlap)
3429            }
3430            Token::CaretAt if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
3431                Some(BinaryOperator::PGStartsWith)
3432            }
3433            Token::Tilde => Some(BinaryOperator::PGRegexMatch),
3434            Token::TildeAsterisk => Some(BinaryOperator::PGRegexIMatch),
3435            Token::ExclamationMarkTilde => Some(BinaryOperator::PGRegexNotMatch),
3436            Token::ExclamationMarkTildeAsterisk => Some(BinaryOperator::PGRegexNotIMatch),
3437            Token::DoubleTilde => Some(BinaryOperator::PGLikeMatch),
3438            Token::DoubleTildeAsterisk => Some(BinaryOperator::PGILikeMatch),
3439            Token::ExclamationMarkDoubleTilde => Some(BinaryOperator::PGNotLikeMatch),
3440            Token::ExclamationMarkDoubleTildeAsterisk => Some(BinaryOperator::PGNotILikeMatch),
3441            Token::Arrow => Some(BinaryOperator::Arrow),
3442            Token::LongArrow => Some(BinaryOperator::LongArrow),
3443            Token::HashArrow => Some(BinaryOperator::HashArrow),
3444            Token::HashLongArrow => Some(BinaryOperator::HashLongArrow),
3445            Token::AtArrow => Some(BinaryOperator::AtArrow),
3446            Token::ArrowAt => Some(BinaryOperator::ArrowAt),
3447            Token::HashMinus => Some(BinaryOperator::HashMinus),
3448            Token::AtQuestion => Some(BinaryOperator::AtQuestion),
3449            Token::AtAt => Some(BinaryOperator::AtAt),
3450            Token::Question => Some(BinaryOperator::Question),
3451            Token::QuestionAnd => Some(BinaryOperator::QuestionAnd),
3452            Token::QuestionPipe => Some(BinaryOperator::QuestionPipe),
3453            Token::CustomBinaryOperator(s) => Some(BinaryOperator::Custom(s.clone())),
3454            Token::DoubleSharp if self.dialect.supports_geometric_types() => {
3455                Some(BinaryOperator::DoubleHash)
3456            }
3457
3458            Token::AmpersandLeftAngleBracket if self.dialect.supports_geometric_types() => {
3459                Some(BinaryOperator::AndLt)
3460            }
3461            Token::AmpersandRightAngleBracket if self.dialect.supports_geometric_types() => {
3462                Some(BinaryOperator::AndGt)
3463            }
3464            Token::QuestionMarkDash if self.dialect.supports_geometric_types() => {
3465                Some(BinaryOperator::QuestionDash)
3466            }
3467            Token::AmpersandLeftAngleBracketVerticalBar
3468                if self.dialect.supports_geometric_types() =>
3469            {
3470                Some(BinaryOperator::AndLtPipe)
3471            }
3472            Token::VerticalBarAmpersandRightAngleBracket
3473                if self.dialect.supports_geometric_types() =>
3474            {
3475                Some(BinaryOperator::PipeAndGt)
3476            }
3477            Token::TwoWayArrow if self.dialect.supports_geometric_types() => {
3478                Some(BinaryOperator::LtDashGt)
3479            }
3480            Token::LeftAngleBracketCaret if self.dialect.supports_geometric_types() => {
3481                Some(BinaryOperator::LtCaret)
3482            }
3483            Token::RightAngleBracketCaret if self.dialect.supports_geometric_types() => {
3484                Some(BinaryOperator::GtCaret)
3485            }
3486            Token::QuestionMarkSharp if self.dialect.supports_geometric_types() => {
3487                Some(BinaryOperator::QuestionHash)
3488            }
3489            Token::QuestionMarkDoubleVerticalBar if self.dialect.supports_geometric_types() => {
3490                Some(BinaryOperator::QuestionDoublePipe)
3491            }
3492            Token::QuestionMarkDashVerticalBar if self.dialect.supports_geometric_types() => {
3493                Some(BinaryOperator::QuestionDashPipe)
3494            }
3495            Token::TildeEqual if self.dialect.supports_geometric_types() => {
3496                Some(BinaryOperator::TildeEq)
3497            }
3498            Token::ShiftLeftVerticalBar if self.dialect.supports_geometric_types() => {
3499                Some(BinaryOperator::LtLtPipe)
3500            }
3501            Token::VerticalBarShiftRight if self.dialect.supports_geometric_types() => {
3502                Some(BinaryOperator::PipeGtGt)
3503            }
3504            Token::AtSign if self.dialect.supports_geometric_types() => Some(BinaryOperator::At),
3505
3506            Token::Word(w) => match w.keyword {
3507                Keyword::AND => Some(BinaryOperator::And),
3508                Keyword::OR => Some(BinaryOperator::Or),
3509                Keyword::XOR => Some(BinaryOperator::Xor),
3510                Keyword::OVERLAPS => Some(BinaryOperator::Overlaps),
3511                Keyword::OPERATOR if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
3512                    self.expect_token(&Token::LParen)?;
3513                    // there are special rules for operator names in
3514                    // postgres so we can not use 'parse_object'
3515                    // or similar.
3516                    // See https://www.postgresql.org/docs/current/sql-createoperator.html
3517                    let mut idents = vec![];
3518                    loop {
3519                        self.advance_token();
3520                        idents.push(self.get_current_token().to_string());
3521                        if !self.consume_token(&Token::Period) {
3522                            break;
3523                        }
3524                    }
3525                    self.expect_token(&Token::RParen)?;
3526                    Some(BinaryOperator::PGCustomBinaryOperator(idents))
3527                }
3528                _ => None,
3529            },
3530            _ => None,
3531        };
3532
3533        let tok = self.token_at(tok_index);
3534        if let Some(op) = regular_binary_operator {
3535            if let Some(keyword) =
3536                self.parse_one_of_keywords(&[Keyword::ANY, Keyword::ALL, Keyword::SOME])
3537            {
3538                self.expect_token(&Token::LParen)?;
3539                let right = if self.peek_sub_query() {
3540                    // We have a subquery ahead (SELECT\WITH ...) need to rewind and
3541                    // use the parenthesis for parsing the subquery as an expression.
3542                    self.prev_token(); // LParen
3543                    self.parse_subexpr(precedence)?
3544                } else {
3545                    // Non-subquery expression
3546                    let right = self.parse_subexpr(precedence)?;
3547                    self.expect_token(&Token::RParen)?;
3548                    right
3549                };
3550
3551                if !matches!(
3552                    op,
3553                    BinaryOperator::Gt
3554                        | BinaryOperator::Lt
3555                        | BinaryOperator::GtEq
3556                        | BinaryOperator::LtEq
3557                        | BinaryOperator::Eq
3558                        | BinaryOperator::NotEq
3559                        | BinaryOperator::PGRegexMatch
3560                        | BinaryOperator::PGRegexIMatch
3561                        | BinaryOperator::PGRegexNotMatch
3562                        | BinaryOperator::PGRegexNotIMatch
3563                        | BinaryOperator::PGLikeMatch
3564                        | BinaryOperator::PGILikeMatch
3565                        | BinaryOperator::PGNotLikeMatch
3566                        | BinaryOperator::PGNotILikeMatch
3567                ) {
3568                    return parser_err!(
3569                        format!(
3570                        "Expected one of [=, >, <, =>, =<, !=, ~, ~*, !~, !~*, ~~, ~~*, !~~, !~~*] as comparison operator, found: {op}"
3571                    ),
3572                        span.start
3573                    );
3574                };
3575
3576                Ok(match keyword {
3577                    Keyword::ALL => Expr::AllOp {
3578                        left: Box::new(expr),
3579                        compare_op: op,
3580                        right: Box::new(right),
3581                    },
3582                    Keyword::ANY | Keyword::SOME => Expr::AnyOp {
3583                        left: Box::new(expr),
3584                        compare_op: op,
3585                        right: Box::new(right),
3586                        is_some: keyword == Keyword::SOME,
3587                    },
3588                    _ => unreachable!(),
3589                })
3590            } else {
3591                Ok(Expr::BinaryOp {
3592                    left: Box::new(expr),
3593                    op,
3594                    right: Box::new(self.parse_subexpr(precedence)?),
3595                })
3596            }
3597        } else if let Token::Word(w) = &tok.token {
3598            match w.keyword {
3599                Keyword::IS => {
3600                    if self.parse_keyword(Keyword::NULL) {
3601                        Ok(Expr::IsNull(Box::new(expr)))
3602                    } else if self.parse_keywords(&[Keyword::NOT, Keyword::NULL]) {
3603                        Ok(Expr::IsNotNull(Box::new(expr)))
3604                    } else if self.parse_keywords(&[Keyword::TRUE]) {
3605                        Ok(Expr::IsTrue(Box::new(expr)))
3606                    } else if self.parse_keywords(&[Keyword::NOT, Keyword::TRUE]) {
3607                        Ok(Expr::IsNotTrue(Box::new(expr)))
3608                    } else if self.parse_keywords(&[Keyword::FALSE]) {
3609                        Ok(Expr::IsFalse(Box::new(expr)))
3610                    } else if self.parse_keywords(&[Keyword::NOT, Keyword::FALSE]) {
3611                        Ok(Expr::IsNotFalse(Box::new(expr)))
3612                    } else if self.parse_keywords(&[Keyword::UNKNOWN]) {
3613                        Ok(Expr::IsUnknown(Box::new(expr)))
3614                    } else if self.parse_keywords(&[Keyword::NOT, Keyword::UNKNOWN]) {
3615                        Ok(Expr::IsNotUnknown(Box::new(expr)))
3616                    } else if self.parse_keywords(&[Keyword::DISTINCT, Keyword::FROM]) {
3617                        let expr2 = self.parse_expr()?;
3618                        Ok(Expr::IsDistinctFrom(Box::new(expr), Box::new(expr2)))
3619                    } else if self.parse_keywords(&[Keyword::NOT, Keyword::DISTINCT, Keyword::FROM])
3620                    {
3621                        let expr2 = self.parse_expr()?;
3622                        Ok(Expr::IsNotDistinctFrom(Box::new(expr), Box::new(expr2)))
3623                    } else if let Ok(is_normalized) = self.parse_unicode_is_normalized(expr) {
3624                        Ok(is_normalized)
3625                    } else {
3626                        self.expected(
3627                            "[NOT] NULL | TRUE | FALSE | DISTINCT | [form] NORMALIZED FROM after IS",
3628                            self.peek_token(),
3629                        )
3630                    }
3631                }
3632                Keyword::AT => {
3633                    self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
3634                    Ok(Expr::AtTimeZone {
3635                        timestamp: Box::new(expr),
3636                        time_zone: Box::new(self.parse_subexpr(precedence)?),
3637                    })
3638                }
3639                Keyword::NOT
3640                | Keyword::IN
3641                | Keyword::BETWEEN
3642                | Keyword::LIKE
3643                | Keyword::ILIKE
3644                | Keyword::SIMILAR
3645                | Keyword::REGEXP
3646                | Keyword::RLIKE => {
3647                    self.prev_token();
3648                    let negated = self.parse_keyword(Keyword::NOT);
3649                    let regexp = self.parse_keyword(Keyword::REGEXP);
3650                    let rlike = self.parse_keyword(Keyword::RLIKE);
3651                    let null = if !self.in_column_definition_state() {
3652                        self.parse_keyword(Keyword::NULL)
3653                    } else {
3654                        false
3655                    };
3656                    if regexp || rlike {
3657                        Ok(Expr::RLike {
3658                            negated,
3659                            expr: Box::new(expr),
3660                            pattern: Box::new(
3661                                self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
3662                            ),
3663                            regexp,
3664                        })
3665                    } else if negated && null {
3666                        Ok(Expr::IsNotNull(Box::new(expr)))
3667                    } else if self.parse_keyword(Keyword::IN) {
3668                        self.parse_in(expr, negated)
3669                    } else if self.parse_keyword(Keyword::BETWEEN) {
3670                        self.parse_between(expr, negated)
3671                    } else if self.parse_keyword(Keyword::LIKE) {
3672                        Ok(Expr::Like {
3673                            negated,
3674                            any: self.parse_keyword(Keyword::ANY),
3675                            expr: Box::new(expr),
3676                            pattern: Box::new(
3677                                self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
3678                            ),
3679                            escape_char: self.parse_escape_char()?,
3680                        })
3681                    } else if self.parse_keyword(Keyword::ILIKE) {
3682                        Ok(Expr::ILike {
3683                            negated,
3684                            any: self.parse_keyword(Keyword::ANY),
3685                            expr: Box::new(expr),
3686                            pattern: Box::new(
3687                                self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
3688                            ),
3689                            escape_char: self.parse_escape_char()?,
3690                        })
3691                    } else if self.parse_keywords(&[Keyword::SIMILAR, Keyword::TO]) {
3692                        Ok(Expr::SimilarTo {
3693                            negated,
3694                            expr: Box::new(expr),
3695                            pattern: Box::new(
3696                                self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
3697                            ),
3698                            escape_char: self.parse_escape_char()?,
3699                        })
3700                    } else {
3701                        self.expected("IN or BETWEEN after NOT", self.peek_token())
3702                    }
3703                }
3704                Keyword::NOTNULL if dialect.supports_notnull_operator() => {
3705                    Ok(Expr::IsNotNull(Box::new(expr)))
3706                }
3707                Keyword::MEMBER => {
3708                    if self.parse_keyword(Keyword::OF) {
3709                        self.expect_token(&Token::LParen)?;
3710                        let array = self.parse_expr()?;
3711                        self.expect_token(&Token::RParen)?;
3712                        Ok(Expr::MemberOf(MemberOf {
3713                            value: Box::new(expr),
3714                            array: Box::new(array),
3715                        }))
3716                    } else {
3717                        self.expected("OF after MEMBER", self.peek_token())
3718                    }
3719                }
3720                // Can only happen if `get_next_precedence` got out of sync with this function
3721                _ => parser_err!(
3722                    format!("No infix parser for token {:?}", tok.token),
3723                    tok.span.start
3724                ),
3725            }
3726        } else if Token::DoubleColon == *tok {
3727            Ok(Expr::Cast {
3728                kind: CastKind::DoubleColon,
3729                expr: Box::new(expr),
3730                data_type: self.parse_data_type()?,
3731                format: None,
3732            })
3733        } else if Token::ExclamationMark == *tok && self.dialect.supports_factorial_operator() {
3734            Ok(Expr::UnaryOp {
3735                op: UnaryOperator::PGPostfixFactorial,
3736                expr: Box::new(expr),
3737            })
3738        } else if Token::LBracket == *tok && self.dialect.supports_partiql()
3739            || (dialect_of!(self is SnowflakeDialect | GenericDialect) && Token::Colon == *tok)
3740        {
3741            self.prev_token();
3742            self.parse_json_access(expr)
3743        } else {
3744            // Can only happen if `get_next_precedence` got out of sync with this function
3745            parser_err!(
3746                format!("No infix parser for token {:?}", tok.token),
3747                tok.span.start
3748            )
3749        }
3750    }
3751
3752    /// Parse the `ESCAPE CHAR` portion of `LIKE`, `ILIKE`, and `SIMILAR TO`
3753    pub fn parse_escape_char(&mut self) -> Result<Option<Value>, ParserError> {
3754        if self.parse_keyword(Keyword::ESCAPE) {
3755            Ok(Some(self.parse_value()?.into()))
3756        } else {
3757            Ok(None)
3758        }
3759    }
3760
3761    /// Parses an array subscript like
3762    /// * `[:]`
3763    /// * `[l]`
3764    /// * `[l:]`
3765    /// * `[:u]`
3766    /// * `[l:u]`
3767    /// * `[l:u:s]`
3768    ///
3769    /// Parser is right after `[`
3770    fn parse_subscript_inner(&mut self) -> Result<Subscript, ParserError> {
3771        // at either `<lower>:(rest)` or `:(rest)]`
3772        let lower_bound = if self.consume_token(&Token::Colon) {
3773            None
3774        } else {
3775            Some(self.parse_expr()?)
3776        };
3777
3778        // check for end
3779        if self.consume_token(&Token::RBracket) {
3780            if let Some(lower_bound) = lower_bound {
3781                return Ok(Subscript::Index { index: lower_bound });
3782            };
3783            return Ok(Subscript::Slice {
3784                lower_bound,
3785                upper_bound: None,
3786                stride: None,
3787            });
3788        }
3789
3790        // consume the `:`
3791        if lower_bound.is_some() {
3792            self.expect_token(&Token::Colon)?;
3793        }
3794
3795        // we are now at either `]`, `<upper>(rest)]`
3796        let upper_bound = if self.consume_token(&Token::RBracket) {
3797            return Ok(Subscript::Slice {
3798                lower_bound,
3799                upper_bound: None,
3800                stride: None,
3801            });
3802        } else {
3803            Some(self.parse_expr()?)
3804        };
3805
3806        // check for end
3807        if self.consume_token(&Token::RBracket) {
3808            return Ok(Subscript::Slice {
3809                lower_bound,
3810                upper_bound,
3811                stride: None,
3812            });
3813        }
3814
3815        // we are now at `:]` or `:stride]`
3816        self.expect_token(&Token::Colon)?;
3817        let stride = if self.consume_token(&Token::RBracket) {
3818            None
3819        } else {
3820            Some(self.parse_expr()?)
3821        };
3822
3823        if stride.is_some() {
3824            self.expect_token(&Token::RBracket)?;
3825        }
3826
3827        Ok(Subscript::Slice {
3828            lower_bound,
3829            upper_bound,
3830            stride,
3831        })
3832    }
3833
3834    /// Parse a multi-dimension array accessing like `[1:3][1][1]`
3835    pub fn parse_multi_dim_subscript(
3836        &mut self,
3837        chain: &mut Vec<AccessExpr>,
3838    ) -> Result<(), ParserError> {
3839        while self.consume_token(&Token::LBracket) {
3840            self.parse_subscript(chain)?;
3841        }
3842        Ok(())
3843    }
3844
3845    /// Parses an array subscript like `[1:3]`
3846    ///
3847    /// Parser is right after `[`
3848    fn parse_subscript(&mut self, chain: &mut Vec<AccessExpr>) -> Result<(), ParserError> {
3849        let subscript = self.parse_subscript_inner()?;
3850        chain.push(AccessExpr::Subscript(subscript));
3851        Ok(())
3852    }
3853
3854    fn parse_json_path_object_key(&mut self) -> Result<JsonPathElem, ParserError> {
3855        let token = self.next_token();
3856        match token.token {
3857            Token::Word(Word {
3858                value,
3859                // path segments in SF dot notation can be unquoted or double-quoted
3860                quote_style: quote_style @ (Some('"') | None),
3861                // some experimentation suggests that snowflake permits
3862                // any keyword here unquoted.
3863                keyword: _,
3864            }) => Ok(JsonPathElem::Dot {
3865                key: value,
3866                quoted: quote_style.is_some(),
3867            }),
3868
3869            // This token should never be generated on snowflake or generic
3870            // dialects, but we handle it just in case this is used on future
3871            // dialects.
3872            Token::DoubleQuotedString(key) => Ok(JsonPathElem::Dot { key, quoted: true }),
3873
3874            _ => self.expected("variant object key name", token),
3875        }
3876    }
3877
3878    fn parse_json_access(&mut self, expr: Expr) -> Result<Expr, ParserError> {
3879        let path = self.parse_json_path()?;
3880        Ok(Expr::JsonAccess {
3881            value: Box::new(expr),
3882            path,
3883        })
3884    }
3885
3886    fn parse_json_path(&mut self) -> Result<JsonPath, ParserError> {
3887        let mut path = Vec::new();
3888        loop {
3889            match self.next_token().token {
3890                Token::Colon if path.is_empty() => {
3891                    path.push(self.parse_json_path_object_key()?);
3892                }
3893                Token::Period if !path.is_empty() => {
3894                    path.push(self.parse_json_path_object_key()?);
3895                }
3896                Token::LBracket => {
3897                    let key = self.parse_expr()?;
3898                    self.expect_token(&Token::RBracket)?;
3899
3900                    path.push(JsonPathElem::Bracket { key });
3901                }
3902                _ => {
3903                    self.prev_token();
3904                    break;
3905                }
3906            };
3907        }
3908
3909        debug_assert!(!path.is_empty());
3910        Ok(JsonPath { path })
3911    }
3912
3913    /// Parses the parens following the `[ NOT ] IN` operator.
3914    pub fn parse_in(&mut self, expr: Expr, negated: bool) -> Result<Expr, ParserError> {
3915        // BigQuery allows `IN UNNEST(array_expression)`
3916        // https://cloud.google.com/bigquery/docs/reference/standard-sql/operators#in_operators
3917        if self.parse_keyword(Keyword::UNNEST) {
3918            self.expect_token(&Token::LParen)?;
3919            let array_expr = self.parse_expr()?;
3920            self.expect_token(&Token::RParen)?;
3921            return Ok(Expr::InUnnest {
3922                expr: Box::new(expr),
3923                array_expr: Box::new(array_expr),
3924                negated,
3925            });
3926        }
3927        self.expect_token(&Token::LParen)?;
3928        let in_op = match self.maybe_parse(|p| p.parse_query())? {
3929            Some(subquery) => Expr::InSubquery {
3930                expr: Box::new(expr),
3931                subquery,
3932                negated,
3933            },
3934            None => Expr::InList {
3935                expr: Box::new(expr),
3936                list: if self.dialect.supports_in_empty_list() {
3937                    self.parse_comma_separated0(Parser::parse_expr, Token::RParen)?
3938                } else {
3939                    self.parse_comma_separated(Parser::parse_expr)?
3940                },
3941                negated,
3942            },
3943        };
3944        self.expect_token(&Token::RParen)?;
3945        Ok(in_op)
3946    }
3947
3948    /// Parses `BETWEEN <low> AND <high>`, assuming the `BETWEEN` keyword was already consumed.
3949    pub fn parse_between(&mut self, expr: Expr, negated: bool) -> Result<Expr, ParserError> {
3950        // Stop parsing subexpressions for <low> and <high> on tokens with
3951        // precedence lower than that of `BETWEEN`, such as `AND`, `IS`, etc.
3952        let low = self.parse_subexpr(self.dialect.prec_value(Precedence::Between))?;
3953        self.expect_keyword_is(Keyword::AND)?;
3954        let high = self.parse_subexpr(self.dialect.prec_value(Precedence::Between))?;
3955        Ok(Expr::Between {
3956            expr: Box::new(expr),
3957            negated,
3958            low: Box::new(low),
3959            high: Box::new(high),
3960        })
3961    }
3962
3963    /// Parse a PostgreSQL casting style which is in the form of `expr::datatype`.
3964    pub fn parse_pg_cast(&mut self, expr: Expr) -> Result<Expr, ParserError> {
3965        Ok(Expr::Cast {
3966            kind: CastKind::DoubleColon,
3967            expr: Box::new(expr),
3968            data_type: self.parse_data_type()?,
3969            format: None,
3970        })
3971    }
3972
3973    /// Get the precedence of the next token
3974    pub fn get_next_precedence(&self) -> Result<u8, ParserError> {
3975        self.dialect.get_next_precedence_default(self)
3976    }
3977
3978    /// Return the token at the given location, or EOF if the index is beyond
3979    /// the length of the current set of tokens.
3980    pub fn token_at(&self, index: usize) -> &TokenWithSpan {
3981        self.tokens.get(index).unwrap_or(&EOF_TOKEN)
3982    }
3983
3984    /// Return the first non-whitespace token that has not yet been processed
3985    /// or Token::EOF
3986    ///
3987    /// See [`Self::peek_token_ref`] to avoid the copy.
3988    pub fn peek_token(&self) -> TokenWithSpan {
3989        self.peek_nth_token(0)
3990    }
3991
3992    /// Return a reference to the first non-whitespace token that has not yet
3993    /// been processed or Token::EOF
3994    pub fn peek_token_ref(&self) -> &TokenWithSpan {
3995        self.peek_nth_token_ref(0)
3996    }
3997
3998    /// Returns the `N` next non-whitespace tokens that have not yet been
3999    /// processed.
4000    ///
4001    /// Example:
4002    /// ```rust
4003    /// # use sqlparser::dialect::GenericDialect;
4004    /// # use sqlparser::parser::Parser;
4005    /// # use sqlparser::keywords::Keyword;
4006    /// # use sqlparser::tokenizer::{Token, Word};
4007    /// let dialect = GenericDialect {};
4008    /// let mut parser = Parser::new(&dialect).try_with_sql("ORDER BY foo, bar").unwrap();
4009    ///
4010    /// // Note that Rust infers the number of tokens to peek based on the
4011    /// // length of the slice pattern!
4012    /// assert!(matches!(
4013    ///     parser.peek_tokens(),
4014    ///     [
4015    ///         Token::Word(Word { keyword: Keyword::ORDER, .. }),
4016    ///         Token::Word(Word { keyword: Keyword::BY, .. }),
4017    ///     ]
4018    /// ));
4019    /// ```
4020    pub fn peek_tokens<const N: usize>(&self) -> [Token; N] {
4021        self.peek_tokens_with_location()
4022            .map(|with_loc| with_loc.token)
4023    }
4024
4025    /// Returns the `N` next non-whitespace tokens with locations that have not
4026    /// yet been processed.
4027    ///
4028    /// See [`Self::peek_token`] for an example.
4029    pub fn peek_tokens_with_location<const N: usize>(&self) -> [TokenWithSpan; N] {
4030        let mut index = self.index;
4031        core::array::from_fn(|_| loop {
4032            let token = self.tokens.get(index);
4033            index += 1;
4034            if let Some(TokenWithSpan {
4035                token: Token::Whitespace(_),
4036                span: _,
4037            }) = token
4038            {
4039                continue;
4040            }
4041            break token.cloned().unwrap_or(TokenWithSpan {
4042                token: Token::EOF,
4043                span: Span::empty(),
4044            });
4045        })
4046    }
4047
4048    /// Returns references to the `N` next non-whitespace tokens
4049    /// that have not yet been processed.
4050    ///
4051    /// See [`Self::peek_tokens`] for an example.
4052    pub fn peek_tokens_ref<const N: usize>(&self) -> [&TokenWithSpan; N] {
4053        let mut index = self.index;
4054        core::array::from_fn(|_| loop {
4055            let token = self.tokens.get(index);
4056            index += 1;
4057            if let Some(TokenWithSpan {
4058                token: Token::Whitespace(_),
4059                span: _,
4060            }) = token
4061            {
4062                continue;
4063            }
4064            break token.unwrap_or(&EOF_TOKEN);
4065        })
4066    }
4067
4068    /// Return nth non-whitespace token that has not yet been processed
4069    pub fn peek_nth_token(&self, n: usize) -> TokenWithSpan {
4070        self.peek_nth_token_ref(n).clone()
4071    }
4072
4073    /// Return nth non-whitespace token that has not yet been processed
4074    pub fn peek_nth_token_ref(&self, mut n: usize) -> &TokenWithSpan {
4075        let mut index = self.index;
4076        loop {
4077            index += 1;
4078            match self.tokens.get(index - 1) {
4079                Some(TokenWithSpan {
4080                    token: Token::Whitespace(_),
4081                    span: _,
4082                }) => continue,
4083                non_whitespace => {
4084                    if n == 0 {
4085                        return non_whitespace.unwrap_or(&EOF_TOKEN);
4086                    }
4087                    n -= 1;
4088                }
4089            }
4090        }
4091    }
4092
4093    /// Return the first token, possibly whitespace, that has not yet been processed
4094    /// (or None if reached end-of-file).
4095    pub fn peek_token_no_skip(&self) -> TokenWithSpan {
4096        self.peek_nth_token_no_skip(0)
4097    }
4098
4099    /// Return nth token, possibly whitespace, that has not yet been processed.
4100    pub fn peek_nth_token_no_skip(&self, n: usize) -> TokenWithSpan {
4101        self.tokens
4102            .get(self.index + n)
4103            .cloned()
4104            .unwrap_or(TokenWithSpan {
4105                token: Token::EOF,
4106                span: Span::empty(),
4107            })
4108    }
4109
4110    /// Return true if the next tokens exactly `expected`
4111    ///
4112    /// Does not advance the current token.
4113    fn peek_keywords(&mut self, expected: &[Keyword]) -> bool {
4114        let index = self.index;
4115        let matched = self.parse_keywords(expected);
4116        self.index = index;
4117        matched
4118    }
4119
4120    /// Advances to the next non-whitespace token and returns a copy.
4121    ///
4122    /// Please use [`Self::advance_token`] and [`Self::get_current_token`] to
4123    /// avoid the copy.
4124    pub fn next_token(&mut self) -> TokenWithSpan {
4125        self.advance_token();
4126        self.get_current_token().clone()
4127    }
4128
4129    /// Returns the index of the current token
4130    ///
4131    /// This can be used with APIs that expect an index, such as
4132    /// [`Self::token_at`]
4133    pub fn get_current_index(&self) -> usize {
4134        self.index.saturating_sub(1)
4135    }
4136
4137    /// Return the next unprocessed token, possibly whitespace.
4138    pub fn next_token_no_skip(&mut self) -> Option<&TokenWithSpan> {
4139        self.index += 1;
4140        self.tokens.get(self.index - 1)
4141    }
4142
4143    /// Advances the current token to the next non-whitespace token
4144    ///
4145    /// See [`Self::get_current_token`] to get the current token after advancing
4146    pub fn advance_token(&mut self) {
4147        loop {
4148            self.index += 1;
4149            match self.tokens.get(self.index - 1) {
4150                Some(TokenWithSpan {
4151                    token: Token::Whitespace(_),
4152                    span: _,
4153                }) => continue,
4154                _ => break,
4155            }
4156        }
4157    }
4158
4159    /// Returns a reference to the current token
4160    ///
4161    /// Does not advance the current token.
4162    pub fn get_current_token(&self) -> &TokenWithSpan {
4163        self.token_at(self.index.saturating_sub(1))
4164    }
4165
4166    /// Returns a reference to the previous token
4167    ///
4168    /// Does not advance the current token.
4169    pub fn get_previous_token(&self) -> &TokenWithSpan {
4170        self.token_at(self.index.saturating_sub(2))
4171    }
4172
4173    /// Returns a reference to the next token
4174    ///
4175    /// Does not advance the current token.
4176    pub fn get_next_token(&self) -> &TokenWithSpan {
4177        self.token_at(self.index)
4178    }
4179
4180    /// Seek back the last one non-whitespace token.
4181    ///
4182    /// Must be called after `next_token()`, otherwise might panic. OK to call
4183    /// after `next_token()` indicates an EOF.
4184    ///
4185    // TODO rename to backup_token and deprecate prev_token?
4186    pub fn prev_token(&mut self) {
4187        loop {
4188            assert!(self.index > 0);
4189            self.index -= 1;
4190            if let Some(TokenWithSpan {
4191                token: Token::Whitespace(_),
4192                span: _,
4193            }) = self.tokens.get(self.index)
4194            {
4195                continue;
4196            }
4197            return;
4198        }
4199    }
4200
4201    /// Report `found` was encountered instead of `expected`
4202    pub fn expected<T>(&self, expected: &str, found: TokenWithSpan) -> Result<T, ParserError> {
4203        parser_err!(
4204            format!("Expected: {expected}, found: {found}"),
4205            found.span.start
4206        )
4207    }
4208
4209    /// report `found` was encountered instead of `expected`
4210    pub fn expected_ref<T>(&self, expected: &str, found: &TokenWithSpan) -> Result<T, ParserError> {
4211        parser_err!(
4212            format!("Expected: {expected}, found: {found}"),
4213            found.span.start
4214        )
4215    }
4216
4217    /// Report that the token at `index` was found instead of `expected`.
4218    pub fn expected_at<T>(&self, expected: &str, index: usize) -> Result<T, ParserError> {
4219        let found = self.tokens.get(index).unwrap_or(&EOF_TOKEN);
4220        parser_err!(
4221            format!("Expected: {expected}, found: {found}"),
4222            found.span.start
4223        )
4224    }
4225
4226    /// If the current token is the `expected` keyword, consume it and returns
4227    /// true. Otherwise, no tokens are consumed and returns false.
4228    #[must_use]
4229    pub fn parse_keyword(&mut self, expected: Keyword) -> bool {
4230        if self.peek_keyword(expected) {
4231            self.advance_token();
4232            true
4233        } else {
4234            false
4235        }
4236    }
4237
4238    #[must_use]
4239    pub fn peek_keyword(&self, expected: Keyword) -> bool {
4240        matches!(&self.peek_token_ref().token, Token::Word(w) if expected == w.keyword)
4241    }
4242
4243    /// If the current token is the `expected` keyword followed by
4244    /// specified tokens, consume them and returns true.
4245    /// Otherwise, no tokens are consumed and returns false.
4246    ///
4247    /// Note that if the length of `tokens` is too long, this function will
4248    /// not be efficient as it does a loop on the tokens with `peek_nth_token`
4249    /// each time.
4250    pub fn parse_keyword_with_tokens(&mut self, expected: Keyword, tokens: &[Token]) -> bool {
4251        self.keyword_with_tokens(expected, tokens, true)
4252    }
4253
4254    /// Peeks to see if the current token is the `expected` keyword followed by specified tokens
4255    /// without consuming them.
4256    ///
4257    /// See [Self::parse_keyword_with_tokens] for details.
4258    pub(crate) fn peek_keyword_with_tokens(&mut self, expected: Keyword, tokens: &[Token]) -> bool {
4259        self.keyword_with_tokens(expected, tokens, false)
4260    }
4261
4262    fn keyword_with_tokens(&mut self, expected: Keyword, tokens: &[Token], consume: bool) -> bool {
4263        match &self.peek_token_ref().token {
4264            Token::Word(w) if expected == w.keyword => {
4265                for (idx, token) in tokens.iter().enumerate() {
4266                    if self.peek_nth_token_ref(idx + 1).token != *token {
4267                        return false;
4268                    }
4269                }
4270
4271                if consume {
4272                    for _ in 0..(tokens.len() + 1) {
4273                        self.advance_token();
4274                    }
4275                }
4276
4277                true
4278            }
4279            _ => false,
4280        }
4281    }
4282
4283    /// If the current and subsequent tokens exactly match the `keywords`
4284    /// sequence, consume them and returns true. Otherwise, no tokens are
4285    /// consumed and returns false
4286    #[must_use]
4287    pub fn parse_keywords(&mut self, keywords: &[Keyword]) -> bool {
4288        let index = self.index;
4289        for &keyword in keywords {
4290            if !self.parse_keyword(keyword) {
4291                // println!("parse_keywords aborting .. did not find {:?}", keyword);
4292                // reset index and return immediately
4293                self.index = index;
4294                return false;
4295            }
4296        }
4297        true
4298    }
4299
4300    /// If the current token is one of the given `keywords`, returns the keyword
4301    /// that matches, without consuming the token. Otherwise, returns [`None`].
4302    #[must_use]
4303    pub fn peek_one_of_keywords(&self, keywords: &[Keyword]) -> Option<Keyword> {
4304        for keyword in keywords {
4305            if self.peek_keyword(*keyword) {
4306                return Some(*keyword);
4307            }
4308        }
4309        None
4310    }
4311
4312    /// If the current token is one of the given `keywords`, consume the token
4313    /// and return the keyword that matches. Otherwise, no tokens are consumed
4314    /// and returns [`None`].
4315    #[must_use]
4316    pub fn parse_one_of_keywords(&mut self, keywords: &[Keyword]) -> Option<Keyword> {
4317        match &self.peek_token_ref().token {
4318            Token::Word(w) => {
4319                keywords
4320                    .iter()
4321                    .find(|keyword| **keyword == w.keyword)
4322                    .map(|keyword| {
4323                        self.advance_token();
4324                        *keyword
4325                    })
4326            }
4327            _ => None,
4328        }
4329    }
4330
4331    /// If the current token is one of the expected keywords, consume the token
4332    /// and return the keyword that matches. Otherwise, return an error.
4333    pub fn expect_one_of_keywords(&mut self, keywords: &[Keyword]) -> Result<Keyword, ParserError> {
4334        if let Some(keyword) = self.parse_one_of_keywords(keywords) {
4335            Ok(keyword)
4336        } else {
4337            let keywords: Vec<String> = keywords.iter().map(|x| format!("{x:?}")).collect();
4338            self.expected_ref(
4339                &format!("one of {}", keywords.join(" or ")),
4340                self.peek_token_ref(),
4341            )
4342        }
4343    }
4344
4345    /// If the current token is the `expected` keyword, consume the token.
4346    /// Otherwise, return an error.
4347    ///
4348    // todo deprecate in favor of expected_keyword_is
4349    pub fn expect_keyword(&mut self, expected: Keyword) -> Result<TokenWithSpan, ParserError> {
4350        if self.parse_keyword(expected) {
4351            Ok(self.get_current_token().clone())
4352        } else {
4353            self.expected_ref(format!("{:?}", &expected).as_str(), self.peek_token_ref())
4354        }
4355    }
4356
4357    /// If the current token is the `expected` keyword, consume the token.
4358    /// Otherwise, return an error.
4359    ///
4360    /// This differs from expect_keyword only in that the matched keyword
4361    /// token is not returned.
4362    pub fn expect_keyword_is(&mut self, expected: Keyword) -> Result<(), ParserError> {
4363        if self.parse_keyword(expected) {
4364            Ok(())
4365        } else {
4366            self.expected_ref(format!("{:?}", &expected).as_str(), self.peek_token_ref())
4367        }
4368    }
4369
4370    /// If the current and subsequent tokens exactly match the `keywords`
4371    /// sequence, consume them and returns Ok. Otherwise, return an Error.
4372    pub fn expect_keywords(&mut self, expected: &[Keyword]) -> Result<(), ParserError> {
4373        for &kw in expected {
4374            self.expect_keyword_is(kw)?;
4375        }
4376        Ok(())
4377    }
4378
4379    /// Consume the next token if it matches the expected token, otherwise return false
4380    ///
4381    /// See [Self::advance_token] to consume the token unconditionally
4382    #[must_use]
4383    pub fn consume_token(&mut self, expected: &Token) -> bool {
4384        if self.peek_token_ref() == expected {
4385            self.advance_token();
4386            true
4387        } else {
4388            false
4389        }
4390    }
4391
4392    /// If the current and subsequent tokens exactly match the `tokens`
4393    /// sequence, consume them and returns true. Otherwise, no tokens are
4394    /// consumed and returns false
4395    #[must_use]
4396    pub fn consume_tokens(&mut self, tokens: &[Token]) -> bool {
4397        let index = self.index;
4398        for token in tokens {
4399            if !self.consume_token(token) {
4400                self.index = index;
4401                return false;
4402            }
4403        }
4404        true
4405    }
4406
4407    /// Bail out if the current token is not an expected keyword, or consume it if it is
4408    pub fn expect_token(&mut self, expected: &Token) -> Result<TokenWithSpan, ParserError> {
4409        if self.peek_token_ref() == expected {
4410            Ok(self.next_token())
4411        } else {
4412            self.expected_ref(&expected.to_string(), self.peek_token_ref())
4413        }
4414    }
4415
4416    fn parse<T: FromStr>(s: String, loc: Location) -> Result<T, ParserError>
4417    where
4418        <T as FromStr>::Err: Display,
4419    {
4420        s.parse::<T>().map_err(|e| {
4421            ParserError::ParserError(format!(
4422                "Could not parse '{s}' as {}: {e}{loc}",
4423                core::any::type_name::<T>()
4424            ))
4425        })
4426    }
4427
4428    /// Parse a comma-separated list of 1+ SelectItem
4429    pub fn parse_projection(&mut self) -> Result<Vec<SelectItem>, ParserError> {
4430        // BigQuery and Snowflake allow trailing commas, but only in project lists
4431        // e.g. `SELECT 1, 2, FROM t`
4432        // https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#trailing_commas
4433        // https://docs.snowflake.com/en/release-notes/2024/8_11#select-supports-trailing-commas
4434
4435        let trailing_commas =
4436            self.options.trailing_commas | self.dialect.supports_projection_trailing_commas();
4437
4438        self.parse_comma_separated_with_trailing_commas(
4439            |p| p.parse_select_item(),
4440            trailing_commas,
4441            Self::is_reserved_for_column_alias,
4442        )
4443    }
4444
4445    pub fn parse_actions_list(&mut self) -> Result<Vec<Action>, ParserError> {
4446        let mut values = vec![];
4447        loop {
4448            values.push(self.parse_grant_permission()?);
4449            if !self.consume_token(&Token::Comma) {
4450                break;
4451            } else if self.options.trailing_commas {
4452                match self.peek_token().token {
4453                    Token::Word(kw) if kw.keyword == Keyword::ON => {
4454                        break;
4455                    }
4456                    Token::RParen
4457                    | Token::SemiColon
4458                    | Token::EOF
4459                    | Token::RBracket
4460                    | Token::RBrace => break,
4461                    _ => continue,
4462                }
4463            }
4464        }
4465        Ok(values)
4466    }
4467
4468    /// Parse a list of [TableWithJoins]
4469    fn parse_table_with_joins(&mut self) -> Result<Vec<TableWithJoins>, ParserError> {
4470        let trailing_commas = self.dialect.supports_from_trailing_commas();
4471
4472        self.parse_comma_separated_with_trailing_commas(
4473            Parser::parse_table_and_joins,
4474            trailing_commas,
4475            |kw, parser| !self.dialect.is_table_factor(kw, parser),
4476        )
4477    }
4478
4479    /// Parse the comma of a comma-separated syntax element.
4480    /// `R` is a predicate that should return true if the next
4481    /// keyword is a reserved keyword.
4482    /// Allows for control over trailing commas
4483    ///
4484    /// Returns true if there is a next element
4485    fn is_parse_comma_separated_end_with_trailing_commas<R>(
4486        &mut self,
4487        trailing_commas: bool,
4488        is_reserved_keyword: &R,
4489    ) -> bool
4490    where
4491        R: Fn(&Keyword, &mut Parser) -> bool,
4492    {
4493        if !self.consume_token(&Token::Comma) {
4494            true
4495        } else if trailing_commas {
4496            let token = self.next_token().token;
4497            let is_end = match token {
4498                Token::Word(ref kw) if is_reserved_keyword(&kw.keyword, self) => true,
4499                Token::RParen | Token::SemiColon | Token::EOF | Token::RBracket | Token::RBrace => {
4500                    true
4501                }
4502                _ => false,
4503            };
4504            self.prev_token();
4505
4506            is_end
4507        } else {
4508            false
4509        }
4510    }
4511
4512    /// Parse the comma of a comma-separated syntax element.
4513    /// Returns true if there is a next element
4514    fn is_parse_comma_separated_end(&mut self) -> bool {
4515        self.is_parse_comma_separated_end_with_trailing_commas(
4516            self.options.trailing_commas,
4517            &Self::is_reserved_for_column_alias,
4518        )
4519    }
4520
4521    /// Parse a comma-separated list of 1+ items accepted by `F`
4522    pub fn parse_comma_separated<T, F>(&mut self, f: F) -> Result<Vec<T>, ParserError>
4523    where
4524        F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4525    {
4526        self.parse_comma_separated_with_trailing_commas(
4527            f,
4528            self.options.trailing_commas,
4529            Self::is_reserved_for_column_alias,
4530        )
4531    }
4532
4533    /// Parse a comma-separated list of 1+ items accepted by `F`.
4534    /// `R` is a predicate that should return true if the next
4535    /// keyword is a reserved keyword.
4536    /// Allows for control over trailing commas.
4537    fn parse_comma_separated_with_trailing_commas<T, F, R>(
4538        &mut self,
4539        mut f: F,
4540        trailing_commas: bool,
4541        is_reserved_keyword: R,
4542    ) -> Result<Vec<T>, ParserError>
4543    where
4544        F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4545        R: Fn(&Keyword, &mut Parser) -> bool,
4546    {
4547        let mut values = vec![];
4548        loop {
4549            values.push(f(self)?);
4550            if self.is_parse_comma_separated_end_with_trailing_commas(
4551                trailing_commas,
4552                &is_reserved_keyword,
4553            ) {
4554                break;
4555            }
4556        }
4557        Ok(values)
4558    }
4559
4560    /// Parse a period-separated list of 1+ items accepted by `F`
4561    fn parse_period_separated<T, F>(&mut self, mut f: F) -> Result<Vec<T>, ParserError>
4562    where
4563        F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4564    {
4565        let mut values = vec![];
4566        loop {
4567            values.push(f(self)?);
4568            if !self.consume_token(&Token::Period) {
4569                break;
4570            }
4571        }
4572        Ok(values)
4573    }
4574
4575    /// Parse a keyword-separated list of 1+ items accepted by `F`
4576    pub fn parse_keyword_separated<T, F>(
4577        &mut self,
4578        keyword: Keyword,
4579        mut f: F,
4580    ) -> Result<Vec<T>, ParserError>
4581    where
4582        F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4583    {
4584        let mut values = vec![];
4585        loop {
4586            values.push(f(self)?);
4587            if !self.parse_keyword(keyword) {
4588                break;
4589            }
4590        }
4591        Ok(values)
4592    }
4593
4594    pub fn parse_parenthesized<T, F>(&mut self, mut f: F) -> Result<T, ParserError>
4595    where
4596        F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4597    {
4598        self.expect_token(&Token::LParen)?;
4599        let res = f(self)?;
4600        self.expect_token(&Token::RParen)?;
4601        Ok(res)
4602    }
4603
4604    /// Parse a comma-separated list of 0+ items accepted by `F`
4605    /// * `end_token` - expected end token for the closure (e.g. [Token::RParen], [Token::RBrace] ...)
4606    pub fn parse_comma_separated0<T, F>(
4607        &mut self,
4608        f: F,
4609        end_token: Token,
4610    ) -> Result<Vec<T>, ParserError>
4611    where
4612        F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4613    {
4614        if self.peek_token().token == end_token {
4615            return Ok(vec![]);
4616        }
4617
4618        if self.options.trailing_commas && self.peek_tokens() == [Token::Comma, end_token] {
4619            let _ = self.consume_token(&Token::Comma);
4620            return Ok(vec![]);
4621        }
4622
4623        self.parse_comma_separated(f)
4624    }
4625
4626    /// Parses 0 or more statements, each followed by a semicolon.
4627    /// If the next token is any of `terminal_keywords` then no more
4628    /// statements will be parsed.
4629    pub(crate) fn parse_statement_list(
4630        &mut self,
4631        terminal_keywords: &[Keyword],
4632    ) -> Result<Vec<Statement>, ParserError> {
4633        let mut values = vec![];
4634        loop {
4635            match &self.peek_nth_token_ref(0).token {
4636                Token::EOF => break,
4637                Token::Word(w) => {
4638                    if w.quote_style.is_none() && terminal_keywords.contains(&w.keyword) {
4639                        break;
4640                    }
4641                }
4642                _ => {}
4643            }
4644
4645            values.push(self.parse_statement()?);
4646            self.expect_token(&Token::SemiColon)?;
4647        }
4648        Ok(values)
4649    }
4650
4651    /// Default implementation of a predicate that returns true if
4652    /// the specified keyword is reserved for column alias.
4653    /// See [Dialect::is_column_alias]
4654    fn is_reserved_for_column_alias(kw: &Keyword, parser: &mut Parser) -> bool {
4655        !parser.dialect.is_column_alias(kw, parser)
4656    }
4657
4658    /// Run a parser method `f`, reverting back to the current position if unsuccessful.
4659    /// Returns `ParserError::RecursionLimitExceeded` if `f` returns a `RecursionLimitExceeded`.
4660    /// Returns `Ok(None)` if `f` returns any other error.
4661    pub fn maybe_parse<T, F>(&mut self, f: F) -> Result<Option<T>, ParserError>
4662    where
4663        F: FnMut(&mut Parser) -> Result<T, ParserError>,
4664    {
4665        match self.try_parse(f) {
4666            Ok(t) => Ok(Some(t)),
4667            Err(ParserError::RecursionLimitExceeded) => Err(ParserError::RecursionLimitExceeded),
4668            _ => Ok(None),
4669        }
4670    }
4671
4672    /// Run a parser method `f`, reverting back to the current position if unsuccessful.
4673    pub fn try_parse<T, F>(&mut self, mut f: F) -> Result<T, ParserError>
4674    where
4675        F: FnMut(&mut Parser) -> Result<T, ParserError>,
4676    {
4677        let index = self.index;
4678        match f(self) {
4679            Ok(t) => Ok(t),
4680            Err(e) => {
4681                // Unwind stack if limit exceeded
4682                self.index = index;
4683                Err(e)
4684            }
4685        }
4686    }
4687
4688    /// Parse either `ALL`, `DISTINCT` or `DISTINCT ON (...)`. Returns [`None`] if `ALL` is parsed
4689    /// and results in a [`ParserError`] if both `ALL` and `DISTINCT` are found.
4690    pub fn parse_all_or_distinct(&mut self) -> Result<Option<Distinct>, ParserError> {
4691        let loc = self.peek_token().span.start;
4692        let all = self.parse_keyword(Keyword::ALL);
4693        let distinct = self.parse_keyword(Keyword::DISTINCT);
4694        if !distinct {
4695            return Ok(None);
4696        }
4697        if all {
4698            return parser_err!("Cannot specify both ALL and DISTINCT".to_string(), loc);
4699        }
4700        let on = self.parse_keyword(Keyword::ON);
4701        if !on {
4702            return Ok(Some(Distinct::Distinct));
4703        }
4704
4705        self.expect_token(&Token::LParen)?;
4706        let col_names = if self.consume_token(&Token::RParen) {
4707            self.prev_token();
4708            Vec::new()
4709        } else {
4710            self.parse_comma_separated(Parser::parse_expr)?
4711        };
4712        self.expect_token(&Token::RParen)?;
4713        Ok(Some(Distinct::On(col_names)))
4714    }
4715
4716    /// Parse a SQL CREATE statement
4717    pub fn parse_create(&mut self) -> Result<Statement, ParserError> {
4718        let or_replace = self.parse_keywords(&[Keyword::OR, Keyword::REPLACE]);
4719        let or_alter = self.parse_keywords(&[Keyword::OR, Keyword::ALTER]);
4720        let local = self.parse_one_of_keywords(&[Keyword::LOCAL]).is_some();
4721        let global = self.parse_one_of_keywords(&[Keyword::GLOBAL]).is_some();
4722        let transient = self.parse_one_of_keywords(&[Keyword::TRANSIENT]).is_some();
4723        let global: Option<bool> = if global {
4724            Some(true)
4725        } else if local {
4726            Some(false)
4727        } else {
4728            None
4729        };
4730        let temporary = self
4731            .parse_one_of_keywords(&[Keyword::TEMP, Keyword::TEMPORARY])
4732            .is_some();
4733        let persistent = dialect_of!(self is DuckDbDialect)
4734            && self.parse_one_of_keywords(&[Keyword::PERSISTENT]).is_some();
4735        let create_view_params = self.parse_create_view_params()?;
4736        if self.parse_keyword(Keyword::TABLE) {
4737            self.parse_create_table(or_replace, temporary, global, transient)
4738        } else if self.peek_keyword(Keyword::MATERIALIZED)
4739            || self.peek_keyword(Keyword::VIEW)
4740            || self.peek_keywords(&[Keyword::SECURE, Keyword::MATERIALIZED, Keyword::VIEW])
4741            || self.peek_keywords(&[Keyword::SECURE, Keyword::VIEW])
4742        {
4743            self.parse_create_view(or_alter, or_replace, temporary, create_view_params)
4744        } else if self.parse_keyword(Keyword::POLICY) {
4745            self.parse_create_policy()
4746        } else if self.parse_keyword(Keyword::EXTERNAL) {
4747            self.parse_create_external_table(or_replace)
4748        } else if self.parse_keyword(Keyword::FUNCTION) {
4749            self.parse_create_function(or_alter, or_replace, temporary)
4750        } else if self.parse_keyword(Keyword::DOMAIN) {
4751            self.parse_create_domain()
4752        } else if self.parse_keyword(Keyword::TRIGGER) {
4753            self.parse_create_trigger(or_alter, or_replace, false)
4754        } else if self.parse_keywords(&[Keyword::CONSTRAINT, Keyword::TRIGGER]) {
4755            self.parse_create_trigger(or_alter, or_replace, true)
4756        } else if self.parse_keyword(Keyword::MACRO) {
4757            self.parse_create_macro(or_replace, temporary)
4758        } else if self.parse_keyword(Keyword::SECRET) {
4759            self.parse_create_secret(or_replace, temporary, persistent)
4760        } else if self.parse_keyword(Keyword::USER) {
4761            self.parse_create_user(or_replace)
4762        } else if or_replace {
4763            self.expected(
4764                "[EXTERNAL] TABLE or [MATERIALIZED] VIEW or FUNCTION after CREATE OR REPLACE",
4765                self.peek_token(),
4766            )
4767        } else if self.parse_keyword(Keyword::EXTENSION) {
4768            self.parse_create_extension()
4769        } else if self.parse_keyword(Keyword::INDEX) {
4770            self.parse_create_index(false)
4771        } else if self.parse_keywords(&[Keyword::UNIQUE, Keyword::INDEX]) {
4772            self.parse_create_index(true)
4773        } else if self.parse_keyword(Keyword::VIRTUAL) {
4774            self.parse_create_virtual_table()
4775        } else if self.parse_keyword(Keyword::SCHEMA) {
4776            self.parse_create_schema()
4777        } else if self.parse_keyword(Keyword::DATABASE) {
4778            self.parse_create_database()
4779        } else if self.parse_keyword(Keyword::ROLE) {
4780            self.parse_create_role()
4781        } else if self.parse_keyword(Keyword::SEQUENCE) {
4782            self.parse_create_sequence(temporary)
4783        } else if self.parse_keyword(Keyword::TYPE) {
4784            self.parse_create_type()
4785        } else if self.parse_keyword(Keyword::PROCEDURE) {
4786            self.parse_create_procedure(or_alter)
4787        } else if self.parse_keyword(Keyword::CONNECTOR) {
4788            self.parse_create_connector()
4789        } else if self.parse_keyword(Keyword::SERVER) {
4790            self.parse_pg_create_server()
4791        } else {
4792            self.expected("an object type after CREATE", self.peek_token())
4793        }
4794    }
4795
4796    fn parse_create_user(&mut self, or_replace: bool) -> Result<Statement, ParserError> {
4797        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
4798        let name = self.parse_identifier()?;
4799        let options = self.parse_key_value_options(false, &[Keyword::WITH, Keyword::TAG])?;
4800        let with_tags = self.parse_keyword(Keyword::WITH);
4801        let tags = if self.parse_keyword(Keyword::TAG) {
4802            self.parse_key_value_options(true, &[])?
4803        } else {
4804            vec![]
4805        };
4806        Ok(Statement::CreateUser(CreateUser {
4807            or_replace,
4808            if_not_exists,
4809            name,
4810            options: KeyValueOptions {
4811                options,
4812                delimiter: KeyValueOptionsDelimiter::Space,
4813            },
4814            with_tags,
4815            tags: KeyValueOptions {
4816                options: tags,
4817                delimiter: KeyValueOptionsDelimiter::Comma,
4818            },
4819        }))
4820    }
4821
4822    /// See [DuckDB Docs](https://duckdb.org/docs/sql/statements/create_secret.html) for more details.
4823    pub fn parse_create_secret(
4824        &mut self,
4825        or_replace: bool,
4826        temporary: bool,
4827        persistent: bool,
4828    ) -> Result<Statement, ParserError> {
4829        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
4830
4831        let mut storage_specifier = None;
4832        let mut name = None;
4833        if self.peek_token() != Token::LParen {
4834            if self.parse_keyword(Keyword::IN) {
4835                storage_specifier = self.parse_identifier().ok()
4836            } else {
4837                name = self.parse_identifier().ok();
4838            }
4839
4840            // Storage specifier may follow the name
4841            if storage_specifier.is_none()
4842                && self.peek_token() != Token::LParen
4843                && self.parse_keyword(Keyword::IN)
4844            {
4845                storage_specifier = self.parse_identifier().ok();
4846            }
4847        }
4848
4849        self.expect_token(&Token::LParen)?;
4850        self.expect_keyword_is(Keyword::TYPE)?;
4851        let secret_type = self.parse_identifier()?;
4852
4853        let mut options = Vec::new();
4854        if self.consume_token(&Token::Comma) {
4855            options.append(&mut self.parse_comma_separated(|p| {
4856                let key = p.parse_identifier()?;
4857                let value = p.parse_identifier()?;
4858                Ok(SecretOption { key, value })
4859            })?);
4860        }
4861        self.expect_token(&Token::RParen)?;
4862
4863        let temp = match (temporary, persistent) {
4864            (true, false) => Some(true),
4865            (false, true) => Some(false),
4866            (false, false) => None,
4867            _ => self.expected("TEMPORARY or PERSISTENT", self.peek_token())?,
4868        };
4869
4870        Ok(Statement::CreateSecret {
4871            or_replace,
4872            temporary: temp,
4873            if_not_exists,
4874            name,
4875            storage_specifier,
4876            secret_type,
4877            options,
4878        })
4879    }
4880
4881    /// Parse a CACHE TABLE statement
4882    pub fn parse_cache_table(&mut self) -> Result<Statement, ParserError> {
4883        let (mut table_flag, mut options, mut has_as, mut query) = (None, vec![], false, None);
4884        if self.parse_keyword(Keyword::TABLE) {
4885            let table_name = self.parse_object_name(false)?;
4886            if self.peek_token().token != Token::EOF {
4887                if let Token::Word(word) = self.peek_token().token {
4888                    if word.keyword == Keyword::OPTIONS {
4889                        options = self.parse_options(Keyword::OPTIONS)?
4890                    }
4891                };
4892
4893                if self.peek_token().token != Token::EOF {
4894                    let (a, q) = self.parse_as_query()?;
4895                    has_as = a;
4896                    query = Some(q);
4897                }
4898
4899                Ok(Statement::Cache {
4900                    table_flag,
4901                    table_name,
4902                    has_as,
4903                    options,
4904                    query,
4905                })
4906            } else {
4907                Ok(Statement::Cache {
4908                    table_flag,
4909                    table_name,
4910                    has_as,
4911                    options,
4912                    query,
4913                })
4914            }
4915        } else {
4916            table_flag = Some(self.parse_object_name(false)?);
4917            if self.parse_keyword(Keyword::TABLE) {
4918                let table_name = self.parse_object_name(false)?;
4919                if self.peek_token() != Token::EOF {
4920                    if let Token::Word(word) = self.peek_token().token {
4921                        if word.keyword == Keyword::OPTIONS {
4922                            options = self.parse_options(Keyword::OPTIONS)?
4923                        }
4924                    };
4925
4926                    if self.peek_token() != Token::EOF {
4927                        let (a, q) = self.parse_as_query()?;
4928                        has_as = a;
4929                        query = Some(q);
4930                    }
4931
4932                    Ok(Statement::Cache {
4933                        table_flag,
4934                        table_name,
4935                        has_as,
4936                        options,
4937                        query,
4938                    })
4939                } else {
4940                    Ok(Statement::Cache {
4941                        table_flag,
4942                        table_name,
4943                        has_as,
4944                        options,
4945                        query,
4946                    })
4947                }
4948            } else {
4949                if self.peek_token() == Token::EOF {
4950                    self.prev_token();
4951                }
4952                self.expected("a `TABLE` keyword", self.peek_token())
4953            }
4954        }
4955    }
4956
4957    /// Parse 'AS' before as query,such as `WITH XXX AS SELECT XXX` oer `CACHE TABLE AS SELECT XXX`
4958    pub fn parse_as_query(&mut self) -> Result<(bool, Box<Query>), ParserError> {
4959        match self.peek_token().token {
4960            Token::Word(word) => match word.keyword {
4961                Keyword::AS => {
4962                    self.next_token();
4963                    Ok((true, self.parse_query()?))
4964                }
4965                _ => Ok((false, self.parse_query()?)),
4966            },
4967            _ => self.expected("a QUERY statement", self.peek_token()),
4968        }
4969    }
4970
4971    /// Parse a UNCACHE TABLE statement
4972    pub fn parse_uncache_table(&mut self) -> Result<Statement, ParserError> {
4973        self.expect_keyword_is(Keyword::TABLE)?;
4974        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
4975        let table_name = self.parse_object_name(false)?;
4976        Ok(Statement::UNCache {
4977            table_name,
4978            if_exists,
4979        })
4980    }
4981
4982    /// SQLite-specific `CREATE VIRTUAL TABLE`
4983    pub fn parse_create_virtual_table(&mut self) -> Result<Statement, ParserError> {
4984        self.expect_keyword_is(Keyword::TABLE)?;
4985        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
4986        let table_name = self.parse_object_name(false)?;
4987        self.expect_keyword_is(Keyword::USING)?;
4988        let module_name = self.parse_identifier()?;
4989        // SQLite docs note that module "arguments syntax is sufficiently
4990        // general that the arguments can be made to appear as column
4991        // definitions in a traditional CREATE TABLE statement", but
4992        // we don't implement that.
4993        let module_args = self.parse_parenthesized_column_list(Optional, false)?;
4994        Ok(Statement::CreateVirtualTable {
4995            name: table_name,
4996            if_not_exists,
4997            module_name,
4998            module_args,
4999        })
5000    }
5001
5002    pub fn parse_create_schema(&mut self) -> Result<Statement, ParserError> {
5003        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5004
5005        let schema_name = self.parse_schema_name()?;
5006
5007        let default_collate_spec = if self.parse_keywords(&[Keyword::DEFAULT, Keyword::COLLATE]) {
5008            Some(self.parse_expr()?)
5009        } else {
5010            None
5011        };
5012
5013        let with = if self.peek_keyword(Keyword::WITH) {
5014            Some(self.parse_options(Keyword::WITH)?)
5015        } else {
5016            None
5017        };
5018
5019        let options = if self.peek_keyword(Keyword::OPTIONS) {
5020            Some(self.parse_options(Keyword::OPTIONS)?)
5021        } else {
5022            None
5023        };
5024
5025        let clone = if self.parse_keyword(Keyword::CLONE) {
5026            Some(self.parse_object_name(false)?)
5027        } else {
5028            None
5029        };
5030
5031        Ok(Statement::CreateSchema {
5032            schema_name,
5033            if_not_exists,
5034            with,
5035            options,
5036            default_collate_spec,
5037            clone,
5038        })
5039    }
5040
5041    fn parse_schema_name(&mut self) -> Result<SchemaName, ParserError> {
5042        if self.parse_keyword(Keyword::AUTHORIZATION) {
5043            Ok(SchemaName::UnnamedAuthorization(self.parse_identifier()?))
5044        } else {
5045            let name = self.parse_object_name(false)?;
5046
5047            if self.parse_keyword(Keyword::AUTHORIZATION) {
5048                Ok(SchemaName::NamedAuthorization(
5049                    name,
5050                    self.parse_identifier()?,
5051                ))
5052            } else {
5053                Ok(SchemaName::Simple(name))
5054            }
5055        }
5056    }
5057
5058    pub fn parse_create_database(&mut self) -> Result<Statement, ParserError> {
5059        let ine = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5060        let db_name = self.parse_object_name(false)?;
5061        let mut location = None;
5062        let mut managed_location = None;
5063        loop {
5064            match self.parse_one_of_keywords(&[Keyword::LOCATION, Keyword::MANAGEDLOCATION]) {
5065                Some(Keyword::LOCATION) => location = Some(self.parse_literal_string()?),
5066                Some(Keyword::MANAGEDLOCATION) => {
5067                    managed_location = Some(self.parse_literal_string()?)
5068                }
5069                _ => break,
5070            }
5071        }
5072        let clone = if self.parse_keyword(Keyword::CLONE) {
5073            Some(self.parse_object_name(false)?)
5074        } else {
5075            None
5076        };
5077
5078        Ok(Statement::CreateDatabase {
5079            db_name,
5080            if_not_exists: ine,
5081            location,
5082            managed_location,
5083            or_replace: false,
5084            transient: false,
5085            clone,
5086            data_retention_time_in_days: None,
5087            max_data_extension_time_in_days: None,
5088            external_volume: None,
5089            catalog: None,
5090            replace_invalid_characters: None,
5091            default_ddl_collation: None,
5092            storage_serialization_policy: None,
5093            comment: None,
5094            catalog_sync: None,
5095            catalog_sync_namespace_mode: None,
5096            catalog_sync_namespace_flatten_delimiter: None,
5097            with_tags: None,
5098            with_contacts: None,
5099        })
5100    }
5101
5102    pub fn parse_optional_create_function_using(
5103        &mut self,
5104    ) -> Result<Option<CreateFunctionUsing>, ParserError> {
5105        if !self.parse_keyword(Keyword::USING) {
5106            return Ok(None);
5107        };
5108        let keyword =
5109            self.expect_one_of_keywords(&[Keyword::JAR, Keyword::FILE, Keyword::ARCHIVE])?;
5110
5111        let uri = self.parse_literal_string()?;
5112
5113        match keyword {
5114            Keyword::JAR => Ok(Some(CreateFunctionUsing::Jar(uri))),
5115            Keyword::FILE => Ok(Some(CreateFunctionUsing::File(uri))),
5116            Keyword::ARCHIVE => Ok(Some(CreateFunctionUsing::Archive(uri))),
5117            _ => self.expected(
5118                "JAR, FILE or ARCHIVE, got {:?}",
5119                TokenWithSpan::wrap(Token::make_keyword(format!("{keyword:?}").as_str())),
5120            ),
5121        }
5122    }
5123
5124    pub fn parse_create_function(
5125        &mut self,
5126        or_alter: bool,
5127        or_replace: bool,
5128        temporary: bool,
5129    ) -> Result<Statement, ParserError> {
5130        if dialect_of!(self is HiveDialect) {
5131            self.parse_hive_create_function(or_replace, temporary)
5132        } else if dialect_of!(self is PostgreSqlDialect | GenericDialect) {
5133            self.parse_postgres_create_function(or_replace, temporary)
5134        } else if dialect_of!(self is DuckDbDialect) {
5135            self.parse_create_macro(or_replace, temporary)
5136        } else if dialect_of!(self is BigQueryDialect) {
5137            self.parse_bigquery_create_function(or_replace, temporary)
5138        } else if dialect_of!(self is MsSqlDialect) {
5139            self.parse_mssql_create_function(or_alter, or_replace, temporary)
5140        } else {
5141            self.prev_token();
5142            self.expected("an object type after CREATE", self.peek_token())
5143        }
5144    }
5145
5146    /// Parse `CREATE FUNCTION` for [PostgreSQL]
5147    ///
5148    /// [PostgreSQL]: https://www.postgresql.org/docs/15/sql-createfunction.html
5149    fn parse_postgres_create_function(
5150        &mut self,
5151        or_replace: bool,
5152        temporary: bool,
5153    ) -> Result<Statement, ParserError> {
5154        let name = self.parse_object_name(false)?;
5155
5156        self.expect_token(&Token::LParen)?;
5157        let args = if Token::RParen != self.peek_token_ref().token {
5158            self.parse_comma_separated(Parser::parse_function_arg)?
5159        } else {
5160            vec![]
5161        };
5162        self.expect_token(&Token::RParen)?;
5163
5164        let return_type = if self.parse_keyword(Keyword::RETURNS) {
5165            Some(self.parse_data_type()?)
5166        } else {
5167            None
5168        };
5169
5170        #[derive(Default)]
5171        struct Body {
5172            language: Option<Ident>,
5173            behavior: Option<FunctionBehavior>,
5174            function_body: Option<CreateFunctionBody>,
5175            called_on_null: Option<FunctionCalledOnNull>,
5176            parallel: Option<FunctionParallel>,
5177        }
5178        let mut body = Body::default();
5179        loop {
5180            fn ensure_not_set<T>(field: &Option<T>, name: &str) -> Result<(), ParserError> {
5181                if field.is_some() {
5182                    return Err(ParserError::ParserError(format!(
5183                        "{name} specified more than once",
5184                    )));
5185                }
5186                Ok(())
5187            }
5188            if self.parse_keyword(Keyword::AS) {
5189                ensure_not_set(&body.function_body, "AS")?;
5190                body.function_body = Some(CreateFunctionBody::AsBeforeOptions(
5191                    self.parse_create_function_body_string()?,
5192                ));
5193            } else if self.parse_keyword(Keyword::LANGUAGE) {
5194                ensure_not_set(&body.language, "LANGUAGE")?;
5195                body.language = Some(self.parse_identifier()?);
5196            } else if self.parse_keyword(Keyword::IMMUTABLE) {
5197                ensure_not_set(&body.behavior, "IMMUTABLE | STABLE | VOLATILE")?;
5198                body.behavior = Some(FunctionBehavior::Immutable);
5199            } else if self.parse_keyword(Keyword::STABLE) {
5200                ensure_not_set(&body.behavior, "IMMUTABLE | STABLE | VOLATILE")?;
5201                body.behavior = Some(FunctionBehavior::Stable);
5202            } else if self.parse_keyword(Keyword::VOLATILE) {
5203                ensure_not_set(&body.behavior, "IMMUTABLE | STABLE | VOLATILE")?;
5204                body.behavior = Some(FunctionBehavior::Volatile);
5205            } else if self.parse_keywords(&[
5206                Keyword::CALLED,
5207                Keyword::ON,
5208                Keyword::NULL,
5209                Keyword::INPUT,
5210            ]) {
5211                ensure_not_set(
5212                    &body.called_on_null,
5213                    "CALLED ON NULL INPUT | RETURNS NULL ON NULL INPUT | STRICT",
5214                )?;
5215                body.called_on_null = Some(FunctionCalledOnNull::CalledOnNullInput);
5216            } else if self.parse_keywords(&[
5217                Keyword::RETURNS,
5218                Keyword::NULL,
5219                Keyword::ON,
5220                Keyword::NULL,
5221                Keyword::INPUT,
5222            ]) {
5223                ensure_not_set(
5224                    &body.called_on_null,
5225                    "CALLED ON NULL INPUT | RETURNS NULL ON NULL INPUT | STRICT",
5226                )?;
5227                body.called_on_null = Some(FunctionCalledOnNull::ReturnsNullOnNullInput);
5228            } else if self.parse_keyword(Keyword::STRICT) {
5229                ensure_not_set(
5230                    &body.called_on_null,
5231                    "CALLED ON NULL INPUT | RETURNS NULL ON NULL INPUT | STRICT",
5232                )?;
5233                body.called_on_null = Some(FunctionCalledOnNull::Strict);
5234            } else if self.parse_keyword(Keyword::PARALLEL) {
5235                ensure_not_set(&body.parallel, "PARALLEL { UNSAFE | RESTRICTED | SAFE }")?;
5236                if self.parse_keyword(Keyword::UNSAFE) {
5237                    body.parallel = Some(FunctionParallel::Unsafe);
5238                } else if self.parse_keyword(Keyword::RESTRICTED) {
5239                    body.parallel = Some(FunctionParallel::Restricted);
5240                } else if self.parse_keyword(Keyword::SAFE) {
5241                    body.parallel = Some(FunctionParallel::Safe);
5242                } else {
5243                    return self.expected("one of UNSAFE | RESTRICTED | SAFE", self.peek_token());
5244                }
5245            } else if self.parse_keyword(Keyword::RETURN) {
5246                ensure_not_set(&body.function_body, "RETURN")?;
5247                body.function_body = Some(CreateFunctionBody::Return(self.parse_expr()?));
5248            } else {
5249                break;
5250            }
5251        }
5252
5253        Ok(Statement::CreateFunction(CreateFunction {
5254            or_alter: false,
5255            or_replace,
5256            temporary,
5257            name,
5258            args: Some(args),
5259            return_type,
5260            behavior: body.behavior,
5261            called_on_null: body.called_on_null,
5262            parallel: body.parallel,
5263            language: body.language,
5264            function_body: body.function_body,
5265            if_not_exists: false,
5266            using: None,
5267            determinism_specifier: None,
5268            options: None,
5269            remote_connection: None,
5270        }))
5271    }
5272
5273    /// Parse `CREATE FUNCTION` for [Hive]
5274    ///
5275    /// [Hive]: https://cwiki.apache.org/confluence/display/hive/languagemanual+ddl#LanguageManualDDL-Create/Drop/ReloadFunction
5276    fn parse_hive_create_function(
5277        &mut self,
5278        or_replace: bool,
5279        temporary: bool,
5280    ) -> Result<Statement, ParserError> {
5281        let name = self.parse_object_name(false)?;
5282        self.expect_keyword_is(Keyword::AS)?;
5283
5284        let as_ = self.parse_create_function_body_string()?;
5285        let using = self.parse_optional_create_function_using()?;
5286
5287        Ok(Statement::CreateFunction(CreateFunction {
5288            or_alter: false,
5289            or_replace,
5290            temporary,
5291            name,
5292            function_body: Some(CreateFunctionBody::AsBeforeOptions(as_)),
5293            using,
5294            if_not_exists: false,
5295            args: None,
5296            return_type: None,
5297            behavior: None,
5298            called_on_null: None,
5299            parallel: None,
5300            language: None,
5301            determinism_specifier: None,
5302            options: None,
5303            remote_connection: None,
5304        }))
5305    }
5306
5307    /// Parse `CREATE FUNCTION` for [BigQuery]
5308    ///
5309    /// [BigQuery]: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#create_function_statement
5310    fn parse_bigquery_create_function(
5311        &mut self,
5312        or_replace: bool,
5313        temporary: bool,
5314    ) -> Result<Statement, ParserError> {
5315        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5316        let (name, args) = self.parse_create_function_name_and_params()?;
5317
5318        let return_type = if self.parse_keyword(Keyword::RETURNS) {
5319            Some(self.parse_data_type()?)
5320        } else {
5321            None
5322        };
5323
5324        let determinism_specifier = if self.parse_keyword(Keyword::DETERMINISTIC) {
5325            Some(FunctionDeterminismSpecifier::Deterministic)
5326        } else if self.parse_keywords(&[Keyword::NOT, Keyword::DETERMINISTIC]) {
5327            Some(FunctionDeterminismSpecifier::NotDeterministic)
5328        } else {
5329            None
5330        };
5331
5332        let language = if self.parse_keyword(Keyword::LANGUAGE) {
5333            Some(self.parse_identifier()?)
5334        } else {
5335            None
5336        };
5337
5338        let remote_connection =
5339            if self.parse_keywords(&[Keyword::REMOTE, Keyword::WITH, Keyword::CONNECTION]) {
5340                Some(self.parse_object_name(false)?)
5341            } else {
5342                None
5343            };
5344
5345        // `OPTIONS` may come before of after the function body but
5346        // may be specified at most once.
5347        let mut options = self.maybe_parse_options(Keyword::OPTIONS)?;
5348
5349        let function_body = if remote_connection.is_none() {
5350            self.expect_keyword_is(Keyword::AS)?;
5351            let expr = self.parse_expr()?;
5352            if options.is_none() {
5353                options = self.maybe_parse_options(Keyword::OPTIONS)?;
5354                Some(CreateFunctionBody::AsBeforeOptions(expr))
5355            } else {
5356                Some(CreateFunctionBody::AsAfterOptions(expr))
5357            }
5358        } else {
5359            None
5360        };
5361
5362        Ok(Statement::CreateFunction(CreateFunction {
5363            or_alter: false,
5364            or_replace,
5365            temporary,
5366            if_not_exists,
5367            name,
5368            args: Some(args),
5369            return_type,
5370            function_body,
5371            language,
5372            determinism_specifier,
5373            options,
5374            remote_connection,
5375            using: None,
5376            behavior: None,
5377            called_on_null: None,
5378            parallel: None,
5379        }))
5380    }
5381
5382    /// Parse `CREATE FUNCTION` for [MsSql]
5383    ///
5384    /// [MsSql]: https://learn.microsoft.com/en-us/sql/t-sql/statements/create-function-transact-sql
5385    fn parse_mssql_create_function(
5386        &mut self,
5387        or_alter: bool,
5388        or_replace: bool,
5389        temporary: bool,
5390    ) -> Result<Statement, ParserError> {
5391        let (name, args) = self.parse_create_function_name_and_params()?;
5392
5393        self.expect_keyword(Keyword::RETURNS)?;
5394
5395        let return_table = self.maybe_parse(|p| {
5396            let return_table_name = p.parse_identifier()?;
5397
5398            p.expect_keyword_is(Keyword::TABLE)?;
5399            p.prev_token();
5400
5401            let table_column_defs = match p.parse_data_type()? {
5402                DataType::Table(Some(table_column_defs)) if !table_column_defs.is_empty() => {
5403                    table_column_defs
5404                }
5405                _ => parser_err!(
5406                    "Expected table column definitions after TABLE keyword",
5407                    p.peek_token().span.start
5408                )?,
5409            };
5410
5411            Ok(DataType::NamedTable {
5412                name: ObjectName(vec![ObjectNamePart::Identifier(return_table_name)]),
5413                columns: table_column_defs,
5414            })
5415        })?;
5416
5417        let return_type = if return_table.is_some() {
5418            return_table
5419        } else {
5420            Some(self.parse_data_type()?)
5421        };
5422
5423        let _ = self.parse_keyword(Keyword::AS);
5424
5425        let function_body = if self.peek_keyword(Keyword::BEGIN) {
5426            let begin_token = self.expect_keyword(Keyword::BEGIN)?;
5427            let statements = self.parse_statement_list(&[Keyword::END])?;
5428            let end_token = self.expect_keyword(Keyword::END)?;
5429
5430            Some(CreateFunctionBody::AsBeginEnd(BeginEndStatements {
5431                begin_token: AttachedToken(begin_token),
5432                statements,
5433                end_token: AttachedToken(end_token),
5434            }))
5435        } else if self.parse_keyword(Keyword::RETURN) {
5436            if self.peek_token() == Token::LParen {
5437                Some(CreateFunctionBody::AsReturnExpr(self.parse_expr()?))
5438            } else if self.peek_keyword(Keyword::SELECT) {
5439                let select = self.parse_select()?;
5440                Some(CreateFunctionBody::AsReturnSelect(select))
5441            } else {
5442                parser_err!(
5443                    "Expected a subquery (or bare SELECT statement) after RETURN",
5444                    self.peek_token().span.start
5445                )?
5446            }
5447        } else {
5448            parser_err!("Unparsable function body", self.peek_token().span.start)?
5449        };
5450
5451        Ok(Statement::CreateFunction(CreateFunction {
5452            or_alter,
5453            or_replace,
5454            temporary,
5455            if_not_exists: false,
5456            name,
5457            args: Some(args),
5458            return_type,
5459            function_body,
5460            language: None,
5461            determinism_specifier: None,
5462            options: None,
5463            remote_connection: None,
5464            using: None,
5465            behavior: None,
5466            called_on_null: None,
5467            parallel: None,
5468        }))
5469    }
5470
5471    fn parse_create_function_name_and_params(
5472        &mut self,
5473    ) -> Result<(ObjectName, Vec<OperateFunctionArg>), ParserError> {
5474        let name = self.parse_object_name(false)?;
5475        let parse_function_param =
5476            |parser: &mut Parser| -> Result<OperateFunctionArg, ParserError> {
5477                let name = parser.parse_identifier()?;
5478                let data_type = parser.parse_data_type()?;
5479                let default_expr = if parser.consume_token(&Token::Eq) {
5480                    Some(parser.parse_expr()?)
5481                } else {
5482                    None
5483                };
5484
5485                Ok(OperateFunctionArg {
5486                    mode: None,
5487                    name: Some(name),
5488                    data_type,
5489                    default_expr,
5490                })
5491            };
5492        self.expect_token(&Token::LParen)?;
5493        let args = self.parse_comma_separated0(parse_function_param, Token::RParen)?;
5494        self.expect_token(&Token::RParen)?;
5495        Ok((name, args))
5496    }
5497
5498    fn parse_function_arg(&mut self) -> Result<OperateFunctionArg, ParserError> {
5499        let mode = if self.parse_keyword(Keyword::IN) {
5500            Some(ArgMode::In)
5501        } else if self.parse_keyword(Keyword::OUT) {
5502            Some(ArgMode::Out)
5503        } else if self.parse_keyword(Keyword::INOUT) {
5504            Some(ArgMode::InOut)
5505        } else {
5506            None
5507        };
5508
5509        // parse: [ argname ] argtype
5510        let mut name = None;
5511        let mut data_type = self.parse_data_type()?;
5512
5513        // To check whether the first token is a name or a type, we need to
5514        // peek the next token, which if it is another type keyword, then the
5515        // first token is a name and not a type in itself.
5516        let data_type_idx = self.get_current_index();
5517        if let Some(next_data_type) = self.maybe_parse(|parser| parser.parse_data_type())? {
5518            let token = self.token_at(data_type_idx);
5519
5520            // We ensure that the token is a `Word` token, and not other special tokens.
5521            if !matches!(token.token, Token::Word(_)) {
5522                return self.expected("a name or type", token.clone());
5523            }
5524
5525            name = Some(Ident::new(token.to_string()));
5526            data_type = next_data_type;
5527        }
5528
5529        let default_expr = if self.parse_keyword(Keyword::DEFAULT) || self.consume_token(&Token::Eq)
5530        {
5531            Some(self.parse_expr()?)
5532        } else {
5533            None
5534        };
5535        Ok(OperateFunctionArg {
5536            mode,
5537            name,
5538            data_type,
5539            default_expr,
5540        })
5541    }
5542
5543    /// Parse statements of the DropTrigger type such as:
5544    ///
5545    /// ```sql
5546    /// DROP TRIGGER [ IF EXISTS ] name ON table_name [ CASCADE | RESTRICT ]
5547    /// ```
5548    pub fn parse_drop_trigger(&mut self) -> Result<Statement, ParserError> {
5549        if !dialect_of!(self is PostgreSqlDialect | GenericDialect | MySqlDialect | MsSqlDialect) {
5550            self.prev_token();
5551            return self.expected("an object type after DROP", self.peek_token());
5552        }
5553        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
5554        let trigger_name = self.parse_object_name(false)?;
5555        let table_name = if self.parse_keyword(Keyword::ON) {
5556            Some(self.parse_object_name(false)?)
5557        } else {
5558            None
5559        };
5560        let option = self
5561            .parse_one_of_keywords(&[Keyword::CASCADE, Keyword::RESTRICT])
5562            .map(|keyword| match keyword {
5563                Keyword::CASCADE => ReferentialAction::Cascade,
5564                Keyword::RESTRICT => ReferentialAction::Restrict,
5565                _ => unreachable!(),
5566            });
5567        Ok(Statement::DropTrigger(DropTrigger {
5568            if_exists,
5569            trigger_name,
5570            table_name,
5571            option,
5572        }))
5573    }
5574
5575    pub fn parse_create_trigger(
5576        &mut self,
5577        or_alter: bool,
5578        or_replace: bool,
5579        is_constraint: bool,
5580    ) -> Result<Statement, ParserError> {
5581        if !dialect_of!(self is PostgreSqlDialect | GenericDialect | MySqlDialect | MsSqlDialect) {
5582            self.prev_token();
5583            return self.expected("an object type after CREATE", self.peek_token());
5584        }
5585
5586        let name = self.parse_object_name(false)?;
5587        let period = self.parse_trigger_period()?;
5588
5589        let events = self.parse_keyword_separated(Keyword::OR, Parser::parse_trigger_event)?;
5590        self.expect_keyword_is(Keyword::ON)?;
5591        let table_name = self.parse_object_name(false)?;
5592
5593        let referenced_table_name = if self.parse_keyword(Keyword::FROM) {
5594            self.parse_object_name(true).ok()
5595        } else {
5596            None
5597        };
5598
5599        let characteristics = self.parse_constraint_characteristics()?;
5600
5601        let mut referencing = vec![];
5602        if self.parse_keyword(Keyword::REFERENCING) {
5603            while let Some(refer) = self.parse_trigger_referencing()? {
5604                referencing.push(refer);
5605            }
5606        }
5607
5608        self.expect_keyword_is(Keyword::FOR)?;
5609        let include_each = self.parse_keyword(Keyword::EACH);
5610        let trigger_object =
5611            match self.expect_one_of_keywords(&[Keyword::ROW, Keyword::STATEMENT])? {
5612                Keyword::ROW => TriggerObject::Row,
5613                Keyword::STATEMENT => TriggerObject::Statement,
5614                _ => unreachable!(),
5615            };
5616
5617        let condition = self
5618            .parse_keyword(Keyword::WHEN)
5619            .then(|| self.parse_expr())
5620            .transpose()?;
5621
5622        let mut exec_body = None;
5623        let mut statements = None;
5624        if self.parse_keyword(Keyword::EXECUTE) {
5625            exec_body = Some(self.parse_trigger_exec_body()?);
5626        } else {
5627            statements = Some(self.parse_conditional_statements(&[Keyword::END])?);
5628        }
5629
5630        Ok(Statement::CreateTrigger(CreateTrigger {
5631            or_alter,
5632            or_replace,
5633            is_constraint,
5634            name,
5635            period,
5636            period_before_table: true,
5637            events,
5638            table_name,
5639            referenced_table_name,
5640            referencing,
5641            trigger_object,
5642            include_each,
5643            condition,
5644            exec_body,
5645            statements_as: false,
5646            statements,
5647            characteristics,
5648        }))
5649    }
5650
5651    pub fn parse_trigger_period(&mut self) -> Result<TriggerPeriod, ParserError> {
5652        Ok(
5653            match self.expect_one_of_keywords(&[
5654                Keyword::FOR,
5655                Keyword::BEFORE,
5656                Keyword::AFTER,
5657                Keyword::INSTEAD,
5658            ])? {
5659                Keyword::FOR => TriggerPeriod::For,
5660                Keyword::BEFORE => TriggerPeriod::Before,
5661                Keyword::AFTER => TriggerPeriod::After,
5662                Keyword::INSTEAD => self
5663                    .expect_keyword_is(Keyword::OF)
5664                    .map(|_| TriggerPeriod::InsteadOf)?,
5665                _ => unreachable!(),
5666            },
5667        )
5668    }
5669
5670    pub fn parse_trigger_event(&mut self) -> Result<TriggerEvent, ParserError> {
5671        Ok(
5672            match self.expect_one_of_keywords(&[
5673                Keyword::INSERT,
5674                Keyword::UPDATE,
5675                Keyword::DELETE,
5676                Keyword::TRUNCATE,
5677            ])? {
5678                Keyword::INSERT => TriggerEvent::Insert,
5679                Keyword::UPDATE => {
5680                    if self.parse_keyword(Keyword::OF) {
5681                        let cols = self.parse_comma_separated(Parser::parse_identifier)?;
5682                        TriggerEvent::Update(cols)
5683                    } else {
5684                        TriggerEvent::Update(vec![])
5685                    }
5686                }
5687                Keyword::DELETE => TriggerEvent::Delete,
5688                Keyword::TRUNCATE => TriggerEvent::Truncate,
5689                _ => unreachable!(),
5690            },
5691        )
5692    }
5693
5694    pub fn parse_trigger_referencing(&mut self) -> Result<Option<TriggerReferencing>, ParserError> {
5695        let refer_type = match self.parse_one_of_keywords(&[Keyword::OLD, Keyword::NEW]) {
5696            Some(Keyword::OLD) if self.parse_keyword(Keyword::TABLE) => {
5697                TriggerReferencingType::OldTable
5698            }
5699            Some(Keyword::NEW) if self.parse_keyword(Keyword::TABLE) => {
5700                TriggerReferencingType::NewTable
5701            }
5702            _ => {
5703                return Ok(None);
5704            }
5705        };
5706
5707        let is_as = self.parse_keyword(Keyword::AS);
5708        let transition_relation_name = self.parse_object_name(false)?;
5709        Ok(Some(TriggerReferencing {
5710            refer_type,
5711            is_as,
5712            transition_relation_name,
5713        }))
5714    }
5715
5716    pub fn parse_trigger_exec_body(&mut self) -> Result<TriggerExecBody, ParserError> {
5717        Ok(TriggerExecBody {
5718            exec_type: match self
5719                .expect_one_of_keywords(&[Keyword::FUNCTION, Keyword::PROCEDURE])?
5720            {
5721                Keyword::FUNCTION => TriggerExecBodyType::Function,
5722                Keyword::PROCEDURE => TriggerExecBodyType::Procedure,
5723                _ => unreachable!(),
5724            },
5725            func_desc: self.parse_function_desc()?,
5726        })
5727    }
5728
5729    pub fn parse_create_macro(
5730        &mut self,
5731        or_replace: bool,
5732        temporary: bool,
5733    ) -> Result<Statement, ParserError> {
5734        if dialect_of!(self is DuckDbDialect |  GenericDialect) {
5735            let name = self.parse_object_name(false)?;
5736            self.expect_token(&Token::LParen)?;
5737            let args = if self.consume_token(&Token::RParen) {
5738                self.prev_token();
5739                None
5740            } else {
5741                Some(self.parse_comma_separated(Parser::parse_macro_arg)?)
5742            };
5743
5744            self.expect_token(&Token::RParen)?;
5745            self.expect_keyword_is(Keyword::AS)?;
5746
5747            Ok(Statement::CreateMacro {
5748                or_replace,
5749                temporary,
5750                name,
5751                args,
5752                definition: if self.parse_keyword(Keyword::TABLE) {
5753                    MacroDefinition::Table(self.parse_query()?)
5754                } else {
5755                    MacroDefinition::Expr(self.parse_expr()?)
5756                },
5757            })
5758        } else {
5759            self.prev_token();
5760            self.expected("an object type after CREATE", self.peek_token())
5761        }
5762    }
5763
5764    fn parse_macro_arg(&mut self) -> Result<MacroArg, ParserError> {
5765        let name = self.parse_identifier()?;
5766
5767        let default_expr =
5768            if self.consume_token(&Token::Assignment) || self.consume_token(&Token::RArrow) {
5769                Some(self.parse_expr()?)
5770            } else {
5771                None
5772            };
5773        Ok(MacroArg { name, default_expr })
5774    }
5775
5776    pub fn parse_create_external_table(
5777        &mut self,
5778        or_replace: bool,
5779    ) -> Result<Statement, ParserError> {
5780        self.expect_keyword_is(Keyword::TABLE)?;
5781        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5782        let table_name = self.parse_object_name(false)?;
5783        let (columns, constraints) = self.parse_columns()?;
5784
5785        let hive_distribution = self.parse_hive_distribution()?;
5786        let hive_formats = self.parse_hive_formats()?;
5787
5788        let file_format = if let Some(ff) = &hive_formats.storage {
5789            match ff {
5790                HiveIOFormat::FileFormat { format } => Some(*format),
5791                _ => None,
5792            }
5793        } else {
5794            None
5795        };
5796        let location = hive_formats.location.clone();
5797        let table_properties = self.parse_options(Keyword::TBLPROPERTIES)?;
5798        let table_options = if !table_properties.is_empty() {
5799            CreateTableOptions::TableProperties(table_properties)
5800        } else {
5801            CreateTableOptions::None
5802        };
5803        Ok(CreateTableBuilder::new(table_name)
5804            .columns(columns)
5805            .constraints(constraints)
5806            .hive_distribution(hive_distribution)
5807            .hive_formats(Some(hive_formats))
5808            .table_options(table_options)
5809            .or_replace(or_replace)
5810            .if_not_exists(if_not_exists)
5811            .external(true)
5812            .file_format(file_format)
5813            .location(location)
5814            .build())
5815    }
5816
5817    pub fn parse_file_format(&mut self) -> Result<FileFormat, ParserError> {
5818        let next_token = self.next_token();
5819        match &next_token.token {
5820            Token::Word(w) => match w.keyword {
5821                Keyword::AVRO => Ok(FileFormat::AVRO),
5822                Keyword::JSONFILE => Ok(FileFormat::JSONFILE),
5823                Keyword::ORC => Ok(FileFormat::ORC),
5824                Keyword::PARQUET => Ok(FileFormat::PARQUET),
5825                Keyword::RCFILE => Ok(FileFormat::RCFILE),
5826                Keyword::SEQUENCEFILE => Ok(FileFormat::SEQUENCEFILE),
5827                Keyword::TEXTFILE => Ok(FileFormat::TEXTFILE),
5828                _ => self.expected("fileformat", next_token),
5829            },
5830            _ => self.expected("fileformat", next_token),
5831        }
5832    }
5833
5834    fn parse_analyze_format_kind(&mut self) -> Result<AnalyzeFormatKind, ParserError> {
5835        if self.consume_token(&Token::Eq) {
5836            Ok(AnalyzeFormatKind::Assignment(self.parse_analyze_format()?))
5837        } else {
5838            Ok(AnalyzeFormatKind::Keyword(self.parse_analyze_format()?))
5839        }
5840    }
5841
5842    pub fn parse_analyze_format(&mut self) -> Result<AnalyzeFormat, ParserError> {
5843        let next_token = self.next_token();
5844        match &next_token.token {
5845            Token::Word(w) => match w.keyword {
5846                Keyword::TEXT => Ok(AnalyzeFormat::TEXT),
5847                Keyword::GRAPHVIZ => Ok(AnalyzeFormat::GRAPHVIZ),
5848                Keyword::JSON => Ok(AnalyzeFormat::JSON),
5849                _ => self.expected("fileformat", next_token),
5850            },
5851            _ => self.expected("fileformat", next_token),
5852        }
5853    }
5854
5855    pub fn parse_create_view(
5856        &mut self,
5857        or_alter: bool,
5858        or_replace: bool,
5859        temporary: bool,
5860        create_view_params: Option<CreateViewParams>,
5861    ) -> Result<Statement, ParserError> {
5862        let secure = self.parse_keyword(Keyword::SECURE);
5863        let materialized = self.parse_keyword(Keyword::MATERIALIZED);
5864        self.expect_keyword_is(Keyword::VIEW)?;
5865        let allow_unquoted_hyphen = dialect_of!(self is BigQueryDialect);
5866        // Tries to parse IF NOT EXISTS either before name or after name
5867        // Name before IF NOT EXISTS is supported by snowflake but undocumented
5868        let if_not_exists_first =
5869            self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5870        let name = self.parse_object_name(allow_unquoted_hyphen)?;
5871        let name_before_not_exists = !if_not_exists_first
5872            && self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5873        let if_not_exists = if_not_exists_first || name_before_not_exists;
5874        // Many dialects support `OR ALTER` right after `CREATE`, but we don't (yet).
5875        // ANSI SQL and Postgres support RECURSIVE here, but we don't support it either.
5876        let columns = self.parse_view_columns()?;
5877        let mut options = CreateTableOptions::None;
5878        let with_options = self.parse_options(Keyword::WITH)?;
5879        if !with_options.is_empty() {
5880            options = CreateTableOptions::With(with_options);
5881        }
5882
5883        let cluster_by = if self.parse_keyword(Keyword::CLUSTER) {
5884            self.expect_keyword_is(Keyword::BY)?;
5885            self.parse_parenthesized_column_list(Optional, false)?
5886        } else {
5887            vec![]
5888        };
5889
5890        if dialect_of!(self is BigQueryDialect | GenericDialect) {
5891            if let Some(opts) = self.maybe_parse_options(Keyword::OPTIONS)? {
5892                if !opts.is_empty() {
5893                    options = CreateTableOptions::Options(opts);
5894                }
5895            };
5896        }
5897
5898        let to = if dialect_of!(self is ClickHouseDialect | GenericDialect)
5899            && self.parse_keyword(Keyword::TO)
5900        {
5901            Some(self.parse_object_name(false)?)
5902        } else {
5903            None
5904        };
5905
5906        let comment = if dialect_of!(self is SnowflakeDialect | GenericDialect)
5907            && self.parse_keyword(Keyword::COMMENT)
5908        {
5909            self.expect_token(&Token::Eq)?;
5910            Some(self.parse_comment_value()?)
5911        } else {
5912            None
5913        };
5914
5915        self.expect_keyword_is(Keyword::AS)?;
5916        let query = self.parse_query()?;
5917        // Optional `WITH [ CASCADED | LOCAL ] CHECK OPTION` is widely supported here.
5918
5919        let with_no_schema_binding = dialect_of!(self is RedshiftSqlDialect | GenericDialect)
5920            && self.parse_keywords(&[
5921                Keyword::WITH,
5922                Keyword::NO,
5923                Keyword::SCHEMA,
5924                Keyword::BINDING,
5925            ]);
5926
5927        Ok(Statement::CreateView {
5928            or_alter,
5929            name,
5930            columns,
5931            query,
5932            materialized,
5933            secure,
5934            or_replace,
5935            options,
5936            cluster_by,
5937            comment,
5938            with_no_schema_binding,
5939            if_not_exists,
5940            temporary,
5941            to,
5942            params: create_view_params,
5943            name_before_not_exists,
5944        })
5945    }
5946
5947    /// Parse optional parameters for the `CREATE VIEW` statement supported by [MySQL].
5948    ///
5949    /// [MySQL]: https://dev.mysql.com/doc/refman/9.1/en/create-view.html
5950    fn parse_create_view_params(&mut self) -> Result<Option<CreateViewParams>, ParserError> {
5951        let algorithm = if self.parse_keyword(Keyword::ALGORITHM) {
5952            self.expect_token(&Token::Eq)?;
5953            Some(
5954                match self.expect_one_of_keywords(&[
5955                    Keyword::UNDEFINED,
5956                    Keyword::MERGE,
5957                    Keyword::TEMPTABLE,
5958                ])? {
5959                    Keyword::UNDEFINED => CreateViewAlgorithm::Undefined,
5960                    Keyword::MERGE => CreateViewAlgorithm::Merge,
5961                    Keyword::TEMPTABLE => CreateViewAlgorithm::TempTable,
5962                    _ => {
5963                        self.prev_token();
5964                        let found = self.next_token();
5965                        return self
5966                            .expected("UNDEFINED or MERGE or TEMPTABLE after ALGORITHM =", found);
5967                    }
5968                },
5969            )
5970        } else {
5971            None
5972        };
5973        let definer = if self.parse_keyword(Keyword::DEFINER) {
5974            self.expect_token(&Token::Eq)?;
5975            Some(self.parse_grantee_name()?)
5976        } else {
5977            None
5978        };
5979        let security = if self.parse_keywords(&[Keyword::SQL, Keyword::SECURITY]) {
5980            Some(
5981                match self.expect_one_of_keywords(&[Keyword::DEFINER, Keyword::INVOKER])? {
5982                    Keyword::DEFINER => CreateViewSecurity::Definer,
5983                    Keyword::INVOKER => CreateViewSecurity::Invoker,
5984                    _ => {
5985                        self.prev_token();
5986                        let found = self.next_token();
5987                        return self.expected("DEFINER or INVOKER after SQL SECURITY", found);
5988                    }
5989                },
5990            )
5991        } else {
5992            None
5993        };
5994        if algorithm.is_some() || definer.is_some() || security.is_some() {
5995            Ok(Some(CreateViewParams {
5996                algorithm,
5997                definer,
5998                security,
5999            }))
6000        } else {
6001            Ok(None)
6002        }
6003    }
6004
6005    pub fn parse_create_role(&mut self) -> Result<Statement, ParserError> {
6006        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
6007        let names = self.parse_comma_separated(|p| p.parse_object_name(false))?;
6008
6009        let _ = self.parse_keyword(Keyword::WITH); // [ WITH ]
6010
6011        let optional_keywords = if dialect_of!(self is MsSqlDialect) {
6012            vec![Keyword::AUTHORIZATION]
6013        } else if dialect_of!(self is PostgreSqlDialect) {
6014            vec![
6015                Keyword::LOGIN,
6016                Keyword::NOLOGIN,
6017                Keyword::INHERIT,
6018                Keyword::NOINHERIT,
6019                Keyword::BYPASSRLS,
6020                Keyword::NOBYPASSRLS,
6021                Keyword::PASSWORD,
6022                Keyword::CREATEDB,
6023                Keyword::NOCREATEDB,
6024                Keyword::CREATEROLE,
6025                Keyword::NOCREATEROLE,
6026                Keyword::SUPERUSER,
6027                Keyword::NOSUPERUSER,
6028                Keyword::REPLICATION,
6029                Keyword::NOREPLICATION,
6030                Keyword::CONNECTION,
6031                Keyword::VALID,
6032                Keyword::IN,
6033                Keyword::ROLE,
6034                Keyword::ADMIN,
6035                Keyword::USER,
6036            ]
6037        } else {
6038            vec![]
6039        };
6040
6041        // MSSQL
6042        let mut authorization_owner = None;
6043        // Postgres
6044        let mut login = None;
6045        let mut inherit = None;
6046        let mut bypassrls = None;
6047        let mut password = None;
6048        let mut create_db = None;
6049        let mut create_role = None;
6050        let mut superuser = None;
6051        let mut replication = None;
6052        let mut connection_limit = None;
6053        let mut valid_until = None;
6054        let mut in_role = vec![];
6055        let mut in_group = vec![];
6056        let mut role = vec![];
6057        let mut user = vec![];
6058        let mut admin = vec![];
6059
6060        while let Some(keyword) = self.parse_one_of_keywords(&optional_keywords) {
6061            let loc = self
6062                .tokens
6063                .get(self.index - 1)
6064                .map_or(Location { line: 0, column: 0 }, |t| t.span.start);
6065            match keyword {
6066                Keyword::AUTHORIZATION => {
6067                    if authorization_owner.is_some() {
6068                        parser_err!("Found multiple AUTHORIZATION", loc)
6069                    } else {
6070                        authorization_owner = Some(self.parse_object_name(false)?);
6071                        Ok(())
6072                    }
6073                }
6074                Keyword::LOGIN | Keyword::NOLOGIN => {
6075                    if login.is_some() {
6076                        parser_err!("Found multiple LOGIN or NOLOGIN", loc)
6077                    } else {
6078                        login = Some(keyword == Keyword::LOGIN);
6079                        Ok(())
6080                    }
6081                }
6082                Keyword::INHERIT | Keyword::NOINHERIT => {
6083                    if inherit.is_some() {
6084                        parser_err!("Found multiple INHERIT or NOINHERIT", loc)
6085                    } else {
6086                        inherit = Some(keyword == Keyword::INHERIT);
6087                        Ok(())
6088                    }
6089                }
6090                Keyword::BYPASSRLS | Keyword::NOBYPASSRLS => {
6091                    if bypassrls.is_some() {
6092                        parser_err!("Found multiple BYPASSRLS or NOBYPASSRLS", loc)
6093                    } else {
6094                        bypassrls = Some(keyword == Keyword::BYPASSRLS);
6095                        Ok(())
6096                    }
6097                }
6098                Keyword::CREATEDB | Keyword::NOCREATEDB => {
6099                    if create_db.is_some() {
6100                        parser_err!("Found multiple CREATEDB or NOCREATEDB", loc)
6101                    } else {
6102                        create_db = Some(keyword == Keyword::CREATEDB);
6103                        Ok(())
6104                    }
6105                }
6106                Keyword::CREATEROLE | Keyword::NOCREATEROLE => {
6107                    if create_role.is_some() {
6108                        parser_err!("Found multiple CREATEROLE or NOCREATEROLE", loc)
6109                    } else {
6110                        create_role = Some(keyword == Keyword::CREATEROLE);
6111                        Ok(())
6112                    }
6113                }
6114                Keyword::SUPERUSER | Keyword::NOSUPERUSER => {
6115                    if superuser.is_some() {
6116                        parser_err!("Found multiple SUPERUSER or NOSUPERUSER", loc)
6117                    } else {
6118                        superuser = Some(keyword == Keyword::SUPERUSER);
6119                        Ok(())
6120                    }
6121                }
6122                Keyword::REPLICATION | Keyword::NOREPLICATION => {
6123                    if replication.is_some() {
6124                        parser_err!("Found multiple REPLICATION or NOREPLICATION", loc)
6125                    } else {
6126                        replication = Some(keyword == Keyword::REPLICATION);
6127                        Ok(())
6128                    }
6129                }
6130                Keyword::PASSWORD => {
6131                    if password.is_some() {
6132                        parser_err!("Found multiple PASSWORD", loc)
6133                    } else {
6134                        password = if self.parse_keyword(Keyword::NULL) {
6135                            Some(Password::NullPassword)
6136                        } else {
6137                            Some(Password::Password(Expr::Value(self.parse_value()?)))
6138                        };
6139                        Ok(())
6140                    }
6141                }
6142                Keyword::CONNECTION => {
6143                    self.expect_keyword_is(Keyword::LIMIT)?;
6144                    if connection_limit.is_some() {
6145                        parser_err!("Found multiple CONNECTION LIMIT", loc)
6146                    } else {
6147                        connection_limit = Some(Expr::Value(self.parse_number_value()?));
6148                        Ok(())
6149                    }
6150                }
6151                Keyword::VALID => {
6152                    self.expect_keyword_is(Keyword::UNTIL)?;
6153                    if valid_until.is_some() {
6154                        parser_err!("Found multiple VALID UNTIL", loc)
6155                    } else {
6156                        valid_until = Some(Expr::Value(self.parse_value()?));
6157                        Ok(())
6158                    }
6159                }
6160                Keyword::IN => {
6161                    if self.parse_keyword(Keyword::ROLE) {
6162                        if !in_role.is_empty() {
6163                            parser_err!("Found multiple IN ROLE", loc)
6164                        } else {
6165                            in_role = self.parse_comma_separated(|p| p.parse_identifier())?;
6166                            Ok(())
6167                        }
6168                    } else if self.parse_keyword(Keyword::GROUP) {
6169                        if !in_group.is_empty() {
6170                            parser_err!("Found multiple IN GROUP", loc)
6171                        } else {
6172                            in_group = self.parse_comma_separated(|p| p.parse_identifier())?;
6173                            Ok(())
6174                        }
6175                    } else {
6176                        self.expected("ROLE or GROUP after IN", self.peek_token())
6177                    }
6178                }
6179                Keyword::ROLE => {
6180                    if !role.is_empty() {
6181                        parser_err!("Found multiple ROLE", loc)
6182                    } else {
6183                        role = self.parse_comma_separated(|p| p.parse_identifier())?;
6184                        Ok(())
6185                    }
6186                }
6187                Keyword::USER => {
6188                    if !user.is_empty() {
6189                        parser_err!("Found multiple USER", loc)
6190                    } else {
6191                        user = self.parse_comma_separated(|p| p.parse_identifier())?;
6192                        Ok(())
6193                    }
6194                }
6195                Keyword::ADMIN => {
6196                    if !admin.is_empty() {
6197                        parser_err!("Found multiple ADMIN", loc)
6198                    } else {
6199                        admin = self.parse_comma_separated(|p| p.parse_identifier())?;
6200                        Ok(())
6201                    }
6202                }
6203                _ => break,
6204            }?
6205        }
6206
6207        Ok(Statement::CreateRole {
6208            names,
6209            if_not_exists,
6210            login,
6211            inherit,
6212            bypassrls,
6213            password,
6214            create_db,
6215            create_role,
6216            replication,
6217            superuser,
6218            connection_limit,
6219            valid_until,
6220            in_role,
6221            in_group,
6222            role,
6223            user,
6224            admin,
6225            authorization_owner,
6226        })
6227    }
6228
6229    pub fn parse_owner(&mut self) -> Result<Owner, ParserError> {
6230        let owner = match self.parse_one_of_keywords(&[Keyword::CURRENT_USER, Keyword::CURRENT_ROLE, Keyword::SESSION_USER]) {
6231            Some(Keyword::CURRENT_USER) => Owner::CurrentUser,
6232            Some(Keyword::CURRENT_ROLE) => Owner::CurrentRole,
6233            Some(Keyword::SESSION_USER) => Owner::SessionUser,
6234            Some(_) => unreachable!(),
6235            None => {
6236                match self.parse_identifier() {
6237                    Ok(ident) => Owner::Ident(ident),
6238                    Err(e) => {
6239                        return Err(ParserError::ParserError(format!("Expected: CURRENT_USER, CURRENT_ROLE, SESSION_USER or identifier after OWNER TO. {e}")))
6240                    }
6241                }
6242            }
6243        };
6244        Ok(owner)
6245    }
6246
6247    /// Parses a [Statement::CreateDomain] statement.
6248    fn parse_create_domain(&mut self) -> Result<Statement, ParserError> {
6249        let name = self.parse_object_name(false)?;
6250        self.expect_keyword_is(Keyword::AS)?;
6251        let data_type = self.parse_data_type()?;
6252        let collation = if self.parse_keyword(Keyword::COLLATE) {
6253            Some(self.parse_identifier()?)
6254        } else {
6255            None
6256        };
6257        let default = if self.parse_keyword(Keyword::DEFAULT) {
6258            Some(self.parse_expr()?)
6259        } else {
6260            None
6261        };
6262        let mut constraints = Vec::new();
6263        while let Some(constraint) = self.parse_optional_table_constraint()? {
6264            constraints.push(constraint);
6265        }
6266
6267        Ok(Statement::CreateDomain(CreateDomain {
6268            name,
6269            data_type,
6270            collation,
6271            default,
6272            constraints,
6273        }))
6274    }
6275
6276    /// ```sql
6277    ///     CREATE POLICY name ON table_name [ AS { PERMISSIVE | RESTRICTIVE } ]
6278    ///     [ FOR { ALL | SELECT | INSERT | UPDATE | DELETE } ]
6279    ///     [ TO { role_name | PUBLIC | CURRENT_USER | CURRENT_ROLE | SESSION_USER } [, ...] ]
6280    ///     [ USING ( using_expression ) ]
6281    ///     [ WITH CHECK ( with_check_expression ) ]
6282    /// ```
6283    ///
6284    /// [PostgreSQL Documentation](https://www.postgresql.org/docs/current/sql-createpolicy.html)
6285    pub fn parse_create_policy(&mut self) -> Result<Statement, ParserError> {
6286        let name = self.parse_identifier()?;
6287        self.expect_keyword_is(Keyword::ON)?;
6288        let table_name = self.parse_object_name(false)?;
6289
6290        let policy_type = if self.parse_keyword(Keyword::AS) {
6291            let keyword =
6292                self.expect_one_of_keywords(&[Keyword::PERMISSIVE, Keyword::RESTRICTIVE])?;
6293            Some(match keyword {
6294                Keyword::PERMISSIVE => CreatePolicyType::Permissive,
6295                Keyword::RESTRICTIVE => CreatePolicyType::Restrictive,
6296                _ => unreachable!(),
6297            })
6298        } else {
6299            None
6300        };
6301
6302        let command = if self.parse_keyword(Keyword::FOR) {
6303            let keyword = self.expect_one_of_keywords(&[
6304                Keyword::ALL,
6305                Keyword::SELECT,
6306                Keyword::INSERT,
6307                Keyword::UPDATE,
6308                Keyword::DELETE,
6309            ])?;
6310            Some(match keyword {
6311                Keyword::ALL => CreatePolicyCommand::All,
6312                Keyword::SELECT => CreatePolicyCommand::Select,
6313                Keyword::INSERT => CreatePolicyCommand::Insert,
6314                Keyword::UPDATE => CreatePolicyCommand::Update,
6315                Keyword::DELETE => CreatePolicyCommand::Delete,
6316                _ => unreachable!(),
6317            })
6318        } else {
6319            None
6320        };
6321
6322        let to = if self.parse_keyword(Keyword::TO) {
6323            Some(self.parse_comma_separated(|p| p.parse_owner())?)
6324        } else {
6325            None
6326        };
6327
6328        let using = if self.parse_keyword(Keyword::USING) {
6329            self.expect_token(&Token::LParen)?;
6330            let expr = self.parse_expr()?;
6331            self.expect_token(&Token::RParen)?;
6332            Some(expr)
6333        } else {
6334            None
6335        };
6336
6337        let with_check = if self.parse_keywords(&[Keyword::WITH, Keyword::CHECK]) {
6338            self.expect_token(&Token::LParen)?;
6339            let expr = self.parse_expr()?;
6340            self.expect_token(&Token::RParen)?;
6341            Some(expr)
6342        } else {
6343            None
6344        };
6345
6346        Ok(CreatePolicy {
6347            name,
6348            table_name,
6349            policy_type,
6350            command,
6351            to,
6352            using,
6353            with_check,
6354        })
6355    }
6356
6357    /// ```sql
6358    /// CREATE CONNECTOR [IF NOT EXISTS] connector_name
6359    /// [TYPE datasource_type]
6360    /// [URL datasource_url]
6361    /// [COMMENT connector_comment]
6362    /// [WITH DCPROPERTIES(property_name=property_value, ...)]
6363    /// ```
6364    ///
6365    /// [Hive Documentation](https://cwiki.apache.org/confluence/pages/viewpage.action?pageId=27362034#LanguageManualDDL-CreateDataConnectorCreateConnector)
6366    pub fn parse_create_connector(&mut self) -> Result<Statement, ParserError> {
6367        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
6368        let name = self.parse_identifier()?;
6369
6370        let connector_type = if self.parse_keyword(Keyword::TYPE) {
6371            Some(self.parse_literal_string()?)
6372        } else {
6373            None
6374        };
6375
6376        let url = if self.parse_keyword(Keyword::URL) {
6377            Some(self.parse_literal_string()?)
6378        } else {
6379            None
6380        };
6381
6382        let comment = self.parse_optional_inline_comment()?;
6383
6384        let with_dcproperties =
6385            match self.parse_options_with_keywords(&[Keyword::WITH, Keyword::DCPROPERTIES])? {
6386                properties if !properties.is_empty() => Some(properties),
6387                _ => None,
6388            };
6389
6390        Ok(Statement::CreateConnector(CreateConnector {
6391            name,
6392            if_not_exists,
6393            connector_type,
6394            url,
6395            comment,
6396            with_dcproperties,
6397        }))
6398    }
6399
6400    pub fn parse_drop(&mut self) -> Result<Statement, ParserError> {
6401        // MySQL dialect supports `TEMPORARY`
6402        let temporary = dialect_of!(self is MySqlDialect | GenericDialect | DuckDbDialect)
6403            && self.parse_keyword(Keyword::TEMPORARY);
6404        let persistent = dialect_of!(self is DuckDbDialect)
6405            && self.parse_one_of_keywords(&[Keyword::PERSISTENT]).is_some();
6406
6407        let object_type = if self.parse_keyword(Keyword::TABLE) {
6408            ObjectType::Table
6409        } else if self.parse_keyword(Keyword::VIEW) {
6410            ObjectType::View
6411        } else if self.parse_keywords(&[Keyword::MATERIALIZED, Keyword::VIEW]) {
6412            ObjectType::MaterializedView
6413        } else if self.parse_keyword(Keyword::INDEX) {
6414            ObjectType::Index
6415        } else if self.parse_keyword(Keyword::ROLE) {
6416            ObjectType::Role
6417        } else if self.parse_keyword(Keyword::SCHEMA) {
6418            ObjectType::Schema
6419        } else if self.parse_keyword(Keyword::DATABASE) {
6420            ObjectType::Database
6421        } else if self.parse_keyword(Keyword::SEQUENCE) {
6422            ObjectType::Sequence
6423        } else if self.parse_keyword(Keyword::STAGE) {
6424            ObjectType::Stage
6425        } else if self.parse_keyword(Keyword::TYPE) {
6426            ObjectType::Type
6427        } else if self.parse_keyword(Keyword::USER) {
6428            ObjectType::User
6429        } else if self.parse_keyword(Keyword::STREAM) {
6430            ObjectType::Stream
6431        } else if self.parse_keyword(Keyword::FUNCTION) {
6432            return self.parse_drop_function();
6433        } else if self.parse_keyword(Keyword::POLICY) {
6434            return self.parse_drop_policy();
6435        } else if self.parse_keyword(Keyword::CONNECTOR) {
6436            return self.parse_drop_connector();
6437        } else if self.parse_keyword(Keyword::DOMAIN) {
6438            return self.parse_drop_domain();
6439        } else if self.parse_keyword(Keyword::PROCEDURE) {
6440            return self.parse_drop_procedure();
6441        } else if self.parse_keyword(Keyword::SECRET) {
6442            return self.parse_drop_secret(temporary, persistent);
6443        } else if self.parse_keyword(Keyword::TRIGGER) {
6444            return self.parse_drop_trigger();
6445        } else if self.parse_keyword(Keyword::EXTENSION) {
6446            return self.parse_drop_extension();
6447        } else {
6448            return self.expected(
6449                "CONNECTOR, DATABASE, EXTENSION, FUNCTION, INDEX, POLICY, PROCEDURE, ROLE, SCHEMA, SECRET, SEQUENCE, STAGE, TABLE, TRIGGER, TYPE, VIEW, MATERIALIZED VIEW or USER after DROP",
6450                self.peek_token(),
6451            );
6452        };
6453        // Many dialects support the non-standard `IF EXISTS` clause and allow
6454        // specifying multiple objects to delete in a single statement
6455        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6456        let names = self.parse_comma_separated(|p| p.parse_object_name(false))?;
6457
6458        let loc = self.peek_token().span.start;
6459        let cascade = self.parse_keyword(Keyword::CASCADE);
6460        let restrict = self.parse_keyword(Keyword::RESTRICT);
6461        let purge = self.parse_keyword(Keyword::PURGE);
6462        if cascade && restrict {
6463            return parser_err!("Cannot specify both CASCADE and RESTRICT in DROP", loc);
6464        }
6465        if object_type == ObjectType::Role && (cascade || restrict || purge) {
6466            return parser_err!(
6467                "Cannot specify CASCADE, RESTRICT, or PURGE in DROP ROLE",
6468                loc
6469            );
6470        }
6471        let table = if self.parse_keyword(Keyword::ON) {
6472            Some(self.parse_object_name(false)?)
6473        } else {
6474            None
6475        };
6476        Ok(Statement::Drop {
6477            object_type,
6478            if_exists,
6479            names,
6480            cascade,
6481            restrict,
6482            purge,
6483            temporary,
6484            table,
6485        })
6486    }
6487
6488    fn parse_optional_drop_behavior(&mut self) -> Option<DropBehavior> {
6489        match self.parse_one_of_keywords(&[Keyword::CASCADE, Keyword::RESTRICT]) {
6490            Some(Keyword::CASCADE) => Some(DropBehavior::Cascade),
6491            Some(Keyword::RESTRICT) => Some(DropBehavior::Restrict),
6492            _ => None,
6493        }
6494    }
6495
6496    /// ```sql
6497    /// DROP FUNCTION [ IF EXISTS ] name [ ( [ [ argmode ] [ argname ] argtype [, ...] ] ) ] [, ...]
6498    /// [ CASCADE | RESTRICT ]
6499    /// ```
6500    fn parse_drop_function(&mut self) -> Result<Statement, ParserError> {
6501        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6502        let func_desc = self.parse_comma_separated(Parser::parse_function_desc)?;
6503        let drop_behavior = self.parse_optional_drop_behavior();
6504        Ok(Statement::DropFunction {
6505            if_exists,
6506            func_desc,
6507            drop_behavior,
6508        })
6509    }
6510
6511    /// ```sql
6512    /// DROP POLICY [ IF EXISTS ] name ON table_name [ CASCADE | RESTRICT ]
6513    /// ```
6514    ///
6515    /// [PostgreSQL Documentation](https://www.postgresql.org/docs/current/sql-droppolicy.html)
6516    fn parse_drop_policy(&mut self) -> Result<Statement, ParserError> {
6517        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6518        let name = self.parse_identifier()?;
6519        self.expect_keyword_is(Keyword::ON)?;
6520        let table_name = self.parse_object_name(false)?;
6521        let drop_behavior = self.parse_optional_drop_behavior();
6522        Ok(Statement::DropPolicy {
6523            if_exists,
6524            name,
6525            table_name,
6526            drop_behavior,
6527        })
6528    }
6529    /// ```sql
6530    /// DROP CONNECTOR [IF EXISTS] name
6531    /// ```
6532    ///
6533    /// See [Hive](https://cwiki.apache.org/confluence/pages/viewpage.action?pageId=27362034#LanguageManualDDL-DropConnector)
6534    fn parse_drop_connector(&mut self) -> Result<Statement, ParserError> {
6535        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6536        let name = self.parse_identifier()?;
6537        Ok(Statement::DropConnector { if_exists, name })
6538    }
6539
6540    /// ```sql
6541    /// DROP DOMAIN [ IF EXISTS ] name [ CASCADE | RESTRICT ]
6542    /// ```
6543    fn parse_drop_domain(&mut self) -> Result<Statement, ParserError> {
6544        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6545        let name = self.parse_object_name(false)?;
6546        let drop_behavior = self.parse_optional_drop_behavior();
6547        Ok(Statement::DropDomain(DropDomain {
6548            if_exists,
6549            name,
6550            drop_behavior,
6551        }))
6552    }
6553
6554    /// ```sql
6555    /// DROP PROCEDURE [ IF EXISTS ] name [ ( [ [ argmode ] [ argname ] argtype [, ...] ] ) ] [, ...]
6556    /// [ CASCADE | RESTRICT ]
6557    /// ```
6558    fn parse_drop_procedure(&mut self) -> Result<Statement, ParserError> {
6559        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6560        let proc_desc = self.parse_comma_separated(Parser::parse_function_desc)?;
6561        let drop_behavior = self.parse_optional_drop_behavior();
6562        Ok(Statement::DropProcedure {
6563            if_exists,
6564            proc_desc,
6565            drop_behavior,
6566        })
6567    }
6568
6569    fn parse_function_desc(&mut self) -> Result<FunctionDesc, ParserError> {
6570        let name = self.parse_object_name(false)?;
6571
6572        let args = if self.consume_token(&Token::LParen) {
6573            if self.consume_token(&Token::RParen) {
6574                Some(vec![])
6575            } else {
6576                let args = self.parse_comma_separated(Parser::parse_function_arg)?;
6577                self.expect_token(&Token::RParen)?;
6578                Some(args)
6579            }
6580        } else {
6581            None
6582        };
6583
6584        Ok(FunctionDesc { name, args })
6585    }
6586
6587    /// See [DuckDB Docs](https://duckdb.org/docs/sql/statements/create_secret.html) for more details.
6588    fn parse_drop_secret(
6589        &mut self,
6590        temporary: bool,
6591        persistent: bool,
6592    ) -> Result<Statement, ParserError> {
6593        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6594        let name = self.parse_identifier()?;
6595        let storage_specifier = if self.parse_keyword(Keyword::FROM) {
6596            self.parse_identifier().ok()
6597        } else {
6598            None
6599        };
6600        let temp = match (temporary, persistent) {
6601            (true, false) => Some(true),
6602            (false, true) => Some(false),
6603            (false, false) => None,
6604            _ => self.expected("TEMPORARY or PERSISTENT", self.peek_token())?,
6605        };
6606
6607        Ok(Statement::DropSecret {
6608            if_exists,
6609            temporary: temp,
6610            name,
6611            storage_specifier,
6612        })
6613    }
6614
6615    /// Parse a `DECLARE` statement.
6616    ///
6617    /// ```sql
6618    /// DECLARE name [ BINARY ] [ ASENSITIVE | INSENSITIVE ] [ [ NO ] SCROLL ]
6619    ///     CURSOR [ { WITH | WITHOUT } HOLD ] FOR query
6620    /// ```
6621    ///
6622    /// The syntax can vary significantly between warehouses. See the grammar
6623    /// on the warehouse specific function in such cases.
6624    pub fn parse_declare(&mut self) -> Result<Statement, ParserError> {
6625        if dialect_of!(self is BigQueryDialect) {
6626            return self.parse_big_query_declare();
6627        }
6628        if dialect_of!(self is SnowflakeDialect) {
6629            return self.parse_snowflake_declare();
6630        }
6631        if dialect_of!(self is MsSqlDialect) {
6632            return self.parse_mssql_declare();
6633        }
6634
6635        let name = self.parse_identifier()?;
6636
6637        let binary = Some(self.parse_keyword(Keyword::BINARY));
6638        let sensitive = if self.parse_keyword(Keyword::INSENSITIVE) {
6639            Some(true)
6640        } else if self.parse_keyword(Keyword::ASENSITIVE) {
6641            Some(false)
6642        } else {
6643            None
6644        };
6645        let scroll = if self.parse_keyword(Keyword::SCROLL) {
6646            Some(true)
6647        } else if self.parse_keywords(&[Keyword::NO, Keyword::SCROLL]) {
6648            Some(false)
6649        } else {
6650            None
6651        };
6652
6653        self.expect_keyword_is(Keyword::CURSOR)?;
6654        let declare_type = Some(DeclareType::Cursor);
6655
6656        let hold = match self.parse_one_of_keywords(&[Keyword::WITH, Keyword::WITHOUT]) {
6657            Some(keyword) => {
6658                self.expect_keyword_is(Keyword::HOLD)?;
6659
6660                match keyword {
6661                    Keyword::WITH => Some(true),
6662                    Keyword::WITHOUT => Some(false),
6663                    _ => unreachable!(),
6664                }
6665            }
6666            None => None,
6667        };
6668
6669        self.expect_keyword_is(Keyword::FOR)?;
6670
6671        let query = Some(self.parse_query()?);
6672
6673        Ok(Statement::Declare {
6674            stmts: vec![Declare {
6675                names: vec![name],
6676                data_type: None,
6677                assignment: None,
6678                declare_type,
6679                binary,
6680                sensitive,
6681                scroll,
6682                hold,
6683                for_query: query,
6684            }],
6685        })
6686    }
6687
6688    /// Parse a [BigQuery] `DECLARE` statement.
6689    ///
6690    /// Syntax:
6691    /// ```text
6692    /// DECLARE variable_name[, ...] [{ <variable_type> | <DEFAULT expression> }];
6693    /// ```
6694    /// [BigQuery]: https://cloud.google.com/bigquery/docs/reference/standard-sql/procedural-language#declare
6695    pub fn parse_big_query_declare(&mut self) -> Result<Statement, ParserError> {
6696        let names = self.parse_comma_separated(Parser::parse_identifier)?;
6697
6698        let data_type = match self.peek_token().token {
6699            Token::Word(w) if w.keyword == Keyword::DEFAULT => None,
6700            _ => Some(self.parse_data_type()?),
6701        };
6702
6703        let expr = if data_type.is_some() {
6704            if self.parse_keyword(Keyword::DEFAULT) {
6705                Some(self.parse_expr()?)
6706            } else {
6707                None
6708            }
6709        } else {
6710            // If no variable type - default expression must be specified, per BQ docs.
6711            // i.e `DECLARE foo;` is invalid.
6712            self.expect_keyword_is(Keyword::DEFAULT)?;
6713            Some(self.parse_expr()?)
6714        };
6715
6716        Ok(Statement::Declare {
6717            stmts: vec![Declare {
6718                names,
6719                data_type,
6720                assignment: expr.map(|expr| DeclareAssignment::Default(Box::new(expr))),
6721                declare_type: None,
6722                binary: None,
6723                sensitive: None,
6724                scroll: None,
6725                hold: None,
6726                for_query: None,
6727            }],
6728        })
6729    }
6730
6731    /// Parse a [Snowflake] `DECLARE` statement.
6732    ///
6733    /// Syntax:
6734    /// ```text
6735    /// DECLARE
6736    ///   [{ <variable_declaration>
6737    ///      | <cursor_declaration>
6738    ///      | <resultset_declaration>
6739    ///      | <exception_declaration> }; ... ]
6740    ///
6741    /// <variable_declaration>
6742    /// <variable_name> [<type>] [ { DEFAULT | := } <expression>]
6743    ///
6744    /// <cursor_declaration>
6745    /// <cursor_name> CURSOR FOR <query>
6746    ///
6747    /// <resultset_declaration>
6748    /// <resultset_name> RESULTSET [ { DEFAULT | := } ( <query> ) ] ;
6749    ///
6750    /// <exception_declaration>
6751    /// <exception_name> EXCEPTION [ ( <exception_number> , '<exception_message>' ) ] ;
6752    /// ```
6753    ///
6754    /// [Snowflake]: https://docs.snowflake.com/en/sql-reference/snowflake-scripting/declare
6755    pub fn parse_snowflake_declare(&mut self) -> Result<Statement, ParserError> {
6756        let mut stmts = vec![];
6757        loop {
6758            let name = self.parse_identifier()?;
6759            let (declare_type, for_query, assigned_expr, data_type) =
6760                if self.parse_keyword(Keyword::CURSOR) {
6761                    self.expect_keyword_is(Keyword::FOR)?;
6762                    match self.peek_token().token {
6763                        Token::Word(w) if w.keyword == Keyword::SELECT => (
6764                            Some(DeclareType::Cursor),
6765                            Some(self.parse_query()?),
6766                            None,
6767                            None,
6768                        ),
6769                        _ => (
6770                            Some(DeclareType::Cursor),
6771                            None,
6772                            Some(DeclareAssignment::For(Box::new(self.parse_expr()?))),
6773                            None,
6774                        ),
6775                    }
6776                } else if self.parse_keyword(Keyword::RESULTSET) {
6777                    let assigned_expr = if self.peek_token().token != Token::SemiColon {
6778                        self.parse_snowflake_variable_declaration_expression()?
6779                    } else {
6780                        // Nothing more to do. The statement has no further parameters.
6781                        None
6782                    };
6783
6784                    (Some(DeclareType::ResultSet), None, assigned_expr, None)
6785                } else if self.parse_keyword(Keyword::EXCEPTION) {
6786                    let assigned_expr = if self.peek_token().token == Token::LParen {
6787                        Some(DeclareAssignment::Expr(Box::new(self.parse_expr()?)))
6788                    } else {
6789                        // Nothing more to do. The statement has no further parameters.
6790                        None
6791                    };
6792
6793                    (Some(DeclareType::Exception), None, assigned_expr, None)
6794                } else {
6795                    // Without an explicit keyword, the only valid option is variable declaration.
6796                    let (assigned_expr, data_type) = if let Some(assigned_expr) =
6797                        self.parse_snowflake_variable_declaration_expression()?
6798                    {
6799                        (Some(assigned_expr), None)
6800                    } else if let Token::Word(_) = self.peek_token().token {
6801                        let data_type = self.parse_data_type()?;
6802                        (
6803                            self.parse_snowflake_variable_declaration_expression()?,
6804                            Some(data_type),
6805                        )
6806                    } else {
6807                        (None, None)
6808                    };
6809                    (None, None, assigned_expr, data_type)
6810                };
6811            let stmt = Declare {
6812                names: vec![name],
6813                data_type,
6814                assignment: assigned_expr,
6815                declare_type,
6816                binary: None,
6817                sensitive: None,
6818                scroll: None,
6819                hold: None,
6820                for_query,
6821            };
6822
6823            stmts.push(stmt);
6824            if self.consume_token(&Token::SemiColon) {
6825                match self.peek_token().token {
6826                    Token::Word(w)
6827                        if ALL_KEYWORDS
6828                            .binary_search(&w.value.to_uppercase().as_str())
6829                            .is_err() =>
6830                    {
6831                        // Not a keyword - start of a new declaration.
6832                        continue;
6833                    }
6834                    _ => {
6835                        // Put back the semicolon, this is the end of the DECLARE statement.
6836                        self.prev_token();
6837                    }
6838                }
6839            }
6840
6841            break;
6842        }
6843
6844        Ok(Statement::Declare { stmts })
6845    }
6846
6847    /// Parse a [MsSql] `DECLARE` statement.
6848    ///
6849    /// Syntax:
6850    /// ```text
6851    /// DECLARE
6852    // {
6853    //   { @local_variable [AS] data_type [ = value ] }
6854    //   | { @cursor_variable_name CURSOR [ FOR ] }
6855    // } [ ,...n ]
6856    /// ```
6857    /// [MsSql]: https://learn.microsoft.com/en-us/sql/t-sql/language-elements/declare-local-variable-transact-sql?view=sql-server-ver16
6858    pub fn parse_mssql_declare(&mut self) -> Result<Statement, ParserError> {
6859        let stmts = self.parse_comma_separated(Parser::parse_mssql_declare_stmt)?;
6860
6861        Ok(Statement::Declare { stmts })
6862    }
6863
6864    /// Parse the body of a [MsSql] `DECLARE`statement.
6865    ///
6866    /// Syntax:
6867    /// ```text
6868    // {
6869    //   { @local_variable [AS] data_type [ = value ] }
6870    //   | { @cursor_variable_name CURSOR [ FOR ]}
6871    // } [ ,...n ]
6872    /// ```
6873    /// [MsSql]: https://learn.microsoft.com/en-us/sql/t-sql/language-elements/declare-local-variable-transact-sql?view=sql-server-ver16
6874    pub fn parse_mssql_declare_stmt(&mut self) -> Result<Declare, ParserError> {
6875        let name = {
6876            let ident = self.parse_identifier()?;
6877            if !ident.value.starts_with('@')
6878                && !matches!(
6879                    self.peek_token().token,
6880                    Token::Word(w) if w.keyword == Keyword::CURSOR
6881                )
6882            {
6883                Err(ParserError::TokenizerError(
6884                    "Invalid MsSql variable declaration.".to_string(),
6885                ))
6886            } else {
6887                Ok(ident)
6888            }
6889        }?;
6890
6891        let (declare_type, data_type) = match self.peek_token().token {
6892            Token::Word(w) => match w.keyword {
6893                Keyword::CURSOR => {
6894                    self.next_token();
6895                    (Some(DeclareType::Cursor), None)
6896                }
6897                Keyword::AS => {
6898                    self.next_token();
6899                    (None, Some(self.parse_data_type()?))
6900                }
6901                _ => (None, Some(self.parse_data_type()?)),
6902            },
6903            _ => (None, Some(self.parse_data_type()?)),
6904        };
6905
6906        let (for_query, assignment) = if self.peek_keyword(Keyword::FOR) {
6907            self.next_token();
6908            let query = Some(self.parse_query()?);
6909            (query, None)
6910        } else {
6911            let assignment = self.parse_mssql_variable_declaration_expression()?;
6912            (None, assignment)
6913        };
6914
6915        Ok(Declare {
6916            names: vec![name],
6917            data_type,
6918            assignment,
6919            declare_type,
6920            binary: None,
6921            sensitive: None,
6922            scroll: None,
6923            hold: None,
6924            for_query,
6925        })
6926    }
6927
6928    /// Parses the assigned expression in a variable declaration.
6929    ///
6930    /// Syntax:
6931    /// ```text
6932    /// [ { DEFAULT | := } <expression>]
6933    /// ```
6934    /// <https://docs.snowflake.com/en/sql-reference/snowflake-scripting/declare#variable-declaration-syntax>
6935    pub fn parse_snowflake_variable_declaration_expression(
6936        &mut self,
6937    ) -> Result<Option<DeclareAssignment>, ParserError> {
6938        Ok(match self.peek_token().token {
6939            Token::Word(w) if w.keyword == Keyword::DEFAULT => {
6940                self.next_token(); // Skip `DEFAULT`
6941                Some(DeclareAssignment::Default(Box::new(self.parse_expr()?)))
6942            }
6943            Token::Assignment => {
6944                self.next_token(); // Skip `:=`
6945                Some(DeclareAssignment::DuckAssignment(Box::new(
6946                    self.parse_expr()?,
6947                )))
6948            }
6949            _ => None,
6950        })
6951    }
6952
6953    /// Parses the assigned expression in a variable declaration.
6954    ///
6955    /// Syntax:
6956    /// ```text
6957    /// [ = <expression>]
6958    /// ```
6959    pub fn parse_mssql_variable_declaration_expression(
6960        &mut self,
6961    ) -> Result<Option<DeclareAssignment>, ParserError> {
6962        Ok(match self.peek_token().token {
6963            Token::Eq => {
6964                self.next_token(); // Skip `=`
6965                Some(DeclareAssignment::MsSqlAssignment(Box::new(
6966                    self.parse_expr()?,
6967                )))
6968            }
6969            _ => None,
6970        })
6971    }
6972
6973    // FETCH [ direction { FROM | IN } ] cursor INTO target;
6974    pub fn parse_fetch_statement(&mut self) -> Result<Statement, ParserError> {
6975        let direction = if self.parse_keyword(Keyword::NEXT) {
6976            FetchDirection::Next
6977        } else if self.parse_keyword(Keyword::PRIOR) {
6978            FetchDirection::Prior
6979        } else if self.parse_keyword(Keyword::FIRST) {
6980            FetchDirection::First
6981        } else if self.parse_keyword(Keyword::LAST) {
6982            FetchDirection::Last
6983        } else if self.parse_keyword(Keyword::ABSOLUTE) {
6984            FetchDirection::Absolute {
6985                limit: self.parse_number_value()?.value,
6986            }
6987        } else if self.parse_keyword(Keyword::RELATIVE) {
6988            FetchDirection::Relative {
6989                limit: self.parse_number_value()?.value,
6990            }
6991        } else if self.parse_keyword(Keyword::FORWARD) {
6992            if self.parse_keyword(Keyword::ALL) {
6993                FetchDirection::ForwardAll
6994            } else {
6995                FetchDirection::Forward {
6996                    // TODO: Support optional
6997                    limit: Some(self.parse_number_value()?.value),
6998                }
6999            }
7000        } else if self.parse_keyword(Keyword::BACKWARD) {
7001            if self.parse_keyword(Keyword::ALL) {
7002                FetchDirection::BackwardAll
7003            } else {
7004                FetchDirection::Backward {
7005                    // TODO: Support optional
7006                    limit: Some(self.parse_number_value()?.value),
7007                }
7008            }
7009        } else if self.parse_keyword(Keyword::ALL) {
7010            FetchDirection::All
7011        } else {
7012            FetchDirection::Count {
7013                limit: self.parse_number_value()?.value,
7014            }
7015        };
7016
7017        let position = if self.peek_keyword(Keyword::FROM) {
7018            self.expect_keyword(Keyword::FROM)?;
7019            FetchPosition::From
7020        } else if self.peek_keyword(Keyword::IN) {
7021            self.expect_keyword(Keyword::IN)?;
7022            FetchPosition::In
7023        } else {
7024            return parser_err!("Expected FROM or IN", self.peek_token().span.start);
7025        };
7026
7027        let name = self.parse_identifier()?;
7028
7029        let into = if self.parse_keyword(Keyword::INTO) {
7030            Some(self.parse_object_name(false)?)
7031        } else {
7032            None
7033        };
7034
7035        Ok(Statement::Fetch {
7036            name,
7037            direction,
7038            position,
7039            into,
7040        })
7041    }
7042
7043    pub fn parse_discard(&mut self) -> Result<Statement, ParserError> {
7044        let object_type = if self.parse_keyword(Keyword::ALL) {
7045            DiscardObject::ALL
7046        } else if self.parse_keyword(Keyword::PLANS) {
7047            DiscardObject::PLANS
7048        } else if self.parse_keyword(Keyword::SEQUENCES) {
7049            DiscardObject::SEQUENCES
7050        } else if self.parse_keyword(Keyword::TEMP) || self.parse_keyword(Keyword::TEMPORARY) {
7051            DiscardObject::TEMP
7052        } else {
7053            return self.expected(
7054                "ALL, PLANS, SEQUENCES, TEMP or TEMPORARY after DISCARD",
7055                self.peek_token(),
7056            );
7057        };
7058        Ok(Statement::Discard { object_type })
7059    }
7060
7061    pub fn parse_create_index(&mut self, unique: bool) -> Result<Statement, ParserError> {
7062        let concurrently = self.parse_keyword(Keyword::CONCURRENTLY);
7063        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
7064        let index_name = if if_not_exists || !self.parse_keyword(Keyword::ON) {
7065            let index_name = self.parse_object_name(false)?;
7066            self.expect_keyword_is(Keyword::ON)?;
7067            Some(index_name)
7068        } else {
7069            None
7070        };
7071        let table_name = self.parse_object_name(false)?;
7072        let using = if self.parse_keyword(Keyword::USING) {
7073            Some(self.parse_index_type()?)
7074        } else {
7075            None
7076        };
7077
7078        let columns = self.parse_parenthesized_index_column_list()?;
7079
7080        let include = if self.parse_keyword(Keyword::INCLUDE) {
7081            self.expect_token(&Token::LParen)?;
7082            let columns = self.parse_comma_separated(|p| p.parse_identifier())?;
7083            self.expect_token(&Token::RParen)?;
7084            columns
7085        } else {
7086            vec![]
7087        };
7088
7089        let nulls_distinct = if self.parse_keyword(Keyword::NULLS) {
7090            let not = self.parse_keyword(Keyword::NOT);
7091            self.expect_keyword_is(Keyword::DISTINCT)?;
7092            Some(!not)
7093        } else {
7094            None
7095        };
7096
7097        let with = if self.dialect.supports_create_index_with_clause()
7098            && self.parse_keyword(Keyword::WITH)
7099        {
7100            self.expect_token(&Token::LParen)?;
7101            let with_params = self.parse_comma_separated(Parser::parse_expr)?;
7102            self.expect_token(&Token::RParen)?;
7103            with_params
7104        } else {
7105            Vec::new()
7106        };
7107
7108        let predicate = if self.parse_keyword(Keyword::WHERE) {
7109            Some(self.parse_expr()?)
7110        } else {
7111            None
7112        };
7113
7114        // MySQL options (including the modern style of `USING` after the column list instead of
7115        // before, which is deprecated) shouldn't conflict with other preceding options (e.g. `WITH
7116        // PARSER` won't be caught by the above `WITH` clause parsing because MySQL doesn't set that
7117        // support flag). This is probably invalid syntax for other dialects, but it is simpler to
7118        // parse it anyway (as we do inside `ALTER TABLE` and `CREATE TABLE` parsing).
7119        let index_options = self.parse_index_options()?;
7120
7121        // MySQL allows `ALGORITHM` and `LOCK` options. Unlike in `ALTER TABLE`, they need not be comma separated.
7122        let mut alter_options = Vec::new();
7123        while self
7124            .peek_one_of_keywords(&[Keyword::ALGORITHM, Keyword::LOCK])
7125            .is_some()
7126        {
7127            alter_options.push(self.parse_alter_table_operation()?)
7128        }
7129
7130        Ok(Statement::CreateIndex(CreateIndex {
7131            name: index_name,
7132            table_name,
7133            using,
7134            columns,
7135            unique,
7136            concurrently,
7137            if_not_exists,
7138            include,
7139            nulls_distinct,
7140            with,
7141            predicate,
7142            index_options,
7143            alter_options,
7144        }))
7145    }
7146
7147    pub fn parse_create_extension(&mut self) -> Result<Statement, ParserError> {
7148        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
7149        let name = self.parse_identifier()?;
7150
7151        let (schema, version, cascade) = if self.parse_keyword(Keyword::WITH) {
7152            let schema = if self.parse_keyword(Keyword::SCHEMA) {
7153                Some(self.parse_identifier()?)
7154            } else {
7155                None
7156            };
7157
7158            let version = if self.parse_keyword(Keyword::VERSION) {
7159                Some(self.parse_identifier()?)
7160            } else {
7161                None
7162            };
7163
7164            let cascade = self.parse_keyword(Keyword::CASCADE);
7165
7166            (schema, version, cascade)
7167        } else {
7168            (None, None, false)
7169        };
7170
7171        Ok(Statement::CreateExtension {
7172            name,
7173            if_not_exists,
7174            schema,
7175            version,
7176            cascade,
7177        })
7178    }
7179
7180    /// Parse a PostgreSQL-specific [Statement::DropExtension] statement.
7181    pub fn parse_drop_extension(&mut self) -> Result<Statement, ParserError> {
7182        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7183        let names = self.parse_comma_separated(|p| p.parse_identifier())?;
7184        let cascade_or_restrict =
7185            self.parse_one_of_keywords(&[Keyword::CASCADE, Keyword::RESTRICT]);
7186        Ok(Statement::DropExtension {
7187            names,
7188            if_exists,
7189            cascade_or_restrict: cascade_or_restrict
7190                .map(|k| match k {
7191                    Keyword::CASCADE => Ok(ReferentialAction::Cascade),
7192                    Keyword::RESTRICT => Ok(ReferentialAction::Restrict),
7193                    _ => self.expected("CASCADE or RESTRICT", self.peek_token()),
7194                })
7195                .transpose()?,
7196        })
7197    }
7198
7199    //TODO: Implement parsing for Skewed
7200    pub fn parse_hive_distribution(&mut self) -> Result<HiveDistributionStyle, ParserError> {
7201        if self.parse_keywords(&[Keyword::PARTITIONED, Keyword::BY]) {
7202            self.expect_token(&Token::LParen)?;
7203            let columns = self.parse_comma_separated(Parser::parse_column_def)?;
7204            self.expect_token(&Token::RParen)?;
7205            Ok(HiveDistributionStyle::PARTITIONED { columns })
7206        } else {
7207            Ok(HiveDistributionStyle::NONE)
7208        }
7209    }
7210
7211    pub fn parse_hive_formats(&mut self) -> Result<HiveFormat, ParserError> {
7212        let mut hive_format = HiveFormat::default();
7213        loop {
7214            match self.parse_one_of_keywords(&[
7215                Keyword::ROW,
7216                Keyword::STORED,
7217                Keyword::LOCATION,
7218                Keyword::WITH,
7219            ]) {
7220                Some(Keyword::ROW) => {
7221                    hive_format.row_format = Some(self.parse_row_format()?);
7222                }
7223                Some(Keyword::STORED) => {
7224                    self.expect_keyword_is(Keyword::AS)?;
7225                    if self.parse_keyword(Keyword::INPUTFORMAT) {
7226                        let input_format = self.parse_expr()?;
7227                        self.expect_keyword_is(Keyword::OUTPUTFORMAT)?;
7228                        let output_format = self.parse_expr()?;
7229                        hive_format.storage = Some(HiveIOFormat::IOF {
7230                            input_format,
7231                            output_format,
7232                        });
7233                    } else {
7234                        let format = self.parse_file_format()?;
7235                        hive_format.storage = Some(HiveIOFormat::FileFormat { format });
7236                    }
7237                }
7238                Some(Keyword::LOCATION) => {
7239                    hive_format.location = Some(self.parse_literal_string()?);
7240                }
7241                Some(Keyword::WITH) => {
7242                    self.prev_token();
7243                    let properties = self
7244                        .parse_options_with_keywords(&[Keyword::WITH, Keyword::SERDEPROPERTIES])?;
7245                    if !properties.is_empty() {
7246                        hive_format.serde_properties = Some(properties);
7247                    } else {
7248                        break;
7249                    }
7250                }
7251                None => break,
7252                _ => break,
7253            }
7254        }
7255
7256        Ok(hive_format)
7257    }
7258
7259    pub fn parse_row_format(&mut self) -> Result<HiveRowFormat, ParserError> {
7260        self.expect_keyword_is(Keyword::FORMAT)?;
7261        match self.parse_one_of_keywords(&[Keyword::SERDE, Keyword::DELIMITED]) {
7262            Some(Keyword::SERDE) => {
7263                let class = self.parse_literal_string()?;
7264                Ok(HiveRowFormat::SERDE { class })
7265            }
7266            _ => {
7267                let mut row_delimiters = vec![];
7268
7269                loop {
7270                    match self.parse_one_of_keywords(&[
7271                        Keyword::FIELDS,
7272                        Keyword::COLLECTION,
7273                        Keyword::MAP,
7274                        Keyword::LINES,
7275                        Keyword::NULL,
7276                    ]) {
7277                        Some(Keyword::FIELDS) => {
7278                            if self.parse_keywords(&[Keyword::TERMINATED, Keyword::BY]) {
7279                                row_delimiters.push(HiveRowDelimiter {
7280                                    delimiter: HiveDelimiter::FieldsTerminatedBy,
7281                                    char: self.parse_identifier()?,
7282                                });
7283
7284                                if self.parse_keywords(&[Keyword::ESCAPED, Keyword::BY]) {
7285                                    row_delimiters.push(HiveRowDelimiter {
7286                                        delimiter: HiveDelimiter::FieldsEscapedBy,
7287                                        char: self.parse_identifier()?,
7288                                    });
7289                                }
7290                            } else {
7291                                break;
7292                            }
7293                        }
7294                        Some(Keyword::COLLECTION) => {
7295                            if self.parse_keywords(&[
7296                                Keyword::ITEMS,
7297                                Keyword::TERMINATED,
7298                                Keyword::BY,
7299                            ]) {
7300                                row_delimiters.push(HiveRowDelimiter {
7301                                    delimiter: HiveDelimiter::CollectionItemsTerminatedBy,
7302                                    char: self.parse_identifier()?,
7303                                });
7304                            } else {
7305                                break;
7306                            }
7307                        }
7308                        Some(Keyword::MAP) => {
7309                            if self.parse_keywords(&[
7310                                Keyword::KEYS,
7311                                Keyword::TERMINATED,
7312                                Keyword::BY,
7313                            ]) {
7314                                row_delimiters.push(HiveRowDelimiter {
7315                                    delimiter: HiveDelimiter::MapKeysTerminatedBy,
7316                                    char: self.parse_identifier()?,
7317                                });
7318                            } else {
7319                                break;
7320                            }
7321                        }
7322                        Some(Keyword::LINES) => {
7323                            if self.parse_keywords(&[Keyword::TERMINATED, Keyword::BY]) {
7324                                row_delimiters.push(HiveRowDelimiter {
7325                                    delimiter: HiveDelimiter::LinesTerminatedBy,
7326                                    char: self.parse_identifier()?,
7327                                });
7328                            } else {
7329                                break;
7330                            }
7331                        }
7332                        Some(Keyword::NULL) => {
7333                            if self.parse_keywords(&[Keyword::DEFINED, Keyword::AS]) {
7334                                row_delimiters.push(HiveRowDelimiter {
7335                                    delimiter: HiveDelimiter::NullDefinedAs,
7336                                    char: self.parse_identifier()?,
7337                                });
7338                            } else {
7339                                break;
7340                            }
7341                        }
7342                        _ => {
7343                            break;
7344                        }
7345                    }
7346                }
7347
7348                Ok(HiveRowFormat::DELIMITED {
7349                    delimiters: row_delimiters,
7350                })
7351            }
7352        }
7353    }
7354
7355    fn parse_optional_on_cluster(&mut self) -> Result<Option<Ident>, ParserError> {
7356        if self.parse_keywords(&[Keyword::ON, Keyword::CLUSTER]) {
7357            Ok(Some(self.parse_identifier()?))
7358        } else {
7359            Ok(None)
7360        }
7361    }
7362
7363    pub fn parse_create_table(
7364        &mut self,
7365        or_replace: bool,
7366        temporary: bool,
7367        global: Option<bool>,
7368        transient: bool,
7369    ) -> Result<Statement, ParserError> {
7370        let allow_unquoted_hyphen = dialect_of!(self is BigQueryDialect);
7371        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
7372        let table_name = self.parse_object_name(allow_unquoted_hyphen)?;
7373
7374        // Clickhouse has `ON CLUSTER 'cluster'` syntax for DDLs
7375        let on_cluster = self.parse_optional_on_cluster()?;
7376
7377        let like = self.maybe_parse_create_table_like(allow_unquoted_hyphen)?;
7378
7379        let clone = if self.parse_keyword(Keyword::CLONE) {
7380            self.parse_object_name(allow_unquoted_hyphen).ok()
7381        } else {
7382            None
7383        };
7384
7385        // parse optional column list (schema)
7386        let (columns, constraints) = self.parse_columns()?;
7387        let comment_after_column_def =
7388            if dialect_of!(self is HiveDialect) && self.parse_keyword(Keyword::COMMENT) {
7389                let next_token = self.next_token();
7390                match next_token.token {
7391                    Token::SingleQuotedString(str) => Some(CommentDef::WithoutEq(str)),
7392                    _ => self.expected("comment", next_token)?,
7393                }
7394            } else {
7395                None
7396            };
7397
7398        // SQLite supports `WITHOUT ROWID` at the end of `CREATE TABLE`
7399        let without_rowid = self.parse_keywords(&[Keyword::WITHOUT, Keyword::ROWID]);
7400
7401        let hive_distribution = self.parse_hive_distribution()?;
7402        let clustered_by = self.parse_optional_clustered_by()?;
7403        let hive_formats = self.parse_hive_formats()?;
7404
7405        let create_table_config = self.parse_optional_create_table_config()?;
7406
7407        // ClickHouse supports `PRIMARY KEY`, before `ORDER BY`
7408        // https://clickhouse.com/docs/en/sql-reference/statements/create/table#primary-key
7409        let primary_key = if dialect_of!(self is ClickHouseDialect | GenericDialect)
7410            && self.parse_keywords(&[Keyword::PRIMARY, Keyword::KEY])
7411        {
7412            Some(Box::new(self.parse_expr()?))
7413        } else {
7414            None
7415        };
7416
7417        let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
7418            if self.consume_token(&Token::LParen) {
7419                let columns = if self.peek_token() != Token::RParen {
7420                    self.parse_comma_separated(|p| p.parse_expr())?
7421                } else {
7422                    vec![]
7423                };
7424                self.expect_token(&Token::RParen)?;
7425                Some(OneOrManyWithParens::Many(columns))
7426            } else {
7427                Some(OneOrManyWithParens::One(self.parse_expr()?))
7428            }
7429        } else {
7430            None
7431        };
7432
7433        let on_commit = if self.parse_keywords(&[Keyword::ON, Keyword::COMMIT]) {
7434            Some(self.parse_create_table_on_commit()?)
7435        } else {
7436            None
7437        };
7438
7439        let strict = self.parse_keyword(Keyword::STRICT);
7440
7441        // Parse optional `AS ( query )`
7442        let query = if self.parse_keyword(Keyword::AS) {
7443            Some(self.parse_query()?)
7444        } else if self.dialect.supports_create_table_select() && self.parse_keyword(Keyword::SELECT)
7445        {
7446            // rewind the SELECT keyword
7447            self.prev_token();
7448            Some(self.parse_query()?)
7449        } else {
7450            None
7451        };
7452
7453        Ok(CreateTableBuilder::new(table_name)
7454            .temporary(temporary)
7455            .columns(columns)
7456            .constraints(constraints)
7457            .or_replace(or_replace)
7458            .if_not_exists(if_not_exists)
7459            .transient(transient)
7460            .hive_distribution(hive_distribution)
7461            .hive_formats(Some(hive_formats))
7462            .global(global)
7463            .query(query)
7464            .without_rowid(without_rowid)
7465            .like(like)
7466            .clone_clause(clone)
7467            .comment_after_column_def(comment_after_column_def)
7468            .order_by(order_by)
7469            .on_commit(on_commit)
7470            .on_cluster(on_cluster)
7471            .clustered_by(clustered_by)
7472            .partition_by(create_table_config.partition_by)
7473            .cluster_by(create_table_config.cluster_by)
7474            .inherits(create_table_config.inherits)
7475            .table_options(create_table_config.table_options)
7476            .primary_key(primary_key)
7477            .strict(strict)
7478            .build())
7479    }
7480
7481    fn maybe_parse_create_table_like(
7482        &mut self,
7483        allow_unquoted_hyphen: bool,
7484    ) -> Result<Option<CreateTableLikeKind>, ParserError> {
7485        let like = if self.dialect.supports_create_table_like_parenthesized()
7486            && self.consume_token(&Token::LParen)
7487        {
7488            if self.parse_keyword(Keyword::LIKE) {
7489                let name = self.parse_object_name(allow_unquoted_hyphen)?;
7490                let defaults = if self.parse_keywords(&[Keyword::INCLUDING, Keyword::DEFAULTS]) {
7491                    Some(CreateTableLikeDefaults::Including)
7492                } else if self.parse_keywords(&[Keyword::EXCLUDING, Keyword::DEFAULTS]) {
7493                    Some(CreateTableLikeDefaults::Excluding)
7494                } else {
7495                    None
7496                };
7497                self.expect_token(&Token::RParen)?;
7498                Some(CreateTableLikeKind::Parenthesized(CreateTableLike {
7499                    name,
7500                    defaults,
7501                }))
7502            } else {
7503                // Rollback the '(' it's probably the columns list
7504                self.prev_token();
7505                None
7506            }
7507        } else if self.parse_keyword(Keyword::LIKE) || self.parse_keyword(Keyword::ILIKE) {
7508            let name = self.parse_object_name(allow_unquoted_hyphen)?;
7509            Some(CreateTableLikeKind::Plain(CreateTableLike {
7510                name,
7511                defaults: None,
7512            }))
7513        } else {
7514            None
7515        };
7516        Ok(like)
7517    }
7518
7519    pub(crate) fn parse_create_table_on_commit(&mut self) -> Result<OnCommit, ParserError> {
7520        if self.parse_keywords(&[Keyword::DELETE, Keyword::ROWS]) {
7521            Ok(OnCommit::DeleteRows)
7522        } else if self.parse_keywords(&[Keyword::PRESERVE, Keyword::ROWS]) {
7523            Ok(OnCommit::PreserveRows)
7524        } else if self.parse_keywords(&[Keyword::DROP]) {
7525            Ok(OnCommit::Drop)
7526        } else {
7527            parser_err!(
7528                "Expecting DELETE ROWS, PRESERVE ROWS or DROP",
7529                self.peek_token()
7530            )
7531        }
7532    }
7533
7534    /// Parse configuration like inheritance, partitioning, clustering information during the table creation.
7535    ///
7536    /// [BigQuery](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#syntax_2)
7537    /// [PostgreSQL](https://www.postgresql.org/docs/current/ddl-partitioning.html)
7538    /// [MySql](https://dev.mysql.com/doc/refman/8.4/en/create-table.html)
7539    fn parse_optional_create_table_config(
7540        &mut self,
7541    ) -> Result<CreateTableConfiguration, ParserError> {
7542        let mut table_options = CreateTableOptions::None;
7543
7544        let inherits = if self.parse_keyword(Keyword::INHERITS) {
7545            Some(self.parse_parenthesized_qualified_column_list(IsOptional::Mandatory, false)?)
7546        } else {
7547            None
7548        };
7549
7550        // PostgreSQL supports `WITH ( options )`, before `AS`
7551        let with_options = self.parse_options(Keyword::WITH)?;
7552        if !with_options.is_empty() {
7553            table_options = CreateTableOptions::With(with_options)
7554        }
7555
7556        let table_properties = self.parse_options(Keyword::TBLPROPERTIES)?;
7557        if !table_properties.is_empty() {
7558            table_options = CreateTableOptions::TableProperties(table_properties);
7559        }
7560        let partition_by = if dialect_of!(self is BigQueryDialect | PostgreSqlDialect | GenericDialect)
7561            && self.parse_keywords(&[Keyword::PARTITION, Keyword::BY])
7562        {
7563            Some(Box::new(self.parse_expr()?))
7564        } else {
7565            None
7566        };
7567
7568        let mut cluster_by = None;
7569        if dialect_of!(self is BigQueryDialect | GenericDialect) {
7570            if self.parse_keywords(&[Keyword::CLUSTER, Keyword::BY]) {
7571                cluster_by = Some(WrappedCollection::NoWrapping(
7572                    self.parse_comma_separated(|p| p.parse_expr())?,
7573                ));
7574            };
7575
7576            if let Token::Word(word) = self.peek_token().token {
7577                if word.keyword == Keyword::OPTIONS {
7578                    table_options =
7579                        CreateTableOptions::Options(self.parse_options(Keyword::OPTIONS)?)
7580                }
7581            };
7582        }
7583
7584        if !dialect_of!(self is HiveDialect) && table_options == CreateTableOptions::None {
7585            let plain_options = self.parse_plain_options()?;
7586            if !plain_options.is_empty() {
7587                table_options = CreateTableOptions::Plain(plain_options)
7588            }
7589        };
7590
7591        Ok(CreateTableConfiguration {
7592            partition_by,
7593            cluster_by,
7594            inherits,
7595            table_options,
7596        })
7597    }
7598
7599    fn parse_plain_option(&mut self) -> Result<Option<SqlOption>, ParserError> {
7600        // Single parameter option
7601        // <https://dev.mysql.com/doc/refman/8.4/en/create-table.html>
7602        if self.parse_keywords(&[Keyword::START, Keyword::TRANSACTION]) {
7603            return Ok(Some(SqlOption::Ident(Ident::new("START TRANSACTION"))));
7604        }
7605
7606        // Custom option
7607        // <https://dev.mysql.com/doc/refman/8.4/en/create-table.html>
7608        if self.parse_keywords(&[Keyword::COMMENT]) {
7609            let has_eq = self.consume_token(&Token::Eq);
7610            let value = self.next_token();
7611
7612            let comment = match (has_eq, value.token) {
7613                (true, Token::SingleQuotedString(s)) => {
7614                    Ok(Some(SqlOption::Comment(CommentDef::WithEq(s))))
7615                }
7616                (false, Token::SingleQuotedString(s)) => {
7617                    Ok(Some(SqlOption::Comment(CommentDef::WithoutEq(s))))
7618                }
7619                (_, token) => {
7620                    self.expected("Token::SingleQuotedString", TokenWithSpan::wrap(token))
7621                }
7622            };
7623            return comment;
7624        }
7625
7626        // <https://dev.mysql.com/doc/refman/8.4/en/create-table.html>
7627        // <https://clickhouse.com/docs/sql-reference/statements/create/table>
7628        if self.parse_keywords(&[Keyword::ENGINE]) {
7629            let _ = self.consume_token(&Token::Eq);
7630            let value = self.next_token();
7631
7632            let engine = match value.token {
7633                Token::Word(w) => {
7634                    let parameters = if self.peek_token() == Token::LParen {
7635                        self.parse_parenthesized_identifiers()?
7636                    } else {
7637                        vec![]
7638                    };
7639
7640                    Ok(Some(SqlOption::NamedParenthesizedList(
7641                        NamedParenthesizedList {
7642                            key: Ident::new("ENGINE"),
7643                            name: Some(Ident::new(w.value)),
7644                            values: parameters,
7645                        },
7646                    )))
7647                }
7648                _ => {
7649                    return self.expected("Token::Word", value)?;
7650                }
7651            };
7652
7653            return engine;
7654        }
7655
7656        // <https://dev.mysql.com/doc/refman/8.4/en/create-table.html>
7657        if self.parse_keywords(&[Keyword::TABLESPACE]) {
7658            let _ = self.consume_token(&Token::Eq);
7659            let value = self.next_token();
7660
7661            let tablespace = match value.token {
7662                Token::Word(Word { value: name, .. }) | Token::SingleQuotedString(name) => {
7663                    let storage = match self.parse_keyword(Keyword::STORAGE) {
7664                        true => {
7665                            let _ = self.consume_token(&Token::Eq);
7666                            let storage_token = self.next_token();
7667                            match &storage_token.token {
7668                                Token::Word(w) => match w.value.to_uppercase().as_str() {
7669                                    "DISK" => Some(StorageType::Disk),
7670                                    "MEMORY" => Some(StorageType::Memory),
7671                                    _ => self
7672                                        .expected("Storage type (DISK or MEMORY)", storage_token)?,
7673                                },
7674                                _ => self.expected("Token::Word", storage_token)?,
7675                            }
7676                        }
7677                        false => None,
7678                    };
7679
7680                    Ok(Some(SqlOption::TableSpace(TablespaceOption {
7681                        name,
7682                        storage,
7683                    })))
7684                }
7685                _ => {
7686                    return self.expected("Token::Word", value)?;
7687                }
7688            };
7689
7690            return tablespace;
7691        }
7692
7693        // <https://dev.mysql.com/doc/refman/8.4/en/create-table.html>
7694        if self.parse_keyword(Keyword::UNION) {
7695            let _ = self.consume_token(&Token::Eq);
7696            let value = self.next_token();
7697
7698            match value.token {
7699                Token::LParen => {
7700                    let tables: Vec<Ident> =
7701                        self.parse_comma_separated0(Parser::parse_identifier, Token::RParen)?;
7702                    self.expect_token(&Token::RParen)?;
7703
7704                    return Ok(Some(SqlOption::NamedParenthesizedList(
7705                        NamedParenthesizedList {
7706                            key: Ident::new("UNION"),
7707                            name: None,
7708                            values: tables,
7709                        },
7710                    )));
7711                }
7712                _ => {
7713                    return self.expected("Token::LParen", value)?;
7714                }
7715            }
7716        }
7717
7718        // Key/Value parameter option
7719        let key = if self.parse_keywords(&[Keyword::DEFAULT, Keyword::CHARSET]) {
7720            Ident::new("DEFAULT CHARSET")
7721        } else if self.parse_keyword(Keyword::CHARSET) {
7722            Ident::new("CHARSET")
7723        } else if self.parse_keywords(&[Keyword::DEFAULT, Keyword::CHARACTER, Keyword::SET]) {
7724            Ident::new("DEFAULT CHARACTER SET")
7725        } else if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
7726            Ident::new("CHARACTER SET")
7727        } else if self.parse_keywords(&[Keyword::DEFAULT, Keyword::COLLATE]) {
7728            Ident::new("DEFAULT COLLATE")
7729        } else if self.parse_keyword(Keyword::COLLATE) {
7730            Ident::new("COLLATE")
7731        } else if self.parse_keywords(&[Keyword::DATA, Keyword::DIRECTORY]) {
7732            Ident::new("DATA DIRECTORY")
7733        } else if self.parse_keywords(&[Keyword::INDEX, Keyword::DIRECTORY]) {
7734            Ident::new("INDEX DIRECTORY")
7735        } else if self.parse_keyword(Keyword::KEY_BLOCK_SIZE) {
7736            Ident::new("KEY_BLOCK_SIZE")
7737        } else if self.parse_keyword(Keyword::ROW_FORMAT) {
7738            Ident::new("ROW_FORMAT")
7739        } else if self.parse_keyword(Keyword::PACK_KEYS) {
7740            Ident::new("PACK_KEYS")
7741        } else if self.parse_keyword(Keyword::STATS_AUTO_RECALC) {
7742            Ident::new("STATS_AUTO_RECALC")
7743        } else if self.parse_keyword(Keyword::STATS_PERSISTENT) {
7744            Ident::new("STATS_PERSISTENT")
7745        } else if self.parse_keyword(Keyword::STATS_SAMPLE_PAGES) {
7746            Ident::new("STATS_SAMPLE_PAGES")
7747        } else if self.parse_keyword(Keyword::DELAY_KEY_WRITE) {
7748            Ident::new("DELAY_KEY_WRITE")
7749        } else if self.parse_keyword(Keyword::COMPRESSION) {
7750            Ident::new("COMPRESSION")
7751        } else if self.parse_keyword(Keyword::ENCRYPTION) {
7752            Ident::new("ENCRYPTION")
7753        } else if self.parse_keyword(Keyword::MAX_ROWS) {
7754            Ident::new("MAX_ROWS")
7755        } else if self.parse_keyword(Keyword::MIN_ROWS) {
7756            Ident::new("MIN_ROWS")
7757        } else if self.parse_keyword(Keyword::AUTOEXTEND_SIZE) {
7758            Ident::new("AUTOEXTEND_SIZE")
7759        } else if self.parse_keyword(Keyword::AVG_ROW_LENGTH) {
7760            Ident::new("AVG_ROW_LENGTH")
7761        } else if self.parse_keyword(Keyword::CHECKSUM) {
7762            Ident::new("CHECKSUM")
7763        } else if self.parse_keyword(Keyword::CONNECTION) {
7764            Ident::new("CONNECTION")
7765        } else if self.parse_keyword(Keyword::ENGINE_ATTRIBUTE) {
7766            Ident::new("ENGINE_ATTRIBUTE")
7767        } else if self.parse_keyword(Keyword::PASSWORD) {
7768            Ident::new("PASSWORD")
7769        } else if self.parse_keyword(Keyword::SECONDARY_ENGINE_ATTRIBUTE) {
7770            Ident::new("SECONDARY_ENGINE_ATTRIBUTE")
7771        } else if self.parse_keyword(Keyword::INSERT_METHOD) {
7772            Ident::new("INSERT_METHOD")
7773        } else if self.parse_keyword(Keyword::AUTO_INCREMENT) {
7774            Ident::new("AUTO_INCREMENT")
7775        } else {
7776            return Ok(None);
7777        };
7778
7779        let _ = self.consume_token(&Token::Eq);
7780
7781        let value = match self
7782            .maybe_parse(|parser| parser.parse_value())?
7783            .map(Expr::Value)
7784        {
7785            Some(expr) => expr,
7786            None => Expr::Identifier(self.parse_identifier()?),
7787        };
7788
7789        Ok(Some(SqlOption::KeyValue { key, value }))
7790    }
7791
7792    pub fn parse_plain_options(&mut self) -> Result<Vec<SqlOption>, ParserError> {
7793        let mut options = Vec::new();
7794
7795        while let Some(option) = self.parse_plain_option()? {
7796            options.push(option);
7797            // Some dialects support comma-separated options; it shouldn't introduce ambiguity to
7798            // consume it for all dialects.
7799            let _ = self.consume_token(&Token::Comma);
7800        }
7801
7802        Ok(options)
7803    }
7804
7805    pub fn parse_optional_inline_comment(&mut self) -> Result<Option<CommentDef>, ParserError> {
7806        let comment = if self.parse_keyword(Keyword::COMMENT) {
7807            let has_eq = self.consume_token(&Token::Eq);
7808            let comment = self.parse_comment_value()?;
7809            Some(if has_eq {
7810                CommentDef::WithEq(comment)
7811            } else {
7812                CommentDef::WithoutEq(comment)
7813            })
7814        } else {
7815            None
7816        };
7817        Ok(comment)
7818    }
7819
7820    pub fn parse_comment_value(&mut self) -> Result<String, ParserError> {
7821        let next_token = self.next_token();
7822        let value = match next_token.token {
7823            Token::SingleQuotedString(str) => str,
7824            Token::DollarQuotedString(str) => str.value,
7825            _ => self.expected("string literal", next_token)?,
7826        };
7827        Ok(value)
7828    }
7829
7830    pub fn parse_optional_procedure_parameters(
7831        &mut self,
7832    ) -> Result<Option<Vec<ProcedureParam>>, ParserError> {
7833        let mut params = vec![];
7834        if !self.consume_token(&Token::LParen) || self.consume_token(&Token::RParen) {
7835            return Ok(Some(params));
7836        }
7837        loop {
7838            if let Token::Word(_) = self.peek_token().token {
7839                params.push(self.parse_procedure_param()?)
7840            }
7841            let comma = self.consume_token(&Token::Comma);
7842            if self.consume_token(&Token::RParen) {
7843                // allow a trailing comma, even though it's not in standard
7844                break;
7845            } else if !comma {
7846                return self.expected("',' or ')' after parameter definition", self.peek_token());
7847            }
7848        }
7849        Ok(Some(params))
7850    }
7851
7852    pub fn parse_columns(&mut self) -> Result<(Vec<ColumnDef>, Vec<TableConstraint>), ParserError> {
7853        let mut columns = vec![];
7854        let mut constraints = vec![];
7855        if !self.consume_token(&Token::LParen) || self.consume_token(&Token::RParen) {
7856            return Ok((columns, constraints));
7857        }
7858
7859        loop {
7860            if let Some(constraint) = self.parse_optional_table_constraint()? {
7861                constraints.push(constraint);
7862            } else if let Token::Word(_) = self.peek_token().token {
7863                columns.push(self.parse_column_def()?);
7864            } else {
7865                return self.expected("column name or constraint definition", self.peek_token());
7866            }
7867
7868            let comma = self.consume_token(&Token::Comma);
7869            let rparen = self.peek_token().token == Token::RParen;
7870
7871            if !comma && !rparen {
7872                return self.expected("',' or ')' after column definition", self.peek_token());
7873            };
7874
7875            if rparen
7876                && (!comma
7877                    || self.dialect.supports_column_definition_trailing_commas()
7878                    || self.options.trailing_commas)
7879            {
7880                let _ = self.consume_token(&Token::RParen);
7881                break;
7882            }
7883        }
7884
7885        Ok((columns, constraints))
7886    }
7887
7888    pub fn parse_procedure_param(&mut self) -> Result<ProcedureParam, ParserError> {
7889        let mode = if self.parse_keyword(Keyword::IN) {
7890            Some(ArgMode::In)
7891        } else if self.parse_keyword(Keyword::OUT) {
7892            Some(ArgMode::Out)
7893        } else if self.parse_keyword(Keyword::INOUT) {
7894            Some(ArgMode::InOut)
7895        } else {
7896            None
7897        };
7898        let name = self.parse_identifier()?;
7899        let data_type = self.parse_data_type()?;
7900        Ok(ProcedureParam {
7901            name,
7902            data_type,
7903            mode,
7904        })
7905    }
7906
7907    pub fn parse_column_def(&mut self) -> Result<ColumnDef, ParserError> {
7908        let name = self.parse_identifier()?;
7909        let data_type = if self.is_column_type_sqlite_unspecified() {
7910            DataType::Unspecified
7911        } else {
7912            self.parse_data_type()?
7913        };
7914        let mut options = vec![];
7915        loop {
7916            if self.parse_keyword(Keyword::CONSTRAINT) {
7917                let name = Some(self.parse_identifier()?);
7918                if let Some(option) = self.parse_optional_column_option()? {
7919                    options.push(ColumnOptionDef { name, option });
7920                } else {
7921                    return self.expected(
7922                        "constraint details after CONSTRAINT <name>",
7923                        self.peek_token(),
7924                    );
7925                }
7926            } else if let Some(option) = self.parse_optional_column_option()? {
7927                options.push(ColumnOptionDef { name: None, option });
7928            } else {
7929                break;
7930            };
7931        }
7932        Ok(ColumnDef {
7933            name,
7934            data_type,
7935            options,
7936        })
7937    }
7938
7939    fn is_column_type_sqlite_unspecified(&mut self) -> bool {
7940        if dialect_of!(self is SQLiteDialect) {
7941            match self.peek_token().token {
7942                Token::Word(word) => matches!(
7943                    word.keyword,
7944                    Keyword::CONSTRAINT
7945                        | Keyword::PRIMARY
7946                        | Keyword::NOT
7947                        | Keyword::UNIQUE
7948                        | Keyword::CHECK
7949                        | Keyword::DEFAULT
7950                        | Keyword::COLLATE
7951                        | Keyword::REFERENCES
7952                        | Keyword::GENERATED
7953                        | Keyword::AS
7954                ),
7955                _ => true, // e.g. comma immediately after column name
7956            }
7957        } else {
7958            false
7959        }
7960    }
7961
7962    pub fn parse_optional_column_option(&mut self) -> Result<Option<ColumnOption>, ParserError> {
7963        if let Some(option) = self.dialect.parse_column_option(self)? {
7964            return option;
7965        }
7966
7967        self.with_state(
7968            ColumnDefinition,
7969            |parser| -> Result<Option<ColumnOption>, ParserError> {
7970                parser.parse_optional_column_option_inner()
7971            },
7972        )
7973    }
7974
7975    fn parse_optional_column_option_inner(&mut self) -> Result<Option<ColumnOption>, ParserError> {
7976        if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
7977            Ok(Some(ColumnOption::CharacterSet(
7978                self.parse_object_name(false)?,
7979            )))
7980        } else if self.parse_keywords(&[Keyword::COLLATE]) {
7981            Ok(Some(ColumnOption::Collation(
7982                self.parse_object_name(false)?,
7983            )))
7984        } else if self.parse_keywords(&[Keyword::NOT, Keyword::NULL]) {
7985            Ok(Some(ColumnOption::NotNull))
7986        } else if self.parse_keywords(&[Keyword::COMMENT]) {
7987            Ok(Some(ColumnOption::Comment(self.parse_comment_value()?)))
7988        } else if self.parse_keyword(Keyword::NULL) {
7989            Ok(Some(ColumnOption::Null))
7990        } else if self.parse_keyword(Keyword::DEFAULT) {
7991            Ok(Some(ColumnOption::Default(
7992                self.parse_column_option_expr()?,
7993            )))
7994        } else if dialect_of!(self is ClickHouseDialect| GenericDialect)
7995            && self.parse_keyword(Keyword::MATERIALIZED)
7996        {
7997            Ok(Some(ColumnOption::Materialized(
7998                self.parse_column_option_expr()?,
7999            )))
8000        } else if dialect_of!(self is ClickHouseDialect| GenericDialect)
8001            && self.parse_keyword(Keyword::ALIAS)
8002        {
8003            Ok(Some(ColumnOption::Alias(self.parse_column_option_expr()?)))
8004        } else if dialect_of!(self is ClickHouseDialect| GenericDialect)
8005            && self.parse_keyword(Keyword::EPHEMERAL)
8006        {
8007            // The expression is optional for the EPHEMERAL syntax, so we need to check
8008            // if the column definition has remaining tokens before parsing the expression.
8009            if matches!(self.peek_token().token, Token::Comma | Token::RParen) {
8010                Ok(Some(ColumnOption::Ephemeral(None)))
8011            } else {
8012                Ok(Some(ColumnOption::Ephemeral(Some(
8013                    self.parse_column_option_expr()?,
8014                ))))
8015            }
8016        } else if self.parse_keywords(&[Keyword::PRIMARY, Keyword::KEY]) {
8017            let characteristics = self.parse_constraint_characteristics()?;
8018            Ok(Some(ColumnOption::Unique {
8019                is_primary: true,
8020                characteristics,
8021            }))
8022        } else if self.parse_keyword(Keyword::UNIQUE) {
8023            let characteristics = self.parse_constraint_characteristics()?;
8024            Ok(Some(ColumnOption::Unique {
8025                is_primary: false,
8026                characteristics,
8027            }))
8028        } else if self.parse_keyword(Keyword::REFERENCES) {
8029            let foreign_table = self.parse_object_name(false)?;
8030            // PostgreSQL allows omitting the column list and
8031            // uses the primary key column of the foreign table by default
8032            let referred_columns = self.parse_parenthesized_column_list(Optional, false)?;
8033            let mut on_delete = None;
8034            let mut on_update = None;
8035            loop {
8036                if on_delete.is_none() && self.parse_keywords(&[Keyword::ON, Keyword::DELETE]) {
8037                    on_delete = Some(self.parse_referential_action()?);
8038                } else if on_update.is_none()
8039                    && self.parse_keywords(&[Keyword::ON, Keyword::UPDATE])
8040                {
8041                    on_update = Some(self.parse_referential_action()?);
8042                } else {
8043                    break;
8044                }
8045            }
8046            let characteristics = self.parse_constraint_characteristics()?;
8047
8048            Ok(Some(ColumnOption::ForeignKey {
8049                foreign_table,
8050                referred_columns,
8051                on_delete,
8052                on_update,
8053                characteristics,
8054            }))
8055        } else if self.parse_keyword(Keyword::CHECK) {
8056            self.expect_token(&Token::LParen)?;
8057            // since `CHECK` requires parentheses, we can parse the inner expression in ParserState::Normal
8058            let expr: Expr = self.with_state(ParserState::Normal, |p| p.parse_expr())?;
8059            self.expect_token(&Token::RParen)?;
8060            Ok(Some(ColumnOption::Check(expr)))
8061        } else if self.parse_keyword(Keyword::AUTO_INCREMENT)
8062            && dialect_of!(self is MySqlDialect | GenericDialect)
8063        {
8064            // Support AUTO_INCREMENT for MySQL
8065            Ok(Some(ColumnOption::DialectSpecific(vec![
8066                Token::make_keyword("AUTO_INCREMENT"),
8067            ])))
8068        } else if self.parse_keyword(Keyword::AUTOINCREMENT)
8069            && dialect_of!(self is SQLiteDialect |  GenericDialect)
8070        {
8071            // Support AUTOINCREMENT for SQLite
8072            Ok(Some(ColumnOption::DialectSpecific(vec![
8073                Token::make_keyword("AUTOINCREMENT"),
8074            ])))
8075        } else if self.parse_keyword(Keyword::ASC)
8076            && self.dialect.supports_asc_desc_in_column_definition()
8077        {
8078            // Support ASC for SQLite
8079            Ok(Some(ColumnOption::DialectSpecific(vec![
8080                Token::make_keyword("ASC"),
8081            ])))
8082        } else if self.parse_keyword(Keyword::DESC)
8083            && self.dialect.supports_asc_desc_in_column_definition()
8084        {
8085            // Support DESC for SQLite
8086            Ok(Some(ColumnOption::DialectSpecific(vec![
8087                Token::make_keyword("DESC"),
8088            ])))
8089        } else if self.parse_keywords(&[Keyword::ON, Keyword::UPDATE])
8090            && dialect_of!(self is MySqlDialect | GenericDialect)
8091        {
8092            let expr = self.parse_column_option_expr()?;
8093            Ok(Some(ColumnOption::OnUpdate(expr)))
8094        } else if self.parse_keyword(Keyword::GENERATED) {
8095            self.parse_optional_column_option_generated()
8096        } else if dialect_of!(self is BigQueryDialect | GenericDialect)
8097            && self.parse_keyword(Keyword::OPTIONS)
8098        {
8099            self.prev_token();
8100            Ok(Some(ColumnOption::Options(
8101                self.parse_options(Keyword::OPTIONS)?,
8102            )))
8103        } else if self.parse_keyword(Keyword::AS)
8104            && dialect_of!(self is MySqlDialect | SQLiteDialect | DuckDbDialect | GenericDialect)
8105        {
8106            self.parse_optional_column_option_as()
8107        } else if self.parse_keyword(Keyword::SRID)
8108            && dialect_of!(self is MySqlDialect | GenericDialect)
8109        {
8110            Ok(Some(ColumnOption::Srid(Box::new(
8111                self.parse_column_option_expr()?,
8112            ))))
8113        } else if self.parse_keyword(Keyword::IDENTITY)
8114            && dialect_of!(self is MsSqlDialect | GenericDialect)
8115        {
8116            let parameters = if self.consume_token(&Token::LParen) {
8117                let seed = self.parse_number()?;
8118                self.expect_token(&Token::Comma)?;
8119                let increment = self.parse_number()?;
8120                self.expect_token(&Token::RParen)?;
8121
8122                Some(IdentityPropertyFormatKind::FunctionCall(
8123                    IdentityParameters { seed, increment },
8124                ))
8125            } else {
8126                None
8127            };
8128            Ok(Some(ColumnOption::Identity(
8129                IdentityPropertyKind::Identity(IdentityProperty {
8130                    parameters,
8131                    order: None,
8132                }),
8133            )))
8134        } else if dialect_of!(self is SQLiteDialect | GenericDialect)
8135            && self.parse_keywords(&[Keyword::ON, Keyword::CONFLICT])
8136        {
8137            // Support ON CONFLICT for SQLite
8138            Ok(Some(ColumnOption::OnConflict(
8139                self.expect_one_of_keywords(&[
8140                    Keyword::ROLLBACK,
8141                    Keyword::ABORT,
8142                    Keyword::FAIL,
8143                    Keyword::IGNORE,
8144                    Keyword::REPLACE,
8145                ])?,
8146            )))
8147        } else {
8148            Ok(None)
8149        }
8150    }
8151
8152    /// When parsing some column option expressions we need to revert to [ParserState::Normal] since
8153    /// `NOT NULL` is allowed as an alias for `IS NOT NULL`.
8154    /// In those cases we use this helper instead of calling [Parser::parse_expr] directly.
8155    ///
8156    /// For example, consider these `CREATE TABLE` statements:
8157    /// ```sql
8158    /// CREATE TABLE foo (abc BOOL DEFAULT (42 NOT NULL) NOT NULL);
8159    /// ```
8160    /// vs
8161    /// ```sql
8162    /// CREATE TABLE foo (abc BOOL NOT NULL);
8163    /// ```
8164    ///
8165    /// In the first we should parse the inner portion of `(42 NOT NULL)` as [Expr::IsNotNull],
8166    /// whereas is both statements that trailing `NOT NULL` should only be parsed as a
8167    /// [ColumnOption::NotNull].
8168    fn parse_column_option_expr(&mut self) -> Result<Expr, ParserError> {
8169        if self.peek_token_ref().token == Token::LParen {
8170            let expr: Expr = self.with_state(ParserState::Normal, |p| p.parse_prefix())?;
8171            Ok(expr)
8172        } else {
8173            Ok(self.parse_expr()?)
8174        }
8175    }
8176
8177    pub(crate) fn parse_tag(&mut self) -> Result<Tag, ParserError> {
8178        let name = self.parse_object_name(false)?;
8179        self.expect_token(&Token::Eq)?;
8180        let value = self.parse_literal_string()?;
8181
8182        Ok(Tag::new(name, value))
8183    }
8184
8185    fn parse_optional_column_option_generated(
8186        &mut self,
8187    ) -> Result<Option<ColumnOption>, ParserError> {
8188        if self.parse_keywords(&[Keyword::ALWAYS, Keyword::AS, Keyword::IDENTITY]) {
8189            let mut sequence_options = vec![];
8190            if self.expect_token(&Token::LParen).is_ok() {
8191                sequence_options = self.parse_create_sequence_options()?;
8192                self.expect_token(&Token::RParen)?;
8193            }
8194            Ok(Some(ColumnOption::Generated {
8195                generated_as: GeneratedAs::Always,
8196                sequence_options: Some(sequence_options),
8197                generation_expr: None,
8198                generation_expr_mode: None,
8199                generated_keyword: true,
8200            }))
8201        } else if self.parse_keywords(&[
8202            Keyword::BY,
8203            Keyword::DEFAULT,
8204            Keyword::AS,
8205            Keyword::IDENTITY,
8206        ]) {
8207            let mut sequence_options = vec![];
8208            if self.expect_token(&Token::LParen).is_ok() {
8209                sequence_options = self.parse_create_sequence_options()?;
8210                self.expect_token(&Token::RParen)?;
8211            }
8212            Ok(Some(ColumnOption::Generated {
8213                generated_as: GeneratedAs::ByDefault,
8214                sequence_options: Some(sequence_options),
8215                generation_expr: None,
8216                generation_expr_mode: None,
8217                generated_keyword: true,
8218            }))
8219        } else if self.parse_keywords(&[Keyword::ALWAYS, Keyword::AS]) {
8220            if self.expect_token(&Token::LParen).is_ok() {
8221                let expr: Expr = self.with_state(ParserState::Normal, |p| p.parse_expr())?;
8222                self.expect_token(&Token::RParen)?;
8223                let (gen_as, expr_mode) = if self.parse_keywords(&[Keyword::STORED]) {
8224                    Ok((
8225                        GeneratedAs::ExpStored,
8226                        Some(GeneratedExpressionMode::Stored),
8227                    ))
8228                } else if dialect_of!(self is PostgreSqlDialect) {
8229                    // Postgres' AS IDENTITY branches are above, this one needs STORED
8230                    self.expected("STORED", self.peek_token())
8231                } else if self.parse_keywords(&[Keyword::VIRTUAL]) {
8232                    Ok((GeneratedAs::Always, Some(GeneratedExpressionMode::Virtual)))
8233                } else {
8234                    Ok((GeneratedAs::Always, None))
8235                }?;
8236
8237                Ok(Some(ColumnOption::Generated {
8238                    generated_as: gen_as,
8239                    sequence_options: None,
8240                    generation_expr: Some(expr),
8241                    generation_expr_mode: expr_mode,
8242                    generated_keyword: true,
8243                }))
8244            } else {
8245                Ok(None)
8246            }
8247        } else {
8248            Ok(None)
8249        }
8250    }
8251
8252    fn parse_optional_column_option_as(&mut self) -> Result<Option<ColumnOption>, ParserError> {
8253        // Some DBs allow 'AS (expr)', shorthand for GENERATED ALWAYS AS
8254        self.expect_token(&Token::LParen)?;
8255        let expr = self.parse_expr()?;
8256        self.expect_token(&Token::RParen)?;
8257
8258        let (gen_as, expr_mode) = if self.parse_keywords(&[Keyword::STORED]) {
8259            (
8260                GeneratedAs::ExpStored,
8261                Some(GeneratedExpressionMode::Stored),
8262            )
8263        } else if self.parse_keywords(&[Keyword::VIRTUAL]) {
8264            (GeneratedAs::Always, Some(GeneratedExpressionMode::Virtual))
8265        } else {
8266            (GeneratedAs::Always, None)
8267        };
8268
8269        Ok(Some(ColumnOption::Generated {
8270            generated_as: gen_as,
8271            sequence_options: None,
8272            generation_expr: Some(expr),
8273            generation_expr_mode: expr_mode,
8274            generated_keyword: false,
8275        }))
8276    }
8277
8278    pub fn parse_optional_clustered_by(&mut self) -> Result<Option<ClusteredBy>, ParserError> {
8279        let clustered_by = if dialect_of!(self is HiveDialect|GenericDialect)
8280            && self.parse_keywords(&[Keyword::CLUSTERED, Keyword::BY])
8281        {
8282            let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
8283
8284            let sorted_by = if self.parse_keywords(&[Keyword::SORTED, Keyword::BY]) {
8285                self.expect_token(&Token::LParen)?;
8286                let sorted_by_columns = self.parse_comma_separated(|p| p.parse_order_by_expr())?;
8287                self.expect_token(&Token::RParen)?;
8288                Some(sorted_by_columns)
8289            } else {
8290                None
8291            };
8292
8293            self.expect_keyword_is(Keyword::INTO)?;
8294            let num_buckets = self.parse_number_value()?.value;
8295            self.expect_keyword_is(Keyword::BUCKETS)?;
8296            Some(ClusteredBy {
8297                columns,
8298                sorted_by,
8299                num_buckets,
8300            })
8301        } else {
8302            None
8303        };
8304        Ok(clustered_by)
8305    }
8306
8307    pub fn parse_referential_action(&mut self) -> Result<ReferentialAction, ParserError> {
8308        if self.parse_keyword(Keyword::RESTRICT) {
8309            Ok(ReferentialAction::Restrict)
8310        } else if self.parse_keyword(Keyword::CASCADE) {
8311            Ok(ReferentialAction::Cascade)
8312        } else if self.parse_keywords(&[Keyword::SET, Keyword::NULL]) {
8313            Ok(ReferentialAction::SetNull)
8314        } else if self.parse_keywords(&[Keyword::NO, Keyword::ACTION]) {
8315            Ok(ReferentialAction::NoAction)
8316        } else if self.parse_keywords(&[Keyword::SET, Keyword::DEFAULT]) {
8317            Ok(ReferentialAction::SetDefault)
8318        } else {
8319            self.expected(
8320                "one of RESTRICT, CASCADE, SET NULL, NO ACTION or SET DEFAULT",
8321                self.peek_token(),
8322            )
8323        }
8324    }
8325
8326    pub fn parse_constraint_characteristics(
8327        &mut self,
8328    ) -> Result<Option<ConstraintCharacteristics>, ParserError> {
8329        let mut cc = ConstraintCharacteristics::default();
8330
8331        loop {
8332            if cc.deferrable.is_none() && self.parse_keywords(&[Keyword::NOT, Keyword::DEFERRABLE])
8333            {
8334                cc.deferrable = Some(false);
8335            } else if cc.deferrable.is_none() && self.parse_keyword(Keyword::DEFERRABLE) {
8336                cc.deferrable = Some(true);
8337            } else if cc.initially.is_none() && self.parse_keyword(Keyword::INITIALLY) {
8338                if self.parse_keyword(Keyword::DEFERRED) {
8339                    cc.initially = Some(DeferrableInitial::Deferred);
8340                } else if self.parse_keyword(Keyword::IMMEDIATE) {
8341                    cc.initially = Some(DeferrableInitial::Immediate);
8342                } else {
8343                    self.expected("one of DEFERRED or IMMEDIATE", self.peek_token())?;
8344                }
8345            } else if cc.enforced.is_none() && self.parse_keyword(Keyword::ENFORCED) {
8346                cc.enforced = Some(true);
8347            } else if cc.enforced.is_none()
8348                && self.parse_keywords(&[Keyword::NOT, Keyword::ENFORCED])
8349            {
8350                cc.enforced = Some(false);
8351            } else {
8352                break;
8353            }
8354        }
8355
8356        if cc.deferrable.is_some() || cc.initially.is_some() || cc.enforced.is_some() {
8357            Ok(Some(cc))
8358        } else {
8359            Ok(None)
8360        }
8361    }
8362
8363    pub fn parse_optional_table_constraint(
8364        &mut self,
8365    ) -> Result<Option<TableConstraint>, ParserError> {
8366        let name = if self.parse_keyword(Keyword::CONSTRAINT) {
8367            Some(self.parse_identifier()?)
8368        } else {
8369            None
8370        };
8371
8372        let next_token = self.next_token();
8373        match next_token.token {
8374            Token::Word(w) if w.keyword == Keyword::UNIQUE => {
8375                let index_type_display = self.parse_index_type_display();
8376                if !dialect_of!(self is GenericDialect | MySqlDialect)
8377                    && !index_type_display.is_none()
8378                {
8379                    return self
8380                        .expected("`index_name` or `(column_name [, ...])`", self.peek_token());
8381                }
8382
8383                let nulls_distinct = self.parse_optional_nulls_distinct()?;
8384
8385                // optional index name
8386                let index_name = self.parse_optional_ident()?;
8387                let index_type = self.parse_optional_using_then_index_type()?;
8388
8389                let columns = self.parse_parenthesized_index_column_list()?;
8390                let index_options = self.parse_index_options()?;
8391                let characteristics = self.parse_constraint_characteristics()?;
8392                Ok(Some(TableConstraint::Unique {
8393                    name,
8394                    index_name,
8395                    index_type_display,
8396                    index_type,
8397                    columns,
8398                    index_options,
8399                    characteristics,
8400                    nulls_distinct,
8401                }))
8402            }
8403            Token::Word(w) if w.keyword == Keyword::PRIMARY => {
8404                // after `PRIMARY` always stay `KEY`
8405                self.expect_keyword_is(Keyword::KEY)?;
8406
8407                // optional index name
8408                let index_name = self.parse_optional_ident()?;
8409                let index_type = self.parse_optional_using_then_index_type()?;
8410
8411                let columns = self.parse_parenthesized_index_column_list()?;
8412                let index_options = self.parse_index_options()?;
8413                let characteristics = self.parse_constraint_characteristics()?;
8414                Ok(Some(TableConstraint::PrimaryKey {
8415                    name,
8416                    index_name,
8417                    index_type,
8418                    columns,
8419                    index_options,
8420                    characteristics,
8421                }))
8422            }
8423            Token::Word(w) if w.keyword == Keyword::FOREIGN => {
8424                self.expect_keyword_is(Keyword::KEY)?;
8425                let index_name = self.parse_optional_ident()?;
8426                let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
8427                self.expect_keyword_is(Keyword::REFERENCES)?;
8428                let foreign_table = self.parse_object_name(false)?;
8429                let referred_columns = self.parse_parenthesized_column_list(Optional, false)?;
8430                let mut on_delete = None;
8431                let mut on_update = None;
8432                loop {
8433                    if on_delete.is_none() && self.parse_keywords(&[Keyword::ON, Keyword::DELETE]) {
8434                        on_delete = Some(self.parse_referential_action()?);
8435                    } else if on_update.is_none()
8436                        && self.parse_keywords(&[Keyword::ON, Keyword::UPDATE])
8437                    {
8438                        on_update = Some(self.parse_referential_action()?);
8439                    } else {
8440                        break;
8441                    }
8442                }
8443
8444                let characteristics = self.parse_constraint_characteristics()?;
8445
8446                Ok(Some(TableConstraint::ForeignKey {
8447                    name,
8448                    index_name,
8449                    columns,
8450                    foreign_table,
8451                    referred_columns,
8452                    on_delete,
8453                    on_update,
8454                    characteristics,
8455                }))
8456            }
8457            Token::Word(w) if w.keyword == Keyword::CHECK => {
8458                self.expect_token(&Token::LParen)?;
8459                let expr = Box::new(self.parse_expr()?);
8460                self.expect_token(&Token::RParen)?;
8461
8462                let enforced = if self.parse_keyword(Keyword::ENFORCED) {
8463                    Some(true)
8464                } else if self.parse_keywords(&[Keyword::NOT, Keyword::ENFORCED]) {
8465                    Some(false)
8466                } else {
8467                    None
8468                };
8469
8470                Ok(Some(TableConstraint::Check {
8471                    name,
8472                    expr,
8473                    enforced,
8474                }))
8475            }
8476            Token::Word(w)
8477                if (w.keyword == Keyword::INDEX || w.keyword == Keyword::KEY)
8478                    && dialect_of!(self is GenericDialect | MySqlDialect)
8479                    && name.is_none() =>
8480            {
8481                let display_as_key = w.keyword == Keyword::KEY;
8482
8483                let name = match self.peek_token().token {
8484                    Token::Word(word) if word.keyword == Keyword::USING => None,
8485                    _ => self.parse_optional_ident()?,
8486                };
8487
8488                let index_type = self.parse_optional_using_then_index_type()?;
8489                let columns = self.parse_parenthesized_index_column_list()?;
8490                let index_options = self.parse_index_options()?;
8491
8492                Ok(Some(TableConstraint::Index {
8493                    display_as_key,
8494                    name,
8495                    index_type,
8496                    columns,
8497                    index_options,
8498                }))
8499            }
8500            Token::Word(w)
8501                if (w.keyword == Keyword::FULLTEXT || w.keyword == Keyword::SPATIAL)
8502                    && dialect_of!(self is GenericDialect | MySqlDialect) =>
8503            {
8504                if let Some(name) = name {
8505                    return self.expected(
8506                        "FULLTEXT or SPATIAL option without constraint name",
8507                        TokenWithSpan {
8508                            token: Token::make_keyword(&name.to_string()),
8509                            span: next_token.span,
8510                        },
8511                    );
8512                }
8513
8514                let fulltext = w.keyword == Keyword::FULLTEXT;
8515
8516                let index_type_display = self.parse_index_type_display();
8517
8518                let opt_index_name = self.parse_optional_ident()?;
8519
8520                let columns = self.parse_parenthesized_index_column_list()?;
8521
8522                Ok(Some(TableConstraint::FulltextOrSpatial {
8523                    fulltext,
8524                    index_type_display,
8525                    opt_index_name,
8526                    columns,
8527                }))
8528            }
8529            _ => {
8530                if name.is_some() {
8531                    self.expected("PRIMARY, UNIQUE, FOREIGN, or CHECK", next_token)
8532                } else {
8533                    self.prev_token();
8534                    Ok(None)
8535                }
8536            }
8537        }
8538    }
8539
8540    fn parse_optional_nulls_distinct(&mut self) -> Result<NullsDistinctOption, ParserError> {
8541        Ok(if self.parse_keyword(Keyword::NULLS) {
8542            let not = self.parse_keyword(Keyword::NOT);
8543            self.expect_keyword_is(Keyword::DISTINCT)?;
8544            if not {
8545                NullsDistinctOption::NotDistinct
8546            } else {
8547                NullsDistinctOption::Distinct
8548            }
8549        } else {
8550            NullsDistinctOption::None
8551        })
8552    }
8553
8554    pub fn maybe_parse_options(
8555        &mut self,
8556        keyword: Keyword,
8557    ) -> Result<Option<Vec<SqlOption>>, ParserError> {
8558        if let Token::Word(word) = self.peek_token().token {
8559            if word.keyword == keyword {
8560                return Ok(Some(self.parse_options(keyword)?));
8561            }
8562        };
8563        Ok(None)
8564    }
8565
8566    pub fn parse_options(&mut self, keyword: Keyword) -> Result<Vec<SqlOption>, ParserError> {
8567        if self.parse_keyword(keyword) {
8568            self.expect_token(&Token::LParen)?;
8569            let options = self.parse_comma_separated0(Parser::parse_sql_option, Token::RParen)?;
8570            self.expect_token(&Token::RParen)?;
8571            Ok(options)
8572        } else {
8573            Ok(vec![])
8574        }
8575    }
8576
8577    pub fn parse_options_with_keywords(
8578        &mut self,
8579        keywords: &[Keyword],
8580    ) -> Result<Vec<SqlOption>, ParserError> {
8581        if self.parse_keywords(keywords) {
8582            self.expect_token(&Token::LParen)?;
8583            let options = self.parse_comma_separated(Parser::parse_sql_option)?;
8584            self.expect_token(&Token::RParen)?;
8585            Ok(options)
8586        } else {
8587            Ok(vec![])
8588        }
8589    }
8590
8591    pub fn parse_index_type(&mut self) -> Result<IndexType, ParserError> {
8592        Ok(if self.parse_keyword(Keyword::BTREE) {
8593            IndexType::BTree
8594        } else if self.parse_keyword(Keyword::HASH) {
8595            IndexType::Hash
8596        } else if self.parse_keyword(Keyword::GIN) {
8597            IndexType::GIN
8598        } else if self.parse_keyword(Keyword::GIST) {
8599            IndexType::GiST
8600        } else if self.parse_keyword(Keyword::SPGIST) {
8601            IndexType::SPGiST
8602        } else if self.parse_keyword(Keyword::BRIN) {
8603            IndexType::BRIN
8604        } else if self.parse_keyword(Keyword::BLOOM) {
8605            IndexType::Bloom
8606        } else {
8607            IndexType::Custom(self.parse_identifier()?)
8608        })
8609    }
8610
8611    /// Optionally parse the `USING` keyword, followed by an [IndexType]
8612    /// Example:
8613    /// ```sql
8614    //// USING BTREE (name, age DESC)
8615    /// ```
8616    pub fn parse_optional_using_then_index_type(
8617        &mut self,
8618    ) -> Result<Option<IndexType>, ParserError> {
8619        if self.parse_keyword(Keyword::USING) {
8620            Ok(Some(self.parse_index_type()?))
8621        } else {
8622            Ok(None)
8623        }
8624    }
8625
8626    /// Parse `[ident]`, mostly `ident` is name, like:
8627    /// `window_name`, `index_name`, ...
8628    pub fn parse_optional_ident(&mut self) -> Result<Option<Ident>, ParserError> {
8629        self.maybe_parse(|parser| parser.parse_identifier())
8630    }
8631
8632    #[must_use]
8633    pub fn parse_index_type_display(&mut self) -> KeyOrIndexDisplay {
8634        if self.parse_keyword(Keyword::KEY) {
8635            KeyOrIndexDisplay::Key
8636        } else if self.parse_keyword(Keyword::INDEX) {
8637            KeyOrIndexDisplay::Index
8638        } else {
8639            KeyOrIndexDisplay::None
8640        }
8641    }
8642
8643    pub fn parse_optional_index_option(&mut self) -> Result<Option<IndexOption>, ParserError> {
8644        if let Some(index_type) = self.parse_optional_using_then_index_type()? {
8645            Ok(Some(IndexOption::Using(index_type)))
8646        } else if self.parse_keyword(Keyword::COMMENT) {
8647            let s = self.parse_literal_string()?;
8648            Ok(Some(IndexOption::Comment(s)))
8649        } else {
8650            Ok(None)
8651        }
8652    }
8653
8654    pub fn parse_index_options(&mut self) -> Result<Vec<IndexOption>, ParserError> {
8655        let mut options = Vec::new();
8656
8657        loop {
8658            match self.parse_optional_index_option()? {
8659                Some(index_option) => options.push(index_option),
8660                None => return Ok(options),
8661            }
8662        }
8663    }
8664
8665    pub fn parse_sql_option(&mut self) -> Result<SqlOption, ParserError> {
8666        let is_mssql = dialect_of!(self is MsSqlDialect|GenericDialect);
8667
8668        match self.peek_token().token {
8669            Token::Word(w) if w.keyword == Keyword::HEAP && is_mssql => {
8670                Ok(SqlOption::Ident(self.parse_identifier()?))
8671            }
8672            Token::Word(w) if w.keyword == Keyword::PARTITION && is_mssql => {
8673                self.parse_option_partition()
8674            }
8675            Token::Word(w) if w.keyword == Keyword::CLUSTERED && is_mssql => {
8676                self.parse_option_clustered()
8677            }
8678            _ => {
8679                let name = self.parse_identifier()?;
8680                self.expect_token(&Token::Eq)?;
8681                let value = self.parse_expr()?;
8682
8683                Ok(SqlOption::KeyValue { key: name, value })
8684            }
8685        }
8686    }
8687
8688    pub fn parse_option_clustered(&mut self) -> Result<SqlOption, ParserError> {
8689        if self.parse_keywords(&[
8690            Keyword::CLUSTERED,
8691            Keyword::COLUMNSTORE,
8692            Keyword::INDEX,
8693            Keyword::ORDER,
8694        ]) {
8695            Ok(SqlOption::Clustered(
8696                TableOptionsClustered::ColumnstoreIndexOrder(
8697                    self.parse_parenthesized_column_list(IsOptional::Mandatory, false)?,
8698                ),
8699            ))
8700        } else if self.parse_keywords(&[Keyword::CLUSTERED, Keyword::COLUMNSTORE, Keyword::INDEX]) {
8701            Ok(SqlOption::Clustered(
8702                TableOptionsClustered::ColumnstoreIndex,
8703            ))
8704        } else if self.parse_keywords(&[Keyword::CLUSTERED, Keyword::INDEX]) {
8705            self.expect_token(&Token::LParen)?;
8706
8707            let columns = self.parse_comma_separated(|p| {
8708                let name = p.parse_identifier()?;
8709                let asc = p.parse_asc_desc();
8710
8711                Ok(ClusteredIndex { name, asc })
8712            })?;
8713
8714            self.expect_token(&Token::RParen)?;
8715
8716            Ok(SqlOption::Clustered(TableOptionsClustered::Index(columns)))
8717        } else {
8718            Err(ParserError::ParserError(
8719                "invalid CLUSTERED sequence".to_string(),
8720            ))
8721        }
8722    }
8723
8724    pub fn parse_option_partition(&mut self) -> Result<SqlOption, ParserError> {
8725        self.expect_keyword_is(Keyword::PARTITION)?;
8726        self.expect_token(&Token::LParen)?;
8727        let column_name = self.parse_identifier()?;
8728
8729        self.expect_keyword_is(Keyword::RANGE)?;
8730        let range_direction = if self.parse_keyword(Keyword::LEFT) {
8731            Some(PartitionRangeDirection::Left)
8732        } else if self.parse_keyword(Keyword::RIGHT) {
8733            Some(PartitionRangeDirection::Right)
8734        } else {
8735            None
8736        };
8737
8738        self.expect_keywords(&[Keyword::FOR, Keyword::VALUES])?;
8739        self.expect_token(&Token::LParen)?;
8740
8741        let for_values = self.parse_comma_separated(Parser::parse_expr)?;
8742
8743        self.expect_token(&Token::RParen)?;
8744        self.expect_token(&Token::RParen)?;
8745
8746        Ok(SqlOption::Partition {
8747            column_name,
8748            range_direction,
8749            for_values,
8750        })
8751    }
8752
8753    pub fn parse_partition(&mut self) -> Result<Partition, ParserError> {
8754        self.expect_token(&Token::LParen)?;
8755        let partitions = self.parse_comma_separated(Parser::parse_expr)?;
8756        self.expect_token(&Token::RParen)?;
8757        Ok(Partition::Partitions(partitions))
8758    }
8759
8760    pub fn parse_projection_select(&mut self) -> Result<ProjectionSelect, ParserError> {
8761        self.expect_token(&Token::LParen)?;
8762        self.expect_keyword_is(Keyword::SELECT)?;
8763        let projection = self.parse_projection()?;
8764        let group_by = self.parse_optional_group_by()?;
8765        let order_by = self.parse_optional_order_by()?;
8766        self.expect_token(&Token::RParen)?;
8767        Ok(ProjectionSelect {
8768            projection,
8769            group_by,
8770            order_by,
8771        })
8772    }
8773    pub fn parse_alter_table_add_projection(&mut self) -> Result<AlterTableOperation, ParserError> {
8774        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
8775        let name = self.parse_identifier()?;
8776        let query = self.parse_projection_select()?;
8777        Ok(AlterTableOperation::AddProjection {
8778            if_not_exists,
8779            name,
8780            select: query,
8781        })
8782    }
8783
8784    pub fn parse_alter_table_operation(&mut self) -> Result<AlterTableOperation, ParserError> {
8785        let operation = if self.parse_keyword(Keyword::ADD) {
8786            if let Some(constraint) = self.parse_optional_table_constraint()? {
8787                let not_valid = self.parse_keywords(&[Keyword::NOT, Keyword::VALID]);
8788                AlterTableOperation::AddConstraint {
8789                    constraint,
8790                    not_valid,
8791                }
8792            } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
8793                && self.parse_keyword(Keyword::PROJECTION)
8794            {
8795                return self.parse_alter_table_add_projection();
8796            } else {
8797                let if_not_exists =
8798                    self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
8799                let mut new_partitions = vec![];
8800                loop {
8801                    if self.parse_keyword(Keyword::PARTITION) {
8802                        new_partitions.push(self.parse_partition()?);
8803                    } else {
8804                        break;
8805                    }
8806                }
8807                if !new_partitions.is_empty() {
8808                    AlterTableOperation::AddPartitions {
8809                        if_not_exists,
8810                        new_partitions,
8811                    }
8812                } else {
8813                    let column_keyword = self.parse_keyword(Keyword::COLUMN);
8814
8815                    let if_not_exists = if dialect_of!(self is PostgreSqlDialect | BigQueryDialect | DuckDbDialect | GenericDialect)
8816                    {
8817                        self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS])
8818                            || if_not_exists
8819                    } else {
8820                        false
8821                    };
8822
8823                    let column_def = self.parse_column_def()?;
8824
8825                    let column_position = self.parse_column_position()?;
8826
8827                    AlterTableOperation::AddColumn {
8828                        column_keyword,
8829                        if_not_exists,
8830                        column_def,
8831                        column_position,
8832                    }
8833                }
8834            }
8835        } else if self.parse_keyword(Keyword::RENAME) {
8836            if dialect_of!(self is PostgreSqlDialect) && self.parse_keyword(Keyword::CONSTRAINT) {
8837                let old_name = self.parse_identifier()?;
8838                self.expect_keyword_is(Keyword::TO)?;
8839                let new_name = self.parse_identifier()?;
8840                AlterTableOperation::RenameConstraint { old_name, new_name }
8841            } else if self.parse_keyword(Keyword::TO) {
8842                let table_name = self.parse_object_name(false)?;
8843                AlterTableOperation::RenameTable {
8844                    table_name: RenameTableNameKind::To(table_name),
8845                }
8846            } else if self.parse_keyword(Keyword::AS) {
8847                let table_name = self.parse_object_name(false)?;
8848                AlterTableOperation::RenameTable {
8849                    table_name: RenameTableNameKind::As(table_name),
8850                }
8851            } else {
8852                let _ = self.parse_keyword(Keyword::COLUMN); // [ COLUMN ]
8853                let old_column_name = self.parse_identifier()?;
8854                self.expect_keyword_is(Keyword::TO)?;
8855                let new_column_name = self.parse_identifier()?;
8856                AlterTableOperation::RenameColumn {
8857                    old_column_name,
8858                    new_column_name,
8859                }
8860            }
8861        } else if self.parse_keyword(Keyword::DISABLE) {
8862            if self.parse_keywords(&[Keyword::ROW, Keyword::LEVEL, Keyword::SECURITY]) {
8863                AlterTableOperation::DisableRowLevelSecurity {}
8864            } else if self.parse_keyword(Keyword::RULE) {
8865                let name = self.parse_identifier()?;
8866                AlterTableOperation::DisableRule { name }
8867            } else if self.parse_keyword(Keyword::TRIGGER) {
8868                let name = self.parse_identifier()?;
8869                AlterTableOperation::DisableTrigger { name }
8870            } else {
8871                return self.expected(
8872                    "ROW LEVEL SECURITY, RULE, or TRIGGER after DISABLE",
8873                    self.peek_token(),
8874                );
8875            }
8876        } else if self.parse_keyword(Keyword::ENABLE) {
8877            if self.parse_keywords(&[Keyword::ALWAYS, Keyword::RULE]) {
8878                let name = self.parse_identifier()?;
8879                AlterTableOperation::EnableAlwaysRule { name }
8880            } else if self.parse_keywords(&[Keyword::ALWAYS, Keyword::TRIGGER]) {
8881                let name = self.parse_identifier()?;
8882                AlterTableOperation::EnableAlwaysTrigger { name }
8883            } else if self.parse_keywords(&[Keyword::ROW, Keyword::LEVEL, Keyword::SECURITY]) {
8884                AlterTableOperation::EnableRowLevelSecurity {}
8885            } else if self.parse_keywords(&[Keyword::REPLICA, Keyword::RULE]) {
8886                let name = self.parse_identifier()?;
8887                AlterTableOperation::EnableReplicaRule { name }
8888            } else if self.parse_keywords(&[Keyword::REPLICA, Keyword::TRIGGER]) {
8889                let name = self.parse_identifier()?;
8890                AlterTableOperation::EnableReplicaTrigger { name }
8891            } else if self.parse_keyword(Keyword::RULE) {
8892                let name = self.parse_identifier()?;
8893                AlterTableOperation::EnableRule { name }
8894            } else if self.parse_keyword(Keyword::TRIGGER) {
8895                let name = self.parse_identifier()?;
8896                AlterTableOperation::EnableTrigger { name }
8897            } else {
8898                return self.expected(
8899                    "ALWAYS, REPLICA, ROW LEVEL SECURITY, RULE, or TRIGGER after ENABLE",
8900                    self.peek_token(),
8901                );
8902            }
8903        } else if self.parse_keywords(&[Keyword::CLEAR, Keyword::PROJECTION])
8904            && dialect_of!(self is ClickHouseDialect|GenericDialect)
8905        {
8906            let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
8907            let name = self.parse_identifier()?;
8908            let partition = if self.parse_keywords(&[Keyword::IN, Keyword::PARTITION]) {
8909                Some(self.parse_identifier()?)
8910            } else {
8911                None
8912            };
8913            AlterTableOperation::ClearProjection {
8914                if_exists,
8915                name,
8916                partition,
8917            }
8918        } else if self.parse_keywords(&[Keyword::MATERIALIZE, Keyword::PROJECTION])
8919            && dialect_of!(self is ClickHouseDialect|GenericDialect)
8920        {
8921            let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
8922            let name = self.parse_identifier()?;
8923            let partition = if self.parse_keywords(&[Keyword::IN, Keyword::PARTITION]) {
8924                Some(self.parse_identifier()?)
8925            } else {
8926                None
8927            };
8928            AlterTableOperation::MaterializeProjection {
8929                if_exists,
8930                name,
8931                partition,
8932            }
8933        } else if self.parse_keyword(Keyword::DROP) {
8934            if self.parse_keywords(&[Keyword::IF, Keyword::EXISTS, Keyword::PARTITION]) {
8935                self.expect_token(&Token::LParen)?;
8936                let partitions = self.parse_comma_separated(Parser::parse_expr)?;
8937                self.expect_token(&Token::RParen)?;
8938                AlterTableOperation::DropPartitions {
8939                    partitions,
8940                    if_exists: true,
8941                }
8942            } else if self.parse_keyword(Keyword::PARTITION) {
8943                self.expect_token(&Token::LParen)?;
8944                let partitions = self.parse_comma_separated(Parser::parse_expr)?;
8945                self.expect_token(&Token::RParen)?;
8946                AlterTableOperation::DropPartitions {
8947                    partitions,
8948                    if_exists: false,
8949                }
8950            } else if self.parse_keyword(Keyword::CONSTRAINT) {
8951                let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
8952                let name = self.parse_identifier()?;
8953                let drop_behavior = self.parse_optional_drop_behavior();
8954                AlterTableOperation::DropConstraint {
8955                    if_exists,
8956                    name,
8957                    drop_behavior,
8958                }
8959            } else if self.parse_keywords(&[Keyword::PRIMARY, Keyword::KEY]) {
8960                let drop_behavior = self.parse_optional_drop_behavior();
8961                AlterTableOperation::DropPrimaryKey { drop_behavior }
8962            } else if self.parse_keywords(&[Keyword::FOREIGN, Keyword::KEY]) {
8963                let name = self.parse_identifier()?;
8964                let drop_behavior = self.parse_optional_drop_behavior();
8965                AlterTableOperation::DropForeignKey {
8966                    name,
8967                    drop_behavior,
8968                }
8969            } else if self.parse_keyword(Keyword::INDEX) {
8970                let name = self.parse_identifier()?;
8971                AlterTableOperation::DropIndex { name }
8972            } else if self.parse_keyword(Keyword::PROJECTION)
8973                && dialect_of!(self is ClickHouseDialect|GenericDialect)
8974            {
8975                let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
8976                let name = self.parse_identifier()?;
8977                AlterTableOperation::DropProjection { if_exists, name }
8978            } else if self.parse_keywords(&[Keyword::CLUSTERING, Keyword::KEY]) {
8979                AlterTableOperation::DropClusteringKey
8980            } else {
8981                let has_column_keyword = self.parse_keyword(Keyword::COLUMN); // [ COLUMN ]
8982                let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
8983                let column_names = if self.dialect.supports_comma_separated_drop_column_list() {
8984                    self.parse_comma_separated(Parser::parse_identifier)?
8985                } else {
8986                    vec![self.parse_identifier()?]
8987                };
8988                let drop_behavior = self.parse_optional_drop_behavior();
8989                AlterTableOperation::DropColumn {
8990                    has_column_keyword,
8991                    column_names,
8992                    if_exists,
8993                    drop_behavior,
8994                }
8995            }
8996        } else if self.parse_keyword(Keyword::PARTITION) {
8997            self.expect_token(&Token::LParen)?;
8998            let before = self.parse_comma_separated(Parser::parse_expr)?;
8999            self.expect_token(&Token::RParen)?;
9000            self.expect_keyword_is(Keyword::RENAME)?;
9001            self.expect_keywords(&[Keyword::TO, Keyword::PARTITION])?;
9002            self.expect_token(&Token::LParen)?;
9003            let renames = self.parse_comma_separated(Parser::parse_expr)?;
9004            self.expect_token(&Token::RParen)?;
9005            AlterTableOperation::RenamePartitions {
9006                old_partitions: before,
9007                new_partitions: renames,
9008            }
9009        } else if self.parse_keyword(Keyword::CHANGE) {
9010            let _ = self.parse_keyword(Keyword::COLUMN); // [ COLUMN ]
9011            let old_name = self.parse_identifier()?;
9012            let new_name = self.parse_identifier()?;
9013            let data_type = self.parse_data_type()?;
9014            let mut options = vec![];
9015            while let Some(option) = self.parse_optional_column_option()? {
9016                options.push(option);
9017            }
9018
9019            let column_position = self.parse_column_position()?;
9020
9021            AlterTableOperation::ChangeColumn {
9022                old_name,
9023                new_name,
9024                data_type,
9025                options,
9026                column_position,
9027            }
9028        } else if self.parse_keyword(Keyword::MODIFY) {
9029            let _ = self.parse_keyword(Keyword::COLUMN); // [ COLUMN ]
9030            let col_name = self.parse_identifier()?;
9031            let data_type = self.parse_data_type()?;
9032            let mut options = vec![];
9033            while let Some(option) = self.parse_optional_column_option()? {
9034                options.push(option);
9035            }
9036
9037            let column_position = self.parse_column_position()?;
9038
9039            AlterTableOperation::ModifyColumn {
9040                col_name,
9041                data_type,
9042                options,
9043                column_position,
9044            }
9045        } else if self.parse_keyword(Keyword::ALTER) {
9046            let _ = self.parse_keyword(Keyword::COLUMN); // [ COLUMN ]
9047            let column_name = self.parse_identifier()?;
9048            let is_postgresql = dialect_of!(self is PostgreSqlDialect);
9049
9050            let op: AlterColumnOperation = if self.parse_keywords(&[
9051                Keyword::SET,
9052                Keyword::NOT,
9053                Keyword::NULL,
9054            ]) {
9055                AlterColumnOperation::SetNotNull {}
9056            } else if self.parse_keywords(&[Keyword::DROP, Keyword::NOT, Keyword::NULL]) {
9057                AlterColumnOperation::DropNotNull {}
9058            } else if self.parse_keywords(&[Keyword::SET, Keyword::DEFAULT]) {
9059                AlterColumnOperation::SetDefault {
9060                    value: self.parse_expr()?,
9061                }
9062            } else if self.parse_keywords(&[Keyword::DROP, Keyword::DEFAULT]) {
9063                AlterColumnOperation::DropDefault {}
9064            } else if self.parse_keywords(&[Keyword::SET, Keyword::DATA, Keyword::TYPE]) {
9065                self.parse_set_data_type(true)?
9066            } else if self.parse_keyword(Keyword::TYPE) {
9067                self.parse_set_data_type(false)?
9068            } else if self.parse_keywords(&[Keyword::ADD, Keyword::GENERATED]) {
9069                let generated_as = if self.parse_keyword(Keyword::ALWAYS) {
9070                    Some(GeneratedAs::Always)
9071                } else if self.parse_keywords(&[Keyword::BY, Keyword::DEFAULT]) {
9072                    Some(GeneratedAs::ByDefault)
9073                } else {
9074                    None
9075                };
9076
9077                self.expect_keywords(&[Keyword::AS, Keyword::IDENTITY])?;
9078
9079                let mut sequence_options: Option<Vec<SequenceOptions>> = None;
9080
9081                if self.peek_token().token == Token::LParen {
9082                    self.expect_token(&Token::LParen)?;
9083                    sequence_options = Some(self.parse_create_sequence_options()?);
9084                    self.expect_token(&Token::RParen)?;
9085                }
9086
9087                AlterColumnOperation::AddGenerated {
9088                    generated_as,
9089                    sequence_options,
9090                }
9091            } else {
9092                let message = if is_postgresql {
9093                    "SET/DROP NOT NULL, SET DEFAULT, SET DATA TYPE, or ADD GENERATED after ALTER COLUMN"
9094                } else {
9095                    "SET/DROP NOT NULL, SET DEFAULT, or SET DATA TYPE after ALTER COLUMN"
9096                };
9097
9098                return self.expected(message, self.peek_token());
9099            };
9100            AlterTableOperation::AlterColumn { column_name, op }
9101        } else if self.parse_keyword(Keyword::SWAP) {
9102            self.expect_keyword_is(Keyword::WITH)?;
9103            let table_name = self.parse_object_name(false)?;
9104            AlterTableOperation::SwapWith { table_name }
9105        } else if dialect_of!(self is PostgreSqlDialect | GenericDialect)
9106            && self.parse_keywords(&[Keyword::OWNER, Keyword::TO])
9107        {
9108            let new_owner = self.parse_owner()?;
9109            AlterTableOperation::OwnerTo { new_owner }
9110        } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
9111            && self.parse_keyword(Keyword::ATTACH)
9112        {
9113            AlterTableOperation::AttachPartition {
9114                partition: self.parse_part_or_partition()?,
9115            }
9116        } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
9117            && self.parse_keyword(Keyword::DETACH)
9118        {
9119            AlterTableOperation::DetachPartition {
9120                partition: self.parse_part_or_partition()?,
9121            }
9122        } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
9123            && self.parse_keyword(Keyword::FREEZE)
9124        {
9125            let partition = self.parse_part_or_partition()?;
9126            let with_name = if self.parse_keyword(Keyword::WITH) {
9127                self.expect_keyword_is(Keyword::NAME)?;
9128                Some(self.parse_identifier()?)
9129            } else {
9130                None
9131            };
9132            AlterTableOperation::FreezePartition {
9133                partition,
9134                with_name,
9135            }
9136        } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
9137            && self.parse_keyword(Keyword::UNFREEZE)
9138        {
9139            let partition = self.parse_part_or_partition()?;
9140            let with_name = if self.parse_keyword(Keyword::WITH) {
9141                self.expect_keyword_is(Keyword::NAME)?;
9142                Some(self.parse_identifier()?)
9143            } else {
9144                None
9145            };
9146            AlterTableOperation::UnfreezePartition {
9147                partition,
9148                with_name,
9149            }
9150        } else if self.parse_keywords(&[Keyword::CLUSTER, Keyword::BY]) {
9151            self.expect_token(&Token::LParen)?;
9152            let exprs = self.parse_comma_separated(|parser| parser.parse_expr())?;
9153            self.expect_token(&Token::RParen)?;
9154            AlterTableOperation::ClusterBy { exprs }
9155        } else if self.parse_keywords(&[Keyword::SUSPEND, Keyword::RECLUSTER]) {
9156            AlterTableOperation::SuspendRecluster
9157        } else if self.parse_keywords(&[Keyword::RESUME, Keyword::RECLUSTER]) {
9158            AlterTableOperation::ResumeRecluster
9159        } else if self.parse_keyword(Keyword::LOCK) {
9160            let equals = self.consume_token(&Token::Eq);
9161            let lock = match self.parse_one_of_keywords(&[
9162                Keyword::DEFAULT,
9163                Keyword::EXCLUSIVE,
9164                Keyword::NONE,
9165                Keyword::SHARED,
9166            ]) {
9167                Some(Keyword::DEFAULT) => AlterTableLock::Default,
9168                Some(Keyword::EXCLUSIVE) => AlterTableLock::Exclusive,
9169                Some(Keyword::NONE) => AlterTableLock::None,
9170                Some(Keyword::SHARED) => AlterTableLock::Shared,
9171                _ => self.expected(
9172                    "DEFAULT, EXCLUSIVE, NONE or SHARED after LOCK [=]",
9173                    self.peek_token(),
9174                )?,
9175            };
9176            AlterTableOperation::Lock { equals, lock }
9177        } else if self.parse_keyword(Keyword::ALGORITHM) {
9178            let equals = self.consume_token(&Token::Eq);
9179            let algorithm = match self.parse_one_of_keywords(&[
9180                Keyword::DEFAULT,
9181                Keyword::INSTANT,
9182                Keyword::INPLACE,
9183                Keyword::COPY,
9184            ]) {
9185                Some(Keyword::DEFAULT) => AlterTableAlgorithm::Default,
9186                Some(Keyword::INSTANT) => AlterTableAlgorithm::Instant,
9187                Some(Keyword::INPLACE) => AlterTableAlgorithm::Inplace,
9188                Some(Keyword::COPY) => AlterTableAlgorithm::Copy,
9189                _ => self.expected(
9190                    "DEFAULT, INSTANT, INPLACE, or COPY after ALGORITHM [=]",
9191                    self.peek_token(),
9192                )?,
9193            };
9194            AlterTableOperation::Algorithm { equals, algorithm }
9195        } else if self.parse_keyword(Keyword::AUTO_INCREMENT) {
9196            let equals = self.consume_token(&Token::Eq);
9197            let value = self.parse_number_value()?;
9198            AlterTableOperation::AutoIncrement { equals, value }
9199        } else if self.parse_keywords(&[Keyword::REPLICA, Keyword::IDENTITY]) {
9200            let identity = if self.parse_keyword(Keyword::NONE) {
9201                ReplicaIdentity::None
9202            } else if self.parse_keyword(Keyword::FULL) {
9203                ReplicaIdentity::Full
9204            } else if self.parse_keyword(Keyword::DEFAULT) {
9205                ReplicaIdentity::Default
9206            } else if self.parse_keywords(&[Keyword::USING, Keyword::INDEX]) {
9207                ReplicaIdentity::Index(self.parse_identifier()?)
9208            } else {
9209                return self.expected(
9210                    "NONE, FULL, DEFAULT, or USING INDEX index_name after REPLICA IDENTITY",
9211                    self.peek_token(),
9212                );
9213            };
9214
9215            AlterTableOperation::ReplicaIdentity { identity }
9216        } else if self.parse_keywords(&[Keyword::VALIDATE, Keyword::CONSTRAINT]) {
9217            let name = self.parse_identifier()?;
9218            AlterTableOperation::ValidateConstraint { name }
9219        } else {
9220            let mut options =
9221                self.parse_options_with_keywords(&[Keyword::SET, Keyword::TBLPROPERTIES])?;
9222            if !options.is_empty() {
9223                AlterTableOperation::SetTblProperties {
9224                    table_properties: options,
9225                }
9226            } else {
9227                options = self.parse_options(Keyword::SET)?;
9228                if !options.is_empty() {
9229                    AlterTableOperation::SetOptionsParens { options }
9230                } else {
9231                    return self.expected(
9232                    "ADD, RENAME, PARTITION, SWAP, DROP, REPLICA IDENTITY, SET, or SET TBLPROPERTIES after ALTER TABLE",
9233                    self.peek_token(),
9234                  );
9235                }
9236            }
9237        };
9238        Ok(operation)
9239    }
9240
9241    fn parse_set_data_type(&mut self, had_set: bool) -> Result<AlterColumnOperation, ParserError> {
9242        let data_type = self.parse_data_type()?;
9243        let using = if self.dialect.supports_alter_column_type_using()
9244            && self.parse_keyword(Keyword::USING)
9245        {
9246            Some(self.parse_expr()?)
9247        } else {
9248            None
9249        };
9250        Ok(AlterColumnOperation::SetDataType {
9251            data_type,
9252            using,
9253            had_set,
9254        })
9255    }
9256
9257    fn parse_part_or_partition(&mut self) -> Result<Partition, ParserError> {
9258        let keyword = self.expect_one_of_keywords(&[Keyword::PART, Keyword::PARTITION])?;
9259        match keyword {
9260            Keyword::PART => Ok(Partition::Part(self.parse_expr()?)),
9261            Keyword::PARTITION => Ok(Partition::Expr(self.parse_expr()?)),
9262            // unreachable because expect_one_of_keywords used above
9263            _ => unreachable!(),
9264        }
9265    }
9266
9267    pub fn parse_alter(&mut self) -> Result<Statement, ParserError> {
9268        let object_type = self.expect_one_of_keywords(&[
9269            Keyword::VIEW,
9270            Keyword::TYPE,
9271            Keyword::TABLE,
9272            Keyword::INDEX,
9273            Keyword::ROLE,
9274            Keyword::POLICY,
9275            Keyword::CONNECTOR,
9276            Keyword::ICEBERG,
9277            Keyword::SCHEMA,
9278        ])?;
9279        match object_type {
9280            Keyword::SCHEMA => {
9281                self.prev_token();
9282                self.prev_token();
9283                self.parse_alter_schema()
9284            }
9285            Keyword::VIEW => self.parse_alter_view(),
9286            Keyword::TYPE => self.parse_alter_type(),
9287            Keyword::TABLE => self.parse_alter_table(false),
9288            Keyword::ICEBERG => {
9289                self.expect_keyword(Keyword::TABLE)?;
9290                self.parse_alter_table(true)
9291            }
9292            Keyword::INDEX => {
9293                let index_name = self.parse_object_name(false)?;
9294                let operation = if self.parse_keyword(Keyword::RENAME) {
9295                    if self.parse_keyword(Keyword::TO) {
9296                        let index_name = self.parse_object_name(false)?;
9297                        AlterIndexOperation::RenameIndex { index_name }
9298                    } else {
9299                        return self.expected("TO after RENAME", self.peek_token());
9300                    }
9301                } else {
9302                    return self.expected("RENAME after ALTER INDEX", self.peek_token());
9303                };
9304
9305                Ok(Statement::AlterIndex {
9306                    name: index_name,
9307                    operation,
9308                })
9309            }
9310            Keyword::ROLE => self.parse_alter_role(),
9311            Keyword::POLICY => self.parse_alter_policy(),
9312            Keyword::CONNECTOR => self.parse_alter_connector(),
9313            // unreachable because expect_one_of_keywords used above
9314            _ => unreachable!(),
9315        }
9316    }
9317
9318    /// Parse a [Statement::AlterTable]
9319    pub fn parse_alter_table(&mut self, iceberg: bool) -> Result<Statement, ParserError> {
9320        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
9321        let only = self.parse_keyword(Keyword::ONLY); // [ ONLY ]
9322        let table_name = self.parse_object_name(false)?;
9323        let on_cluster = self.parse_optional_on_cluster()?;
9324        let operations = self.parse_comma_separated(Parser::parse_alter_table_operation)?;
9325
9326        let mut location = None;
9327        if self.parse_keyword(Keyword::LOCATION) {
9328            location = Some(HiveSetLocation {
9329                has_set: false,
9330                location: self.parse_identifier()?,
9331            });
9332        } else if self.parse_keywords(&[Keyword::SET, Keyword::LOCATION]) {
9333            location = Some(HiveSetLocation {
9334                has_set: true,
9335                location: self.parse_identifier()?,
9336            });
9337        }
9338
9339        let end_token = if self.peek_token_ref().token == Token::SemiColon {
9340            self.peek_token_ref().clone()
9341        } else {
9342            self.get_current_token().clone()
9343        };
9344
9345        Ok(Statement::AlterTable {
9346            name: table_name,
9347            if_exists,
9348            only,
9349            operations,
9350            location,
9351            on_cluster,
9352            iceberg,
9353            end_token: AttachedToken(end_token),
9354        })
9355    }
9356
9357    pub fn parse_alter_view(&mut self) -> Result<Statement, ParserError> {
9358        let name = self.parse_object_name(false)?;
9359        let columns = self.parse_parenthesized_column_list(Optional, false)?;
9360
9361        let with_options = self.parse_options(Keyword::WITH)?;
9362
9363        self.expect_keyword_is(Keyword::AS)?;
9364        let query = self.parse_query()?;
9365
9366        Ok(Statement::AlterView {
9367            name,
9368            columns,
9369            query,
9370            with_options,
9371        })
9372    }
9373
9374    /// Parse a [Statement::AlterType]
9375    pub fn parse_alter_type(&mut self) -> Result<Statement, ParserError> {
9376        let name = self.parse_object_name(false)?;
9377
9378        if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
9379            let new_name = self.parse_identifier()?;
9380            Ok(Statement::AlterType(AlterType {
9381                name,
9382                operation: AlterTypeOperation::Rename(AlterTypeRename { new_name }),
9383            }))
9384        } else if self.parse_keywords(&[Keyword::ADD, Keyword::VALUE]) {
9385            let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
9386            let new_enum_value = self.parse_identifier()?;
9387            let position = if self.parse_keyword(Keyword::BEFORE) {
9388                Some(AlterTypeAddValuePosition::Before(self.parse_identifier()?))
9389            } else if self.parse_keyword(Keyword::AFTER) {
9390                Some(AlterTypeAddValuePosition::After(self.parse_identifier()?))
9391            } else {
9392                None
9393            };
9394
9395            Ok(Statement::AlterType(AlterType {
9396                name,
9397                operation: AlterTypeOperation::AddValue(AlterTypeAddValue {
9398                    if_not_exists,
9399                    value: new_enum_value,
9400                    position,
9401                }),
9402            }))
9403        } else if self.parse_keywords(&[Keyword::RENAME, Keyword::VALUE]) {
9404            let existing_enum_value = self.parse_identifier()?;
9405            self.expect_keyword(Keyword::TO)?;
9406            let new_enum_value = self.parse_identifier()?;
9407
9408            Ok(Statement::AlterType(AlterType {
9409                name,
9410                operation: AlterTypeOperation::RenameValue(AlterTypeRenameValue {
9411                    from: existing_enum_value,
9412                    to: new_enum_value,
9413                }),
9414            }))
9415        } else {
9416            self.expected_ref(
9417                "{RENAME TO | { RENAME | ADD } VALUE}",
9418                self.peek_token_ref(),
9419            )
9420        }
9421    }
9422
9423    // Parse a [Statement::AlterSchema]
9424    // ALTER SCHEMA [ IF EXISTS ] schema_name
9425    pub fn parse_alter_schema(&mut self) -> Result<Statement, ParserError> {
9426        self.expect_keywords(&[Keyword::ALTER, Keyword::SCHEMA])?;
9427        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
9428        let name = self.parse_object_name(false)?;
9429        let operation = if self.parse_keywords(&[Keyword::SET, Keyword::OPTIONS]) {
9430            self.prev_token();
9431            let options = self.parse_options(Keyword::OPTIONS)?;
9432            AlterSchemaOperation::SetOptionsParens { options }
9433        } else if self.parse_keywords(&[Keyword::SET, Keyword::DEFAULT, Keyword::COLLATE]) {
9434            let collate = self.parse_expr()?;
9435            AlterSchemaOperation::SetDefaultCollate { collate }
9436        } else if self.parse_keywords(&[Keyword::ADD, Keyword::REPLICA]) {
9437            let replica = self.parse_identifier()?;
9438            let options = if self.peek_keyword(Keyword::OPTIONS) {
9439                Some(self.parse_options(Keyword::OPTIONS)?)
9440            } else {
9441                None
9442            };
9443            AlterSchemaOperation::AddReplica { replica, options }
9444        } else if self.parse_keywords(&[Keyword::DROP, Keyword::REPLICA]) {
9445            let replica = self.parse_identifier()?;
9446            AlterSchemaOperation::DropReplica { replica }
9447        } else if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
9448            let new_name = self.parse_object_name(false)?;
9449            AlterSchemaOperation::Rename { name: new_name }
9450        } else if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
9451            let owner = self.parse_owner()?;
9452            AlterSchemaOperation::OwnerTo { owner }
9453        } else {
9454            return self.expected_ref("ALTER SCHEMA operation", self.peek_token_ref());
9455        };
9456        Ok(Statement::AlterSchema(AlterSchema {
9457            name,
9458            if_exists,
9459            operations: vec![operation],
9460        }))
9461    }
9462
9463    /// Parse a `CALL procedure_name(arg1, arg2, ...)`
9464    /// or `CALL procedure_name` statement
9465    pub fn parse_call(&mut self) -> Result<Statement, ParserError> {
9466        let object_name = self.parse_object_name(false)?;
9467        if self.peek_token().token == Token::LParen {
9468            match self.parse_function(object_name)? {
9469                Expr::Function(f) => Ok(Statement::Call(f)),
9470                other => parser_err!(
9471                    format!("Expected a simple procedure call but found: {other}"),
9472                    self.peek_token().span.start
9473                ),
9474            }
9475        } else {
9476            Ok(Statement::Call(Function {
9477                name: object_name,
9478                uses_odbc_syntax: false,
9479                parameters: FunctionArguments::None,
9480                args: FunctionArguments::None,
9481                over: None,
9482                filter: None,
9483                null_treatment: None,
9484                within_group: vec![],
9485            }))
9486        }
9487    }
9488
9489    /// Parse a copy statement
9490    pub fn parse_copy(&mut self) -> Result<Statement, ParserError> {
9491        let source;
9492        if self.consume_token(&Token::LParen) {
9493            source = CopySource::Query(self.parse_query()?);
9494            self.expect_token(&Token::RParen)?;
9495        } else {
9496            let table_name = self.parse_object_name(false)?;
9497            let columns = self.parse_parenthesized_column_list(Optional, false)?;
9498            source = CopySource::Table {
9499                table_name,
9500                columns,
9501            };
9502        }
9503        let to = match self.parse_one_of_keywords(&[Keyword::FROM, Keyword::TO]) {
9504            Some(Keyword::FROM) => false,
9505            Some(Keyword::TO) => true,
9506            _ => self.expected("FROM or TO", self.peek_token())?,
9507        };
9508        if !to {
9509            // Use a separate if statement to prevent Rust compiler from complaining about
9510            // "if statement in this position is unstable: https://github.com/rust-lang/rust/issues/53667"
9511            if let CopySource::Query(_) = source {
9512                return Err(ParserError::ParserError(
9513                    "COPY ... FROM does not support query as a source".to_string(),
9514                ));
9515            }
9516        }
9517        let target = if self.parse_keyword(Keyword::STDIN) {
9518            CopyTarget::Stdin
9519        } else if self.parse_keyword(Keyword::STDOUT) {
9520            CopyTarget::Stdout
9521        } else if self.parse_keyword(Keyword::PROGRAM) {
9522            CopyTarget::Program {
9523                command: self.parse_literal_string()?,
9524            }
9525        } else {
9526            CopyTarget::File {
9527                filename: self.parse_literal_string()?,
9528            }
9529        };
9530        let _ = self.parse_keyword(Keyword::WITH); // [ WITH ]
9531        let mut options = vec![];
9532        if self.consume_token(&Token::LParen) {
9533            options = self.parse_comma_separated(Parser::parse_copy_option)?;
9534            self.expect_token(&Token::RParen)?;
9535        }
9536        let mut legacy_options = vec![];
9537        while let Some(opt) = self.maybe_parse(|parser| parser.parse_copy_legacy_option())? {
9538            legacy_options.push(opt);
9539        }
9540        let values = if let CopyTarget::Stdin = target {
9541            self.expect_token(&Token::SemiColon)?;
9542            self.parse_tsv()
9543        } else {
9544            vec![]
9545        };
9546        Ok(Statement::Copy {
9547            source,
9548            to,
9549            target,
9550            options,
9551            legacy_options,
9552            values,
9553        })
9554    }
9555
9556    /// Parse [Statement::Open]
9557    fn parse_open(&mut self) -> Result<Statement, ParserError> {
9558        self.expect_keyword(Keyword::OPEN)?;
9559        Ok(Statement::Open(OpenStatement {
9560            cursor_name: self.parse_identifier()?,
9561        }))
9562    }
9563
9564    pub fn parse_close(&mut self) -> Result<Statement, ParserError> {
9565        let cursor = if self.parse_keyword(Keyword::ALL) {
9566            CloseCursor::All
9567        } else {
9568            let name = self.parse_identifier()?;
9569
9570            CloseCursor::Specific { name }
9571        };
9572
9573        Ok(Statement::Close { cursor })
9574    }
9575
9576    fn parse_copy_option(&mut self) -> Result<CopyOption, ParserError> {
9577        let ret = match self.parse_one_of_keywords(&[
9578            Keyword::FORMAT,
9579            Keyword::FREEZE,
9580            Keyword::DELIMITER,
9581            Keyword::NULL,
9582            Keyword::HEADER,
9583            Keyword::QUOTE,
9584            Keyword::ESCAPE,
9585            Keyword::FORCE_QUOTE,
9586            Keyword::FORCE_NOT_NULL,
9587            Keyword::FORCE_NULL,
9588            Keyword::ENCODING,
9589        ]) {
9590            Some(Keyword::FORMAT) => CopyOption::Format(self.parse_identifier()?),
9591            Some(Keyword::FREEZE) => CopyOption::Freeze(!matches!(
9592                self.parse_one_of_keywords(&[Keyword::TRUE, Keyword::FALSE]),
9593                Some(Keyword::FALSE)
9594            )),
9595            Some(Keyword::DELIMITER) => CopyOption::Delimiter(self.parse_literal_char()?),
9596            Some(Keyword::NULL) => CopyOption::Null(self.parse_literal_string()?),
9597            Some(Keyword::HEADER) => CopyOption::Header(!matches!(
9598                self.parse_one_of_keywords(&[Keyword::TRUE, Keyword::FALSE]),
9599                Some(Keyword::FALSE)
9600            )),
9601            Some(Keyword::QUOTE) => CopyOption::Quote(self.parse_literal_char()?),
9602            Some(Keyword::ESCAPE) => CopyOption::Escape(self.parse_literal_char()?),
9603            Some(Keyword::FORCE_QUOTE) => {
9604                CopyOption::ForceQuote(self.parse_parenthesized_column_list(Mandatory, false)?)
9605            }
9606            Some(Keyword::FORCE_NOT_NULL) => {
9607                CopyOption::ForceNotNull(self.parse_parenthesized_column_list(Mandatory, false)?)
9608            }
9609            Some(Keyword::FORCE_NULL) => {
9610                CopyOption::ForceNull(self.parse_parenthesized_column_list(Mandatory, false)?)
9611            }
9612            Some(Keyword::ENCODING) => CopyOption::Encoding(self.parse_literal_string()?),
9613            _ => self.expected("option", self.peek_token())?,
9614        };
9615        Ok(ret)
9616    }
9617
9618    fn parse_copy_legacy_option(&mut self) -> Result<CopyLegacyOption, ParserError> {
9619        // FORMAT \[ AS \] is optional
9620        if self.parse_keyword(Keyword::FORMAT) {
9621            let _ = self.parse_keyword(Keyword::AS);
9622        }
9623
9624        let ret = match self.parse_one_of_keywords(&[
9625            Keyword::ACCEPTANYDATE,
9626            Keyword::ACCEPTINVCHARS,
9627            Keyword::ADDQUOTES,
9628            Keyword::ALLOWOVERWRITE,
9629            Keyword::BINARY,
9630            Keyword::BLANKSASNULL,
9631            Keyword::BZIP2,
9632            Keyword::CLEANPATH,
9633            Keyword::CSV,
9634            Keyword::DATEFORMAT,
9635            Keyword::DELIMITER,
9636            Keyword::EMPTYASNULL,
9637            Keyword::ENCRYPTED,
9638            Keyword::ESCAPE,
9639            Keyword::EXTENSION,
9640            Keyword::FIXEDWIDTH,
9641            Keyword::GZIP,
9642            Keyword::HEADER,
9643            Keyword::IAM_ROLE,
9644            Keyword::IGNOREHEADER,
9645            Keyword::JSON,
9646            Keyword::MANIFEST,
9647            Keyword::MAXFILESIZE,
9648            Keyword::NULL,
9649            Keyword::PARALLEL,
9650            Keyword::PARQUET,
9651            Keyword::PARTITION,
9652            Keyword::REGION,
9653            Keyword::ROWGROUPSIZE,
9654            Keyword::TIMEFORMAT,
9655            Keyword::TRUNCATECOLUMNS,
9656            Keyword::ZSTD,
9657        ]) {
9658            Some(Keyword::ACCEPTANYDATE) => CopyLegacyOption::AcceptAnyDate,
9659            Some(Keyword::ACCEPTINVCHARS) => {
9660                let _ = self.parse_keyword(Keyword::AS); // [ AS ]
9661                let ch = if matches!(self.peek_token().token, Token::SingleQuotedString(_)) {
9662                    Some(self.parse_literal_string()?)
9663                } else {
9664                    None
9665                };
9666                CopyLegacyOption::AcceptInvChars(ch)
9667            }
9668            Some(Keyword::ADDQUOTES) => CopyLegacyOption::AddQuotes,
9669            Some(Keyword::ALLOWOVERWRITE) => CopyLegacyOption::AllowOverwrite,
9670            Some(Keyword::BINARY) => CopyLegacyOption::Binary,
9671            Some(Keyword::BLANKSASNULL) => CopyLegacyOption::BlankAsNull,
9672            Some(Keyword::BZIP2) => CopyLegacyOption::Bzip2,
9673            Some(Keyword::CLEANPATH) => CopyLegacyOption::CleanPath,
9674            Some(Keyword::CSV) => CopyLegacyOption::Csv({
9675                let mut opts = vec![];
9676                while let Some(opt) =
9677                    self.maybe_parse(|parser| parser.parse_copy_legacy_csv_option())?
9678                {
9679                    opts.push(opt);
9680                }
9681                opts
9682            }),
9683            Some(Keyword::DATEFORMAT) => {
9684                let _ = self.parse_keyword(Keyword::AS);
9685                let fmt = if matches!(self.peek_token().token, Token::SingleQuotedString(_)) {
9686                    Some(self.parse_literal_string()?)
9687                } else {
9688                    None
9689                };
9690                CopyLegacyOption::DateFormat(fmt)
9691            }
9692            Some(Keyword::DELIMITER) => {
9693                let _ = self.parse_keyword(Keyword::AS);
9694                CopyLegacyOption::Delimiter(self.parse_literal_char()?)
9695            }
9696            Some(Keyword::EMPTYASNULL) => CopyLegacyOption::EmptyAsNull,
9697            Some(Keyword::ENCRYPTED) => {
9698                let auto = self.parse_keyword(Keyword::AUTO);
9699                CopyLegacyOption::Encrypted { auto }
9700            }
9701            Some(Keyword::ESCAPE) => CopyLegacyOption::Escape,
9702            Some(Keyword::EXTENSION) => {
9703                let ext = self.parse_literal_string()?;
9704                CopyLegacyOption::Extension(ext)
9705            }
9706            Some(Keyword::FIXEDWIDTH) => {
9707                let spec = self.parse_literal_string()?;
9708                CopyLegacyOption::FixedWidth(spec)
9709            }
9710            Some(Keyword::GZIP) => CopyLegacyOption::Gzip,
9711            Some(Keyword::HEADER) => CopyLegacyOption::Header,
9712            Some(Keyword::IAM_ROLE) => CopyLegacyOption::IamRole(self.parse_iam_role_kind()?),
9713            Some(Keyword::IGNOREHEADER) => {
9714                let _ = self.parse_keyword(Keyword::AS);
9715                let num_rows = self.parse_literal_uint()?;
9716                CopyLegacyOption::IgnoreHeader(num_rows)
9717            }
9718            Some(Keyword::JSON) => CopyLegacyOption::Json,
9719            Some(Keyword::MANIFEST) => {
9720                let verbose = self.parse_keyword(Keyword::VERBOSE);
9721                CopyLegacyOption::Manifest { verbose }
9722            }
9723            Some(Keyword::MAXFILESIZE) => {
9724                let _ = self.parse_keyword(Keyword::AS);
9725                let size = self.parse_number_value()?.value;
9726                let unit = match self.parse_one_of_keywords(&[Keyword::MB, Keyword::GB]) {
9727                    Some(Keyword::MB) => Some(FileSizeUnit::MB),
9728                    Some(Keyword::GB) => Some(FileSizeUnit::GB),
9729                    _ => None,
9730                };
9731                CopyLegacyOption::MaxFileSize(FileSize { size, unit })
9732            }
9733            Some(Keyword::NULL) => {
9734                let _ = self.parse_keyword(Keyword::AS);
9735                CopyLegacyOption::Null(self.parse_literal_string()?)
9736            }
9737            Some(Keyword::PARALLEL) => {
9738                let enabled = match self.parse_one_of_keywords(&[
9739                    Keyword::TRUE,
9740                    Keyword::FALSE,
9741                    Keyword::ON,
9742                    Keyword::OFF,
9743                ]) {
9744                    Some(Keyword::TRUE) | Some(Keyword::ON) => Some(true),
9745                    Some(Keyword::FALSE) | Some(Keyword::OFF) => Some(false),
9746                    _ => None,
9747                };
9748                CopyLegacyOption::Parallel(enabled)
9749            }
9750            Some(Keyword::PARQUET) => CopyLegacyOption::Parquet,
9751            Some(Keyword::PARTITION) => {
9752                self.expect_keyword(Keyword::BY)?;
9753                let columns = self.parse_parenthesized_column_list(IsOptional::Mandatory, false)?;
9754                let include = self.parse_keyword(Keyword::INCLUDE);
9755                CopyLegacyOption::PartitionBy(UnloadPartitionBy { columns, include })
9756            }
9757            Some(Keyword::REGION) => {
9758                let _ = self.parse_keyword(Keyword::AS);
9759                let region = self.parse_literal_string()?;
9760                CopyLegacyOption::Region(region)
9761            }
9762            Some(Keyword::ROWGROUPSIZE) => {
9763                let _ = self.parse_keyword(Keyword::AS);
9764                let file_size = self.parse_file_size()?;
9765                CopyLegacyOption::RowGroupSize(file_size)
9766            }
9767            Some(Keyword::TIMEFORMAT) => {
9768                let _ = self.parse_keyword(Keyword::AS);
9769                let fmt = if matches!(self.peek_token().token, Token::SingleQuotedString(_)) {
9770                    Some(self.parse_literal_string()?)
9771                } else {
9772                    None
9773                };
9774                CopyLegacyOption::TimeFormat(fmt)
9775            }
9776            Some(Keyword::TRUNCATECOLUMNS) => CopyLegacyOption::TruncateColumns,
9777            Some(Keyword::ZSTD) => CopyLegacyOption::Zstd,
9778            _ => self.expected("option", self.peek_token())?,
9779        };
9780        Ok(ret)
9781    }
9782
9783    fn parse_file_size(&mut self) -> Result<FileSize, ParserError> {
9784        let size = self.parse_number_value()?.value;
9785        let unit = self.maybe_parse_file_size_unit();
9786        Ok(FileSize { size, unit })
9787    }
9788
9789    fn maybe_parse_file_size_unit(&mut self) -> Option<FileSizeUnit> {
9790        match self.parse_one_of_keywords(&[Keyword::MB, Keyword::GB]) {
9791            Some(Keyword::MB) => Some(FileSizeUnit::MB),
9792            Some(Keyword::GB) => Some(FileSizeUnit::GB),
9793            _ => None,
9794        }
9795    }
9796
9797    fn parse_iam_role_kind(&mut self) -> Result<IamRoleKind, ParserError> {
9798        if self.parse_keyword(Keyword::DEFAULT) {
9799            Ok(IamRoleKind::Default)
9800        } else {
9801            let arn = self.parse_literal_string()?;
9802            Ok(IamRoleKind::Arn(arn))
9803        }
9804    }
9805
9806    fn parse_copy_legacy_csv_option(&mut self) -> Result<CopyLegacyCsvOption, ParserError> {
9807        let ret = match self.parse_one_of_keywords(&[
9808            Keyword::HEADER,
9809            Keyword::QUOTE,
9810            Keyword::ESCAPE,
9811            Keyword::FORCE,
9812        ]) {
9813            Some(Keyword::HEADER) => CopyLegacyCsvOption::Header,
9814            Some(Keyword::QUOTE) => {
9815                let _ = self.parse_keyword(Keyword::AS); // [ AS ]
9816                CopyLegacyCsvOption::Quote(self.parse_literal_char()?)
9817            }
9818            Some(Keyword::ESCAPE) => {
9819                let _ = self.parse_keyword(Keyword::AS); // [ AS ]
9820                CopyLegacyCsvOption::Escape(self.parse_literal_char()?)
9821            }
9822            Some(Keyword::FORCE) if self.parse_keywords(&[Keyword::NOT, Keyword::NULL]) => {
9823                CopyLegacyCsvOption::ForceNotNull(
9824                    self.parse_comma_separated(|p| p.parse_identifier())?,
9825                )
9826            }
9827            Some(Keyword::FORCE) if self.parse_keywords(&[Keyword::QUOTE]) => {
9828                CopyLegacyCsvOption::ForceQuote(
9829                    self.parse_comma_separated(|p| p.parse_identifier())?,
9830                )
9831            }
9832            _ => self.expected("csv option", self.peek_token())?,
9833        };
9834        Ok(ret)
9835    }
9836
9837    fn parse_literal_char(&mut self) -> Result<char, ParserError> {
9838        let s = self.parse_literal_string()?;
9839        if s.len() != 1 {
9840            let loc = self
9841                .tokens
9842                .get(self.index - 1)
9843                .map_or(Location { line: 0, column: 0 }, |t| t.span.start);
9844            return parser_err!(format!("Expect a char, found {s:?}"), loc);
9845        }
9846        Ok(s.chars().next().unwrap())
9847    }
9848
9849    /// Parse a tab separated values in
9850    /// COPY payload
9851    pub fn parse_tsv(&mut self) -> Vec<Option<String>> {
9852        self.parse_tab_value()
9853    }
9854
9855    pub fn parse_tab_value(&mut self) -> Vec<Option<String>> {
9856        let mut values = vec![];
9857        let mut content = String::from("");
9858        while let Some(t) = self.next_token_no_skip().map(|t| &t.token) {
9859            match t {
9860                Token::Whitespace(Whitespace::Tab) => {
9861                    values.push(Some(content.to_string()));
9862                    content.clear();
9863                }
9864                Token::Whitespace(Whitespace::Newline) => {
9865                    values.push(Some(content.to_string()));
9866                    content.clear();
9867                }
9868                Token::Backslash => {
9869                    if self.consume_token(&Token::Period) {
9870                        return values;
9871                    }
9872                    if let Token::Word(w) = self.next_token().token {
9873                        if w.value == "N" {
9874                            values.push(None);
9875                        }
9876                    }
9877                }
9878                _ => {
9879                    content.push_str(&t.to_string());
9880                }
9881            }
9882        }
9883        values
9884    }
9885
9886    /// Parse a literal value (numbers, strings, date/time, booleans)
9887    pub fn parse_value(&mut self) -> Result<ValueWithSpan, ParserError> {
9888        let next_token = self.next_token();
9889        let span = next_token.span;
9890        let ok_value = |value: Value| Ok(value.with_span(span));
9891        match next_token.token {
9892            Token::Word(w) => match w.keyword {
9893                Keyword::TRUE if self.dialect.supports_boolean_literals() => {
9894                    ok_value(Value::Boolean(true))
9895                }
9896                Keyword::FALSE if self.dialect.supports_boolean_literals() => {
9897                    ok_value(Value::Boolean(false))
9898                }
9899                Keyword::NULL => ok_value(Value::Null),
9900                Keyword::NoKeyword if w.quote_style.is_some() => match w.quote_style {
9901                    Some('"') => ok_value(Value::DoubleQuotedString(w.value)),
9902                    Some('\'') => ok_value(Value::SingleQuotedString(w.value)),
9903                    _ => self.expected(
9904                        "A value?",
9905                        TokenWithSpan {
9906                            token: Token::Word(w),
9907                            span,
9908                        },
9909                    )?,
9910                },
9911                _ => self.expected(
9912                    "a concrete value",
9913                    TokenWithSpan {
9914                        token: Token::Word(w),
9915                        span,
9916                    },
9917                ),
9918            },
9919            // The call to n.parse() returns a bigdecimal when the
9920            // bigdecimal feature is enabled, and is otherwise a no-op
9921            // (i.e., it returns the input string).
9922            Token::Number(n, l) => ok_value(Value::Number(Self::parse(n, span.start)?, l)),
9923            Token::SingleQuotedString(ref s) => ok_value(Value::SingleQuotedString(
9924                self.maybe_concat_string_literal(s.to_string()),
9925            )),
9926            Token::DoubleQuotedString(ref s) => ok_value(Value::DoubleQuotedString(
9927                self.maybe_concat_string_literal(s.to_string()),
9928            )),
9929            Token::TripleSingleQuotedString(ref s) => {
9930                ok_value(Value::TripleSingleQuotedString(s.to_string()))
9931            }
9932            Token::TripleDoubleQuotedString(ref s) => {
9933                ok_value(Value::TripleDoubleQuotedString(s.to_string()))
9934            }
9935            Token::DollarQuotedString(ref s) => ok_value(Value::DollarQuotedString(s.clone())),
9936            Token::SingleQuotedByteStringLiteral(ref s) => {
9937                ok_value(Value::SingleQuotedByteStringLiteral(s.clone()))
9938            }
9939            Token::DoubleQuotedByteStringLiteral(ref s) => {
9940                ok_value(Value::DoubleQuotedByteStringLiteral(s.clone()))
9941            }
9942            Token::TripleSingleQuotedByteStringLiteral(ref s) => {
9943                ok_value(Value::TripleSingleQuotedByteStringLiteral(s.clone()))
9944            }
9945            Token::TripleDoubleQuotedByteStringLiteral(ref s) => {
9946                ok_value(Value::TripleDoubleQuotedByteStringLiteral(s.clone()))
9947            }
9948            Token::SingleQuotedRawStringLiteral(ref s) => {
9949                ok_value(Value::SingleQuotedRawStringLiteral(s.clone()))
9950            }
9951            Token::DoubleQuotedRawStringLiteral(ref s) => {
9952                ok_value(Value::DoubleQuotedRawStringLiteral(s.clone()))
9953            }
9954            Token::TripleSingleQuotedRawStringLiteral(ref s) => {
9955                ok_value(Value::TripleSingleQuotedRawStringLiteral(s.clone()))
9956            }
9957            Token::TripleDoubleQuotedRawStringLiteral(ref s) => {
9958                ok_value(Value::TripleDoubleQuotedRawStringLiteral(s.clone()))
9959            }
9960            Token::NationalStringLiteral(ref s) => {
9961                ok_value(Value::NationalStringLiteral(s.to_string()))
9962            }
9963            Token::EscapedStringLiteral(ref s) => {
9964                ok_value(Value::EscapedStringLiteral(s.to_string()))
9965            }
9966            Token::UnicodeStringLiteral(ref s) => {
9967                ok_value(Value::UnicodeStringLiteral(s.to_string()))
9968            }
9969            Token::HexStringLiteral(ref s) => ok_value(Value::HexStringLiteral(s.to_string())),
9970            Token::Placeholder(ref s) => ok_value(Value::Placeholder(s.to_string())),
9971            tok @ Token::Colon | tok @ Token::AtSign => {
9972                // 1. Not calling self.parse_identifier(false)?
9973                //    because only in placeholder we want to check
9974                //    numbers as idfentifies.  This because snowflake
9975                //    allows numbers as placeholders
9976                // 2. Not calling self.next_token() to enforce `tok`
9977                //    be followed immediately by a word/number, ie.
9978                //    without any whitespace in between
9979                let next_token = self.next_token_no_skip().unwrap_or(&EOF_TOKEN).clone();
9980                let ident = match next_token.token {
9981                    Token::Word(w) => Ok(w.into_ident(next_token.span)),
9982                    Token::Number(w, false) => Ok(Ident::with_span(next_token.span, w)),
9983                    _ => self.expected("placeholder", next_token),
9984                }?;
9985                Ok(Value::Placeholder(tok.to_string() + &ident.value)
9986                    .with_span(Span::new(span.start, ident.span.end)))
9987            }
9988            unexpected => self.expected(
9989                "a value",
9990                TokenWithSpan {
9991                    token: unexpected,
9992                    span,
9993                },
9994            ),
9995        }
9996    }
9997
9998    fn maybe_concat_string_literal(&mut self, mut str: String) -> String {
9999        if self.dialect.supports_string_literal_concatenation() {
10000            while let Token::SingleQuotedString(ref s) | Token::DoubleQuotedString(ref s) =
10001                self.peek_token_ref().token
10002            {
10003                str.push_str(s.clone().as_str());
10004                self.advance_token();
10005            }
10006        }
10007        str
10008    }
10009
10010    /// Parse an unsigned numeric literal
10011    pub fn parse_number_value(&mut self) -> Result<ValueWithSpan, ParserError> {
10012        let value_wrapper = self.parse_value()?;
10013        match &value_wrapper.value {
10014            Value::Number(_, _) => Ok(value_wrapper),
10015            Value::Placeholder(_) => Ok(value_wrapper),
10016            _ => {
10017                self.prev_token();
10018                self.expected("literal number", self.peek_token())
10019            }
10020        }
10021    }
10022
10023    /// Parse a numeric literal as an expression. Returns a [`Expr::UnaryOp`] if the number is signed,
10024    /// otherwise returns a [`Expr::Value`]
10025    pub fn parse_number(&mut self) -> Result<Expr, ParserError> {
10026        let next_token = self.next_token();
10027        match next_token.token {
10028            Token::Plus => Ok(Expr::UnaryOp {
10029                op: UnaryOperator::Plus,
10030                expr: Box::new(Expr::Value(self.parse_number_value()?)),
10031            }),
10032            Token::Minus => Ok(Expr::UnaryOp {
10033                op: UnaryOperator::Minus,
10034                expr: Box::new(Expr::Value(self.parse_number_value()?)),
10035            }),
10036            _ => {
10037                self.prev_token();
10038                Ok(Expr::Value(self.parse_number_value()?))
10039            }
10040        }
10041    }
10042
10043    fn parse_introduced_string_expr(&mut self) -> Result<Expr, ParserError> {
10044        let next_token = self.next_token();
10045        let span = next_token.span;
10046        match next_token.token {
10047            Token::SingleQuotedString(ref s) => Ok(Expr::Value(
10048                Value::SingleQuotedString(s.to_string()).with_span(span),
10049            )),
10050            Token::DoubleQuotedString(ref s) => Ok(Expr::Value(
10051                Value::DoubleQuotedString(s.to_string()).with_span(span),
10052            )),
10053            Token::HexStringLiteral(ref s) => Ok(Expr::Value(
10054                Value::HexStringLiteral(s.to_string()).with_span(span),
10055            )),
10056            unexpected => self.expected(
10057                "a string value",
10058                TokenWithSpan {
10059                    token: unexpected,
10060                    span,
10061                },
10062            ),
10063        }
10064    }
10065
10066    /// Parse an unsigned literal integer/long
10067    pub fn parse_literal_uint(&mut self) -> Result<u64, ParserError> {
10068        let next_token = self.next_token();
10069        match next_token.token {
10070            Token::Number(s, _) => Self::parse::<u64>(s, next_token.span.start),
10071            _ => self.expected("literal int", next_token),
10072        }
10073    }
10074
10075    /// Parse the body of a `CREATE FUNCTION` specified as a string.
10076    /// e.g. `CREATE FUNCTION ... AS $$ body $$`.
10077    fn parse_create_function_body_string(&mut self) -> Result<Expr, ParserError> {
10078        let peek_token = self.peek_token();
10079        let span = peek_token.span;
10080        match peek_token.token {
10081            Token::DollarQuotedString(s) if dialect_of!(self is PostgreSqlDialect | GenericDialect) =>
10082            {
10083                self.next_token();
10084                Ok(Expr::Value(Value::DollarQuotedString(s).with_span(span)))
10085            }
10086            _ => Ok(Expr::Value(
10087                Value::SingleQuotedString(self.parse_literal_string()?).with_span(span),
10088            )),
10089        }
10090    }
10091
10092    /// Parse a literal string
10093    pub fn parse_literal_string(&mut self) -> Result<String, ParserError> {
10094        let next_token = self.next_token();
10095        match next_token.token {
10096            Token::Word(Word {
10097                value,
10098                keyword: Keyword::NoKeyword,
10099                ..
10100            }) => Ok(value),
10101            Token::SingleQuotedString(s) => Ok(s),
10102            Token::DoubleQuotedString(s) => Ok(s),
10103            Token::EscapedStringLiteral(s) if dialect_of!(self is PostgreSqlDialect | GenericDialect) => {
10104                Ok(s)
10105            }
10106            Token::UnicodeStringLiteral(s) => Ok(s),
10107            _ => self.expected("literal string", next_token),
10108        }
10109    }
10110
10111    /// Parse a boolean string
10112    pub(crate) fn parse_boolean_string(&mut self) -> Result<bool, ParserError> {
10113        match self.parse_one_of_keywords(&[Keyword::TRUE, Keyword::FALSE]) {
10114            Some(Keyword::TRUE) => Ok(true),
10115            Some(Keyword::FALSE) => Ok(false),
10116            _ => self.expected("TRUE or FALSE", self.peek_token()),
10117        }
10118    }
10119
10120    /// Parse a literal unicode normalization clause
10121    pub fn parse_unicode_is_normalized(&mut self, expr: Expr) -> Result<Expr, ParserError> {
10122        let neg = self.parse_keyword(Keyword::NOT);
10123        let normalized_form = self.maybe_parse(|parser| {
10124            match parser.parse_one_of_keywords(&[
10125                Keyword::NFC,
10126                Keyword::NFD,
10127                Keyword::NFKC,
10128                Keyword::NFKD,
10129            ]) {
10130                Some(Keyword::NFC) => Ok(NormalizationForm::NFC),
10131                Some(Keyword::NFD) => Ok(NormalizationForm::NFD),
10132                Some(Keyword::NFKC) => Ok(NormalizationForm::NFKC),
10133                Some(Keyword::NFKD) => Ok(NormalizationForm::NFKD),
10134                _ => parser.expected("unicode normalization form", parser.peek_token()),
10135            }
10136        })?;
10137        if self.parse_keyword(Keyword::NORMALIZED) {
10138            return Ok(Expr::IsNormalized {
10139                expr: Box::new(expr),
10140                form: normalized_form,
10141                negated: neg,
10142            });
10143        }
10144        self.expected("unicode normalization form", self.peek_token())
10145    }
10146
10147    pub fn parse_enum_values(&mut self) -> Result<Vec<EnumMember>, ParserError> {
10148        self.expect_token(&Token::LParen)?;
10149        let values = self.parse_comma_separated(|parser| {
10150            let name = parser.parse_literal_string()?;
10151            let e = if parser.consume_token(&Token::Eq) {
10152                let value = parser.parse_number()?;
10153                EnumMember::NamedValue(name, value)
10154            } else {
10155                EnumMember::Name(name)
10156            };
10157            Ok(e)
10158        })?;
10159        self.expect_token(&Token::RParen)?;
10160
10161        Ok(values)
10162    }
10163
10164    /// Parse a SQL datatype (in the context of a CREATE TABLE statement for example)
10165    pub fn parse_data_type(&mut self) -> Result<DataType, ParserError> {
10166        let (ty, trailing_bracket) = self.parse_data_type_helper()?;
10167        if trailing_bracket.0 {
10168            return parser_err!(
10169                format!("unmatched > after parsing data type {ty}"),
10170                self.peek_token()
10171            );
10172        }
10173
10174        Ok(ty)
10175    }
10176
10177    fn parse_data_type_helper(
10178        &mut self,
10179    ) -> Result<(DataType, MatchedTrailingBracket), ParserError> {
10180        let dialect = self.dialect;
10181        self.advance_token();
10182        let next_token = self.get_current_token();
10183        let next_token_index = self.get_current_index();
10184
10185        let mut trailing_bracket: MatchedTrailingBracket = false.into();
10186        let mut data = match &next_token.token {
10187            Token::Word(w) => match w.keyword {
10188                Keyword::BOOLEAN => Ok(DataType::Boolean),
10189                Keyword::BOOL => Ok(DataType::Bool),
10190                Keyword::FLOAT => {
10191                    let precision = self.parse_exact_number_optional_precision_scale()?;
10192
10193                    if self.parse_keyword(Keyword::UNSIGNED) {
10194                        Ok(DataType::FloatUnsigned(precision))
10195                    } else {
10196                        Ok(DataType::Float(precision))
10197                    }
10198                }
10199                Keyword::REAL => {
10200                    if self.parse_keyword(Keyword::UNSIGNED) {
10201                        Ok(DataType::RealUnsigned)
10202                    } else {
10203                        Ok(DataType::Real)
10204                    }
10205                }
10206                Keyword::FLOAT4 => Ok(DataType::Float4),
10207                Keyword::FLOAT32 => Ok(DataType::Float32),
10208                Keyword::FLOAT64 => Ok(DataType::Float64),
10209                Keyword::FLOAT8 => Ok(DataType::Float8),
10210                Keyword::DOUBLE => {
10211                    if self.parse_keyword(Keyword::PRECISION) {
10212                        if self.parse_keyword(Keyword::UNSIGNED) {
10213                            Ok(DataType::DoublePrecisionUnsigned)
10214                        } else {
10215                            Ok(DataType::DoublePrecision)
10216                        }
10217                    } else {
10218                        let precision = self.parse_exact_number_optional_precision_scale()?;
10219
10220                        if self.parse_keyword(Keyword::UNSIGNED) {
10221                            Ok(DataType::DoubleUnsigned(precision))
10222                        } else {
10223                            Ok(DataType::Double(precision))
10224                        }
10225                    }
10226                }
10227                Keyword::TINYINT => {
10228                    let optional_precision = self.parse_optional_precision();
10229                    if self.parse_keyword(Keyword::UNSIGNED) {
10230                        Ok(DataType::TinyIntUnsigned(optional_precision?))
10231                    } else {
10232                        if dialect.supports_data_type_signed_suffix() {
10233                            let _ = self.parse_keyword(Keyword::SIGNED);
10234                        }
10235                        Ok(DataType::TinyInt(optional_precision?))
10236                    }
10237                }
10238                Keyword::INT2 => {
10239                    let optional_precision = self.parse_optional_precision();
10240                    if self.parse_keyword(Keyword::UNSIGNED) {
10241                        Ok(DataType::Int2Unsigned(optional_precision?))
10242                    } else {
10243                        Ok(DataType::Int2(optional_precision?))
10244                    }
10245                }
10246                Keyword::SMALLINT => {
10247                    let optional_precision = self.parse_optional_precision();
10248                    if self.parse_keyword(Keyword::UNSIGNED) {
10249                        Ok(DataType::SmallIntUnsigned(optional_precision?))
10250                    } else {
10251                        if dialect.supports_data_type_signed_suffix() {
10252                            let _ = self.parse_keyword(Keyword::SIGNED);
10253                        }
10254                        Ok(DataType::SmallInt(optional_precision?))
10255                    }
10256                }
10257                Keyword::MEDIUMINT => {
10258                    let optional_precision = self.parse_optional_precision();
10259                    if self.parse_keyword(Keyword::UNSIGNED) {
10260                        Ok(DataType::MediumIntUnsigned(optional_precision?))
10261                    } else {
10262                        if dialect.supports_data_type_signed_suffix() {
10263                            let _ = self.parse_keyword(Keyword::SIGNED);
10264                        }
10265                        Ok(DataType::MediumInt(optional_precision?))
10266                    }
10267                }
10268                Keyword::INT => {
10269                    let optional_precision = self.parse_optional_precision();
10270                    if self.parse_keyword(Keyword::UNSIGNED) {
10271                        Ok(DataType::IntUnsigned(optional_precision?))
10272                    } else {
10273                        if dialect.supports_data_type_signed_suffix() {
10274                            let _ = self.parse_keyword(Keyword::SIGNED);
10275                        }
10276                        Ok(DataType::Int(optional_precision?))
10277                    }
10278                }
10279                Keyword::INT4 => {
10280                    let optional_precision = self.parse_optional_precision();
10281                    if self.parse_keyword(Keyword::UNSIGNED) {
10282                        Ok(DataType::Int4Unsigned(optional_precision?))
10283                    } else {
10284                        Ok(DataType::Int4(optional_precision?))
10285                    }
10286                }
10287                Keyword::INT8 => {
10288                    let optional_precision = self.parse_optional_precision();
10289                    if self.parse_keyword(Keyword::UNSIGNED) {
10290                        Ok(DataType::Int8Unsigned(optional_precision?))
10291                    } else {
10292                        Ok(DataType::Int8(optional_precision?))
10293                    }
10294                }
10295                Keyword::INT16 => Ok(DataType::Int16),
10296                Keyword::INT32 => Ok(DataType::Int32),
10297                Keyword::INT64 => Ok(DataType::Int64),
10298                Keyword::INT128 => Ok(DataType::Int128),
10299                Keyword::INT256 => Ok(DataType::Int256),
10300                Keyword::INTEGER => {
10301                    let optional_precision = self.parse_optional_precision();
10302                    if self.parse_keyword(Keyword::UNSIGNED) {
10303                        Ok(DataType::IntegerUnsigned(optional_precision?))
10304                    } else {
10305                        if dialect.supports_data_type_signed_suffix() {
10306                            let _ = self.parse_keyword(Keyword::SIGNED);
10307                        }
10308                        Ok(DataType::Integer(optional_precision?))
10309                    }
10310                }
10311                Keyword::BIGINT => {
10312                    let optional_precision = self.parse_optional_precision();
10313                    if self.parse_keyword(Keyword::UNSIGNED) {
10314                        Ok(DataType::BigIntUnsigned(optional_precision?))
10315                    } else {
10316                        if dialect.supports_data_type_signed_suffix() {
10317                            let _ = self.parse_keyword(Keyword::SIGNED);
10318                        }
10319                        Ok(DataType::BigInt(optional_precision?))
10320                    }
10321                }
10322                Keyword::HUGEINT => Ok(DataType::HugeInt),
10323                Keyword::UBIGINT => Ok(DataType::UBigInt),
10324                Keyword::UHUGEINT => Ok(DataType::UHugeInt),
10325                Keyword::USMALLINT => Ok(DataType::USmallInt),
10326                Keyword::UTINYINT => Ok(DataType::UTinyInt),
10327                Keyword::UINT8 => Ok(DataType::UInt8),
10328                Keyword::UINT16 => Ok(DataType::UInt16),
10329                Keyword::UINT32 => Ok(DataType::UInt32),
10330                Keyword::UINT64 => Ok(DataType::UInt64),
10331                Keyword::UINT128 => Ok(DataType::UInt128),
10332                Keyword::UINT256 => Ok(DataType::UInt256),
10333                Keyword::VARCHAR => Ok(DataType::Varchar(self.parse_optional_character_length()?)),
10334                Keyword::NVARCHAR => {
10335                    Ok(DataType::Nvarchar(self.parse_optional_character_length()?))
10336                }
10337                Keyword::CHARACTER => {
10338                    if self.parse_keyword(Keyword::VARYING) {
10339                        Ok(DataType::CharacterVarying(
10340                            self.parse_optional_character_length()?,
10341                        ))
10342                    } else if self.parse_keywords(&[Keyword::LARGE, Keyword::OBJECT]) {
10343                        Ok(DataType::CharacterLargeObject(
10344                            self.parse_optional_precision()?,
10345                        ))
10346                    } else {
10347                        Ok(DataType::Character(self.parse_optional_character_length()?))
10348                    }
10349                }
10350                Keyword::CHAR => {
10351                    if self.parse_keyword(Keyword::VARYING) {
10352                        Ok(DataType::CharVarying(
10353                            self.parse_optional_character_length()?,
10354                        ))
10355                    } else if self.parse_keywords(&[Keyword::LARGE, Keyword::OBJECT]) {
10356                        Ok(DataType::CharLargeObject(self.parse_optional_precision()?))
10357                    } else {
10358                        Ok(DataType::Char(self.parse_optional_character_length()?))
10359                    }
10360                }
10361                Keyword::CLOB => Ok(DataType::Clob(self.parse_optional_precision()?)),
10362                Keyword::BINARY => Ok(DataType::Binary(self.parse_optional_precision()?)),
10363                Keyword::VARBINARY => Ok(DataType::Varbinary(self.parse_optional_binary_length()?)),
10364                Keyword::BLOB => Ok(DataType::Blob(self.parse_optional_precision()?)),
10365                Keyword::TINYBLOB => Ok(DataType::TinyBlob),
10366                Keyword::MEDIUMBLOB => Ok(DataType::MediumBlob),
10367                Keyword::LONGBLOB => Ok(DataType::LongBlob),
10368                Keyword::BYTES => Ok(DataType::Bytes(self.parse_optional_precision()?)),
10369                Keyword::BIT => {
10370                    if self.parse_keyword(Keyword::VARYING) {
10371                        Ok(DataType::BitVarying(self.parse_optional_precision()?))
10372                    } else {
10373                        Ok(DataType::Bit(self.parse_optional_precision()?))
10374                    }
10375                }
10376                Keyword::VARBIT => Ok(DataType::VarBit(self.parse_optional_precision()?)),
10377                Keyword::UUID => Ok(DataType::Uuid),
10378                Keyword::DATE => Ok(DataType::Date),
10379                Keyword::DATE32 => Ok(DataType::Date32),
10380                Keyword::DATETIME => Ok(DataType::Datetime(self.parse_optional_precision()?)),
10381                Keyword::DATETIME64 => {
10382                    self.prev_token();
10383                    let (precision, time_zone) = self.parse_datetime_64()?;
10384                    Ok(DataType::Datetime64(precision, time_zone))
10385                }
10386                Keyword::TIMESTAMP => {
10387                    let precision = self.parse_optional_precision()?;
10388                    let tz = if self.parse_keyword(Keyword::WITH) {
10389                        self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
10390                        TimezoneInfo::WithTimeZone
10391                    } else if self.parse_keyword(Keyword::WITHOUT) {
10392                        self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
10393                        TimezoneInfo::WithoutTimeZone
10394                    } else {
10395                        TimezoneInfo::None
10396                    };
10397                    Ok(DataType::Timestamp(precision, tz))
10398                }
10399                Keyword::TIMESTAMPTZ => Ok(DataType::Timestamp(
10400                    self.parse_optional_precision()?,
10401                    TimezoneInfo::Tz,
10402                )),
10403                Keyword::TIMESTAMP_NTZ => Ok(DataType::TimestampNtz),
10404                Keyword::TIME => {
10405                    let precision = self.parse_optional_precision()?;
10406                    let tz = if self.parse_keyword(Keyword::WITH) {
10407                        self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
10408                        TimezoneInfo::WithTimeZone
10409                    } else if self.parse_keyword(Keyword::WITHOUT) {
10410                        self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
10411                        TimezoneInfo::WithoutTimeZone
10412                    } else {
10413                        TimezoneInfo::None
10414                    };
10415                    Ok(DataType::Time(precision, tz))
10416                }
10417                Keyword::TIMETZ => Ok(DataType::Time(
10418                    self.parse_optional_precision()?,
10419                    TimezoneInfo::Tz,
10420                )),
10421                Keyword::INTERVAL => {
10422                    if self.dialect.supports_interval_options() {
10423                        let fields = self.maybe_parse_optional_interval_fields()?;
10424                        let precision = self.parse_optional_precision()?;
10425                        Ok(DataType::Interval { fields, precision })
10426                    } else {
10427                        Ok(DataType::Interval {
10428                            fields: None,
10429                            precision: None,
10430                        })
10431                    }
10432                }
10433                Keyword::JSON => Ok(DataType::JSON),
10434                Keyword::JSONB => Ok(DataType::JSONB),
10435                Keyword::REGCLASS => Ok(DataType::Regclass),
10436                Keyword::STRING => Ok(DataType::String(self.parse_optional_precision()?)),
10437                Keyword::FIXEDSTRING => {
10438                    self.expect_token(&Token::LParen)?;
10439                    let character_length = self.parse_literal_uint()?;
10440                    self.expect_token(&Token::RParen)?;
10441                    Ok(DataType::FixedString(character_length))
10442                }
10443                Keyword::TEXT => Ok(DataType::Text),
10444                Keyword::TINYTEXT => Ok(DataType::TinyText),
10445                Keyword::MEDIUMTEXT => Ok(DataType::MediumText),
10446                Keyword::LONGTEXT => Ok(DataType::LongText),
10447                Keyword::BYTEA => Ok(DataType::Bytea),
10448                Keyword::NUMERIC => Ok(DataType::Numeric(
10449                    self.parse_exact_number_optional_precision_scale()?,
10450                )),
10451                Keyword::DECIMAL => {
10452                    let precision = self.parse_exact_number_optional_precision_scale()?;
10453
10454                    if self.parse_keyword(Keyword::UNSIGNED) {
10455                        Ok(DataType::DecimalUnsigned(precision))
10456                    } else {
10457                        Ok(DataType::Decimal(precision))
10458                    }
10459                }
10460                Keyword::DEC => {
10461                    let precision = self.parse_exact_number_optional_precision_scale()?;
10462
10463                    if self.parse_keyword(Keyword::UNSIGNED) {
10464                        Ok(DataType::DecUnsigned(precision))
10465                    } else {
10466                        Ok(DataType::Dec(precision))
10467                    }
10468                }
10469                Keyword::BIGNUMERIC => Ok(DataType::BigNumeric(
10470                    self.parse_exact_number_optional_precision_scale()?,
10471                )),
10472                Keyword::BIGDECIMAL => Ok(DataType::BigDecimal(
10473                    self.parse_exact_number_optional_precision_scale()?,
10474                )),
10475                Keyword::ENUM => Ok(DataType::Enum(self.parse_enum_values()?, None)),
10476                Keyword::ENUM8 => Ok(DataType::Enum(self.parse_enum_values()?, Some(8))),
10477                Keyword::ENUM16 => Ok(DataType::Enum(self.parse_enum_values()?, Some(16))),
10478                Keyword::SET => Ok(DataType::Set(self.parse_string_values()?)),
10479                Keyword::ARRAY => {
10480                    if dialect_of!(self is SnowflakeDialect) {
10481                        Ok(DataType::Array(ArrayElemTypeDef::None))
10482                    } else if dialect_of!(self is ClickHouseDialect) {
10483                        Ok(self.parse_sub_type(|internal_type| {
10484                            DataType::Array(ArrayElemTypeDef::Parenthesis(internal_type))
10485                        })?)
10486                    } else {
10487                        self.expect_token(&Token::Lt)?;
10488                        let (inside_type, _trailing_bracket) = self.parse_data_type_helper()?;
10489                        trailing_bracket = self.expect_closing_angle_bracket(_trailing_bracket)?;
10490                        Ok(DataType::Array(ArrayElemTypeDef::AngleBracket(Box::new(
10491                            inside_type,
10492                        ))))
10493                    }
10494                }
10495                Keyword::STRUCT if dialect_is!(dialect is DuckDbDialect) => {
10496                    self.prev_token();
10497                    let field_defs = self.parse_duckdb_struct_type_def()?;
10498                    Ok(DataType::Struct(field_defs, StructBracketKind::Parentheses))
10499                }
10500                Keyword::STRUCT if dialect_is!(dialect is BigQueryDialect | GenericDialect) => {
10501                    self.prev_token();
10502                    let (field_defs, _trailing_bracket) =
10503                        self.parse_struct_type_def(Self::parse_struct_field_def)?;
10504                    trailing_bracket = _trailing_bracket;
10505                    Ok(DataType::Struct(
10506                        field_defs,
10507                        StructBracketKind::AngleBrackets,
10508                    ))
10509                }
10510                Keyword::UNION if dialect_is!(dialect is DuckDbDialect | GenericDialect) => {
10511                    self.prev_token();
10512                    let fields = self.parse_union_type_def()?;
10513                    Ok(DataType::Union(fields))
10514                }
10515                Keyword::NULLABLE if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
10516                    Ok(self.parse_sub_type(DataType::Nullable)?)
10517                }
10518                Keyword::LOWCARDINALITY if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
10519                    Ok(self.parse_sub_type(DataType::LowCardinality)?)
10520                }
10521                Keyword::MAP if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
10522                    self.prev_token();
10523                    let (key_data_type, value_data_type) = self.parse_click_house_map_def()?;
10524                    Ok(DataType::Map(
10525                        Box::new(key_data_type),
10526                        Box::new(value_data_type),
10527                    ))
10528                }
10529                Keyword::NESTED if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
10530                    self.expect_token(&Token::LParen)?;
10531                    let field_defs = self.parse_comma_separated(Parser::parse_column_def)?;
10532                    self.expect_token(&Token::RParen)?;
10533                    Ok(DataType::Nested(field_defs))
10534                }
10535                Keyword::TUPLE if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
10536                    self.prev_token();
10537                    let field_defs = self.parse_click_house_tuple_def()?;
10538                    Ok(DataType::Tuple(field_defs))
10539                }
10540                Keyword::TRIGGER => Ok(DataType::Trigger),
10541                Keyword::ANY if self.peek_keyword(Keyword::TYPE) => {
10542                    let _ = self.parse_keyword(Keyword::TYPE);
10543                    Ok(DataType::AnyType)
10544                }
10545                Keyword::TABLE => {
10546                    // an LParen after the TABLE keyword indicates that table columns are being defined
10547                    // whereas no LParen indicates an anonymous table expression will be returned
10548                    if self.peek_token() == Token::LParen {
10549                        let columns = self.parse_returns_table_columns()?;
10550                        Ok(DataType::Table(Some(columns)))
10551                    } else {
10552                        Ok(DataType::Table(None))
10553                    }
10554                }
10555                Keyword::SIGNED => {
10556                    if self.parse_keyword(Keyword::INTEGER) {
10557                        Ok(DataType::SignedInteger)
10558                    } else {
10559                        Ok(DataType::Signed)
10560                    }
10561                }
10562                Keyword::UNSIGNED => {
10563                    if self.parse_keyword(Keyword::INTEGER) {
10564                        Ok(DataType::UnsignedInteger)
10565                    } else {
10566                        Ok(DataType::Unsigned)
10567                    }
10568                }
10569                Keyword::TSVECTOR if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
10570                    Ok(DataType::TsVector)
10571                }
10572                Keyword::TSQUERY if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
10573                    Ok(DataType::TsQuery)
10574                }
10575                _ => {
10576                    self.prev_token();
10577                    let type_name = self.parse_object_name(false)?;
10578                    if let Some(modifiers) = self.parse_optional_type_modifiers()? {
10579                        Ok(DataType::Custom(type_name, modifiers))
10580                    } else {
10581                        Ok(DataType::Custom(type_name, vec![]))
10582                    }
10583                }
10584            },
10585            _ => self.expected_at("a data type name", next_token_index),
10586        }?;
10587
10588        if self.dialect.supports_array_typedef_with_brackets() {
10589            while self.consume_token(&Token::LBracket) {
10590                // Parse optional array data type size
10591                let size = self.maybe_parse(|p| p.parse_literal_uint())?;
10592                self.expect_token(&Token::RBracket)?;
10593                data = DataType::Array(ArrayElemTypeDef::SquareBracket(Box::new(data), size))
10594            }
10595        }
10596        Ok((data, trailing_bracket))
10597    }
10598
10599    fn parse_returns_table_column(&mut self) -> Result<ColumnDef, ParserError> {
10600        self.parse_column_def()
10601    }
10602
10603    fn parse_returns_table_columns(&mut self) -> Result<Vec<ColumnDef>, ParserError> {
10604        self.expect_token(&Token::LParen)?;
10605        let columns = self.parse_comma_separated(Parser::parse_returns_table_column)?;
10606        self.expect_token(&Token::RParen)?;
10607        Ok(columns)
10608    }
10609
10610    pub fn parse_string_values(&mut self) -> Result<Vec<String>, ParserError> {
10611        self.expect_token(&Token::LParen)?;
10612        let mut values = Vec::new();
10613        loop {
10614            let next_token = self.next_token();
10615            match next_token.token {
10616                Token::SingleQuotedString(value) => values.push(value),
10617                _ => self.expected("a string", next_token)?,
10618            }
10619            let next_token = self.next_token();
10620            match next_token.token {
10621                Token::Comma => (),
10622                Token::RParen => break,
10623                _ => self.expected(", or }", next_token)?,
10624            }
10625        }
10626        Ok(values)
10627    }
10628
10629    /// Strictly parse `identifier AS identifier`
10630    pub fn parse_identifier_with_alias(&mut self) -> Result<IdentWithAlias, ParserError> {
10631        let ident = self.parse_identifier()?;
10632        self.expect_keyword_is(Keyword::AS)?;
10633        let alias = self.parse_identifier()?;
10634        Ok(IdentWithAlias { ident, alias })
10635    }
10636
10637    /// Parse `identifier [AS] identifier` where the AS keyword is optional
10638    fn parse_identifier_with_optional_alias(&mut self) -> Result<IdentWithAlias, ParserError> {
10639        let ident = self.parse_identifier()?;
10640        let _after_as = self.parse_keyword(Keyword::AS);
10641        let alias = self.parse_identifier()?;
10642        Ok(IdentWithAlias { ident, alias })
10643    }
10644
10645    /// Parse comma-separated list of parenthesized queries for pipe operators
10646    fn parse_pipe_operator_queries(&mut self) -> Result<Vec<Query>, ParserError> {
10647        self.parse_comma_separated(|parser| {
10648            parser.expect_token(&Token::LParen)?;
10649            let query = parser.parse_query()?;
10650            parser.expect_token(&Token::RParen)?;
10651            Ok(*query)
10652        })
10653    }
10654
10655    /// Parse set quantifier for pipe operators that require DISTINCT. E.g. INTERSECT and EXCEPT
10656    fn parse_distinct_required_set_quantifier(
10657        &mut self,
10658        operator_name: &str,
10659    ) -> Result<SetQuantifier, ParserError> {
10660        let quantifier = self.parse_set_quantifier(&Some(SetOperator::Intersect));
10661        match quantifier {
10662            SetQuantifier::Distinct | SetQuantifier::DistinctByName => Ok(quantifier),
10663            _ => Err(ParserError::ParserError(format!(
10664                "{operator_name} pipe operator requires DISTINCT modifier",
10665            ))),
10666        }
10667    }
10668
10669    /// Parse optional identifier alias (with or without AS keyword)
10670    fn parse_identifier_optional_alias(&mut self) -> Result<Option<Ident>, ParserError> {
10671        if self.parse_keyword(Keyword::AS) {
10672            Ok(Some(self.parse_identifier()?))
10673        } else {
10674            // Check if the next token is an identifier (implicit alias)
10675            self.maybe_parse(|parser| parser.parse_identifier())
10676        }
10677    }
10678
10679    /// Optionally parses an alias for a select list item
10680    fn maybe_parse_select_item_alias(&mut self) -> Result<Option<Ident>, ParserError> {
10681        fn validator(explicit: bool, kw: &Keyword, parser: &mut Parser) -> bool {
10682            parser.dialect.is_select_item_alias(explicit, kw, parser)
10683        }
10684        self.parse_optional_alias_inner(None, validator)
10685    }
10686
10687    /// Optionally parses an alias for a table like in `... FROM generate_series(1, 10) AS t (col)`.
10688    /// In this case, the alias is allowed to optionally name the columns in the table, in
10689    /// addition to the table itself.
10690    pub fn maybe_parse_table_alias(&mut self) -> Result<Option<TableAlias>, ParserError> {
10691        fn validator(explicit: bool, kw: &Keyword, parser: &mut Parser) -> bool {
10692            parser.dialect.is_table_factor_alias(explicit, kw, parser)
10693        }
10694        match self.parse_optional_alias_inner(None, validator)? {
10695            Some(name) => {
10696                let columns = self.parse_table_alias_column_defs()?;
10697                Ok(Some(TableAlias { name, columns }))
10698            }
10699            None => Ok(None),
10700        }
10701    }
10702
10703    fn parse_table_index_hints(&mut self) -> Result<Vec<TableIndexHints>, ParserError> {
10704        let mut hints = vec![];
10705        while let Some(hint_type) =
10706            self.parse_one_of_keywords(&[Keyword::USE, Keyword::IGNORE, Keyword::FORCE])
10707        {
10708            let hint_type = match hint_type {
10709                Keyword::USE => TableIndexHintType::Use,
10710                Keyword::IGNORE => TableIndexHintType::Ignore,
10711                Keyword::FORCE => TableIndexHintType::Force,
10712                _ => {
10713                    return self.expected(
10714                        "expected to match USE/IGNORE/FORCE keyword",
10715                        self.peek_token(),
10716                    )
10717                }
10718            };
10719            let index_type = match self.parse_one_of_keywords(&[Keyword::INDEX, Keyword::KEY]) {
10720                Some(Keyword::INDEX) => TableIndexType::Index,
10721                Some(Keyword::KEY) => TableIndexType::Key,
10722                _ => {
10723                    return self.expected("expected to match INDEX/KEY keyword", self.peek_token())
10724                }
10725            };
10726            let for_clause = if self.parse_keyword(Keyword::FOR) {
10727                let clause = if self.parse_keyword(Keyword::JOIN) {
10728                    TableIndexHintForClause::Join
10729                } else if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
10730                    TableIndexHintForClause::OrderBy
10731                } else if self.parse_keywords(&[Keyword::GROUP, Keyword::BY]) {
10732                    TableIndexHintForClause::GroupBy
10733                } else {
10734                    return self.expected(
10735                        "expected to match FOR/ORDER BY/GROUP BY table hint in for clause",
10736                        self.peek_token(),
10737                    );
10738                };
10739                Some(clause)
10740            } else {
10741                None
10742            };
10743
10744            self.expect_token(&Token::LParen)?;
10745            let index_names = if self.peek_token().token != Token::RParen {
10746                self.parse_comma_separated(Parser::parse_identifier)?
10747            } else {
10748                vec![]
10749            };
10750            self.expect_token(&Token::RParen)?;
10751            hints.push(TableIndexHints {
10752                hint_type,
10753                index_type,
10754                for_clause,
10755                index_names,
10756            });
10757        }
10758        Ok(hints)
10759    }
10760
10761    /// Wrapper for parse_optional_alias_inner, left for backwards-compatibility
10762    /// but new flows should use the context-specific methods such as `maybe_parse_select_item_alias`
10763    /// and `maybe_parse_table_alias`.
10764    pub fn parse_optional_alias(
10765        &mut self,
10766        reserved_kwds: &[Keyword],
10767    ) -> Result<Option<Ident>, ParserError> {
10768        fn validator(_explicit: bool, _kw: &Keyword, _parser: &mut Parser) -> bool {
10769            false
10770        }
10771        self.parse_optional_alias_inner(Some(reserved_kwds), validator)
10772    }
10773
10774    /// Parses an optional alias after a SQL element such as a select list item
10775    /// or a table name.
10776    ///
10777    /// This method accepts an optional list of reserved keywords or a function
10778    /// to call to validate if a keyword should be parsed as an alias, to allow
10779    /// callers to customize the parsing logic based on their context.
10780    fn parse_optional_alias_inner<F>(
10781        &mut self,
10782        reserved_kwds: Option<&[Keyword]>,
10783        validator: F,
10784    ) -> Result<Option<Ident>, ParserError>
10785    where
10786        F: Fn(bool, &Keyword, &mut Parser) -> bool,
10787    {
10788        let after_as = self.parse_keyword(Keyword::AS);
10789
10790        let next_token = self.next_token();
10791        match next_token.token {
10792            // By default, if a word is located after the `AS` keyword we consider it an alias
10793            // as long as it's not reserved.
10794            Token::Word(w)
10795                if after_as || reserved_kwds.is_some_and(|x| !x.contains(&w.keyword)) =>
10796            {
10797                Ok(Some(w.into_ident(next_token.span)))
10798            }
10799            // This pattern allows for customizing the acceptance of words as aliases based on the caller's
10800            // context, such as to what SQL element this word is a potential alias of (select item alias, table name
10801            // alias, etc.) or dialect-specific logic that goes beyond a simple list of reserved keywords.
10802            Token::Word(w) if validator(after_as, &w.keyword, self) => {
10803                Ok(Some(w.into_ident(next_token.span)))
10804            }
10805            // For backwards-compatibility, we accept quoted strings as aliases regardless of the context.
10806            Token::SingleQuotedString(s) => Ok(Some(Ident::with_quote('\'', s))),
10807            Token::DoubleQuotedString(s) => Ok(Some(Ident::with_quote('\"', s))),
10808            _ => {
10809                if after_as {
10810                    return self.expected("an identifier after AS", next_token);
10811                }
10812                self.prev_token();
10813                Ok(None) // no alias found
10814            }
10815        }
10816    }
10817
10818    pub fn parse_optional_group_by(&mut self) -> Result<Option<GroupByExpr>, ParserError> {
10819        if self.parse_keywords(&[Keyword::GROUP, Keyword::BY]) {
10820            let expressions = if self.parse_keyword(Keyword::ALL) {
10821                None
10822            } else {
10823                Some(self.parse_comma_separated(Parser::parse_group_by_expr)?)
10824            };
10825
10826            let mut modifiers = vec![];
10827            if self.dialect.supports_group_by_with_modifier() {
10828                loop {
10829                    if !self.parse_keyword(Keyword::WITH) {
10830                        break;
10831                    }
10832                    let keyword = self.expect_one_of_keywords(&[
10833                        Keyword::ROLLUP,
10834                        Keyword::CUBE,
10835                        Keyword::TOTALS,
10836                    ])?;
10837                    modifiers.push(match keyword {
10838                        Keyword::ROLLUP => GroupByWithModifier::Rollup,
10839                        Keyword::CUBE => GroupByWithModifier::Cube,
10840                        Keyword::TOTALS => GroupByWithModifier::Totals,
10841                        _ => {
10842                            return parser_err!(
10843                                "BUG: expected to match GroupBy modifier keyword",
10844                                self.peek_token().span.start
10845                            )
10846                        }
10847                    });
10848                }
10849            }
10850            if self.parse_keywords(&[Keyword::GROUPING, Keyword::SETS]) {
10851                self.expect_token(&Token::LParen)?;
10852                let result = self.parse_comma_separated(|p| {
10853                    if p.peek_token_ref().token == Token::LParen {
10854                        p.parse_tuple(true, true)
10855                    } else {
10856                        Ok(vec![p.parse_expr()?])
10857                    }
10858                })?;
10859                self.expect_token(&Token::RParen)?;
10860                modifiers.push(GroupByWithModifier::GroupingSets(Expr::GroupingSets(
10861                    result,
10862                )));
10863            };
10864            let group_by = match expressions {
10865                None => GroupByExpr::All(modifiers),
10866                Some(exprs) => GroupByExpr::Expressions(exprs, modifiers),
10867            };
10868            Ok(Some(group_by))
10869        } else {
10870            Ok(None)
10871        }
10872    }
10873
10874    pub fn parse_optional_order_by(&mut self) -> Result<Option<OrderBy>, ParserError> {
10875        if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
10876            let order_by =
10877                if self.dialect.supports_order_by_all() && self.parse_keyword(Keyword::ALL) {
10878                    let order_by_options = self.parse_order_by_options()?;
10879                    OrderBy {
10880                        kind: OrderByKind::All(order_by_options),
10881                        interpolate: None,
10882                    }
10883                } else {
10884                    let exprs = self.parse_comma_separated(Parser::parse_order_by_expr)?;
10885                    let interpolate = if dialect_of!(self is ClickHouseDialect | GenericDialect) {
10886                        self.parse_interpolations()?
10887                    } else {
10888                        None
10889                    };
10890                    OrderBy {
10891                        kind: OrderByKind::Expressions(exprs),
10892                        interpolate,
10893                    }
10894                };
10895            Ok(Some(order_by))
10896        } else {
10897            Ok(None)
10898        }
10899    }
10900
10901    fn parse_optional_limit_clause(&mut self) -> Result<Option<LimitClause>, ParserError> {
10902        let mut offset = if self.parse_keyword(Keyword::OFFSET) {
10903            Some(self.parse_offset()?)
10904        } else {
10905            None
10906        };
10907
10908        let (limit, limit_by) = if self.parse_keyword(Keyword::LIMIT) {
10909            let expr = self.parse_limit()?;
10910
10911            if self.dialect.supports_limit_comma()
10912                && offset.is_none()
10913                && expr.is_some() // ALL not supported with comma
10914                && self.consume_token(&Token::Comma)
10915            {
10916                let offset = expr.ok_or_else(|| {
10917                    ParserError::ParserError(
10918                        "Missing offset for LIMIT <offset>, <limit>".to_string(),
10919                    )
10920                })?;
10921                return Ok(Some(LimitClause::OffsetCommaLimit {
10922                    offset,
10923                    limit: self.parse_expr()?,
10924                }));
10925            }
10926
10927            let limit_by = if dialect_of!(self is ClickHouseDialect | GenericDialect)
10928                && self.parse_keyword(Keyword::BY)
10929            {
10930                Some(self.parse_comma_separated(Parser::parse_expr)?)
10931            } else {
10932                None
10933            };
10934
10935            (Some(expr), limit_by)
10936        } else {
10937            (None, None)
10938        };
10939
10940        if offset.is_none() && limit.is_some() && self.parse_keyword(Keyword::OFFSET) {
10941            offset = Some(self.parse_offset()?);
10942        }
10943
10944        if offset.is_some() || (limit.is_some() && limit != Some(None)) || limit_by.is_some() {
10945            Ok(Some(LimitClause::LimitOffset {
10946                limit: limit.unwrap_or_default(),
10947                offset,
10948                limit_by: limit_by.unwrap_or_default(),
10949            }))
10950        } else {
10951            Ok(None)
10952        }
10953    }
10954
10955    /// Parse a table object for insertion
10956    /// e.g. `some_database.some_table` or `FUNCTION some_table_func(...)`
10957    pub fn parse_table_object(&mut self) -> Result<TableObject, ParserError> {
10958        if self.dialect.supports_insert_table_function() && self.parse_keyword(Keyword::FUNCTION) {
10959            let fn_name = self.parse_object_name(false)?;
10960            self.parse_function_call(fn_name)
10961                .map(TableObject::TableFunction)
10962        } else {
10963            self.parse_object_name(false).map(TableObject::TableName)
10964        }
10965    }
10966
10967    /// Parse a possibly qualified, possibly quoted identifier, e.g.
10968    /// `foo` or `myschema."table"
10969    ///
10970    /// The `in_table_clause` parameter indicates whether the object name is a table in a FROM, JOIN,
10971    /// or similar table clause. Currently, this is used only to support unquoted hyphenated identifiers
10972    /// in this context on BigQuery.
10973    pub fn parse_object_name(&mut self, in_table_clause: bool) -> Result<ObjectName, ParserError> {
10974        self.parse_object_name_inner(in_table_clause, false)
10975    }
10976
10977    /// Parse a possibly qualified, possibly quoted identifier, e.g.
10978    /// `foo` or `myschema."table"
10979    ///
10980    /// The `in_table_clause` parameter indicates whether the object name is a table in a FROM, JOIN,
10981    /// or similar table clause. Currently, this is used only to support unquoted hyphenated identifiers
10982    /// in this context on BigQuery.
10983    ///
10984    /// The `allow_wildcards` parameter indicates whether to allow for wildcards in the object name
10985    /// e.g. *, *.*, `foo`.*, or "foo"."bar"
10986    fn parse_object_name_inner(
10987        &mut self,
10988        in_table_clause: bool,
10989        allow_wildcards: bool,
10990    ) -> Result<ObjectName, ParserError> {
10991        let mut parts = vec![];
10992        if dialect_of!(self is BigQueryDialect) && in_table_clause {
10993            loop {
10994                let (ident, end_with_period) = self.parse_unquoted_hyphenated_identifier()?;
10995                parts.push(ObjectNamePart::Identifier(ident));
10996                if !self.consume_token(&Token::Period) && !end_with_period {
10997                    break;
10998                }
10999            }
11000        } else {
11001            loop {
11002                if allow_wildcards && self.peek_token().token == Token::Mul {
11003                    let span = self.next_token().span;
11004                    parts.push(ObjectNamePart::Identifier(Ident {
11005                        value: Token::Mul.to_string(),
11006                        quote_style: None,
11007                        span,
11008                    }));
11009                } else if dialect_of!(self is BigQueryDialect) && in_table_clause {
11010                    let (ident, end_with_period) = self.parse_unquoted_hyphenated_identifier()?;
11011                    parts.push(ObjectNamePart::Identifier(ident));
11012                    if !self.consume_token(&Token::Period) && !end_with_period {
11013                        break;
11014                    }
11015                } else if self.dialect.supports_object_name_double_dot_notation()
11016                    && parts.len() == 1
11017                    && matches!(self.peek_token().token, Token::Period)
11018                {
11019                    // Empty string here means default schema
11020                    parts.push(ObjectNamePart::Identifier(Ident::new("")));
11021                } else {
11022                    let ident = self.parse_identifier()?;
11023                    let part = if self
11024                        .dialect
11025                        .is_identifier_generating_function_name(&ident, &parts)
11026                    {
11027                        self.expect_token(&Token::LParen)?;
11028                        let args: Vec<FunctionArg> =
11029                            self.parse_comma_separated0(Self::parse_function_args, Token::RParen)?;
11030                        self.expect_token(&Token::RParen)?;
11031                        ObjectNamePart::Function(ObjectNamePartFunction { name: ident, args })
11032                    } else {
11033                        ObjectNamePart::Identifier(ident)
11034                    };
11035                    parts.push(part);
11036                }
11037
11038                if !self.consume_token(&Token::Period) {
11039                    break;
11040                }
11041            }
11042        }
11043
11044        // BigQuery accepts any number of quoted identifiers of a table name.
11045        // https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#quoted_identifiers
11046        if dialect_of!(self is BigQueryDialect)
11047            && parts.iter().any(|part| {
11048                part.as_ident()
11049                    .is_some_and(|ident| ident.value.contains('.'))
11050            })
11051        {
11052            parts = parts
11053                .into_iter()
11054                .flat_map(|part| match part.as_ident() {
11055                    Some(ident) => ident
11056                        .value
11057                        .split('.')
11058                        .map(|value| {
11059                            ObjectNamePart::Identifier(Ident {
11060                                value: value.into(),
11061                                quote_style: ident.quote_style,
11062                                span: ident.span,
11063                            })
11064                        })
11065                        .collect::<Vec<_>>(),
11066                    None => vec![part],
11067                })
11068                .collect()
11069        }
11070
11071        Ok(ObjectName(parts))
11072    }
11073
11074    /// Parse identifiers
11075    pub fn parse_identifiers(&mut self) -> Result<Vec<Ident>, ParserError> {
11076        let mut idents = vec![];
11077        loop {
11078            match &self.peek_token_ref().token {
11079                Token::Word(w) => {
11080                    idents.push(w.clone().into_ident(self.peek_token_ref().span));
11081                }
11082                Token::EOF | Token::Eq => break,
11083                _ => {}
11084            }
11085            self.advance_token();
11086        }
11087        Ok(idents)
11088    }
11089
11090    /// Parse identifiers of form ident1[.identN]*
11091    ///
11092    /// Similar in functionality to [parse_identifiers], with difference
11093    /// being this function is much more strict about parsing a valid multipart identifier, not
11094    /// allowing extraneous tokens to be parsed, otherwise it fails.
11095    ///
11096    /// For example:
11097    ///
11098    /// ```rust
11099    /// use sqlparser::ast::Ident;
11100    /// use sqlparser::dialect::GenericDialect;
11101    /// use sqlparser::parser::Parser;
11102    ///
11103    /// let dialect = GenericDialect {};
11104    /// let expected = vec![Ident::new("one"), Ident::new("two")];
11105    ///
11106    /// // expected usage
11107    /// let sql = "one.two";
11108    /// let mut parser = Parser::new(&dialect).try_with_sql(sql).unwrap();
11109    /// let actual = parser.parse_multipart_identifier().unwrap();
11110    /// assert_eq!(&actual, &expected);
11111    ///
11112    /// // parse_identifiers is more loose on what it allows, parsing successfully
11113    /// let sql = "one + two";
11114    /// let mut parser = Parser::new(&dialect).try_with_sql(sql).unwrap();
11115    /// let actual = parser.parse_identifiers().unwrap();
11116    /// assert_eq!(&actual, &expected);
11117    ///
11118    /// // expected to strictly fail due to + separator
11119    /// let sql = "one + two";
11120    /// let mut parser = Parser::new(&dialect).try_with_sql(sql).unwrap();
11121    /// let actual = parser.parse_multipart_identifier().unwrap_err();
11122    /// assert_eq!(
11123    ///     actual.to_string(),
11124    ///     "sql parser error: Unexpected token in identifier: +"
11125    /// );
11126    /// ```
11127    ///
11128    /// [parse_identifiers]: Parser::parse_identifiers
11129    pub fn parse_multipart_identifier(&mut self) -> Result<Vec<Ident>, ParserError> {
11130        let mut idents = vec![];
11131
11132        // expecting at least one word for identifier
11133        let next_token = self.next_token();
11134        match next_token.token {
11135            Token::Word(w) => idents.push(w.into_ident(next_token.span)),
11136            Token::EOF => {
11137                return Err(ParserError::ParserError(
11138                    "Empty input when parsing identifier".to_string(),
11139                ))?
11140            }
11141            token => {
11142                return Err(ParserError::ParserError(format!(
11143                    "Unexpected token in identifier: {token}"
11144                )))?
11145            }
11146        };
11147
11148        // parse optional next parts if exist
11149        loop {
11150            match self.next_token().token {
11151                // ensure that optional period is succeeded by another identifier
11152                Token::Period => {
11153                    let next_token = self.next_token();
11154                    match next_token.token {
11155                        Token::Word(w) => idents.push(w.into_ident(next_token.span)),
11156                        Token::EOF => {
11157                            return Err(ParserError::ParserError(
11158                                "Trailing period in identifier".to_string(),
11159                            ))?
11160                        }
11161                        token => {
11162                            return Err(ParserError::ParserError(format!(
11163                                "Unexpected token following period in identifier: {token}"
11164                            )))?
11165                        }
11166                    }
11167                }
11168                Token::EOF => break,
11169                token => {
11170                    return Err(ParserError::ParserError(format!(
11171                        "Unexpected token in identifier: {token}"
11172                    )))?
11173                }
11174            }
11175        }
11176
11177        Ok(idents)
11178    }
11179
11180    /// Parse a simple one-word identifier (possibly quoted, possibly a keyword)
11181    pub fn parse_identifier(&mut self) -> Result<Ident, ParserError> {
11182        let next_token = self.next_token();
11183        match next_token.token {
11184            Token::Word(w) => Ok(w.into_ident(next_token.span)),
11185            Token::SingleQuotedString(s) => Ok(Ident::with_quote('\'', s)),
11186            Token::DoubleQuotedString(s) => Ok(Ident::with_quote('\"', s)),
11187            _ => self.expected("identifier", next_token),
11188        }
11189    }
11190
11191    /// On BigQuery, hyphens are permitted in unquoted identifiers inside of a FROM or
11192    /// TABLE clause.
11193    ///
11194    /// The first segment must be an ordinary unquoted identifier, e.g. it must not start
11195    /// with a digit. Subsequent segments are either must either be valid identifiers or
11196    /// integers, e.g. foo-123 is allowed, but foo-123a is not.
11197    ///
11198    /// [BigQuery-lexical](https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical)
11199    ///
11200    /// Return a tuple of the identifier and a boolean indicating it ends with a period.
11201    fn parse_unquoted_hyphenated_identifier(&mut self) -> Result<(Ident, bool), ParserError> {
11202        match self.peek_token().token {
11203            Token::Word(w) => {
11204                let quote_style_is_none = w.quote_style.is_none();
11205                let mut requires_whitespace = false;
11206                let mut ident = w.into_ident(self.next_token().span);
11207                if quote_style_is_none {
11208                    while matches!(self.peek_token_no_skip().token, Token::Minus) {
11209                        self.next_token();
11210                        ident.value.push('-');
11211
11212                        let token = self
11213                            .next_token_no_skip()
11214                            .cloned()
11215                            .unwrap_or(TokenWithSpan::wrap(Token::EOF));
11216                        requires_whitespace = match token.token {
11217                            Token::Word(next_word) if next_word.quote_style.is_none() => {
11218                                ident.value.push_str(&next_word.value);
11219                                false
11220                            }
11221                            Token::Number(s, false) => {
11222                                // A number token can represent a decimal value ending with a period, e.g., `Number('123.')`.
11223                                // However, for an [ObjectName], it is part of a hyphenated identifier, e.g., `foo-123.bar`.
11224                                //
11225                                // If a number token is followed by a period, it is part of an [ObjectName].
11226                                // Return the identifier with `true` if the number token is followed by a period, indicating that
11227                                // parsing should continue for the next part of the hyphenated identifier.
11228                                if s.ends_with('.') {
11229                                    let Some(s) = s.split('.').next().filter(|s| {
11230                                        !s.is_empty() && s.chars().all(|c| c.is_ascii_digit())
11231                                    }) else {
11232                                        return self.expected(
11233                                            "continuation of hyphenated identifier",
11234                                            TokenWithSpan::new(Token::Number(s, false), token.span),
11235                                        );
11236                                    };
11237                                    ident.value.push_str(s);
11238                                    return Ok((ident, true));
11239                                } else {
11240                                    ident.value.push_str(&s);
11241                                }
11242                                // If next token is period, then it is part of an ObjectName and we don't expect whitespace
11243                                // after the number.
11244                                !matches!(self.peek_token().token, Token::Period)
11245                            }
11246                            _ => {
11247                                return self
11248                                    .expected("continuation of hyphenated identifier", token);
11249                            }
11250                        }
11251                    }
11252
11253                    // If the last segment was a number, we must check that it's followed by whitespace,
11254                    // otherwise foo-123a will be parsed as `foo-123` with the alias `a`.
11255                    if requires_whitespace {
11256                        let token = self.next_token();
11257                        if !matches!(token.token, Token::EOF | Token::Whitespace(_)) {
11258                            return self
11259                                .expected("whitespace following hyphenated identifier", token);
11260                        }
11261                    }
11262                }
11263                Ok((ident, false))
11264            }
11265            _ => Ok((self.parse_identifier()?, false)),
11266        }
11267    }
11268
11269    /// Parses a parenthesized, comma-separated list of column definitions within a view.
11270    fn parse_view_columns(&mut self) -> Result<Vec<ViewColumnDef>, ParserError> {
11271        if self.consume_token(&Token::LParen) {
11272            if self.peek_token().token == Token::RParen {
11273                self.next_token();
11274                Ok(vec![])
11275            } else {
11276                let cols = self.parse_comma_separated_with_trailing_commas(
11277                    Parser::parse_view_column,
11278                    self.dialect.supports_column_definition_trailing_commas(),
11279                    Self::is_reserved_for_column_alias,
11280                )?;
11281                self.expect_token(&Token::RParen)?;
11282                Ok(cols)
11283            }
11284        } else {
11285            Ok(vec![])
11286        }
11287    }
11288
11289    /// Parses a column definition within a view.
11290    fn parse_view_column(&mut self) -> Result<ViewColumnDef, ParserError> {
11291        let name = self.parse_identifier()?;
11292        let options = self.parse_view_column_options()?;
11293        let data_type = if dialect_of!(self is ClickHouseDialect) {
11294            Some(self.parse_data_type()?)
11295        } else {
11296            None
11297        };
11298        Ok(ViewColumnDef {
11299            name,
11300            data_type,
11301            options,
11302        })
11303    }
11304
11305    fn parse_view_column_options(&mut self) -> Result<Option<ColumnOptions>, ParserError> {
11306        let mut options = Vec::new();
11307        loop {
11308            let option = self.parse_optional_column_option()?;
11309            if let Some(option) = option {
11310                options.push(option);
11311            } else {
11312                break;
11313            }
11314        }
11315        if options.is_empty() {
11316            Ok(None)
11317        } else if self.dialect.supports_space_separated_column_options() {
11318            Ok(Some(ColumnOptions::SpaceSeparated(options)))
11319        } else {
11320            Ok(Some(ColumnOptions::CommaSeparated(options)))
11321        }
11322    }
11323
11324    /// Parses a parenthesized comma-separated list of unqualified, possibly quoted identifiers.
11325    /// For example: `(col1, "col 2", ...)`
11326    pub fn parse_parenthesized_column_list(
11327        &mut self,
11328        optional: IsOptional,
11329        allow_empty: bool,
11330    ) -> Result<Vec<Ident>, ParserError> {
11331        self.parse_parenthesized_column_list_inner(optional, allow_empty, |p| p.parse_identifier())
11332    }
11333
11334    pub fn parse_parenthesized_compound_identifier_list(
11335        &mut self,
11336        optional: IsOptional,
11337        allow_empty: bool,
11338    ) -> Result<Vec<Expr>, ParserError> {
11339        self.parse_parenthesized_column_list_inner(optional, allow_empty, |p| {
11340            Ok(Expr::CompoundIdentifier(
11341                p.parse_period_separated(|p| p.parse_identifier())?,
11342            ))
11343        })
11344    }
11345
11346    /// Parses a parenthesized comma-separated list of index columns, which can be arbitrary
11347    /// expressions with ordering information (and an opclass in some dialects).
11348    fn parse_parenthesized_index_column_list(&mut self) -> Result<Vec<IndexColumn>, ParserError> {
11349        self.parse_parenthesized_column_list_inner(Mandatory, false, |p| {
11350            p.parse_create_index_expr()
11351        })
11352    }
11353
11354    /// Parses a parenthesized comma-separated list of qualified, possibly quoted identifiers.
11355    /// For example: `(db1.sc1.tbl1.col1, db1.sc1.tbl1."col 2", ...)`
11356    pub fn parse_parenthesized_qualified_column_list(
11357        &mut self,
11358        optional: IsOptional,
11359        allow_empty: bool,
11360    ) -> Result<Vec<ObjectName>, ParserError> {
11361        self.parse_parenthesized_column_list_inner(optional, allow_empty, |p| {
11362            p.parse_object_name(true)
11363        })
11364    }
11365
11366    /// Parses a parenthesized comma-separated list of columns using
11367    /// the provided function to parse each element.
11368    fn parse_parenthesized_column_list_inner<F, T>(
11369        &mut self,
11370        optional: IsOptional,
11371        allow_empty: bool,
11372        mut f: F,
11373    ) -> Result<Vec<T>, ParserError>
11374    where
11375        F: FnMut(&mut Parser) -> Result<T, ParserError>,
11376    {
11377        if self.consume_token(&Token::LParen) {
11378            if allow_empty && self.peek_token().token == Token::RParen {
11379                self.next_token();
11380                Ok(vec![])
11381            } else {
11382                let cols = self.parse_comma_separated(|p| f(p))?;
11383                self.expect_token(&Token::RParen)?;
11384                Ok(cols)
11385            }
11386        } else if optional == Optional {
11387            Ok(vec![])
11388        } else {
11389            self.expected("a list of columns in parentheses", self.peek_token())
11390        }
11391    }
11392
11393    /// Parses a parenthesized comma-separated list of table alias column definitions.
11394    fn parse_table_alias_column_defs(&mut self) -> Result<Vec<TableAliasColumnDef>, ParserError> {
11395        if self.consume_token(&Token::LParen) {
11396            let cols = self.parse_comma_separated(|p| {
11397                let name = p.parse_identifier()?;
11398                let data_type = p.maybe_parse(|p| p.parse_data_type())?;
11399                Ok(TableAliasColumnDef { name, data_type })
11400            })?;
11401            self.expect_token(&Token::RParen)?;
11402            Ok(cols)
11403        } else {
11404            Ok(vec![])
11405        }
11406    }
11407
11408    pub fn parse_precision(&mut self) -> Result<u64, ParserError> {
11409        self.expect_token(&Token::LParen)?;
11410        let n = self.parse_literal_uint()?;
11411        self.expect_token(&Token::RParen)?;
11412        Ok(n)
11413    }
11414
11415    pub fn parse_optional_precision(&mut self) -> Result<Option<u64>, ParserError> {
11416        if self.consume_token(&Token::LParen) {
11417            let n = self.parse_literal_uint()?;
11418            self.expect_token(&Token::RParen)?;
11419            Ok(Some(n))
11420        } else {
11421            Ok(None)
11422        }
11423    }
11424
11425    fn maybe_parse_optional_interval_fields(
11426        &mut self,
11427    ) -> Result<Option<IntervalFields>, ParserError> {
11428        match self.parse_one_of_keywords(&[
11429            // Can be followed by `TO` option
11430            Keyword::YEAR,
11431            Keyword::DAY,
11432            Keyword::HOUR,
11433            Keyword::MINUTE,
11434            // No `TO` option
11435            Keyword::MONTH,
11436            Keyword::SECOND,
11437        ]) {
11438            Some(Keyword::YEAR) => {
11439                if self.peek_keyword(Keyword::TO) {
11440                    self.expect_keyword(Keyword::TO)?;
11441                    self.expect_keyword(Keyword::MONTH)?;
11442                    Ok(Some(IntervalFields::YearToMonth))
11443                } else {
11444                    Ok(Some(IntervalFields::Year))
11445                }
11446            }
11447            Some(Keyword::DAY) => {
11448                if self.peek_keyword(Keyword::TO) {
11449                    self.expect_keyword(Keyword::TO)?;
11450                    match self.expect_one_of_keywords(&[
11451                        Keyword::HOUR,
11452                        Keyword::MINUTE,
11453                        Keyword::SECOND,
11454                    ])? {
11455                        Keyword::HOUR => Ok(Some(IntervalFields::DayToHour)),
11456                        Keyword::MINUTE => Ok(Some(IntervalFields::DayToMinute)),
11457                        Keyword::SECOND => Ok(Some(IntervalFields::DayToSecond)),
11458                        _ => {
11459                            self.prev_token();
11460                            self.expected("HOUR, MINUTE, or SECOND", self.peek_token())
11461                        }
11462                    }
11463                } else {
11464                    Ok(Some(IntervalFields::Day))
11465                }
11466            }
11467            Some(Keyword::HOUR) => {
11468                if self.peek_keyword(Keyword::TO) {
11469                    self.expect_keyword(Keyword::TO)?;
11470                    match self.expect_one_of_keywords(&[Keyword::MINUTE, Keyword::SECOND])? {
11471                        Keyword::MINUTE => Ok(Some(IntervalFields::HourToMinute)),
11472                        Keyword::SECOND => Ok(Some(IntervalFields::HourToSecond)),
11473                        _ => {
11474                            self.prev_token();
11475                            self.expected("MINUTE or SECOND", self.peek_token())
11476                        }
11477                    }
11478                } else {
11479                    Ok(Some(IntervalFields::Hour))
11480                }
11481            }
11482            Some(Keyword::MINUTE) => {
11483                if self.peek_keyword(Keyword::TO) {
11484                    self.expect_keyword(Keyword::TO)?;
11485                    self.expect_keyword(Keyword::SECOND)?;
11486                    Ok(Some(IntervalFields::MinuteToSecond))
11487                } else {
11488                    Ok(Some(IntervalFields::Minute))
11489                }
11490            }
11491            Some(Keyword::MONTH) => Ok(Some(IntervalFields::Month)),
11492            Some(Keyword::SECOND) => Ok(Some(IntervalFields::Second)),
11493            Some(_) => {
11494                self.prev_token();
11495                self.expected(
11496                    "YEAR, MONTH, DAY, HOUR, MINUTE, or SECOND",
11497                    self.peek_token(),
11498                )
11499            }
11500            None => Ok(None),
11501        }
11502    }
11503
11504    /// Parse datetime64 [1]
11505    /// Syntax
11506    /// ```sql
11507    /// DateTime64(precision[, timezone])
11508    /// ```
11509    ///
11510    /// [1]: https://clickhouse.com/docs/en/sql-reference/data-types/datetime64
11511    pub fn parse_datetime_64(&mut self) -> Result<(u64, Option<String>), ParserError> {
11512        self.expect_keyword_is(Keyword::DATETIME64)?;
11513        self.expect_token(&Token::LParen)?;
11514        let precision = self.parse_literal_uint()?;
11515        let time_zone = if self.consume_token(&Token::Comma) {
11516            Some(self.parse_literal_string()?)
11517        } else {
11518            None
11519        };
11520        self.expect_token(&Token::RParen)?;
11521        Ok((precision, time_zone))
11522    }
11523
11524    pub fn parse_optional_character_length(
11525        &mut self,
11526    ) -> Result<Option<CharacterLength>, ParserError> {
11527        if self.consume_token(&Token::LParen) {
11528            let character_length = self.parse_character_length()?;
11529            self.expect_token(&Token::RParen)?;
11530            Ok(Some(character_length))
11531        } else {
11532            Ok(None)
11533        }
11534    }
11535
11536    pub fn parse_optional_binary_length(&mut self) -> Result<Option<BinaryLength>, ParserError> {
11537        if self.consume_token(&Token::LParen) {
11538            let binary_length = self.parse_binary_length()?;
11539            self.expect_token(&Token::RParen)?;
11540            Ok(Some(binary_length))
11541        } else {
11542            Ok(None)
11543        }
11544    }
11545
11546    pub fn parse_character_length(&mut self) -> Result<CharacterLength, ParserError> {
11547        if self.parse_keyword(Keyword::MAX) {
11548            return Ok(CharacterLength::Max);
11549        }
11550        let length = self.parse_literal_uint()?;
11551        let unit = if self.parse_keyword(Keyword::CHARACTERS) {
11552            Some(CharLengthUnits::Characters)
11553        } else if self.parse_keyword(Keyword::OCTETS) {
11554            Some(CharLengthUnits::Octets)
11555        } else {
11556            None
11557        };
11558        Ok(CharacterLength::IntegerLength { length, unit })
11559    }
11560
11561    pub fn parse_binary_length(&mut self) -> Result<BinaryLength, ParserError> {
11562        if self.parse_keyword(Keyword::MAX) {
11563            return Ok(BinaryLength::Max);
11564        }
11565        let length = self.parse_literal_uint()?;
11566        Ok(BinaryLength::IntegerLength { length })
11567    }
11568
11569    pub fn parse_optional_precision_scale(
11570        &mut self,
11571    ) -> Result<(Option<u64>, Option<u64>), ParserError> {
11572        if self.consume_token(&Token::LParen) {
11573            let n = self.parse_literal_uint()?;
11574            let scale = if self.consume_token(&Token::Comma) {
11575                Some(self.parse_literal_uint()?)
11576            } else {
11577                None
11578            };
11579            self.expect_token(&Token::RParen)?;
11580            Ok((Some(n), scale))
11581        } else {
11582            Ok((None, None))
11583        }
11584    }
11585
11586    pub fn parse_exact_number_optional_precision_scale(
11587        &mut self,
11588    ) -> Result<ExactNumberInfo, ParserError> {
11589        if self.consume_token(&Token::LParen) {
11590            let precision = self.parse_literal_uint()?;
11591            let scale = if self.consume_token(&Token::Comma) {
11592                Some(self.parse_signed_integer()?)
11593            } else {
11594                None
11595            };
11596
11597            self.expect_token(&Token::RParen)?;
11598
11599            match scale {
11600                None => Ok(ExactNumberInfo::Precision(precision)),
11601                Some(scale) => Ok(ExactNumberInfo::PrecisionAndScale(precision, scale)),
11602            }
11603        } else {
11604            Ok(ExactNumberInfo::None)
11605        }
11606    }
11607
11608    /// Parse an optionally signed integer literal.
11609    fn parse_signed_integer(&mut self) -> Result<i64, ParserError> {
11610        let is_negative = self.consume_token(&Token::Minus);
11611
11612        if !is_negative {
11613            let _ = self.consume_token(&Token::Plus);
11614        }
11615
11616        let current_token = self.peek_token_ref();
11617        match &current_token.token {
11618            Token::Number(s, _) => {
11619                let s = s.clone();
11620                let span_start = current_token.span.start;
11621                self.advance_token();
11622                let value = Self::parse::<i64>(s, span_start)?;
11623                Ok(if is_negative { -value } else { value })
11624            }
11625            _ => self.expected_ref("number", current_token),
11626        }
11627    }
11628
11629    pub fn parse_optional_type_modifiers(&mut self) -> Result<Option<Vec<String>>, ParserError> {
11630        if self.consume_token(&Token::LParen) {
11631            let mut modifiers = Vec::new();
11632            loop {
11633                let next_token = self.next_token();
11634                match next_token.token {
11635                    Token::Word(w) => modifiers.push(w.to_string()),
11636                    Token::Number(n, _) => modifiers.push(n),
11637                    Token::SingleQuotedString(s) => modifiers.push(s),
11638
11639                    Token::Comma => {
11640                        continue;
11641                    }
11642                    Token::RParen => {
11643                        break;
11644                    }
11645                    _ => self.expected("type modifiers", next_token)?,
11646                }
11647            }
11648
11649            Ok(Some(modifiers))
11650        } else {
11651            Ok(None)
11652        }
11653    }
11654
11655    /// Parse a parenthesized sub data type
11656    fn parse_sub_type<F>(&mut self, parent_type: F) -> Result<DataType, ParserError>
11657    where
11658        F: FnOnce(Box<DataType>) -> DataType,
11659    {
11660        self.expect_token(&Token::LParen)?;
11661        let inside_type = self.parse_data_type()?;
11662        self.expect_token(&Token::RParen)?;
11663        Ok(parent_type(inside_type.into()))
11664    }
11665
11666    /// Parse a DELETE statement, returning a `Box`ed SetExpr
11667    ///
11668    /// This is used to reduce the size of the stack frames in debug builds
11669    fn parse_delete_setexpr_boxed(&mut self) -> Result<Box<SetExpr>, ParserError> {
11670        Ok(Box::new(SetExpr::Delete(self.parse_delete()?)))
11671    }
11672
11673    /// Parse a MERGE statement, returning a `Box`ed SetExpr
11674    ///
11675    /// This is used to reduce the size of the stack frames in debug builds
11676    fn parse_merge_setexpr_boxed(&mut self) -> Result<Box<SetExpr>, ParserError> {
11677        Ok(Box::new(SetExpr::Merge(self.parse_merge()?)))
11678    }
11679
11680    pub fn parse_delete(&mut self) -> Result<Statement, ParserError> {
11681        let (tables, with_from_keyword) = if !self.parse_keyword(Keyword::FROM) {
11682            // `FROM` keyword is optional in BigQuery SQL.
11683            // https://cloud.google.com/bigquery/docs/reference/standard-sql/dml-syntax#delete_statement
11684            if dialect_of!(self is BigQueryDialect | GenericDialect) {
11685                (vec![], false)
11686            } else {
11687                let tables = self.parse_comma_separated(|p| p.parse_object_name(false))?;
11688                self.expect_keyword_is(Keyword::FROM)?;
11689                (tables, true)
11690            }
11691        } else {
11692            (vec![], true)
11693        };
11694
11695        let from = self.parse_comma_separated(Parser::parse_table_and_joins)?;
11696        let using = if self.parse_keyword(Keyword::USING) {
11697            Some(self.parse_comma_separated(Parser::parse_table_and_joins)?)
11698        } else {
11699            None
11700        };
11701        let selection = if self.parse_keyword(Keyword::WHERE) {
11702            Some(self.parse_expr()?)
11703        } else {
11704            None
11705        };
11706        let returning = if self.parse_keyword(Keyword::RETURNING) {
11707            Some(self.parse_comma_separated(Parser::parse_select_item)?)
11708        } else {
11709            None
11710        };
11711        let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
11712            self.parse_comma_separated(Parser::parse_order_by_expr)?
11713        } else {
11714            vec![]
11715        };
11716        let limit = if self.parse_keyword(Keyword::LIMIT) {
11717            self.parse_limit()?
11718        } else {
11719            None
11720        };
11721
11722        Ok(Statement::Delete(Delete {
11723            tables,
11724            from: if with_from_keyword {
11725                FromTable::WithFromKeyword(from)
11726            } else {
11727                FromTable::WithoutKeyword(from)
11728            },
11729            using,
11730            selection,
11731            returning,
11732            order_by,
11733            limit,
11734        }))
11735    }
11736
11737    // KILL [CONNECTION | QUERY | MUTATION] processlist_id
11738    pub fn parse_kill(&mut self) -> Result<Statement, ParserError> {
11739        let modifier_keyword =
11740            self.parse_one_of_keywords(&[Keyword::CONNECTION, Keyword::QUERY, Keyword::MUTATION]);
11741
11742        let id = self.parse_literal_uint()?;
11743
11744        let modifier = match modifier_keyword {
11745            Some(Keyword::CONNECTION) => Some(KillType::Connection),
11746            Some(Keyword::QUERY) => Some(KillType::Query),
11747            Some(Keyword::MUTATION) => {
11748                if dialect_of!(self is ClickHouseDialect | GenericDialect) {
11749                    Some(KillType::Mutation)
11750                } else {
11751                    self.expected(
11752                        "Unsupported type for KILL, allowed: CONNECTION | QUERY",
11753                        self.peek_token(),
11754                    )?
11755                }
11756            }
11757            _ => None,
11758        };
11759
11760        Ok(Statement::Kill { modifier, id })
11761    }
11762
11763    pub fn parse_explain(
11764        &mut self,
11765        describe_alias: DescribeAlias,
11766    ) -> Result<Statement, ParserError> {
11767        let mut analyze = false;
11768        let mut verbose = false;
11769        let mut query_plan = false;
11770        let mut estimate = false;
11771        let mut format = None;
11772        let mut options = None;
11773
11774        // Note: DuckDB is compatible with PostgreSQL syntax for this statement,
11775        // although not all features may be implemented.
11776        if describe_alias == DescribeAlias::Explain
11777            && self.dialect.supports_explain_with_utility_options()
11778            && self.peek_token().token == Token::LParen
11779        {
11780            options = Some(self.parse_utility_options()?)
11781        } else if self.parse_keywords(&[Keyword::QUERY, Keyword::PLAN]) {
11782            query_plan = true;
11783        } else if self.parse_keyword(Keyword::ESTIMATE) {
11784            estimate = true;
11785        } else {
11786            analyze = self.parse_keyword(Keyword::ANALYZE);
11787            verbose = self.parse_keyword(Keyword::VERBOSE);
11788            if self.parse_keyword(Keyword::FORMAT) {
11789                format = Some(self.parse_analyze_format_kind()?);
11790            }
11791        }
11792
11793        match self.maybe_parse(|parser| parser.parse_statement())? {
11794            Some(Statement::Explain { .. }) | Some(Statement::ExplainTable { .. }) => Err(
11795                ParserError::ParserError("Explain must be root of the plan".to_string()),
11796            ),
11797            Some(statement) => Ok(Statement::Explain {
11798                describe_alias,
11799                analyze,
11800                verbose,
11801                query_plan,
11802                estimate,
11803                statement: Box::new(statement),
11804                format,
11805                options,
11806            }),
11807            _ => {
11808                let hive_format =
11809                    match self.parse_one_of_keywords(&[Keyword::EXTENDED, Keyword::FORMATTED]) {
11810                        Some(Keyword::EXTENDED) => Some(HiveDescribeFormat::Extended),
11811                        Some(Keyword::FORMATTED) => Some(HiveDescribeFormat::Formatted),
11812                        _ => None,
11813                    };
11814
11815                let has_table_keyword = if self.dialect.describe_requires_table_keyword() {
11816                    // only allow to use TABLE keyword for DESC|DESCRIBE statement
11817                    self.parse_keyword(Keyword::TABLE)
11818                } else {
11819                    false
11820                };
11821
11822                let table_name = self.parse_object_name(false)?;
11823                Ok(Statement::ExplainTable {
11824                    describe_alias,
11825                    hive_format,
11826                    has_table_keyword,
11827                    table_name,
11828                })
11829            }
11830        }
11831    }
11832
11833    /// Parse a query expression, i.e. a `SELECT` statement optionally
11834    /// preceded with some `WITH` CTE declarations and optionally followed
11835    /// by `ORDER BY`. Unlike some other parse_... methods, this one doesn't
11836    /// expect the initial keyword to be already consumed
11837    pub fn parse_query(&mut self) -> Result<Box<Query>, ParserError> {
11838        let _guard = self.recursion_counter.try_decrease()?;
11839        let with = if self.parse_keyword(Keyword::WITH) {
11840            let with_token = self.get_current_token();
11841            Some(With {
11842                with_token: with_token.clone().into(),
11843                recursive: self.parse_keyword(Keyword::RECURSIVE),
11844                cte_tables: self.parse_comma_separated(Parser::parse_cte)?,
11845            })
11846        } else {
11847            None
11848        };
11849        if self.parse_keyword(Keyword::INSERT) {
11850            Ok(Query {
11851                with,
11852                body: self.parse_insert_setexpr_boxed()?,
11853                order_by: None,
11854                limit_clause: None,
11855                fetch: None,
11856                locks: vec![],
11857                for_clause: None,
11858                settings: None,
11859                format_clause: None,
11860                pipe_operators: vec![],
11861            }
11862            .into())
11863        } else if self.parse_keyword(Keyword::UPDATE) {
11864            Ok(Query {
11865                with,
11866                body: self.parse_update_setexpr_boxed()?,
11867                order_by: None,
11868                limit_clause: None,
11869                fetch: None,
11870                locks: vec![],
11871                for_clause: None,
11872                settings: None,
11873                format_clause: None,
11874                pipe_operators: vec![],
11875            }
11876            .into())
11877        } else if self.parse_keyword(Keyword::DELETE) {
11878            Ok(Query {
11879                with,
11880                body: self.parse_delete_setexpr_boxed()?,
11881                limit_clause: None,
11882                order_by: None,
11883                fetch: None,
11884                locks: vec![],
11885                for_clause: None,
11886                settings: None,
11887                format_clause: None,
11888                pipe_operators: vec![],
11889            }
11890            .into())
11891        } else if self.parse_keyword(Keyword::MERGE) {
11892            Ok(Query {
11893                with,
11894                body: self.parse_merge_setexpr_boxed()?,
11895                limit_clause: None,
11896                order_by: None,
11897                fetch: None,
11898                locks: vec![],
11899                for_clause: None,
11900                settings: None,
11901                format_clause: None,
11902                pipe_operators: vec![],
11903            }
11904            .into())
11905        } else {
11906            let body = self.parse_query_body(self.dialect.prec_unknown())?;
11907
11908            let order_by = self.parse_optional_order_by()?;
11909
11910            let limit_clause = self.parse_optional_limit_clause()?;
11911
11912            let settings = self.parse_settings()?;
11913
11914            let fetch = if self.parse_keyword(Keyword::FETCH) {
11915                Some(self.parse_fetch()?)
11916            } else {
11917                None
11918            };
11919
11920            let mut for_clause = None;
11921            let mut locks = Vec::new();
11922            while self.parse_keyword(Keyword::FOR) {
11923                if let Some(parsed_for_clause) = self.parse_for_clause()? {
11924                    for_clause = Some(parsed_for_clause);
11925                    break;
11926                } else {
11927                    locks.push(self.parse_lock()?);
11928                }
11929            }
11930            let format_clause = if dialect_of!(self is ClickHouseDialect | GenericDialect)
11931                && self.parse_keyword(Keyword::FORMAT)
11932            {
11933                if self.parse_keyword(Keyword::NULL) {
11934                    Some(FormatClause::Null)
11935                } else {
11936                    let ident = self.parse_identifier()?;
11937                    Some(FormatClause::Identifier(ident))
11938                }
11939            } else {
11940                None
11941            };
11942
11943            let pipe_operators = if self.dialect.supports_pipe_operator() {
11944                self.parse_pipe_operators()?
11945            } else {
11946                Vec::new()
11947            };
11948
11949            Ok(Query {
11950                with,
11951                body,
11952                order_by,
11953                limit_clause,
11954                fetch,
11955                locks,
11956                for_clause,
11957                settings,
11958                format_clause,
11959                pipe_operators,
11960            }
11961            .into())
11962        }
11963    }
11964
11965    fn parse_pipe_operators(&mut self) -> Result<Vec<PipeOperator>, ParserError> {
11966        let mut pipe_operators = Vec::new();
11967
11968        while self.consume_token(&Token::VerticalBarRightAngleBracket) {
11969            let kw = self.expect_one_of_keywords(&[
11970                Keyword::SELECT,
11971                Keyword::EXTEND,
11972                Keyword::SET,
11973                Keyword::DROP,
11974                Keyword::AS,
11975                Keyword::WHERE,
11976                Keyword::LIMIT,
11977                Keyword::AGGREGATE,
11978                Keyword::ORDER,
11979                Keyword::TABLESAMPLE,
11980                Keyword::RENAME,
11981                Keyword::UNION,
11982                Keyword::INTERSECT,
11983                Keyword::EXCEPT,
11984                Keyword::CALL,
11985                Keyword::PIVOT,
11986                Keyword::UNPIVOT,
11987                Keyword::JOIN,
11988                Keyword::INNER,
11989                Keyword::LEFT,
11990                Keyword::RIGHT,
11991                Keyword::FULL,
11992                Keyword::CROSS,
11993            ])?;
11994            match kw {
11995                Keyword::SELECT => {
11996                    let exprs = self.parse_comma_separated(Parser::parse_select_item)?;
11997                    pipe_operators.push(PipeOperator::Select { exprs })
11998                }
11999                Keyword::EXTEND => {
12000                    let exprs = self.parse_comma_separated(Parser::parse_select_item)?;
12001                    pipe_operators.push(PipeOperator::Extend { exprs })
12002                }
12003                Keyword::SET => {
12004                    let assignments = self.parse_comma_separated(Parser::parse_assignment)?;
12005                    pipe_operators.push(PipeOperator::Set { assignments })
12006                }
12007                Keyword::DROP => {
12008                    let columns = self.parse_identifiers()?;
12009                    pipe_operators.push(PipeOperator::Drop { columns })
12010                }
12011                Keyword::AS => {
12012                    let alias = self.parse_identifier()?;
12013                    pipe_operators.push(PipeOperator::As { alias })
12014                }
12015                Keyword::WHERE => {
12016                    let expr = self.parse_expr()?;
12017                    pipe_operators.push(PipeOperator::Where { expr })
12018                }
12019                Keyword::LIMIT => {
12020                    let expr = self.parse_expr()?;
12021                    let offset = if self.parse_keyword(Keyword::OFFSET) {
12022                        Some(self.parse_expr()?)
12023                    } else {
12024                        None
12025                    };
12026                    pipe_operators.push(PipeOperator::Limit { expr, offset })
12027                }
12028                Keyword::AGGREGATE => {
12029                    let full_table_exprs = if self.peek_keyword(Keyword::GROUP) {
12030                        vec![]
12031                    } else {
12032                        self.parse_comma_separated(|parser| {
12033                            parser.parse_expr_with_alias_and_order_by()
12034                        })?
12035                    };
12036
12037                    let group_by_expr = if self.parse_keywords(&[Keyword::GROUP, Keyword::BY]) {
12038                        self.parse_comma_separated(|parser| {
12039                            parser.parse_expr_with_alias_and_order_by()
12040                        })?
12041                    } else {
12042                        vec![]
12043                    };
12044
12045                    pipe_operators.push(PipeOperator::Aggregate {
12046                        full_table_exprs,
12047                        group_by_expr,
12048                    })
12049                }
12050                Keyword::ORDER => {
12051                    self.expect_one_of_keywords(&[Keyword::BY])?;
12052                    let exprs = self.parse_comma_separated(Parser::parse_order_by_expr)?;
12053                    pipe_operators.push(PipeOperator::OrderBy { exprs })
12054                }
12055                Keyword::TABLESAMPLE => {
12056                    let sample = self.parse_table_sample(TableSampleModifier::TableSample)?;
12057                    pipe_operators.push(PipeOperator::TableSample { sample });
12058                }
12059                Keyword::RENAME => {
12060                    let mappings =
12061                        self.parse_comma_separated(Parser::parse_identifier_with_optional_alias)?;
12062                    pipe_operators.push(PipeOperator::Rename { mappings });
12063                }
12064                Keyword::UNION => {
12065                    let set_quantifier = self.parse_set_quantifier(&Some(SetOperator::Union));
12066                    let queries = self.parse_pipe_operator_queries()?;
12067                    pipe_operators.push(PipeOperator::Union {
12068                        set_quantifier,
12069                        queries,
12070                    });
12071                }
12072                Keyword::INTERSECT => {
12073                    let set_quantifier =
12074                        self.parse_distinct_required_set_quantifier("INTERSECT")?;
12075                    let queries = self.parse_pipe_operator_queries()?;
12076                    pipe_operators.push(PipeOperator::Intersect {
12077                        set_quantifier,
12078                        queries,
12079                    });
12080                }
12081                Keyword::EXCEPT => {
12082                    let set_quantifier = self.parse_distinct_required_set_quantifier("EXCEPT")?;
12083                    let queries = self.parse_pipe_operator_queries()?;
12084                    pipe_operators.push(PipeOperator::Except {
12085                        set_quantifier,
12086                        queries,
12087                    });
12088                }
12089                Keyword::CALL => {
12090                    let function_name = self.parse_object_name(false)?;
12091                    let function_expr = self.parse_function(function_name)?;
12092                    if let Expr::Function(function) = function_expr {
12093                        let alias = self.parse_identifier_optional_alias()?;
12094                        pipe_operators.push(PipeOperator::Call { function, alias });
12095                    } else {
12096                        return Err(ParserError::ParserError(
12097                            "Expected function call after CALL".to_string(),
12098                        ));
12099                    }
12100                }
12101                Keyword::PIVOT => {
12102                    self.expect_token(&Token::LParen)?;
12103                    let aggregate_functions =
12104                        self.parse_comma_separated(Self::parse_aliased_function_call)?;
12105                    self.expect_keyword_is(Keyword::FOR)?;
12106                    let value_column = self.parse_period_separated(|p| p.parse_identifier())?;
12107                    self.expect_keyword_is(Keyword::IN)?;
12108
12109                    self.expect_token(&Token::LParen)?;
12110                    let value_source = if self.parse_keyword(Keyword::ANY) {
12111                        let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
12112                            self.parse_comma_separated(Parser::parse_order_by_expr)?
12113                        } else {
12114                            vec![]
12115                        };
12116                        PivotValueSource::Any(order_by)
12117                    } else if self.peek_sub_query() {
12118                        PivotValueSource::Subquery(self.parse_query()?)
12119                    } else {
12120                        PivotValueSource::List(
12121                            self.parse_comma_separated(Self::parse_expr_with_alias)?,
12122                        )
12123                    };
12124                    self.expect_token(&Token::RParen)?;
12125                    self.expect_token(&Token::RParen)?;
12126
12127                    let alias = self.parse_identifier_optional_alias()?;
12128
12129                    pipe_operators.push(PipeOperator::Pivot {
12130                        aggregate_functions,
12131                        value_column,
12132                        value_source,
12133                        alias,
12134                    });
12135                }
12136                Keyword::UNPIVOT => {
12137                    self.expect_token(&Token::LParen)?;
12138                    let value_column = self.parse_identifier()?;
12139                    self.expect_keyword(Keyword::FOR)?;
12140                    let name_column = self.parse_identifier()?;
12141                    self.expect_keyword(Keyword::IN)?;
12142
12143                    self.expect_token(&Token::LParen)?;
12144                    let unpivot_columns = self.parse_comma_separated(Parser::parse_identifier)?;
12145                    self.expect_token(&Token::RParen)?;
12146
12147                    self.expect_token(&Token::RParen)?;
12148
12149                    let alias = self.parse_identifier_optional_alias()?;
12150
12151                    pipe_operators.push(PipeOperator::Unpivot {
12152                        value_column,
12153                        name_column,
12154                        unpivot_columns,
12155                        alias,
12156                    });
12157                }
12158                Keyword::JOIN
12159                | Keyword::INNER
12160                | Keyword::LEFT
12161                | Keyword::RIGHT
12162                | Keyword::FULL
12163                | Keyword::CROSS => {
12164                    self.prev_token();
12165                    let mut joins = self.parse_joins()?;
12166                    if joins.len() != 1 {
12167                        return Err(ParserError::ParserError(
12168                            "Join pipe operator must have a single join".to_string(),
12169                        ));
12170                    }
12171                    let join = joins.swap_remove(0);
12172                    pipe_operators.push(PipeOperator::Join(join))
12173                }
12174                unhandled => {
12175                    return Err(ParserError::ParserError(format!(
12176                    "`expect_one_of_keywords` further up allowed unhandled keyword: {unhandled:?}"
12177                )))
12178                }
12179            }
12180        }
12181        Ok(pipe_operators)
12182    }
12183
12184    fn parse_settings(&mut self) -> Result<Option<Vec<Setting>>, ParserError> {
12185        let settings = if dialect_of!(self is ClickHouseDialect|GenericDialect)
12186            && self.parse_keyword(Keyword::SETTINGS)
12187        {
12188            let key_values = self.parse_comma_separated(|p| {
12189                let key = p.parse_identifier()?;
12190                p.expect_token(&Token::Eq)?;
12191                let value = p.parse_expr()?;
12192                Ok(Setting { key, value })
12193            })?;
12194            Some(key_values)
12195        } else {
12196            None
12197        };
12198        Ok(settings)
12199    }
12200
12201    /// Parse a mssql `FOR [XML | JSON | BROWSE]` clause
12202    pub fn parse_for_clause(&mut self) -> Result<Option<ForClause>, ParserError> {
12203        if self.parse_keyword(Keyword::XML) {
12204            Ok(Some(self.parse_for_xml()?))
12205        } else if self.parse_keyword(Keyword::JSON) {
12206            Ok(Some(self.parse_for_json()?))
12207        } else if self.parse_keyword(Keyword::BROWSE) {
12208            Ok(Some(ForClause::Browse))
12209        } else {
12210            Ok(None)
12211        }
12212    }
12213
12214    /// Parse a mssql `FOR XML` clause
12215    pub fn parse_for_xml(&mut self) -> Result<ForClause, ParserError> {
12216        let for_xml = if self.parse_keyword(Keyword::RAW) {
12217            let mut element_name = None;
12218            if self.peek_token().token == Token::LParen {
12219                self.expect_token(&Token::LParen)?;
12220                element_name = Some(self.parse_literal_string()?);
12221                self.expect_token(&Token::RParen)?;
12222            }
12223            ForXml::Raw(element_name)
12224        } else if self.parse_keyword(Keyword::AUTO) {
12225            ForXml::Auto
12226        } else if self.parse_keyword(Keyword::EXPLICIT) {
12227            ForXml::Explicit
12228        } else if self.parse_keyword(Keyword::PATH) {
12229            let mut element_name = None;
12230            if self.peek_token().token == Token::LParen {
12231                self.expect_token(&Token::LParen)?;
12232                element_name = Some(self.parse_literal_string()?);
12233                self.expect_token(&Token::RParen)?;
12234            }
12235            ForXml::Path(element_name)
12236        } else {
12237            return Err(ParserError::ParserError(
12238                "Expected FOR XML [RAW | AUTO | EXPLICIT | PATH ]".to_string(),
12239            ));
12240        };
12241        let mut elements = false;
12242        let mut binary_base64 = false;
12243        let mut root = None;
12244        let mut r#type = false;
12245        while self.peek_token().token == Token::Comma {
12246            self.next_token();
12247            if self.parse_keyword(Keyword::ELEMENTS) {
12248                elements = true;
12249            } else if self.parse_keyword(Keyword::BINARY) {
12250                self.expect_keyword_is(Keyword::BASE64)?;
12251                binary_base64 = true;
12252            } else if self.parse_keyword(Keyword::ROOT) {
12253                self.expect_token(&Token::LParen)?;
12254                root = Some(self.parse_literal_string()?);
12255                self.expect_token(&Token::RParen)?;
12256            } else if self.parse_keyword(Keyword::TYPE) {
12257                r#type = true;
12258            }
12259        }
12260        Ok(ForClause::Xml {
12261            for_xml,
12262            elements,
12263            binary_base64,
12264            root,
12265            r#type,
12266        })
12267    }
12268
12269    /// Parse a mssql `FOR JSON` clause
12270    pub fn parse_for_json(&mut self) -> Result<ForClause, ParserError> {
12271        let for_json = if self.parse_keyword(Keyword::AUTO) {
12272            ForJson::Auto
12273        } else if self.parse_keyword(Keyword::PATH) {
12274            ForJson::Path
12275        } else {
12276            return Err(ParserError::ParserError(
12277                "Expected FOR JSON [AUTO | PATH ]".to_string(),
12278            ));
12279        };
12280        let mut root = None;
12281        let mut include_null_values = false;
12282        let mut without_array_wrapper = false;
12283        while self.peek_token().token == Token::Comma {
12284            self.next_token();
12285            if self.parse_keyword(Keyword::ROOT) {
12286                self.expect_token(&Token::LParen)?;
12287                root = Some(self.parse_literal_string()?);
12288                self.expect_token(&Token::RParen)?;
12289            } else if self.parse_keyword(Keyword::INCLUDE_NULL_VALUES) {
12290                include_null_values = true;
12291            } else if self.parse_keyword(Keyword::WITHOUT_ARRAY_WRAPPER) {
12292                without_array_wrapper = true;
12293            }
12294        }
12295        Ok(ForClause::Json {
12296            for_json,
12297            root,
12298            include_null_values,
12299            without_array_wrapper,
12300        })
12301    }
12302
12303    /// Parse a CTE (`alias [( col1, col2, ... )] AS (subquery)`)
12304    pub fn parse_cte(&mut self) -> Result<Cte, ParserError> {
12305        let name = self.parse_identifier()?;
12306
12307        let mut cte = if self.parse_keyword(Keyword::AS) {
12308            let mut is_materialized = None;
12309            if dialect_of!(self is PostgreSqlDialect) {
12310                if self.parse_keyword(Keyword::MATERIALIZED) {
12311                    is_materialized = Some(CteAsMaterialized::Materialized);
12312                } else if self.parse_keywords(&[Keyword::NOT, Keyword::MATERIALIZED]) {
12313                    is_materialized = Some(CteAsMaterialized::NotMaterialized);
12314                }
12315            }
12316            self.expect_token(&Token::LParen)?;
12317
12318            let query = self.parse_query()?;
12319            let closing_paren_token = self.expect_token(&Token::RParen)?;
12320
12321            let alias = TableAlias {
12322                name,
12323                columns: vec![],
12324            };
12325            Cte {
12326                alias,
12327                query,
12328                from: None,
12329                materialized: is_materialized,
12330                closing_paren_token: closing_paren_token.into(),
12331            }
12332        } else {
12333            let columns = self.parse_table_alias_column_defs()?;
12334            self.expect_keyword_is(Keyword::AS)?;
12335            let mut is_materialized = None;
12336            if dialect_of!(self is PostgreSqlDialect) {
12337                if self.parse_keyword(Keyword::MATERIALIZED) {
12338                    is_materialized = Some(CteAsMaterialized::Materialized);
12339                } else if self.parse_keywords(&[Keyword::NOT, Keyword::MATERIALIZED]) {
12340                    is_materialized = Some(CteAsMaterialized::NotMaterialized);
12341                }
12342            }
12343            self.expect_token(&Token::LParen)?;
12344
12345            let query = self.parse_query()?;
12346            let closing_paren_token = self.expect_token(&Token::RParen)?;
12347
12348            let alias = TableAlias { name, columns };
12349            Cte {
12350                alias,
12351                query,
12352                from: None,
12353                materialized: is_materialized,
12354                closing_paren_token: closing_paren_token.into(),
12355            }
12356        };
12357        if self.parse_keyword(Keyword::FROM) {
12358            cte.from = Some(self.parse_identifier()?);
12359        }
12360        Ok(cte)
12361    }
12362
12363    /// Parse a "query body", which is an expression with roughly the
12364    /// following grammar:
12365    /// ```sql
12366    ///   query_body ::= restricted_select | '(' subquery ')' | set_operation
12367    ///   restricted_select ::= 'SELECT' [expr_list] [ from ] [ where ] [ groupby_having ]
12368    ///   subquery ::= query_body [ order_by_limit ]
12369    ///   set_operation ::= query_body { 'UNION' | 'EXCEPT' | 'INTERSECT' } [ 'ALL' ] query_body
12370    /// ```
12371    pub fn parse_query_body(&mut self, precedence: u8) -> Result<Box<SetExpr>, ParserError> {
12372        // We parse the expression using a Pratt parser, as in `parse_expr()`.
12373        // Start by parsing a restricted SELECT or a `(subquery)`:
12374        let expr = if self.peek_keyword(Keyword::SELECT)
12375            || (self.peek_keyword(Keyword::FROM) && self.dialect.supports_from_first_select())
12376        {
12377            SetExpr::Select(self.parse_select().map(Box::new)?)
12378        } else if self.consume_token(&Token::LParen) {
12379            // CTEs are not allowed here, but the parser currently accepts them
12380            let subquery = self.parse_query()?;
12381            self.expect_token(&Token::RParen)?;
12382            SetExpr::Query(subquery)
12383        } else if self.parse_keyword(Keyword::VALUES) {
12384            let is_mysql = dialect_of!(self is MySqlDialect);
12385            SetExpr::Values(self.parse_values(is_mysql)?)
12386        } else if self.parse_keyword(Keyword::TABLE) {
12387            SetExpr::Table(Box::new(self.parse_as_table()?))
12388        } else {
12389            return self.expected(
12390                "SELECT, VALUES, or a subquery in the query body",
12391                self.peek_token(),
12392            );
12393        };
12394
12395        self.parse_remaining_set_exprs(expr, precedence)
12396    }
12397
12398    /// Parse any extra set expressions that may be present in a query body
12399    ///
12400    /// (this is its own function to reduce required stack size in debug builds)
12401    fn parse_remaining_set_exprs(
12402        &mut self,
12403        mut expr: SetExpr,
12404        precedence: u8,
12405    ) -> Result<Box<SetExpr>, ParserError> {
12406        loop {
12407            // The query can be optionally followed by a set operator:
12408            let op = self.parse_set_operator(&self.peek_token().token);
12409            let next_precedence = match op {
12410                // UNION and EXCEPT have the same binding power and evaluate left-to-right
12411                Some(SetOperator::Union) | Some(SetOperator::Except) | Some(SetOperator::Minus) => {
12412                    10
12413                }
12414                // INTERSECT has higher precedence than UNION/EXCEPT
12415                Some(SetOperator::Intersect) => 20,
12416                // Unexpected token or EOF => stop parsing the query body
12417                None => break,
12418            };
12419            if precedence >= next_precedence {
12420                break;
12421            }
12422            self.next_token(); // skip past the set operator
12423            let set_quantifier = self.parse_set_quantifier(&op);
12424            expr = SetExpr::SetOperation {
12425                left: Box::new(expr),
12426                op: op.unwrap(),
12427                set_quantifier,
12428                right: self.parse_query_body(next_precedence)?,
12429            };
12430        }
12431
12432        Ok(expr.into())
12433    }
12434
12435    pub fn parse_set_operator(&mut self, token: &Token) -> Option<SetOperator> {
12436        match token {
12437            Token::Word(w) if w.keyword == Keyword::UNION => Some(SetOperator::Union),
12438            Token::Word(w) if w.keyword == Keyword::EXCEPT => Some(SetOperator::Except),
12439            Token::Word(w) if w.keyword == Keyword::INTERSECT => Some(SetOperator::Intersect),
12440            Token::Word(w) if w.keyword == Keyword::MINUS => Some(SetOperator::Minus),
12441            _ => None,
12442        }
12443    }
12444
12445    pub fn parse_set_quantifier(&mut self, op: &Option<SetOperator>) -> SetQuantifier {
12446        match op {
12447            Some(
12448                SetOperator::Except
12449                | SetOperator::Intersect
12450                | SetOperator::Union
12451                | SetOperator::Minus,
12452            ) => {
12453                if self.parse_keywords(&[Keyword::DISTINCT, Keyword::BY, Keyword::NAME]) {
12454                    SetQuantifier::DistinctByName
12455                } else if self.parse_keywords(&[Keyword::BY, Keyword::NAME]) {
12456                    SetQuantifier::ByName
12457                } else if self.parse_keyword(Keyword::ALL) {
12458                    if self.parse_keywords(&[Keyword::BY, Keyword::NAME]) {
12459                        SetQuantifier::AllByName
12460                    } else {
12461                        SetQuantifier::All
12462                    }
12463                } else if self.parse_keyword(Keyword::DISTINCT) {
12464                    SetQuantifier::Distinct
12465                } else {
12466                    SetQuantifier::None
12467                }
12468            }
12469            _ => SetQuantifier::None,
12470        }
12471    }
12472
12473    /// Parse a restricted `SELECT` statement (no CTEs / `UNION` / `ORDER BY`)
12474    pub fn parse_select(&mut self) -> Result<Select, ParserError> {
12475        let mut from_first = None;
12476
12477        if self.dialect.supports_from_first_select() && self.peek_keyword(Keyword::FROM) {
12478            let from_token = self.expect_keyword(Keyword::FROM)?;
12479            let from = self.parse_table_with_joins()?;
12480            if !self.peek_keyword(Keyword::SELECT) {
12481                return Ok(Select {
12482                    select_token: AttachedToken(from_token),
12483                    distinct: None,
12484                    top: None,
12485                    top_before_distinct: false,
12486                    projection: vec![],
12487                    exclude: None,
12488                    into: None,
12489                    from,
12490                    lateral_views: vec![],
12491                    prewhere: None,
12492                    selection: None,
12493                    group_by: GroupByExpr::Expressions(vec![], vec![]),
12494                    cluster_by: vec![],
12495                    distribute_by: vec![],
12496                    sort_by: vec![],
12497                    having: None,
12498                    named_window: vec![],
12499                    window_before_qualify: false,
12500                    qualify: None,
12501                    value_table_mode: None,
12502                    connect_by: None,
12503                    flavor: SelectFlavor::FromFirstNoSelect,
12504                });
12505            }
12506            from_first = Some(from);
12507        }
12508
12509        let select_token = self.expect_keyword(Keyword::SELECT)?;
12510        let value_table_mode = self.parse_value_table_mode()?;
12511
12512        let mut top_before_distinct = false;
12513        let mut top = None;
12514        if self.dialect.supports_top_before_distinct() && self.parse_keyword(Keyword::TOP) {
12515            top = Some(self.parse_top()?);
12516            top_before_distinct = true;
12517        }
12518        let distinct = self.parse_all_or_distinct()?;
12519        if !self.dialect.supports_top_before_distinct() && self.parse_keyword(Keyword::TOP) {
12520            top = Some(self.parse_top()?);
12521        }
12522
12523        let projection =
12524            if self.dialect.supports_empty_projections() && self.peek_keyword(Keyword::FROM) {
12525                vec![]
12526            } else {
12527                self.parse_projection()?
12528            };
12529
12530        let exclude = if self.dialect.supports_select_exclude() {
12531            self.parse_optional_select_item_exclude()?
12532        } else {
12533            None
12534        };
12535
12536        let into = if self.parse_keyword(Keyword::INTO) {
12537            Some(self.parse_select_into()?)
12538        } else {
12539            None
12540        };
12541
12542        // Note that for keywords to be properly handled here, they need to be
12543        // added to `RESERVED_FOR_COLUMN_ALIAS` / `RESERVED_FOR_TABLE_ALIAS`,
12544        // otherwise they may be parsed as an alias as part of the `projection`
12545        // or `from`.
12546
12547        let (from, from_first) = if let Some(from) = from_first.take() {
12548            (from, true)
12549        } else if self.parse_keyword(Keyword::FROM) {
12550            (self.parse_table_with_joins()?, false)
12551        } else {
12552            (vec![], false)
12553        };
12554
12555        let mut lateral_views = vec![];
12556        loop {
12557            if self.parse_keywords(&[Keyword::LATERAL, Keyword::VIEW]) {
12558                let outer = self.parse_keyword(Keyword::OUTER);
12559                let lateral_view = self.parse_expr()?;
12560                let lateral_view_name = self.parse_object_name(false)?;
12561                let lateral_col_alias = self
12562                    .parse_comma_separated(|parser| {
12563                        parser.parse_optional_alias(&[
12564                            Keyword::WHERE,
12565                            Keyword::GROUP,
12566                            Keyword::CLUSTER,
12567                            Keyword::HAVING,
12568                            Keyword::LATERAL,
12569                        ]) // This couldn't possibly be a bad idea
12570                    })?
12571                    .into_iter()
12572                    .flatten()
12573                    .collect();
12574
12575                lateral_views.push(LateralView {
12576                    lateral_view,
12577                    lateral_view_name,
12578                    lateral_col_alias,
12579                    outer,
12580                });
12581            } else {
12582                break;
12583            }
12584        }
12585
12586        let prewhere = if dialect_of!(self is ClickHouseDialect|GenericDialect)
12587            && self.parse_keyword(Keyword::PREWHERE)
12588        {
12589            Some(self.parse_expr()?)
12590        } else {
12591            None
12592        };
12593
12594        let selection = if self.parse_keyword(Keyword::WHERE) {
12595            Some(self.parse_expr()?)
12596        } else {
12597            None
12598        };
12599
12600        let group_by = self
12601            .parse_optional_group_by()?
12602            .unwrap_or_else(|| GroupByExpr::Expressions(vec![], vec![]));
12603
12604        let cluster_by = if self.parse_keywords(&[Keyword::CLUSTER, Keyword::BY]) {
12605            self.parse_comma_separated(Parser::parse_expr)?
12606        } else {
12607            vec![]
12608        };
12609
12610        let distribute_by = if self.parse_keywords(&[Keyword::DISTRIBUTE, Keyword::BY]) {
12611            self.parse_comma_separated(Parser::parse_expr)?
12612        } else {
12613            vec![]
12614        };
12615
12616        let sort_by = if self.parse_keywords(&[Keyword::SORT, Keyword::BY]) {
12617            self.parse_comma_separated(Parser::parse_order_by_expr)?
12618        } else {
12619            vec![]
12620        };
12621
12622        let having = if self.parse_keyword(Keyword::HAVING) {
12623            Some(self.parse_expr()?)
12624        } else {
12625            None
12626        };
12627
12628        // Accept QUALIFY and WINDOW in any order and flag accordingly.
12629        let (named_windows, qualify, window_before_qualify) = if self.parse_keyword(Keyword::WINDOW)
12630        {
12631            let named_windows = self.parse_comma_separated(Parser::parse_named_window)?;
12632            if self.parse_keyword(Keyword::QUALIFY) {
12633                (named_windows, Some(self.parse_expr()?), true)
12634            } else {
12635                (named_windows, None, true)
12636            }
12637        } else if self.parse_keyword(Keyword::QUALIFY) {
12638            let qualify = Some(self.parse_expr()?);
12639            if self.parse_keyword(Keyword::WINDOW) {
12640                (
12641                    self.parse_comma_separated(Parser::parse_named_window)?,
12642                    qualify,
12643                    false,
12644                )
12645            } else {
12646                (Default::default(), qualify, false)
12647            }
12648        } else {
12649            Default::default()
12650        };
12651
12652        let connect_by = if self.dialect.supports_connect_by()
12653            && self
12654                .parse_one_of_keywords(&[Keyword::START, Keyword::CONNECT])
12655                .is_some()
12656        {
12657            self.prev_token();
12658            Some(self.parse_connect_by()?)
12659        } else {
12660            None
12661        };
12662
12663        Ok(Select {
12664            select_token: AttachedToken(select_token),
12665            distinct,
12666            top,
12667            top_before_distinct,
12668            projection,
12669            exclude,
12670            into,
12671            from,
12672            lateral_views,
12673            prewhere,
12674            selection,
12675            group_by,
12676            cluster_by,
12677            distribute_by,
12678            sort_by,
12679            having,
12680            named_window: named_windows,
12681            window_before_qualify,
12682            qualify,
12683            value_table_mode,
12684            connect_by,
12685            flavor: if from_first {
12686                SelectFlavor::FromFirst
12687            } else {
12688                SelectFlavor::Standard
12689            },
12690        })
12691    }
12692
12693    fn parse_value_table_mode(&mut self) -> Result<Option<ValueTableMode>, ParserError> {
12694        if !dialect_of!(self is BigQueryDialect) {
12695            return Ok(None);
12696        }
12697
12698        let mode = if self.parse_keywords(&[Keyword::DISTINCT, Keyword::AS, Keyword::VALUE]) {
12699            Some(ValueTableMode::DistinctAsValue)
12700        } else if self.parse_keywords(&[Keyword::DISTINCT, Keyword::AS, Keyword::STRUCT]) {
12701            Some(ValueTableMode::DistinctAsStruct)
12702        } else if self.parse_keywords(&[Keyword::AS, Keyword::VALUE])
12703            || self.parse_keywords(&[Keyword::ALL, Keyword::AS, Keyword::VALUE])
12704        {
12705            Some(ValueTableMode::AsValue)
12706        } else if self.parse_keywords(&[Keyword::AS, Keyword::STRUCT])
12707            || self.parse_keywords(&[Keyword::ALL, Keyword::AS, Keyword::STRUCT])
12708        {
12709            Some(ValueTableMode::AsStruct)
12710        } else if self.parse_keyword(Keyword::AS) {
12711            self.expected("VALUE or STRUCT", self.peek_token())?
12712        } else {
12713            None
12714        };
12715
12716        Ok(mode)
12717    }
12718
12719    /// Invoke `f` after first setting the parser's `ParserState` to `state`.
12720    ///
12721    /// Upon return, restores the parser's state to what it started at.
12722    fn with_state<T, F>(&mut self, state: ParserState, mut f: F) -> Result<T, ParserError>
12723    where
12724        F: FnMut(&mut Parser) -> Result<T, ParserError>,
12725    {
12726        let current_state = self.state;
12727        self.state = state;
12728        let res = f(self);
12729        self.state = current_state;
12730        res
12731    }
12732
12733    pub fn parse_connect_by(&mut self) -> Result<ConnectBy, ParserError> {
12734        let (condition, relationships) = if self.parse_keywords(&[Keyword::CONNECT, Keyword::BY]) {
12735            let relationships = self.with_state(ParserState::ConnectBy, |parser| {
12736                parser.parse_comma_separated(Parser::parse_expr)
12737            })?;
12738            self.expect_keywords(&[Keyword::START, Keyword::WITH])?;
12739            let condition = self.parse_expr()?;
12740            (condition, relationships)
12741        } else {
12742            self.expect_keywords(&[Keyword::START, Keyword::WITH])?;
12743            let condition = self.parse_expr()?;
12744            self.expect_keywords(&[Keyword::CONNECT, Keyword::BY])?;
12745            let relationships = self.with_state(ParserState::ConnectBy, |parser| {
12746                parser.parse_comma_separated(Parser::parse_expr)
12747            })?;
12748            (condition, relationships)
12749        };
12750        Ok(ConnectBy {
12751            condition,
12752            relationships,
12753        })
12754    }
12755
12756    /// Parse `CREATE TABLE x AS TABLE y`
12757    pub fn parse_as_table(&mut self) -> Result<Table, ParserError> {
12758        let token1 = self.next_token();
12759        let token2 = self.next_token();
12760        let token3 = self.next_token();
12761
12762        let table_name;
12763        let schema_name;
12764        if token2 == Token::Period {
12765            match token1.token {
12766                Token::Word(w) => {
12767                    schema_name = w.value;
12768                }
12769                _ => {
12770                    return self.expected("Schema name", token1);
12771                }
12772            }
12773            match token3.token {
12774                Token::Word(w) => {
12775                    table_name = w.value;
12776                }
12777                _ => {
12778                    return self.expected("Table name", token3);
12779                }
12780            }
12781            Ok(Table {
12782                table_name: Some(table_name),
12783                schema_name: Some(schema_name),
12784            })
12785        } else {
12786            match token1.token {
12787                Token::Word(w) => {
12788                    table_name = w.value;
12789                }
12790                _ => {
12791                    return self.expected("Table name", token1);
12792                }
12793            }
12794            Ok(Table {
12795                table_name: Some(table_name),
12796                schema_name: None,
12797            })
12798        }
12799    }
12800
12801    /// Parse a `SET ROLE` statement. Expects SET to be consumed already.
12802    fn parse_set_role(
12803        &mut self,
12804        modifier: Option<ContextModifier>,
12805    ) -> Result<Statement, ParserError> {
12806        self.expect_keyword_is(Keyword::ROLE)?;
12807
12808        let role_name = if self.parse_keyword(Keyword::NONE) {
12809            None
12810        } else {
12811            Some(self.parse_identifier()?)
12812        };
12813        Ok(Statement::Set(Set::SetRole {
12814            context_modifier: modifier,
12815            role_name,
12816        }))
12817    }
12818
12819    fn parse_set_values(
12820        &mut self,
12821        parenthesized_assignment: bool,
12822    ) -> Result<Vec<Expr>, ParserError> {
12823        let mut values = vec![];
12824
12825        if parenthesized_assignment {
12826            self.expect_token(&Token::LParen)?;
12827        }
12828
12829        loop {
12830            let value = if let Some(expr) = self.try_parse_expr_sub_query()? {
12831                expr
12832            } else if let Ok(expr) = self.parse_expr() {
12833                expr
12834            } else {
12835                self.expected("variable value", self.peek_token())?
12836            };
12837
12838            values.push(value);
12839            if self.consume_token(&Token::Comma) {
12840                continue;
12841            }
12842
12843            if parenthesized_assignment {
12844                self.expect_token(&Token::RParen)?;
12845            }
12846            return Ok(values);
12847        }
12848    }
12849
12850    fn parse_context_modifier(&mut self) -> Option<ContextModifier> {
12851        let modifier =
12852            self.parse_one_of_keywords(&[Keyword::SESSION, Keyword::LOCAL, Keyword::GLOBAL])?;
12853
12854        Self::keyword_to_modifier(modifier)
12855    }
12856
12857    /// Parse a single SET statement assignment `var = expr`.
12858    fn parse_set_assignment(&mut self) -> Result<SetAssignment, ParserError> {
12859        let scope = self.parse_context_modifier();
12860
12861        let name = if self.dialect.supports_parenthesized_set_variables()
12862            && self.consume_token(&Token::LParen)
12863        {
12864            // Parenthesized assignments are handled in the `parse_set` function after
12865            // trying to parse list of assignments using this function.
12866            // If a dialect supports both, and we find a LParen, we early exit from this function.
12867            self.expected("Unparenthesized assignment", self.peek_token())?
12868        } else {
12869            self.parse_object_name(false)?
12870        };
12871
12872        if !(self.consume_token(&Token::Eq) || self.parse_keyword(Keyword::TO)) {
12873            return self.expected("assignment operator", self.peek_token());
12874        }
12875
12876        let value = self.parse_expr()?;
12877
12878        Ok(SetAssignment { scope, name, value })
12879    }
12880
12881    fn parse_set(&mut self) -> Result<Statement, ParserError> {
12882        let hivevar = self.parse_keyword(Keyword::HIVEVAR);
12883
12884        // Modifier is either HIVEVAR: or a ContextModifier (LOCAL, SESSION, etc), not both
12885        let scope = if !hivevar {
12886            self.parse_context_modifier()
12887        } else {
12888            None
12889        };
12890
12891        if hivevar {
12892            self.expect_token(&Token::Colon)?;
12893        }
12894
12895        if let Some(set_role_stmt) = self.maybe_parse(|parser| parser.parse_set_role(scope))? {
12896            return Ok(set_role_stmt);
12897        }
12898
12899        // Handle special cases first
12900        if self.parse_keywords(&[Keyword::TIME, Keyword::ZONE])
12901            || self.parse_keyword(Keyword::TIMEZONE)
12902        {
12903            if self.consume_token(&Token::Eq) || self.parse_keyword(Keyword::TO) {
12904                return Ok(Set::SingleAssignment {
12905                    scope,
12906                    hivevar,
12907                    variable: ObjectName::from(vec!["TIMEZONE".into()]),
12908                    values: self.parse_set_values(false)?,
12909                }
12910                .into());
12911            } else {
12912                // A shorthand alias for SET TIME ZONE that doesn't require
12913                // the assignment operator. It's originally PostgreSQL specific,
12914                // but we allow it for all the dialects
12915                return Ok(Set::SetTimeZone {
12916                    local: scope == Some(ContextModifier::Local),
12917                    value: self.parse_expr()?,
12918                }
12919                .into());
12920            }
12921        } else if self.dialect.supports_set_names() && self.parse_keyword(Keyword::NAMES) {
12922            if self.parse_keyword(Keyword::DEFAULT) {
12923                return Ok(Set::SetNamesDefault {}.into());
12924            }
12925            let charset_name = self.parse_identifier()?;
12926            let collation_name = if self.parse_one_of_keywords(&[Keyword::COLLATE]).is_some() {
12927                Some(self.parse_literal_string()?)
12928            } else {
12929                None
12930            };
12931
12932            return Ok(Set::SetNames {
12933                charset_name,
12934                collation_name,
12935            }
12936            .into());
12937        } else if self.parse_keyword(Keyword::CHARACTERISTICS) {
12938            self.expect_keywords(&[Keyword::AS, Keyword::TRANSACTION])?;
12939            return Ok(Set::SetTransaction {
12940                modes: self.parse_transaction_modes()?,
12941                snapshot: None,
12942                session: true,
12943            }
12944            .into());
12945        } else if self.parse_keyword(Keyword::TRANSACTION) {
12946            if self.parse_keyword(Keyword::SNAPSHOT) {
12947                let snapshot_id = self.parse_value()?.value;
12948                return Ok(Set::SetTransaction {
12949                    modes: vec![],
12950                    snapshot: Some(snapshot_id),
12951                    session: false,
12952                }
12953                .into());
12954            }
12955            return Ok(Set::SetTransaction {
12956                modes: self.parse_transaction_modes()?,
12957                snapshot: None,
12958                session: false,
12959            }
12960            .into());
12961        }
12962
12963        if self.dialect.supports_comma_separated_set_assignments() {
12964            if scope.is_some() {
12965                self.prev_token();
12966            }
12967
12968            if let Some(assignments) = self
12969                .maybe_parse(|parser| parser.parse_comma_separated(Parser::parse_set_assignment))?
12970            {
12971                return if assignments.len() > 1 {
12972                    Ok(Set::MultipleAssignments { assignments }.into())
12973                } else {
12974                    let SetAssignment { scope, name, value } =
12975                        assignments.into_iter().next().ok_or_else(|| {
12976                            ParserError::ParserError("Expected at least one assignment".to_string())
12977                        })?;
12978
12979                    Ok(Set::SingleAssignment {
12980                        scope,
12981                        hivevar,
12982                        variable: name,
12983                        values: vec![value],
12984                    }
12985                    .into())
12986                };
12987            }
12988        }
12989
12990        let variables = if self.dialect.supports_parenthesized_set_variables()
12991            && self.consume_token(&Token::LParen)
12992        {
12993            let vars = OneOrManyWithParens::Many(
12994                self.parse_comma_separated(|parser: &mut Parser<'a>| parser.parse_identifier())?
12995                    .into_iter()
12996                    .map(|ident| ObjectName::from(vec![ident]))
12997                    .collect(),
12998            );
12999            self.expect_token(&Token::RParen)?;
13000            vars
13001        } else {
13002            OneOrManyWithParens::One(self.parse_object_name(false)?)
13003        };
13004
13005        if self.consume_token(&Token::Eq) || self.parse_keyword(Keyword::TO) {
13006            let stmt = match variables {
13007                OneOrManyWithParens::One(var) => Set::SingleAssignment {
13008                    scope,
13009                    hivevar,
13010                    variable: var,
13011                    values: self.parse_set_values(false)?,
13012                },
13013                OneOrManyWithParens::Many(vars) => Set::ParenthesizedAssignments {
13014                    variables: vars,
13015                    values: self.parse_set_values(true)?,
13016                },
13017            };
13018
13019            return Ok(stmt.into());
13020        }
13021
13022        if self.dialect.supports_set_stmt_without_operator() {
13023            self.prev_token();
13024            return self.parse_set_session_params();
13025        };
13026
13027        self.expected("equals sign or TO", self.peek_token())
13028    }
13029
13030    pub fn parse_set_session_params(&mut self) -> Result<Statement, ParserError> {
13031        if self.parse_keyword(Keyword::STATISTICS) {
13032            let topic = match self.parse_one_of_keywords(&[
13033                Keyword::IO,
13034                Keyword::PROFILE,
13035                Keyword::TIME,
13036                Keyword::XML,
13037            ]) {
13038                Some(Keyword::IO) => SessionParamStatsTopic::IO,
13039                Some(Keyword::PROFILE) => SessionParamStatsTopic::Profile,
13040                Some(Keyword::TIME) => SessionParamStatsTopic::Time,
13041                Some(Keyword::XML) => SessionParamStatsTopic::Xml,
13042                _ => return self.expected("IO, PROFILE, TIME or XML", self.peek_token()),
13043            };
13044            let value = self.parse_session_param_value()?;
13045            Ok(
13046                Set::SetSessionParam(SetSessionParamKind::Statistics(SetSessionParamStatistics {
13047                    topic,
13048                    value,
13049                }))
13050                .into(),
13051            )
13052        } else if self.parse_keyword(Keyword::IDENTITY_INSERT) {
13053            let obj = self.parse_object_name(false)?;
13054            let value = self.parse_session_param_value()?;
13055            Ok(Set::SetSessionParam(SetSessionParamKind::IdentityInsert(
13056                SetSessionParamIdentityInsert { obj, value },
13057            ))
13058            .into())
13059        } else if self.parse_keyword(Keyword::OFFSETS) {
13060            let keywords = self.parse_comma_separated(|parser| {
13061                let next_token = parser.next_token();
13062                match &next_token.token {
13063                    Token::Word(w) => Ok(w.to_string()),
13064                    _ => parser.expected("SQL keyword", next_token),
13065                }
13066            })?;
13067            let value = self.parse_session_param_value()?;
13068            Ok(
13069                Set::SetSessionParam(SetSessionParamKind::Offsets(SetSessionParamOffsets {
13070                    keywords,
13071                    value,
13072                }))
13073                .into(),
13074            )
13075        } else {
13076            let names = self.parse_comma_separated(|parser| {
13077                let next_token = parser.next_token();
13078                match next_token.token {
13079                    Token::Word(w) => Ok(w.to_string()),
13080                    _ => parser.expected("Session param name", next_token),
13081                }
13082            })?;
13083            let value = self.parse_expr()?.to_string();
13084            Ok(
13085                Set::SetSessionParam(SetSessionParamKind::Generic(SetSessionParamGeneric {
13086                    names,
13087                    value,
13088                }))
13089                .into(),
13090            )
13091        }
13092    }
13093
13094    fn parse_session_param_value(&mut self) -> Result<SessionParamValue, ParserError> {
13095        if self.parse_keyword(Keyword::ON) {
13096            Ok(SessionParamValue::On)
13097        } else if self.parse_keyword(Keyword::OFF) {
13098            Ok(SessionParamValue::Off)
13099        } else {
13100            self.expected("ON or OFF", self.peek_token())
13101        }
13102    }
13103
13104    pub fn parse_show(&mut self) -> Result<Statement, ParserError> {
13105        let terse = self.parse_keyword(Keyword::TERSE);
13106        let extended = self.parse_keyword(Keyword::EXTENDED);
13107        let full = self.parse_keyword(Keyword::FULL);
13108        let session = self.parse_keyword(Keyword::SESSION);
13109        let global = self.parse_keyword(Keyword::GLOBAL);
13110        let external = self.parse_keyword(Keyword::EXTERNAL);
13111        if self
13112            .parse_one_of_keywords(&[Keyword::COLUMNS, Keyword::FIELDS])
13113            .is_some()
13114        {
13115            Ok(self.parse_show_columns(extended, full)?)
13116        } else if self.parse_keyword(Keyword::TABLES) {
13117            Ok(self.parse_show_tables(terse, extended, full, external)?)
13118        } else if self.parse_keywords(&[Keyword::MATERIALIZED, Keyword::VIEWS]) {
13119            Ok(self.parse_show_views(terse, true)?)
13120        } else if self.parse_keyword(Keyword::VIEWS) {
13121            Ok(self.parse_show_views(terse, false)?)
13122        } else if self.parse_keyword(Keyword::FUNCTIONS) {
13123            Ok(self.parse_show_functions()?)
13124        } else if extended || full {
13125            Err(ParserError::ParserError(
13126                "EXTENDED/FULL are not supported with this type of SHOW query".to_string(),
13127            ))
13128        } else if self.parse_one_of_keywords(&[Keyword::CREATE]).is_some() {
13129            Ok(self.parse_show_create()?)
13130        } else if self.parse_keyword(Keyword::COLLATION) {
13131            Ok(self.parse_show_collation()?)
13132        } else if self.parse_keyword(Keyword::VARIABLES)
13133            && dialect_of!(self is MySqlDialect | GenericDialect)
13134        {
13135            Ok(Statement::ShowVariables {
13136                filter: self.parse_show_statement_filter()?,
13137                session,
13138                global,
13139            })
13140        } else if self.parse_keyword(Keyword::STATUS)
13141            && dialect_of!(self is MySqlDialect | GenericDialect)
13142        {
13143            Ok(Statement::ShowStatus {
13144                filter: self.parse_show_statement_filter()?,
13145                session,
13146                global,
13147            })
13148        } else if self.parse_keyword(Keyword::DATABASES) {
13149            self.parse_show_databases(terse)
13150        } else if self.parse_keyword(Keyword::SCHEMAS) {
13151            self.parse_show_schemas(terse)
13152        } else if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
13153            self.parse_show_charset(false)
13154        } else if self.parse_keyword(Keyword::CHARSET) {
13155            self.parse_show_charset(true)
13156        } else {
13157            Ok(Statement::ShowVariable {
13158                variable: self.parse_identifiers()?,
13159            })
13160        }
13161    }
13162
13163    fn parse_show_charset(&mut self, is_shorthand: bool) -> Result<Statement, ParserError> {
13164        // parse one of keywords
13165        Ok(Statement::ShowCharset(ShowCharset {
13166            is_shorthand,
13167            filter: self.parse_show_statement_filter()?,
13168        }))
13169    }
13170
13171    fn parse_show_databases(&mut self, terse: bool) -> Result<Statement, ParserError> {
13172        let history = self.parse_keyword(Keyword::HISTORY);
13173        let show_options = self.parse_show_stmt_options()?;
13174        Ok(Statement::ShowDatabases {
13175            terse,
13176            history,
13177            show_options,
13178        })
13179    }
13180
13181    fn parse_show_schemas(&mut self, terse: bool) -> Result<Statement, ParserError> {
13182        let history = self.parse_keyword(Keyword::HISTORY);
13183        let show_options = self.parse_show_stmt_options()?;
13184        Ok(Statement::ShowSchemas {
13185            terse,
13186            history,
13187            show_options,
13188        })
13189    }
13190
13191    pub fn parse_show_create(&mut self) -> Result<Statement, ParserError> {
13192        let obj_type = match self.expect_one_of_keywords(&[
13193            Keyword::TABLE,
13194            Keyword::TRIGGER,
13195            Keyword::FUNCTION,
13196            Keyword::PROCEDURE,
13197            Keyword::EVENT,
13198            Keyword::VIEW,
13199        ])? {
13200            Keyword::TABLE => Ok(ShowCreateObject::Table),
13201            Keyword::TRIGGER => Ok(ShowCreateObject::Trigger),
13202            Keyword::FUNCTION => Ok(ShowCreateObject::Function),
13203            Keyword::PROCEDURE => Ok(ShowCreateObject::Procedure),
13204            Keyword::EVENT => Ok(ShowCreateObject::Event),
13205            Keyword::VIEW => Ok(ShowCreateObject::View),
13206            keyword => Err(ParserError::ParserError(format!(
13207                "Unable to map keyword to ShowCreateObject: {keyword:?}"
13208            ))),
13209        }?;
13210
13211        let obj_name = self.parse_object_name(false)?;
13212
13213        Ok(Statement::ShowCreate { obj_type, obj_name })
13214    }
13215
13216    pub fn parse_show_columns(
13217        &mut self,
13218        extended: bool,
13219        full: bool,
13220    ) -> Result<Statement, ParserError> {
13221        let show_options = self.parse_show_stmt_options()?;
13222        Ok(Statement::ShowColumns {
13223            extended,
13224            full,
13225            show_options,
13226        })
13227    }
13228
13229    fn parse_show_tables(
13230        &mut self,
13231        terse: bool,
13232        extended: bool,
13233        full: bool,
13234        external: bool,
13235    ) -> Result<Statement, ParserError> {
13236        let history = !external && self.parse_keyword(Keyword::HISTORY);
13237        let show_options = self.parse_show_stmt_options()?;
13238        Ok(Statement::ShowTables {
13239            terse,
13240            history,
13241            extended,
13242            full,
13243            external,
13244            show_options,
13245        })
13246    }
13247
13248    fn parse_show_views(
13249        &mut self,
13250        terse: bool,
13251        materialized: bool,
13252    ) -> Result<Statement, ParserError> {
13253        let show_options = self.parse_show_stmt_options()?;
13254        Ok(Statement::ShowViews {
13255            materialized,
13256            terse,
13257            show_options,
13258        })
13259    }
13260
13261    pub fn parse_show_functions(&mut self) -> Result<Statement, ParserError> {
13262        let filter = self.parse_show_statement_filter()?;
13263        Ok(Statement::ShowFunctions { filter })
13264    }
13265
13266    pub fn parse_show_collation(&mut self) -> Result<Statement, ParserError> {
13267        let filter = self.parse_show_statement_filter()?;
13268        Ok(Statement::ShowCollation { filter })
13269    }
13270
13271    pub fn parse_show_statement_filter(
13272        &mut self,
13273    ) -> Result<Option<ShowStatementFilter>, ParserError> {
13274        if self.parse_keyword(Keyword::LIKE) {
13275            Ok(Some(ShowStatementFilter::Like(
13276                self.parse_literal_string()?,
13277            )))
13278        } else if self.parse_keyword(Keyword::ILIKE) {
13279            Ok(Some(ShowStatementFilter::ILike(
13280                self.parse_literal_string()?,
13281            )))
13282        } else if self.parse_keyword(Keyword::WHERE) {
13283            Ok(Some(ShowStatementFilter::Where(self.parse_expr()?)))
13284        } else {
13285            self.maybe_parse(|parser| -> Result<String, ParserError> {
13286                parser.parse_literal_string()
13287            })?
13288            .map_or(Ok(None), |filter| {
13289                Ok(Some(ShowStatementFilter::NoKeyword(filter)))
13290            })
13291        }
13292    }
13293
13294    pub fn parse_use(&mut self) -> Result<Statement, ParserError> {
13295        // Determine which keywords are recognized by the current dialect
13296        let parsed_keyword = if dialect_of!(self is HiveDialect) {
13297            // HiveDialect accepts USE DEFAULT; statement without any db specified
13298            if self.parse_keyword(Keyword::DEFAULT) {
13299                return Ok(Statement::Use(Use::Default));
13300            }
13301            None // HiveDialect doesn't expect any other specific keyword after `USE`
13302        } else if dialect_of!(self is DatabricksDialect) {
13303            self.parse_one_of_keywords(&[Keyword::CATALOG, Keyword::DATABASE, Keyword::SCHEMA])
13304        } else if dialect_of!(self is SnowflakeDialect) {
13305            self.parse_one_of_keywords(&[
13306                Keyword::DATABASE,
13307                Keyword::SCHEMA,
13308                Keyword::WAREHOUSE,
13309                Keyword::ROLE,
13310                Keyword::SECONDARY,
13311            ])
13312        } else {
13313            None // No specific keywords for other dialects, including GenericDialect
13314        };
13315
13316        let result = if matches!(parsed_keyword, Some(Keyword::SECONDARY)) {
13317            self.parse_secondary_roles()?
13318        } else {
13319            let obj_name = self.parse_object_name(false)?;
13320            match parsed_keyword {
13321                Some(Keyword::CATALOG) => Use::Catalog(obj_name),
13322                Some(Keyword::DATABASE) => Use::Database(obj_name),
13323                Some(Keyword::SCHEMA) => Use::Schema(obj_name),
13324                Some(Keyword::WAREHOUSE) => Use::Warehouse(obj_name),
13325                Some(Keyword::ROLE) => Use::Role(obj_name),
13326                _ => Use::Object(obj_name),
13327            }
13328        };
13329
13330        Ok(Statement::Use(result))
13331    }
13332
13333    fn parse_secondary_roles(&mut self) -> Result<Use, ParserError> {
13334        self.expect_one_of_keywords(&[Keyword::ROLES, Keyword::ROLE])?;
13335        if self.parse_keyword(Keyword::NONE) {
13336            Ok(Use::SecondaryRoles(SecondaryRoles::None))
13337        } else if self.parse_keyword(Keyword::ALL) {
13338            Ok(Use::SecondaryRoles(SecondaryRoles::All))
13339        } else {
13340            let roles = self.parse_comma_separated(|parser| parser.parse_identifier())?;
13341            Ok(Use::SecondaryRoles(SecondaryRoles::List(roles)))
13342        }
13343    }
13344
13345    pub fn parse_table_and_joins(&mut self) -> Result<TableWithJoins, ParserError> {
13346        let relation = self.parse_table_factor()?;
13347        // Note that for keywords to be properly handled here, they need to be
13348        // added to `RESERVED_FOR_TABLE_ALIAS`, otherwise they may be parsed as
13349        // a table alias.
13350        let joins = self.parse_joins()?;
13351        Ok(TableWithJoins { relation, joins })
13352    }
13353
13354    fn parse_joins(&mut self) -> Result<Vec<Join>, ParserError> {
13355        let mut joins = vec![];
13356        loop {
13357            let global = self.parse_keyword(Keyword::GLOBAL);
13358            let join = if self.parse_keyword(Keyword::CROSS) {
13359                let join_operator = if self.parse_keyword(Keyword::JOIN) {
13360                    JoinOperator::CrossJoin(JoinConstraint::None)
13361                } else if self.parse_keyword(Keyword::APPLY) {
13362                    // MSSQL extension, similar to CROSS JOIN LATERAL
13363                    JoinOperator::CrossApply
13364                } else {
13365                    return self.expected("JOIN or APPLY after CROSS", self.peek_token());
13366                };
13367                let relation = self.parse_table_factor()?;
13368                let join_operator = if matches!(join_operator, JoinOperator::CrossJoin(_))
13369                    && self.dialect.supports_cross_join_constraint()
13370                {
13371                    let constraint = self.parse_join_constraint(false)?;
13372                    JoinOperator::CrossJoin(constraint)
13373                } else {
13374                    join_operator
13375                };
13376                Join {
13377                    relation,
13378                    global,
13379                    join_operator,
13380                }
13381            } else if self.parse_keyword(Keyword::OUTER) {
13382                // MSSQL extension, similar to LEFT JOIN LATERAL .. ON 1=1
13383                self.expect_keyword_is(Keyword::APPLY)?;
13384                Join {
13385                    relation: self.parse_table_factor()?,
13386                    global,
13387                    join_operator: JoinOperator::OuterApply,
13388                }
13389            } else if self.parse_keyword(Keyword::ASOF) {
13390                self.expect_keyword_is(Keyword::JOIN)?;
13391                let relation = self.parse_table_factor()?;
13392                self.expect_keyword_is(Keyword::MATCH_CONDITION)?;
13393                let match_condition = self.parse_parenthesized(Self::parse_expr)?;
13394                Join {
13395                    relation,
13396                    global,
13397                    join_operator: JoinOperator::AsOf {
13398                        match_condition,
13399                        constraint: self.parse_join_constraint(false)?,
13400                    },
13401                }
13402            } else {
13403                let natural = self.parse_keyword(Keyword::NATURAL);
13404                let peek_keyword = if let Token::Word(w) = self.peek_token().token {
13405                    w.keyword
13406                } else {
13407                    Keyword::NoKeyword
13408                };
13409
13410                let join_operator_type = match peek_keyword {
13411                    Keyword::INNER | Keyword::JOIN => {
13412                        let inner = self.parse_keyword(Keyword::INNER); // [ INNER ]
13413                        self.expect_keyword_is(Keyword::JOIN)?;
13414                        if inner {
13415                            JoinOperator::Inner
13416                        } else {
13417                            JoinOperator::Join
13418                        }
13419                    }
13420                    kw @ Keyword::LEFT | kw @ Keyword::RIGHT => {
13421                        let _ = self.next_token(); // consume LEFT/RIGHT
13422                        let is_left = kw == Keyword::LEFT;
13423                        let join_type = self.parse_one_of_keywords(&[
13424                            Keyword::OUTER,
13425                            Keyword::SEMI,
13426                            Keyword::ANTI,
13427                            Keyword::JOIN,
13428                        ]);
13429                        match join_type {
13430                            Some(Keyword::OUTER) => {
13431                                self.expect_keyword_is(Keyword::JOIN)?;
13432                                if is_left {
13433                                    JoinOperator::LeftOuter
13434                                } else {
13435                                    JoinOperator::RightOuter
13436                                }
13437                            }
13438                            Some(Keyword::SEMI) => {
13439                                self.expect_keyword_is(Keyword::JOIN)?;
13440                                if is_left {
13441                                    JoinOperator::LeftSemi
13442                                } else {
13443                                    JoinOperator::RightSemi
13444                                }
13445                            }
13446                            Some(Keyword::ANTI) => {
13447                                self.expect_keyword_is(Keyword::JOIN)?;
13448                                if is_left {
13449                                    JoinOperator::LeftAnti
13450                                } else {
13451                                    JoinOperator::RightAnti
13452                                }
13453                            }
13454                            Some(Keyword::JOIN) => {
13455                                if is_left {
13456                                    JoinOperator::Left
13457                                } else {
13458                                    JoinOperator::Right
13459                                }
13460                            }
13461                            _ => {
13462                                return Err(ParserError::ParserError(format!(
13463                                    "expected OUTER, SEMI, ANTI or JOIN after {kw:?}"
13464                                )))
13465                            }
13466                        }
13467                    }
13468                    Keyword::ANTI => {
13469                        let _ = self.next_token(); // consume ANTI
13470                        self.expect_keyword_is(Keyword::JOIN)?;
13471                        JoinOperator::Anti
13472                    }
13473                    Keyword::SEMI => {
13474                        let _ = self.next_token(); // consume SEMI
13475                        self.expect_keyword_is(Keyword::JOIN)?;
13476                        JoinOperator::Semi
13477                    }
13478                    Keyword::FULL => {
13479                        let _ = self.next_token(); // consume FULL
13480                        let _ = self.parse_keyword(Keyword::OUTER); // [ OUTER ]
13481                        self.expect_keyword_is(Keyword::JOIN)?;
13482                        JoinOperator::FullOuter
13483                    }
13484                    Keyword::OUTER => {
13485                        return self.expected("LEFT, RIGHT, or FULL", self.peek_token());
13486                    }
13487                    Keyword::STRAIGHT_JOIN => {
13488                        let _ = self.next_token(); // consume STRAIGHT_JOIN
13489                        JoinOperator::StraightJoin
13490                    }
13491                    _ if natural => {
13492                        return self.expected("a join type after NATURAL", self.peek_token());
13493                    }
13494                    _ => break,
13495                };
13496                let mut relation = self.parse_table_factor()?;
13497
13498                if !self
13499                    .dialect
13500                    .supports_left_associative_joins_without_parens()
13501                    && self.peek_parens_less_nested_join()
13502                {
13503                    let joins = self.parse_joins()?;
13504                    relation = TableFactor::NestedJoin {
13505                        table_with_joins: Box::new(TableWithJoins { relation, joins }),
13506                        alias: None,
13507                    };
13508                }
13509
13510                let join_constraint = self.parse_join_constraint(natural)?;
13511                Join {
13512                    relation,
13513                    global,
13514                    join_operator: join_operator_type(join_constraint),
13515                }
13516            };
13517            joins.push(join);
13518        }
13519        Ok(joins)
13520    }
13521
13522    fn peek_parens_less_nested_join(&self) -> bool {
13523        matches!(
13524            self.peek_token_ref().token,
13525            Token::Word(Word {
13526                keyword: Keyword::JOIN
13527                    | Keyword::INNER
13528                    | Keyword::LEFT
13529                    | Keyword::RIGHT
13530                    | Keyword::FULL,
13531                ..
13532            })
13533        )
13534    }
13535
13536    /// A table name or a parenthesized subquery, followed by optional `[AS] alias`
13537    pub fn parse_table_factor(&mut self) -> Result<TableFactor, ParserError> {
13538        if self.parse_keyword(Keyword::LATERAL) {
13539            // LATERAL must always be followed by a subquery or table function.
13540            if self.consume_token(&Token::LParen) {
13541                self.parse_derived_table_factor(Lateral)
13542            } else {
13543                let name = self.parse_object_name(false)?;
13544                self.expect_token(&Token::LParen)?;
13545                let args = self.parse_optional_args()?;
13546                let alias = self.maybe_parse_table_alias()?;
13547                Ok(TableFactor::Function {
13548                    lateral: true,
13549                    name,
13550                    args,
13551                    alias,
13552                })
13553            }
13554        } else if self.parse_keyword(Keyword::TABLE) {
13555            // parse table function (SELECT * FROM TABLE (<expr>) [ AS <alias> ])
13556            self.expect_token(&Token::LParen)?;
13557            let expr = self.parse_expr()?;
13558            self.expect_token(&Token::RParen)?;
13559            let alias = self.maybe_parse_table_alias()?;
13560            Ok(TableFactor::TableFunction { expr, alias })
13561        } else if self.consume_token(&Token::LParen) {
13562            // A left paren introduces either a derived table (i.e., a subquery)
13563            // or a nested join. It's nearly impossible to determine ahead of
13564            // time which it is... so we just try to parse both.
13565            //
13566            // Here's an example that demonstrates the complexity:
13567            //                     /-------------------------------------------------------\
13568            //                     | /-----------------------------------\                 |
13569            //     SELECT * FROM ( ( ( (SELECT 1) UNION (SELECT 2) ) AS t1 NATURAL JOIN t2 ) )
13570            //                   ^ ^ ^ ^
13571            //                   | | | |
13572            //                   | | | |
13573            //                   | | | (4) belongs to a SetExpr::Query inside the subquery
13574            //                   | | (3) starts a derived table (subquery)
13575            //                   | (2) starts a nested join
13576            //                   (1) an additional set of parens around a nested join
13577            //
13578
13579            // If the recently consumed '(' starts a derived table, the call to
13580            // `parse_derived_table_factor` below will return success after parsing the
13581            // subquery, followed by the closing ')', and the alias of the derived table.
13582            // In the example above this is case (3).
13583            if let Some(mut table) =
13584                self.maybe_parse(|parser| parser.parse_derived_table_factor(NotLateral))?
13585            {
13586                while let Some(kw) = self.parse_one_of_keywords(&[Keyword::PIVOT, Keyword::UNPIVOT])
13587                {
13588                    table = match kw {
13589                        Keyword::PIVOT => self.parse_pivot_table_factor(table)?,
13590                        Keyword::UNPIVOT => self.parse_unpivot_table_factor(table)?,
13591                        _ => unreachable!(),
13592                    }
13593                }
13594                return Ok(table);
13595            }
13596
13597            // A parsing error from `parse_derived_table_factor` indicates that the '(' we've
13598            // recently consumed does not start a derived table (cases 1, 2, or 4).
13599            // `maybe_parse` will ignore such an error and rewind to be after the opening '('.
13600
13601            // Inside the parentheses we expect to find an (A) table factor
13602            // followed by some joins or (B) another level of nesting.
13603            let mut table_and_joins = self.parse_table_and_joins()?;
13604
13605            #[allow(clippy::if_same_then_else)]
13606            if !table_and_joins.joins.is_empty() {
13607                self.expect_token(&Token::RParen)?;
13608                let alias = self.maybe_parse_table_alias()?;
13609                Ok(TableFactor::NestedJoin {
13610                    table_with_joins: Box::new(table_and_joins),
13611                    alias,
13612                }) // (A)
13613            } else if let TableFactor::NestedJoin {
13614                table_with_joins: _,
13615                alias: _,
13616            } = &table_and_joins.relation
13617            {
13618                // (B): `table_and_joins` (what we found inside the parentheses)
13619                // is a nested join `(foo JOIN bar)`, not followed by other joins.
13620                self.expect_token(&Token::RParen)?;
13621                let alias = self.maybe_parse_table_alias()?;
13622                Ok(TableFactor::NestedJoin {
13623                    table_with_joins: Box::new(table_and_joins),
13624                    alias,
13625                })
13626            } else if dialect_of!(self is SnowflakeDialect | GenericDialect) {
13627                // Dialect-specific behavior: Snowflake diverges from the
13628                // standard and from most of the other implementations by
13629                // allowing extra parentheses not only around a join (B), but
13630                // around lone table names (e.g. `FROM (mytable [AS alias])`)
13631                // and around derived tables (e.g. `FROM ((SELECT ...)
13632                // [AS alias])`) as well.
13633                self.expect_token(&Token::RParen)?;
13634
13635                if let Some(outer_alias) = self.maybe_parse_table_alias()? {
13636                    // Snowflake also allows specifying an alias *after* parens
13637                    // e.g. `FROM (mytable) AS alias`
13638                    match &mut table_and_joins.relation {
13639                        TableFactor::Derived { alias, .. }
13640                        | TableFactor::Table { alias, .. }
13641                        | TableFactor::Function { alias, .. }
13642                        | TableFactor::UNNEST { alias, .. }
13643                        | TableFactor::JsonTable { alias, .. }
13644                        | TableFactor::XmlTable { alias, .. }
13645                        | TableFactor::OpenJsonTable { alias, .. }
13646                        | TableFactor::TableFunction { alias, .. }
13647                        | TableFactor::Pivot { alias, .. }
13648                        | TableFactor::Unpivot { alias, .. }
13649                        | TableFactor::MatchRecognize { alias, .. }
13650                        | TableFactor::SemanticView { alias, .. }
13651                        | TableFactor::NestedJoin { alias, .. } => {
13652                            // but not `FROM (mytable AS alias1) AS alias2`.
13653                            if let Some(inner_alias) = alias {
13654                                return Err(ParserError::ParserError(format!(
13655                                    "duplicate alias {inner_alias}"
13656                                )));
13657                            }
13658                            // Act as if the alias was specified normally next
13659                            // to the table name: `(mytable) AS alias` ->
13660                            // `(mytable AS alias)`
13661                            alias.replace(outer_alias);
13662                        }
13663                    };
13664                }
13665                // Do not store the extra set of parens in the AST
13666                Ok(table_and_joins.relation)
13667            } else {
13668                // The SQL spec prohibits derived tables and bare tables from
13669                // appearing alone in parentheses (e.g. `FROM (mytable)`)
13670                self.expected("joined table", self.peek_token())
13671            }
13672        } else if dialect_of!(self is SnowflakeDialect | DatabricksDialect | GenericDialect)
13673            && matches!(
13674                self.peek_tokens(),
13675                [
13676                    Token::Word(Word {
13677                        keyword: Keyword::VALUES,
13678                        ..
13679                    }),
13680                    Token::LParen
13681                ]
13682            )
13683        {
13684            self.expect_keyword_is(Keyword::VALUES)?;
13685
13686            // Snowflake and Databricks allow syntax like below:
13687            // SELECT * FROM VALUES (1, 'a'), (2, 'b') AS t (col1, col2)
13688            // where there are no parentheses around the VALUES clause.
13689            let values = SetExpr::Values(self.parse_values(false)?);
13690            let alias = self.maybe_parse_table_alias()?;
13691            Ok(TableFactor::Derived {
13692                lateral: false,
13693                subquery: Box::new(Query {
13694                    with: None,
13695                    body: Box::new(values),
13696                    order_by: None,
13697                    limit_clause: None,
13698                    fetch: None,
13699                    locks: vec![],
13700                    for_clause: None,
13701                    settings: None,
13702                    format_clause: None,
13703                    pipe_operators: vec![],
13704                }),
13705                alias,
13706            })
13707        } else if dialect_of!(self is BigQueryDialect | PostgreSqlDialect | GenericDialect)
13708            && self.parse_keyword(Keyword::UNNEST)
13709        {
13710            self.expect_token(&Token::LParen)?;
13711            let array_exprs = self.parse_comma_separated(Parser::parse_expr)?;
13712            self.expect_token(&Token::RParen)?;
13713
13714            let with_ordinality = self.parse_keywords(&[Keyword::WITH, Keyword::ORDINALITY]);
13715            let alias = match self.maybe_parse_table_alias() {
13716                Ok(Some(alias)) => Some(alias),
13717                Ok(None) => None,
13718                Err(e) => return Err(e),
13719            };
13720
13721            let with_offset = match self.expect_keywords(&[Keyword::WITH, Keyword::OFFSET]) {
13722                Ok(()) => true,
13723                Err(_) => false,
13724            };
13725
13726            let with_offset_alias = if with_offset {
13727                match self.parse_optional_alias(keywords::RESERVED_FOR_COLUMN_ALIAS) {
13728                    Ok(Some(alias)) => Some(alias),
13729                    Ok(None) => None,
13730                    Err(e) => return Err(e),
13731                }
13732            } else {
13733                None
13734            };
13735
13736            Ok(TableFactor::UNNEST {
13737                alias,
13738                array_exprs,
13739                with_offset,
13740                with_offset_alias,
13741                with_ordinality,
13742            })
13743        } else if self.parse_keyword_with_tokens(Keyword::JSON_TABLE, &[Token::LParen]) {
13744            let json_expr = self.parse_expr()?;
13745            self.expect_token(&Token::Comma)?;
13746            let json_path = self.parse_value()?.value;
13747            self.expect_keyword_is(Keyword::COLUMNS)?;
13748            self.expect_token(&Token::LParen)?;
13749            let columns = self.parse_comma_separated(Parser::parse_json_table_column_def)?;
13750            self.expect_token(&Token::RParen)?;
13751            self.expect_token(&Token::RParen)?;
13752            let alias = self.maybe_parse_table_alias()?;
13753            Ok(TableFactor::JsonTable {
13754                json_expr,
13755                json_path,
13756                columns,
13757                alias,
13758            })
13759        } else if self.parse_keyword_with_tokens(Keyword::OPENJSON, &[Token::LParen]) {
13760            self.prev_token();
13761            self.parse_open_json_table_factor()
13762        } else if self.parse_keyword_with_tokens(Keyword::XMLTABLE, &[Token::LParen]) {
13763            self.prev_token();
13764            self.parse_xml_table_factor()
13765        } else if self.dialect.supports_semantic_view_table_factor()
13766            && self.peek_keyword_with_tokens(Keyword::SEMANTIC_VIEW, &[Token::LParen])
13767        {
13768            self.parse_semantic_view_table_factor()
13769        } else {
13770            let name = self.parse_object_name(true)?;
13771
13772            let json_path = match self.peek_token().token {
13773                Token::LBracket if self.dialect.supports_partiql() => Some(self.parse_json_path()?),
13774                _ => None,
13775            };
13776
13777            let partitions: Vec<Ident> = if dialect_of!(self is MySqlDialect | GenericDialect)
13778                && self.parse_keyword(Keyword::PARTITION)
13779            {
13780                self.parse_parenthesized_identifiers()?
13781            } else {
13782                vec![]
13783            };
13784
13785            // Parse potential version qualifier
13786            let version = self.maybe_parse_table_version()?;
13787
13788            // Postgres, MSSQL, ClickHouse: table-valued functions:
13789            let args = if self.consume_token(&Token::LParen) {
13790                Some(self.parse_table_function_args()?)
13791            } else {
13792                None
13793            };
13794
13795            let with_ordinality = self.parse_keywords(&[Keyword::WITH, Keyword::ORDINALITY]);
13796
13797            let mut sample = None;
13798            if self.dialect.supports_table_sample_before_alias() {
13799                if let Some(parsed_sample) = self.maybe_parse_table_sample()? {
13800                    sample = Some(TableSampleKind::BeforeTableAlias(parsed_sample));
13801                }
13802            }
13803
13804            let alias = self.maybe_parse_table_alias()?;
13805
13806            // MYSQL-specific table hints:
13807            let index_hints = if self.dialect.supports_table_hints() {
13808                self.maybe_parse(|p| p.parse_table_index_hints())?
13809                    .unwrap_or(vec![])
13810            } else {
13811                vec![]
13812            };
13813
13814            // MSSQL-specific table hints:
13815            let mut with_hints = vec![];
13816            if self.parse_keyword(Keyword::WITH) {
13817                if self.consume_token(&Token::LParen) {
13818                    with_hints = self.parse_comma_separated(Parser::parse_expr)?;
13819                    self.expect_token(&Token::RParen)?;
13820                } else {
13821                    // rewind, as WITH may belong to the next statement's CTE
13822                    self.prev_token();
13823                }
13824            };
13825
13826            if !self.dialect.supports_table_sample_before_alias() {
13827                if let Some(parsed_sample) = self.maybe_parse_table_sample()? {
13828                    sample = Some(TableSampleKind::AfterTableAlias(parsed_sample));
13829                }
13830            }
13831
13832            let mut table = TableFactor::Table {
13833                name,
13834                alias,
13835                args,
13836                with_hints,
13837                version,
13838                partitions,
13839                with_ordinality,
13840                json_path,
13841                sample,
13842                index_hints,
13843            };
13844
13845            while let Some(kw) = self.parse_one_of_keywords(&[Keyword::PIVOT, Keyword::UNPIVOT]) {
13846                table = match kw {
13847                    Keyword::PIVOT => self.parse_pivot_table_factor(table)?,
13848                    Keyword::UNPIVOT => self.parse_unpivot_table_factor(table)?,
13849                    _ => unreachable!(),
13850                }
13851            }
13852
13853            if self.dialect.supports_match_recognize()
13854                && self.parse_keyword(Keyword::MATCH_RECOGNIZE)
13855            {
13856                table = self.parse_match_recognize(table)?;
13857            }
13858
13859            Ok(table)
13860        }
13861    }
13862
13863    fn maybe_parse_table_sample(&mut self) -> Result<Option<Box<TableSample>>, ParserError> {
13864        let modifier = if self.parse_keyword(Keyword::TABLESAMPLE) {
13865            TableSampleModifier::TableSample
13866        } else if self.parse_keyword(Keyword::SAMPLE) {
13867            TableSampleModifier::Sample
13868        } else {
13869            return Ok(None);
13870        };
13871        self.parse_table_sample(modifier).map(Some)
13872    }
13873
13874    fn parse_table_sample(
13875        &mut self,
13876        modifier: TableSampleModifier,
13877    ) -> Result<Box<TableSample>, ParserError> {
13878        let name = match self.parse_one_of_keywords(&[
13879            Keyword::BERNOULLI,
13880            Keyword::ROW,
13881            Keyword::SYSTEM,
13882            Keyword::BLOCK,
13883        ]) {
13884            Some(Keyword::BERNOULLI) => Some(TableSampleMethod::Bernoulli),
13885            Some(Keyword::ROW) => Some(TableSampleMethod::Row),
13886            Some(Keyword::SYSTEM) => Some(TableSampleMethod::System),
13887            Some(Keyword::BLOCK) => Some(TableSampleMethod::Block),
13888            _ => None,
13889        };
13890
13891        let parenthesized = self.consume_token(&Token::LParen);
13892
13893        let (quantity, bucket) = if parenthesized && self.parse_keyword(Keyword::BUCKET) {
13894            let selected_bucket = self.parse_number_value()?.value;
13895            self.expect_keywords(&[Keyword::OUT, Keyword::OF])?;
13896            let total = self.parse_number_value()?.value;
13897            let on = if self.parse_keyword(Keyword::ON) {
13898                Some(self.parse_expr()?)
13899            } else {
13900                None
13901            };
13902            (
13903                None,
13904                Some(TableSampleBucket {
13905                    bucket: selected_bucket,
13906                    total,
13907                    on,
13908                }),
13909            )
13910        } else {
13911            let value = match self.maybe_parse(|p| p.parse_expr())? {
13912                Some(num) => num,
13913                None => {
13914                    let next_token = self.next_token();
13915                    if let Token::Word(w) = next_token.token {
13916                        Expr::Value(Value::Placeholder(w.value).with_span(next_token.span))
13917                    } else {
13918                        return parser_err!(
13919                            "Expecting number or byte length e.g. 100M",
13920                            self.peek_token().span.start
13921                        );
13922                    }
13923                }
13924            };
13925            let unit = if self.parse_keyword(Keyword::ROWS) {
13926                Some(TableSampleUnit::Rows)
13927            } else if self.parse_keyword(Keyword::PERCENT) {
13928                Some(TableSampleUnit::Percent)
13929            } else {
13930                None
13931            };
13932            (
13933                Some(TableSampleQuantity {
13934                    parenthesized,
13935                    value,
13936                    unit,
13937                }),
13938                None,
13939            )
13940        };
13941        if parenthesized {
13942            self.expect_token(&Token::RParen)?;
13943        }
13944
13945        let seed = if self.parse_keyword(Keyword::REPEATABLE) {
13946            Some(self.parse_table_sample_seed(TableSampleSeedModifier::Repeatable)?)
13947        } else if self.parse_keyword(Keyword::SEED) {
13948            Some(self.parse_table_sample_seed(TableSampleSeedModifier::Seed)?)
13949        } else {
13950            None
13951        };
13952
13953        let offset = if self.parse_keyword(Keyword::OFFSET) {
13954            Some(self.parse_expr()?)
13955        } else {
13956            None
13957        };
13958
13959        Ok(Box::new(TableSample {
13960            modifier,
13961            name,
13962            quantity,
13963            seed,
13964            bucket,
13965            offset,
13966        }))
13967    }
13968
13969    fn parse_table_sample_seed(
13970        &mut self,
13971        modifier: TableSampleSeedModifier,
13972    ) -> Result<TableSampleSeed, ParserError> {
13973        self.expect_token(&Token::LParen)?;
13974        let value = self.parse_number_value()?.value;
13975        self.expect_token(&Token::RParen)?;
13976        Ok(TableSampleSeed { modifier, value })
13977    }
13978
13979    /// Parses `OPENJSON( jsonExpression [ , path ] )  [ <with_clause> ]` clause,
13980    /// assuming the `OPENJSON` keyword was already consumed.
13981    fn parse_open_json_table_factor(&mut self) -> Result<TableFactor, ParserError> {
13982        self.expect_token(&Token::LParen)?;
13983        let json_expr = self.parse_expr()?;
13984        let json_path = if self.consume_token(&Token::Comma) {
13985            Some(self.parse_value()?.value)
13986        } else {
13987            None
13988        };
13989        self.expect_token(&Token::RParen)?;
13990        let columns = if self.parse_keyword(Keyword::WITH) {
13991            self.expect_token(&Token::LParen)?;
13992            let columns = self.parse_comma_separated(Parser::parse_openjson_table_column_def)?;
13993            self.expect_token(&Token::RParen)?;
13994            columns
13995        } else {
13996            Vec::new()
13997        };
13998        let alias = self.maybe_parse_table_alias()?;
13999        Ok(TableFactor::OpenJsonTable {
14000            json_expr,
14001            json_path,
14002            columns,
14003            alias,
14004        })
14005    }
14006
14007    fn parse_xml_table_factor(&mut self) -> Result<TableFactor, ParserError> {
14008        self.expect_token(&Token::LParen)?;
14009        let namespaces = if self.parse_keyword(Keyword::XMLNAMESPACES) {
14010            self.expect_token(&Token::LParen)?;
14011            let namespaces = self.parse_comma_separated(Parser::parse_xml_namespace_definition)?;
14012            self.expect_token(&Token::RParen)?;
14013            self.expect_token(&Token::Comma)?;
14014            namespaces
14015        } else {
14016            vec![]
14017        };
14018        let row_expression = self.parse_expr()?;
14019        let passing = self.parse_xml_passing_clause()?;
14020        self.expect_keyword_is(Keyword::COLUMNS)?;
14021        let columns = self.parse_comma_separated(Parser::parse_xml_table_column)?;
14022        self.expect_token(&Token::RParen)?;
14023        let alias = self.maybe_parse_table_alias()?;
14024        Ok(TableFactor::XmlTable {
14025            namespaces,
14026            row_expression,
14027            passing,
14028            columns,
14029            alias,
14030        })
14031    }
14032
14033    fn parse_xml_namespace_definition(&mut self) -> Result<XmlNamespaceDefinition, ParserError> {
14034        let uri = self.parse_expr()?;
14035        self.expect_keyword_is(Keyword::AS)?;
14036        let name = self.parse_identifier()?;
14037        Ok(XmlNamespaceDefinition { uri, name })
14038    }
14039
14040    fn parse_xml_table_column(&mut self) -> Result<XmlTableColumn, ParserError> {
14041        let name = self.parse_identifier()?;
14042
14043        let option = if self.parse_keyword(Keyword::FOR) {
14044            self.expect_keyword(Keyword::ORDINALITY)?;
14045            XmlTableColumnOption::ForOrdinality
14046        } else {
14047            let r#type = self.parse_data_type()?;
14048            let mut path = None;
14049            let mut default = None;
14050
14051            if self.parse_keyword(Keyword::PATH) {
14052                path = Some(self.parse_expr()?);
14053            }
14054
14055            if self.parse_keyword(Keyword::DEFAULT) {
14056                default = Some(self.parse_expr()?);
14057            }
14058
14059            let not_null = self.parse_keywords(&[Keyword::NOT, Keyword::NULL]);
14060            if !not_null {
14061                // NULL is the default but can be specified explicitly
14062                let _ = self.parse_keyword(Keyword::NULL);
14063            }
14064
14065            XmlTableColumnOption::NamedInfo {
14066                r#type,
14067                path,
14068                default,
14069                nullable: !not_null,
14070            }
14071        };
14072        Ok(XmlTableColumn { name, option })
14073    }
14074
14075    fn parse_xml_passing_clause(&mut self) -> Result<XmlPassingClause, ParserError> {
14076        let mut arguments = vec![];
14077        if self.parse_keyword(Keyword::PASSING) {
14078            loop {
14079                let by_value =
14080                    self.parse_keyword(Keyword::BY) && self.expect_keyword(Keyword::VALUE).is_ok();
14081                let expr = self.parse_expr()?;
14082                let alias = if self.parse_keyword(Keyword::AS) {
14083                    Some(self.parse_identifier()?)
14084                } else {
14085                    None
14086                };
14087                arguments.push(XmlPassingArgument {
14088                    expr,
14089                    alias,
14090                    by_value,
14091                });
14092                if !self.consume_token(&Token::Comma) {
14093                    break;
14094                }
14095            }
14096        }
14097        Ok(XmlPassingClause { arguments })
14098    }
14099
14100    /// Parse a [TableFactor::SemanticView]
14101    fn parse_semantic_view_table_factor(&mut self) -> Result<TableFactor, ParserError> {
14102        self.expect_keyword(Keyword::SEMANTIC_VIEW)?;
14103        self.expect_token(&Token::LParen)?;
14104
14105        let name = self.parse_object_name(true)?;
14106
14107        // Parse DIMENSIONS, METRICS, FACTS and WHERE clauses in flexible order
14108        let mut dimensions = Vec::new();
14109        let mut metrics = Vec::new();
14110        let mut facts = Vec::new();
14111        let mut where_clause = None;
14112
14113        while self.peek_token().token != Token::RParen {
14114            if self.parse_keyword(Keyword::DIMENSIONS) {
14115                if !dimensions.is_empty() {
14116                    return Err(ParserError::ParserError(
14117                        "DIMENSIONS clause can only be specified once".to_string(),
14118                    ));
14119                }
14120                dimensions = self.parse_comma_separated(Parser::parse_wildcard_expr)?;
14121            } else if self.parse_keyword(Keyword::METRICS) {
14122                if !metrics.is_empty() {
14123                    return Err(ParserError::ParserError(
14124                        "METRICS clause can only be specified once".to_string(),
14125                    ));
14126                }
14127                metrics = self.parse_comma_separated(Parser::parse_wildcard_expr)?;
14128            } else if self.parse_keyword(Keyword::FACTS) {
14129                if !facts.is_empty() {
14130                    return Err(ParserError::ParserError(
14131                        "FACTS clause can only be specified once".to_string(),
14132                    ));
14133                }
14134                facts = self.parse_comma_separated(Parser::parse_wildcard_expr)?;
14135            } else if self.parse_keyword(Keyword::WHERE) {
14136                if where_clause.is_some() {
14137                    return Err(ParserError::ParserError(
14138                        "WHERE clause can only be specified once".to_string(),
14139                    ));
14140                }
14141                where_clause = Some(self.parse_expr()?);
14142            } else {
14143                return parser_err!(
14144                    format!(
14145                        "Expected one of DIMENSIONS, METRICS, FACTS or WHERE, got {}",
14146                        self.peek_token().token
14147                    ),
14148                    self.peek_token().span.start
14149                )?;
14150            }
14151        }
14152
14153        self.expect_token(&Token::RParen)?;
14154
14155        let alias = self.maybe_parse_table_alias()?;
14156
14157        Ok(TableFactor::SemanticView {
14158            name,
14159            dimensions,
14160            metrics,
14161            facts,
14162            where_clause,
14163            alias,
14164        })
14165    }
14166
14167    fn parse_match_recognize(&mut self, table: TableFactor) -> Result<TableFactor, ParserError> {
14168        self.expect_token(&Token::LParen)?;
14169
14170        let partition_by = if self.parse_keywords(&[Keyword::PARTITION, Keyword::BY]) {
14171            self.parse_comma_separated(Parser::parse_expr)?
14172        } else {
14173            vec![]
14174        };
14175
14176        let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
14177            self.parse_comma_separated(Parser::parse_order_by_expr)?
14178        } else {
14179            vec![]
14180        };
14181
14182        let measures = if self.parse_keyword(Keyword::MEASURES) {
14183            self.parse_comma_separated(|p| {
14184                let expr = p.parse_expr()?;
14185                let _ = p.parse_keyword(Keyword::AS);
14186                let alias = p.parse_identifier()?;
14187                Ok(Measure { expr, alias })
14188            })?
14189        } else {
14190            vec![]
14191        };
14192
14193        let rows_per_match =
14194            if self.parse_keywords(&[Keyword::ONE, Keyword::ROW, Keyword::PER, Keyword::MATCH]) {
14195                Some(RowsPerMatch::OneRow)
14196            } else if self.parse_keywords(&[
14197                Keyword::ALL,
14198                Keyword::ROWS,
14199                Keyword::PER,
14200                Keyword::MATCH,
14201            ]) {
14202                Some(RowsPerMatch::AllRows(
14203                    if self.parse_keywords(&[Keyword::SHOW, Keyword::EMPTY, Keyword::MATCHES]) {
14204                        Some(EmptyMatchesMode::Show)
14205                    } else if self.parse_keywords(&[
14206                        Keyword::OMIT,
14207                        Keyword::EMPTY,
14208                        Keyword::MATCHES,
14209                    ]) {
14210                        Some(EmptyMatchesMode::Omit)
14211                    } else if self.parse_keywords(&[
14212                        Keyword::WITH,
14213                        Keyword::UNMATCHED,
14214                        Keyword::ROWS,
14215                    ]) {
14216                        Some(EmptyMatchesMode::WithUnmatched)
14217                    } else {
14218                        None
14219                    },
14220                ))
14221            } else {
14222                None
14223            };
14224
14225        let after_match_skip =
14226            if self.parse_keywords(&[Keyword::AFTER, Keyword::MATCH, Keyword::SKIP]) {
14227                if self.parse_keywords(&[Keyword::PAST, Keyword::LAST, Keyword::ROW]) {
14228                    Some(AfterMatchSkip::PastLastRow)
14229                } else if self.parse_keywords(&[Keyword::TO, Keyword::NEXT, Keyword::ROW]) {
14230                    Some(AfterMatchSkip::ToNextRow)
14231                } else if self.parse_keywords(&[Keyword::TO, Keyword::FIRST]) {
14232                    Some(AfterMatchSkip::ToFirst(self.parse_identifier()?))
14233                } else if self.parse_keywords(&[Keyword::TO, Keyword::LAST]) {
14234                    Some(AfterMatchSkip::ToLast(self.parse_identifier()?))
14235                } else {
14236                    let found = self.next_token();
14237                    return self.expected("after match skip option", found);
14238                }
14239            } else {
14240                None
14241            };
14242
14243        self.expect_keyword_is(Keyword::PATTERN)?;
14244        let pattern = self.parse_parenthesized(Self::parse_pattern)?;
14245
14246        self.expect_keyword_is(Keyword::DEFINE)?;
14247
14248        let symbols = self.parse_comma_separated(|p| {
14249            let symbol = p.parse_identifier()?;
14250            p.expect_keyword_is(Keyword::AS)?;
14251            let definition = p.parse_expr()?;
14252            Ok(SymbolDefinition { symbol, definition })
14253        })?;
14254
14255        self.expect_token(&Token::RParen)?;
14256
14257        let alias = self.maybe_parse_table_alias()?;
14258
14259        Ok(TableFactor::MatchRecognize {
14260            table: Box::new(table),
14261            partition_by,
14262            order_by,
14263            measures,
14264            rows_per_match,
14265            after_match_skip,
14266            pattern,
14267            symbols,
14268            alias,
14269        })
14270    }
14271
14272    fn parse_base_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
14273        match self.next_token().token {
14274            Token::Caret => Ok(MatchRecognizePattern::Symbol(MatchRecognizeSymbol::Start)),
14275            Token::Placeholder(s) if s == "$" => {
14276                Ok(MatchRecognizePattern::Symbol(MatchRecognizeSymbol::End))
14277            }
14278            Token::LBrace => {
14279                self.expect_token(&Token::Minus)?;
14280                let symbol = self.parse_identifier().map(MatchRecognizeSymbol::Named)?;
14281                self.expect_token(&Token::Minus)?;
14282                self.expect_token(&Token::RBrace)?;
14283                Ok(MatchRecognizePattern::Exclude(symbol))
14284            }
14285            Token::Word(Word {
14286                value,
14287                quote_style: None,
14288                ..
14289            }) if value == "PERMUTE" => {
14290                self.expect_token(&Token::LParen)?;
14291                let symbols = self.parse_comma_separated(|p| {
14292                    p.parse_identifier().map(MatchRecognizeSymbol::Named)
14293                })?;
14294                self.expect_token(&Token::RParen)?;
14295                Ok(MatchRecognizePattern::Permute(symbols))
14296            }
14297            Token::LParen => {
14298                let pattern = self.parse_pattern()?;
14299                self.expect_token(&Token::RParen)?;
14300                Ok(MatchRecognizePattern::Group(Box::new(pattern)))
14301            }
14302            _ => {
14303                self.prev_token();
14304                self.parse_identifier()
14305                    .map(MatchRecognizeSymbol::Named)
14306                    .map(MatchRecognizePattern::Symbol)
14307            }
14308        }
14309    }
14310
14311    fn parse_repetition_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
14312        let mut pattern = self.parse_base_pattern()?;
14313        loop {
14314            let token = self.next_token();
14315            let quantifier = match token.token {
14316                Token::Mul => RepetitionQuantifier::ZeroOrMore,
14317                Token::Plus => RepetitionQuantifier::OneOrMore,
14318                Token::Placeholder(s) if s == "?" => RepetitionQuantifier::AtMostOne,
14319                Token::LBrace => {
14320                    // quantifier is a range like {n} or {n,} or {,m} or {n,m}
14321                    let token = self.next_token();
14322                    match token.token {
14323                        Token::Comma => {
14324                            let next_token = self.next_token();
14325                            let Token::Number(n, _) = next_token.token else {
14326                                return self.expected("literal number", next_token);
14327                            };
14328                            self.expect_token(&Token::RBrace)?;
14329                            RepetitionQuantifier::AtMost(Self::parse(n, token.span.start)?)
14330                        }
14331                        Token::Number(n, _) if self.consume_token(&Token::Comma) => {
14332                            let next_token = self.next_token();
14333                            match next_token.token {
14334                                Token::Number(m, _) => {
14335                                    self.expect_token(&Token::RBrace)?;
14336                                    RepetitionQuantifier::Range(
14337                                        Self::parse(n, token.span.start)?,
14338                                        Self::parse(m, token.span.start)?,
14339                                    )
14340                                }
14341                                Token::RBrace => {
14342                                    RepetitionQuantifier::AtLeast(Self::parse(n, token.span.start)?)
14343                                }
14344                                _ => {
14345                                    return self.expected("} or upper bound", next_token);
14346                                }
14347                            }
14348                        }
14349                        Token::Number(n, _) => {
14350                            self.expect_token(&Token::RBrace)?;
14351                            RepetitionQuantifier::Exactly(Self::parse(n, token.span.start)?)
14352                        }
14353                        _ => return self.expected("quantifier range", token),
14354                    }
14355                }
14356                _ => {
14357                    self.prev_token();
14358                    break;
14359                }
14360            };
14361            pattern = MatchRecognizePattern::Repetition(Box::new(pattern), quantifier);
14362        }
14363        Ok(pattern)
14364    }
14365
14366    fn parse_concat_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
14367        let mut patterns = vec![self.parse_repetition_pattern()?];
14368        while !matches!(self.peek_token().token, Token::RParen | Token::Pipe) {
14369            patterns.push(self.parse_repetition_pattern()?);
14370        }
14371        match <[MatchRecognizePattern; 1]>::try_from(patterns) {
14372            Ok([pattern]) => Ok(pattern),
14373            Err(patterns) => Ok(MatchRecognizePattern::Concat(patterns)),
14374        }
14375    }
14376
14377    fn parse_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
14378        let pattern = self.parse_concat_pattern()?;
14379        if self.consume_token(&Token::Pipe) {
14380            match self.parse_pattern()? {
14381                // flatten nested alternations
14382                MatchRecognizePattern::Alternation(mut patterns) => {
14383                    patterns.insert(0, pattern);
14384                    Ok(MatchRecognizePattern::Alternation(patterns))
14385                }
14386                next => Ok(MatchRecognizePattern::Alternation(vec![pattern, next])),
14387            }
14388        } else {
14389            Ok(pattern)
14390        }
14391    }
14392
14393    /// Parses a the timestamp version specifier (i.e. query historical data)
14394    pub fn maybe_parse_table_version(&mut self) -> Result<Option<TableVersion>, ParserError> {
14395        if self.dialect.supports_timestamp_versioning() {
14396            if self.parse_keywords(&[Keyword::FOR, Keyword::SYSTEM_TIME, Keyword::AS, Keyword::OF])
14397            {
14398                let expr = self.parse_expr()?;
14399                return Ok(Some(TableVersion::ForSystemTimeAsOf(expr)));
14400            } else if self.peek_keyword(Keyword::AT) || self.peek_keyword(Keyword::BEFORE) {
14401                let func_name = self.parse_object_name(true)?;
14402                let func = self.parse_function(func_name)?;
14403                return Ok(Some(TableVersion::Function(func)));
14404            }
14405        }
14406        Ok(None)
14407    }
14408
14409    /// Parses MySQL's JSON_TABLE column definition.
14410    /// For example: `id INT EXISTS PATH '$' DEFAULT '0' ON EMPTY ERROR ON ERROR`
14411    pub fn parse_json_table_column_def(&mut self) -> Result<JsonTableColumn, ParserError> {
14412        if self.parse_keyword(Keyword::NESTED) {
14413            let _has_path_keyword = self.parse_keyword(Keyword::PATH);
14414            let path = self.parse_value()?.value;
14415            self.expect_keyword_is(Keyword::COLUMNS)?;
14416            let columns = self.parse_parenthesized(|p| {
14417                p.parse_comma_separated(Self::parse_json_table_column_def)
14418            })?;
14419            return Ok(JsonTableColumn::Nested(JsonTableNestedColumn {
14420                path,
14421                columns,
14422            }));
14423        }
14424        let name = self.parse_identifier()?;
14425        if self.parse_keyword(Keyword::FOR) {
14426            self.expect_keyword_is(Keyword::ORDINALITY)?;
14427            return Ok(JsonTableColumn::ForOrdinality(name));
14428        }
14429        let r#type = self.parse_data_type()?;
14430        let exists = self.parse_keyword(Keyword::EXISTS);
14431        self.expect_keyword_is(Keyword::PATH)?;
14432        let path = self.parse_value()?.value;
14433        let mut on_empty = None;
14434        let mut on_error = None;
14435        while let Some(error_handling) = self.parse_json_table_column_error_handling()? {
14436            if self.parse_keyword(Keyword::EMPTY) {
14437                on_empty = Some(error_handling);
14438            } else {
14439                self.expect_keyword_is(Keyword::ERROR)?;
14440                on_error = Some(error_handling);
14441            }
14442        }
14443        Ok(JsonTableColumn::Named(JsonTableNamedColumn {
14444            name,
14445            r#type,
14446            path,
14447            exists,
14448            on_empty,
14449            on_error,
14450        }))
14451    }
14452
14453    /// Parses MSSQL's `OPENJSON WITH` column definition.
14454    ///
14455    /// ```sql
14456    /// colName type [ column_path ] [ AS JSON ]
14457    /// ```
14458    ///
14459    /// Reference: <https://learn.microsoft.com/en-us/sql/t-sql/functions/openjson-transact-sql?view=sql-server-ver16#syntax>
14460    pub fn parse_openjson_table_column_def(&mut self) -> Result<OpenJsonTableColumn, ParserError> {
14461        let name = self.parse_identifier()?;
14462        let r#type = self.parse_data_type()?;
14463        let path = if let Token::SingleQuotedString(path) = self.peek_token().token {
14464            self.next_token();
14465            Some(path)
14466        } else {
14467            None
14468        };
14469        let as_json = self.parse_keyword(Keyword::AS);
14470        if as_json {
14471            self.expect_keyword_is(Keyword::JSON)?;
14472        }
14473        Ok(OpenJsonTableColumn {
14474            name,
14475            r#type,
14476            path,
14477            as_json,
14478        })
14479    }
14480
14481    fn parse_json_table_column_error_handling(
14482        &mut self,
14483    ) -> Result<Option<JsonTableColumnErrorHandling>, ParserError> {
14484        let res = if self.parse_keyword(Keyword::NULL) {
14485            JsonTableColumnErrorHandling::Null
14486        } else if self.parse_keyword(Keyword::ERROR) {
14487            JsonTableColumnErrorHandling::Error
14488        } else if self.parse_keyword(Keyword::DEFAULT) {
14489            JsonTableColumnErrorHandling::Default(self.parse_value()?.value)
14490        } else {
14491            return Ok(None);
14492        };
14493        self.expect_keyword_is(Keyword::ON)?;
14494        Ok(Some(res))
14495    }
14496
14497    pub fn parse_derived_table_factor(
14498        &mut self,
14499        lateral: IsLateral,
14500    ) -> Result<TableFactor, ParserError> {
14501        let subquery = self.parse_query()?;
14502        self.expect_token(&Token::RParen)?;
14503        let alias = self.maybe_parse_table_alias()?;
14504        Ok(TableFactor::Derived {
14505            lateral: match lateral {
14506                Lateral => true,
14507                NotLateral => false,
14508            },
14509            subquery,
14510            alias,
14511        })
14512    }
14513
14514    fn parse_aliased_function_call(&mut self) -> Result<ExprWithAlias, ParserError> {
14515        let function_name = match self.next_token().token {
14516            Token::Word(w) => Ok(w.value),
14517            _ => self.expected("a function identifier", self.peek_token()),
14518        }?;
14519        let expr = self.parse_function(ObjectName::from(vec![Ident::new(function_name)]))?;
14520        let alias = if self.parse_keyword(Keyword::AS) {
14521            Some(self.parse_identifier()?)
14522        } else {
14523            None
14524        };
14525
14526        Ok(ExprWithAlias { expr, alias })
14527    }
14528    /// Parses an expression with an optional alias
14529    ///
14530    /// Examples:
14531    ///
14532    /// ```sql
14533    /// SUM(price) AS total_price
14534    /// ```
14535    /// ```sql
14536    /// SUM(price)
14537    /// ```
14538    ///
14539    /// Example
14540    /// ```
14541    /// # use sqlparser::parser::{Parser, ParserError};
14542    /// # use sqlparser::dialect::GenericDialect;
14543    /// # fn main() ->Result<(), ParserError> {
14544    /// let sql = r#"SUM("a") as "b""#;
14545    /// let mut parser = Parser::new(&GenericDialect).try_with_sql(sql)?;
14546    /// let expr_with_alias = parser.parse_expr_with_alias()?;
14547    /// assert_eq!(Some("b".to_string()), expr_with_alias.alias.map(|x|x.value));
14548    /// # Ok(())
14549    /// # }
14550    pub fn parse_expr_with_alias(&mut self) -> Result<ExprWithAlias, ParserError> {
14551        let expr = self.parse_expr()?;
14552        let alias = if self.parse_keyword(Keyword::AS) {
14553            Some(self.parse_identifier()?)
14554        } else {
14555            None
14556        };
14557
14558        Ok(ExprWithAlias { expr, alias })
14559    }
14560
14561    pub fn parse_pivot_table_factor(
14562        &mut self,
14563        table: TableFactor,
14564    ) -> Result<TableFactor, ParserError> {
14565        self.expect_token(&Token::LParen)?;
14566        let aggregate_functions = self.parse_comma_separated(Self::parse_aliased_function_call)?;
14567        self.expect_keyword_is(Keyword::FOR)?;
14568        let value_column = if self.peek_token_ref().token == Token::LParen {
14569            self.parse_parenthesized_column_list_inner(Mandatory, false, |p| {
14570                p.parse_subexpr(self.dialect.prec_value(Precedence::Between))
14571            })?
14572        } else {
14573            vec![self.parse_subexpr(self.dialect.prec_value(Precedence::Between))?]
14574        };
14575        self.expect_keyword_is(Keyword::IN)?;
14576
14577        self.expect_token(&Token::LParen)?;
14578        let value_source = if self.parse_keyword(Keyword::ANY) {
14579            let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
14580                self.parse_comma_separated(Parser::parse_order_by_expr)?
14581            } else {
14582                vec![]
14583            };
14584            PivotValueSource::Any(order_by)
14585        } else if self.peek_sub_query() {
14586            PivotValueSource::Subquery(self.parse_query()?)
14587        } else {
14588            PivotValueSource::List(self.parse_comma_separated(Self::parse_expr_with_alias)?)
14589        };
14590        self.expect_token(&Token::RParen)?;
14591
14592        let default_on_null =
14593            if self.parse_keywords(&[Keyword::DEFAULT, Keyword::ON, Keyword::NULL]) {
14594                self.expect_token(&Token::LParen)?;
14595                let expr = self.parse_expr()?;
14596                self.expect_token(&Token::RParen)?;
14597                Some(expr)
14598            } else {
14599                None
14600            };
14601
14602        self.expect_token(&Token::RParen)?;
14603        let alias = self.maybe_parse_table_alias()?;
14604        Ok(TableFactor::Pivot {
14605            table: Box::new(table),
14606            aggregate_functions,
14607            value_column,
14608            value_source,
14609            default_on_null,
14610            alias,
14611        })
14612    }
14613
14614    pub fn parse_unpivot_table_factor(
14615        &mut self,
14616        table: TableFactor,
14617    ) -> Result<TableFactor, ParserError> {
14618        let null_inclusion = if self.parse_keyword(Keyword::INCLUDE) {
14619            self.expect_keyword_is(Keyword::NULLS)?;
14620            Some(NullInclusion::IncludeNulls)
14621        } else if self.parse_keyword(Keyword::EXCLUDE) {
14622            self.expect_keyword_is(Keyword::NULLS)?;
14623            Some(NullInclusion::ExcludeNulls)
14624        } else {
14625            None
14626        };
14627        self.expect_token(&Token::LParen)?;
14628        let value = self.parse_expr()?;
14629        self.expect_keyword_is(Keyword::FOR)?;
14630        let name = self.parse_identifier()?;
14631        self.expect_keyword_is(Keyword::IN)?;
14632        let columns = self.parse_parenthesized_column_list_inner(Mandatory, false, |p| {
14633            p.parse_expr_with_alias()
14634        })?;
14635        self.expect_token(&Token::RParen)?;
14636        let alias = self.maybe_parse_table_alias()?;
14637        Ok(TableFactor::Unpivot {
14638            table: Box::new(table),
14639            value,
14640            null_inclusion,
14641            name,
14642            columns,
14643            alias,
14644        })
14645    }
14646
14647    pub fn parse_join_constraint(&mut self, natural: bool) -> Result<JoinConstraint, ParserError> {
14648        if natural {
14649            Ok(JoinConstraint::Natural)
14650        } else if self.parse_keyword(Keyword::ON) {
14651            let constraint = self.parse_expr()?;
14652            Ok(JoinConstraint::On(constraint))
14653        } else if self.parse_keyword(Keyword::USING) {
14654            let columns = self.parse_parenthesized_qualified_column_list(Mandatory, false)?;
14655            Ok(JoinConstraint::Using(columns))
14656        } else {
14657            Ok(JoinConstraint::None)
14658            //self.expected("ON, or USING after JOIN", self.peek_token())
14659        }
14660    }
14661
14662    /// Parse a GRANT statement.
14663    pub fn parse_grant(&mut self) -> Result<Statement, ParserError> {
14664        let (privileges, objects) = self.parse_grant_deny_revoke_privileges_objects()?;
14665
14666        self.expect_keyword_is(Keyword::TO)?;
14667        let grantees = self.parse_grantees()?;
14668
14669        let with_grant_option =
14670            self.parse_keywords(&[Keyword::WITH, Keyword::GRANT, Keyword::OPTION]);
14671
14672        let current_grants =
14673            if self.parse_keywords(&[Keyword::COPY, Keyword::CURRENT, Keyword::GRANTS]) {
14674                Some(CurrentGrantsKind::CopyCurrentGrants)
14675            } else if self.parse_keywords(&[Keyword::REVOKE, Keyword::CURRENT, Keyword::GRANTS]) {
14676                Some(CurrentGrantsKind::RevokeCurrentGrants)
14677            } else {
14678                None
14679            };
14680
14681        let as_grantor = if self.parse_keywords(&[Keyword::AS]) {
14682            Some(self.parse_identifier()?)
14683        } else {
14684            None
14685        };
14686
14687        let granted_by = if self.parse_keywords(&[Keyword::GRANTED, Keyword::BY]) {
14688            Some(self.parse_identifier()?)
14689        } else {
14690            None
14691        };
14692
14693        Ok(Statement::Grant {
14694            privileges,
14695            objects,
14696            grantees,
14697            with_grant_option,
14698            as_grantor,
14699            granted_by,
14700            current_grants,
14701        })
14702    }
14703
14704    fn parse_grantees(&mut self) -> Result<Vec<Grantee>, ParserError> {
14705        let mut values = vec![];
14706        let mut grantee_type = GranteesType::None;
14707        loop {
14708            let new_grantee_type = if self.parse_keyword(Keyword::ROLE) {
14709                GranteesType::Role
14710            } else if self.parse_keyword(Keyword::USER) {
14711                GranteesType::User
14712            } else if self.parse_keyword(Keyword::SHARE) {
14713                GranteesType::Share
14714            } else if self.parse_keyword(Keyword::GROUP) {
14715                GranteesType::Group
14716            } else if self.parse_keyword(Keyword::PUBLIC) {
14717                GranteesType::Public
14718            } else if self.parse_keywords(&[Keyword::DATABASE, Keyword::ROLE]) {
14719                GranteesType::DatabaseRole
14720            } else if self.parse_keywords(&[Keyword::APPLICATION, Keyword::ROLE]) {
14721                GranteesType::ApplicationRole
14722            } else if self.parse_keyword(Keyword::APPLICATION) {
14723                GranteesType::Application
14724            } else {
14725                grantee_type.clone() // keep from previous iteraton, if not specified
14726            };
14727
14728            if self
14729                .dialect
14730                .get_reserved_grantees_types()
14731                .contains(&new_grantee_type)
14732            {
14733                self.prev_token();
14734            } else {
14735                grantee_type = new_grantee_type;
14736            }
14737
14738            let grantee = if grantee_type == GranteesType::Public {
14739                Grantee {
14740                    grantee_type: grantee_type.clone(),
14741                    name: None,
14742                }
14743            } else {
14744                let mut name = self.parse_grantee_name()?;
14745                if self.consume_token(&Token::Colon) {
14746                    // Redshift supports namespace prefix for external users and groups:
14747                    // <Namespace>:<GroupName> or <Namespace>:<UserName>
14748                    // https://docs.aws.amazon.com/redshift/latest/mgmt/redshift-iam-access-control-native-idp.html
14749                    let ident = self.parse_identifier()?;
14750                    if let GranteeName::ObjectName(namespace) = name {
14751                        name = GranteeName::ObjectName(ObjectName::from(vec![Ident::new(
14752                            format!("{namespace}:{ident}"),
14753                        )]));
14754                    };
14755                }
14756                Grantee {
14757                    grantee_type: grantee_type.clone(),
14758                    name: Some(name),
14759                }
14760            };
14761
14762            values.push(grantee);
14763
14764            if !self.consume_token(&Token::Comma) {
14765                break;
14766            }
14767        }
14768
14769        Ok(values)
14770    }
14771
14772    pub fn parse_grant_deny_revoke_privileges_objects(
14773        &mut self,
14774    ) -> Result<(Privileges, Option<GrantObjects>), ParserError> {
14775        let privileges = if self.parse_keyword(Keyword::ALL) {
14776            Privileges::All {
14777                with_privileges_keyword: self.parse_keyword(Keyword::PRIVILEGES),
14778            }
14779        } else {
14780            let actions = self.parse_actions_list()?;
14781            Privileges::Actions(actions)
14782        };
14783
14784        let objects = if self.parse_keyword(Keyword::ON) {
14785            if self.parse_keywords(&[Keyword::ALL, Keyword::TABLES, Keyword::IN, Keyword::SCHEMA]) {
14786                Some(GrantObjects::AllTablesInSchema {
14787                    schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
14788                })
14789            } else if self.parse_keywords(&[
14790                Keyword::ALL,
14791                Keyword::EXTERNAL,
14792                Keyword::TABLES,
14793                Keyword::IN,
14794                Keyword::SCHEMA,
14795            ]) {
14796                Some(GrantObjects::AllExternalTablesInSchema {
14797                    schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
14798                })
14799            } else if self.parse_keywords(&[
14800                Keyword::ALL,
14801                Keyword::VIEWS,
14802                Keyword::IN,
14803                Keyword::SCHEMA,
14804            ]) {
14805                Some(GrantObjects::AllViewsInSchema {
14806                    schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
14807                })
14808            } else if self.parse_keywords(&[
14809                Keyword::ALL,
14810                Keyword::MATERIALIZED,
14811                Keyword::VIEWS,
14812                Keyword::IN,
14813                Keyword::SCHEMA,
14814            ]) {
14815                Some(GrantObjects::AllMaterializedViewsInSchema {
14816                    schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
14817                })
14818            } else if self.parse_keywords(&[
14819                Keyword::ALL,
14820                Keyword::FUNCTIONS,
14821                Keyword::IN,
14822                Keyword::SCHEMA,
14823            ]) {
14824                Some(GrantObjects::AllFunctionsInSchema {
14825                    schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
14826                })
14827            } else if self.parse_keywords(&[
14828                Keyword::FUTURE,
14829                Keyword::SCHEMAS,
14830                Keyword::IN,
14831                Keyword::DATABASE,
14832            ]) {
14833                Some(GrantObjects::FutureSchemasInDatabase {
14834                    databases: self.parse_comma_separated(|p| p.parse_object_name(false))?,
14835                })
14836            } else if self.parse_keywords(&[
14837                Keyword::FUTURE,
14838                Keyword::TABLES,
14839                Keyword::IN,
14840                Keyword::SCHEMA,
14841            ]) {
14842                Some(GrantObjects::FutureTablesInSchema {
14843                    schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
14844                })
14845            } else if self.parse_keywords(&[
14846                Keyword::FUTURE,
14847                Keyword::EXTERNAL,
14848                Keyword::TABLES,
14849                Keyword::IN,
14850                Keyword::SCHEMA,
14851            ]) {
14852                Some(GrantObjects::FutureExternalTablesInSchema {
14853                    schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
14854                })
14855            } else if self.parse_keywords(&[
14856                Keyword::FUTURE,
14857                Keyword::VIEWS,
14858                Keyword::IN,
14859                Keyword::SCHEMA,
14860            ]) {
14861                Some(GrantObjects::FutureViewsInSchema {
14862                    schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
14863                })
14864            } else if self.parse_keywords(&[
14865                Keyword::FUTURE,
14866                Keyword::MATERIALIZED,
14867                Keyword::VIEWS,
14868                Keyword::IN,
14869                Keyword::SCHEMA,
14870            ]) {
14871                Some(GrantObjects::FutureMaterializedViewsInSchema {
14872                    schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
14873                })
14874            } else if self.parse_keywords(&[
14875                Keyword::ALL,
14876                Keyword::SEQUENCES,
14877                Keyword::IN,
14878                Keyword::SCHEMA,
14879            ]) {
14880                Some(GrantObjects::AllSequencesInSchema {
14881                    schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
14882                })
14883            } else if self.parse_keywords(&[
14884                Keyword::FUTURE,
14885                Keyword::SEQUENCES,
14886                Keyword::IN,
14887                Keyword::SCHEMA,
14888            ]) {
14889                Some(GrantObjects::FutureSequencesInSchema {
14890                    schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
14891                })
14892            } else if self.parse_keywords(&[Keyword::RESOURCE, Keyword::MONITOR]) {
14893                Some(GrantObjects::ResourceMonitors(
14894                    self.parse_comma_separated(|p| p.parse_object_name(false))?,
14895                ))
14896            } else if self.parse_keywords(&[Keyword::COMPUTE, Keyword::POOL]) {
14897                Some(GrantObjects::ComputePools(
14898                    self.parse_comma_separated(|p| p.parse_object_name(false))?,
14899                ))
14900            } else if self.parse_keywords(&[Keyword::FAILOVER, Keyword::GROUP]) {
14901                Some(GrantObjects::FailoverGroup(
14902                    self.parse_comma_separated(|p| p.parse_object_name(false))?,
14903                ))
14904            } else if self.parse_keywords(&[Keyword::REPLICATION, Keyword::GROUP]) {
14905                Some(GrantObjects::ReplicationGroup(
14906                    self.parse_comma_separated(|p| p.parse_object_name(false))?,
14907                ))
14908            } else if self.parse_keywords(&[Keyword::EXTERNAL, Keyword::VOLUME]) {
14909                Some(GrantObjects::ExternalVolumes(
14910                    self.parse_comma_separated(|p| p.parse_object_name(false))?,
14911                ))
14912            } else {
14913                let object_type = self.parse_one_of_keywords(&[
14914                    Keyword::SEQUENCE,
14915                    Keyword::DATABASE,
14916                    Keyword::SCHEMA,
14917                    Keyword::TABLE,
14918                    Keyword::VIEW,
14919                    Keyword::WAREHOUSE,
14920                    Keyword::INTEGRATION,
14921                    Keyword::VIEW,
14922                    Keyword::WAREHOUSE,
14923                    Keyword::INTEGRATION,
14924                    Keyword::USER,
14925                    Keyword::CONNECTION,
14926                    Keyword::PROCEDURE,
14927                    Keyword::FUNCTION,
14928                ]);
14929                let objects =
14930                    self.parse_comma_separated(|p| p.parse_object_name_inner(false, true));
14931                match object_type {
14932                    Some(Keyword::DATABASE) => Some(GrantObjects::Databases(objects?)),
14933                    Some(Keyword::SCHEMA) => Some(GrantObjects::Schemas(objects?)),
14934                    Some(Keyword::SEQUENCE) => Some(GrantObjects::Sequences(objects?)),
14935                    Some(Keyword::WAREHOUSE) => Some(GrantObjects::Warehouses(objects?)),
14936                    Some(Keyword::INTEGRATION) => Some(GrantObjects::Integrations(objects?)),
14937                    Some(Keyword::VIEW) => Some(GrantObjects::Views(objects?)),
14938                    Some(Keyword::USER) => Some(GrantObjects::Users(objects?)),
14939                    Some(Keyword::CONNECTION) => Some(GrantObjects::Connections(objects?)),
14940                    kw @ (Some(Keyword::PROCEDURE) | Some(Keyword::FUNCTION)) => {
14941                        if let Some(name) = objects?.first() {
14942                            self.parse_grant_procedure_or_function(name, &kw)?
14943                        } else {
14944                            self.expected("procedure or function name", self.peek_token())?
14945                        }
14946                    }
14947                    Some(Keyword::TABLE) | None => Some(GrantObjects::Tables(objects?)),
14948                    _ => unreachable!(),
14949                }
14950            }
14951        } else {
14952            None
14953        };
14954
14955        Ok((privileges, objects))
14956    }
14957
14958    fn parse_grant_procedure_or_function(
14959        &mut self,
14960        name: &ObjectName,
14961        kw: &Option<Keyword>,
14962    ) -> Result<Option<GrantObjects>, ParserError> {
14963        let arg_types = if self.consume_token(&Token::LParen) {
14964            let list = self.parse_comma_separated0(Self::parse_data_type, Token::RParen)?;
14965            self.expect_token(&Token::RParen)?;
14966            list
14967        } else {
14968            vec![]
14969        };
14970        match kw {
14971            Some(Keyword::PROCEDURE) => Ok(Some(GrantObjects::Procedure {
14972                name: name.clone(),
14973                arg_types,
14974            })),
14975            Some(Keyword::FUNCTION) => Ok(Some(GrantObjects::Function {
14976                name: name.clone(),
14977                arg_types,
14978            })),
14979            _ => self.expected("procedure or function keywords", self.peek_token())?,
14980        }
14981    }
14982
14983    pub fn parse_grant_permission(&mut self) -> Result<Action, ParserError> {
14984        fn parse_columns(parser: &mut Parser) -> Result<Option<Vec<Ident>>, ParserError> {
14985            let columns = parser.parse_parenthesized_column_list(Optional, false)?;
14986            if columns.is_empty() {
14987                Ok(None)
14988            } else {
14989                Ok(Some(columns))
14990            }
14991        }
14992
14993        // Multi-word privileges
14994        if self.parse_keywords(&[Keyword::IMPORTED, Keyword::PRIVILEGES]) {
14995            Ok(Action::ImportedPrivileges)
14996        } else if self.parse_keywords(&[Keyword::ADD, Keyword::SEARCH, Keyword::OPTIMIZATION]) {
14997            Ok(Action::AddSearchOptimization)
14998        } else if self.parse_keywords(&[Keyword::ATTACH, Keyword::LISTING]) {
14999            Ok(Action::AttachListing)
15000        } else if self.parse_keywords(&[Keyword::ATTACH, Keyword::POLICY]) {
15001            Ok(Action::AttachPolicy)
15002        } else if self.parse_keywords(&[Keyword::BIND, Keyword::SERVICE, Keyword::ENDPOINT]) {
15003            Ok(Action::BindServiceEndpoint)
15004        } else if self.parse_keywords(&[Keyword::DATABASE, Keyword::ROLE]) {
15005            let role = self.parse_object_name(false)?;
15006            Ok(Action::DatabaseRole { role })
15007        } else if self.parse_keywords(&[Keyword::EVOLVE, Keyword::SCHEMA]) {
15008            Ok(Action::EvolveSchema)
15009        } else if self.parse_keywords(&[Keyword::IMPORT, Keyword::SHARE]) {
15010            Ok(Action::ImportShare)
15011        } else if self.parse_keywords(&[Keyword::MANAGE, Keyword::VERSIONS]) {
15012            Ok(Action::ManageVersions)
15013        } else if self.parse_keywords(&[Keyword::MANAGE, Keyword::RELEASES]) {
15014            Ok(Action::ManageReleases)
15015        } else if self.parse_keywords(&[Keyword::OVERRIDE, Keyword::SHARE, Keyword::RESTRICTIONS]) {
15016            Ok(Action::OverrideShareRestrictions)
15017        } else if self.parse_keywords(&[
15018            Keyword::PURCHASE,
15019            Keyword::DATA,
15020            Keyword::EXCHANGE,
15021            Keyword::LISTING,
15022        ]) {
15023            Ok(Action::PurchaseDataExchangeListing)
15024        } else if self.parse_keywords(&[Keyword::RESOLVE, Keyword::ALL]) {
15025            Ok(Action::ResolveAll)
15026        } else if self.parse_keywords(&[Keyword::READ, Keyword::SESSION]) {
15027            Ok(Action::ReadSession)
15028
15029        // Single-word privileges
15030        } else if self.parse_keyword(Keyword::APPLY) {
15031            let apply_type = self.parse_action_apply_type()?;
15032            Ok(Action::Apply { apply_type })
15033        } else if self.parse_keyword(Keyword::APPLYBUDGET) {
15034            Ok(Action::ApplyBudget)
15035        } else if self.parse_keyword(Keyword::AUDIT) {
15036            Ok(Action::Audit)
15037        } else if self.parse_keyword(Keyword::CONNECT) {
15038            Ok(Action::Connect)
15039        } else if self.parse_keyword(Keyword::CREATE) {
15040            let obj_type = self.maybe_parse_action_create_object_type();
15041            Ok(Action::Create { obj_type })
15042        } else if self.parse_keyword(Keyword::DELETE) {
15043            Ok(Action::Delete)
15044        } else if self.parse_keyword(Keyword::EXEC) {
15045            let obj_type = self.maybe_parse_action_execute_obj_type();
15046            Ok(Action::Exec { obj_type })
15047        } else if self.parse_keyword(Keyword::EXECUTE) {
15048            let obj_type = self.maybe_parse_action_execute_obj_type();
15049            Ok(Action::Execute { obj_type })
15050        } else if self.parse_keyword(Keyword::FAILOVER) {
15051            Ok(Action::Failover)
15052        } else if self.parse_keyword(Keyword::INSERT) {
15053            Ok(Action::Insert {
15054                columns: parse_columns(self)?,
15055            })
15056        } else if self.parse_keyword(Keyword::MANAGE) {
15057            let manage_type = self.parse_action_manage_type()?;
15058            Ok(Action::Manage { manage_type })
15059        } else if self.parse_keyword(Keyword::MODIFY) {
15060            let modify_type = self.parse_action_modify_type();
15061            Ok(Action::Modify { modify_type })
15062        } else if self.parse_keyword(Keyword::MONITOR) {
15063            let monitor_type = self.parse_action_monitor_type();
15064            Ok(Action::Monitor { monitor_type })
15065        } else if self.parse_keyword(Keyword::OPERATE) {
15066            Ok(Action::Operate)
15067        } else if self.parse_keyword(Keyword::REFERENCES) {
15068            Ok(Action::References {
15069                columns: parse_columns(self)?,
15070            })
15071        } else if self.parse_keyword(Keyword::READ) {
15072            Ok(Action::Read)
15073        } else if self.parse_keyword(Keyword::REPLICATE) {
15074            Ok(Action::Replicate)
15075        } else if self.parse_keyword(Keyword::ROLE) {
15076            let role = self.parse_object_name(false)?;
15077            Ok(Action::Role { role })
15078        } else if self.parse_keyword(Keyword::SELECT) {
15079            Ok(Action::Select {
15080                columns: parse_columns(self)?,
15081            })
15082        } else if self.parse_keyword(Keyword::TEMPORARY) {
15083            Ok(Action::Temporary)
15084        } else if self.parse_keyword(Keyword::TRIGGER) {
15085            Ok(Action::Trigger)
15086        } else if self.parse_keyword(Keyword::TRUNCATE) {
15087            Ok(Action::Truncate)
15088        } else if self.parse_keyword(Keyword::UPDATE) {
15089            Ok(Action::Update {
15090                columns: parse_columns(self)?,
15091            })
15092        } else if self.parse_keyword(Keyword::USAGE) {
15093            Ok(Action::Usage)
15094        } else if self.parse_keyword(Keyword::OWNERSHIP) {
15095            Ok(Action::Ownership)
15096        } else if self.parse_keyword(Keyword::DROP) {
15097            Ok(Action::Drop)
15098        } else {
15099            self.expected("a privilege keyword", self.peek_token())?
15100        }
15101    }
15102
15103    fn maybe_parse_action_create_object_type(&mut self) -> Option<ActionCreateObjectType> {
15104        // Multi-word object types
15105        if self.parse_keywords(&[Keyword::APPLICATION, Keyword::PACKAGE]) {
15106            Some(ActionCreateObjectType::ApplicationPackage)
15107        } else if self.parse_keywords(&[Keyword::COMPUTE, Keyword::POOL]) {
15108            Some(ActionCreateObjectType::ComputePool)
15109        } else if self.parse_keywords(&[Keyword::DATA, Keyword::EXCHANGE, Keyword::LISTING]) {
15110            Some(ActionCreateObjectType::DataExchangeListing)
15111        } else if self.parse_keywords(&[Keyword::EXTERNAL, Keyword::VOLUME]) {
15112            Some(ActionCreateObjectType::ExternalVolume)
15113        } else if self.parse_keywords(&[Keyword::FAILOVER, Keyword::GROUP]) {
15114            Some(ActionCreateObjectType::FailoverGroup)
15115        } else if self.parse_keywords(&[Keyword::NETWORK, Keyword::POLICY]) {
15116            Some(ActionCreateObjectType::NetworkPolicy)
15117        } else if self.parse_keywords(&[Keyword::ORGANIZATION, Keyword::LISTING]) {
15118            Some(ActionCreateObjectType::OrganiationListing)
15119        } else if self.parse_keywords(&[Keyword::REPLICATION, Keyword::GROUP]) {
15120            Some(ActionCreateObjectType::ReplicationGroup)
15121        }
15122        // Single-word object types
15123        else if self.parse_keyword(Keyword::ACCOUNT) {
15124            Some(ActionCreateObjectType::Account)
15125        } else if self.parse_keyword(Keyword::APPLICATION) {
15126            Some(ActionCreateObjectType::Application)
15127        } else if self.parse_keyword(Keyword::DATABASE) {
15128            Some(ActionCreateObjectType::Database)
15129        } else if self.parse_keyword(Keyword::INTEGRATION) {
15130            Some(ActionCreateObjectType::Integration)
15131        } else if self.parse_keyword(Keyword::ROLE) {
15132            Some(ActionCreateObjectType::Role)
15133        } else if self.parse_keyword(Keyword::SCHEMA) {
15134            Some(ActionCreateObjectType::Schema)
15135        } else if self.parse_keyword(Keyword::SHARE) {
15136            Some(ActionCreateObjectType::Share)
15137        } else if self.parse_keyword(Keyword::USER) {
15138            Some(ActionCreateObjectType::User)
15139        } else if self.parse_keyword(Keyword::WAREHOUSE) {
15140            Some(ActionCreateObjectType::Warehouse)
15141        } else {
15142            None
15143        }
15144    }
15145
15146    fn parse_action_apply_type(&mut self) -> Result<ActionApplyType, ParserError> {
15147        if self.parse_keywords(&[Keyword::AGGREGATION, Keyword::POLICY]) {
15148            Ok(ActionApplyType::AggregationPolicy)
15149        } else if self.parse_keywords(&[Keyword::AUTHENTICATION, Keyword::POLICY]) {
15150            Ok(ActionApplyType::AuthenticationPolicy)
15151        } else if self.parse_keywords(&[Keyword::JOIN, Keyword::POLICY]) {
15152            Ok(ActionApplyType::JoinPolicy)
15153        } else if self.parse_keywords(&[Keyword::MASKING, Keyword::POLICY]) {
15154            Ok(ActionApplyType::MaskingPolicy)
15155        } else if self.parse_keywords(&[Keyword::PACKAGES, Keyword::POLICY]) {
15156            Ok(ActionApplyType::PackagesPolicy)
15157        } else if self.parse_keywords(&[Keyword::PASSWORD, Keyword::POLICY]) {
15158            Ok(ActionApplyType::PasswordPolicy)
15159        } else if self.parse_keywords(&[Keyword::PROJECTION, Keyword::POLICY]) {
15160            Ok(ActionApplyType::ProjectionPolicy)
15161        } else if self.parse_keywords(&[Keyword::ROW, Keyword::ACCESS, Keyword::POLICY]) {
15162            Ok(ActionApplyType::RowAccessPolicy)
15163        } else if self.parse_keywords(&[Keyword::SESSION, Keyword::POLICY]) {
15164            Ok(ActionApplyType::SessionPolicy)
15165        } else if self.parse_keyword(Keyword::TAG) {
15166            Ok(ActionApplyType::Tag)
15167        } else {
15168            self.expected("GRANT APPLY type", self.peek_token())
15169        }
15170    }
15171
15172    fn maybe_parse_action_execute_obj_type(&mut self) -> Option<ActionExecuteObjectType> {
15173        if self.parse_keywords(&[Keyword::DATA, Keyword::METRIC, Keyword::FUNCTION]) {
15174            Some(ActionExecuteObjectType::DataMetricFunction)
15175        } else if self.parse_keywords(&[Keyword::MANAGED, Keyword::ALERT]) {
15176            Some(ActionExecuteObjectType::ManagedAlert)
15177        } else if self.parse_keywords(&[Keyword::MANAGED, Keyword::TASK]) {
15178            Some(ActionExecuteObjectType::ManagedTask)
15179        } else if self.parse_keyword(Keyword::ALERT) {
15180            Some(ActionExecuteObjectType::Alert)
15181        } else if self.parse_keyword(Keyword::TASK) {
15182            Some(ActionExecuteObjectType::Task)
15183        } else {
15184            None
15185        }
15186    }
15187
15188    fn parse_action_manage_type(&mut self) -> Result<ActionManageType, ParserError> {
15189        if self.parse_keywords(&[Keyword::ACCOUNT, Keyword::SUPPORT, Keyword::CASES]) {
15190            Ok(ActionManageType::AccountSupportCases)
15191        } else if self.parse_keywords(&[Keyword::EVENT, Keyword::SHARING]) {
15192            Ok(ActionManageType::EventSharing)
15193        } else if self.parse_keywords(&[Keyword::LISTING, Keyword::AUTO, Keyword::FULFILLMENT]) {
15194            Ok(ActionManageType::ListingAutoFulfillment)
15195        } else if self.parse_keywords(&[Keyword::ORGANIZATION, Keyword::SUPPORT, Keyword::CASES]) {
15196            Ok(ActionManageType::OrganizationSupportCases)
15197        } else if self.parse_keywords(&[Keyword::USER, Keyword::SUPPORT, Keyword::CASES]) {
15198            Ok(ActionManageType::UserSupportCases)
15199        } else if self.parse_keyword(Keyword::GRANTS) {
15200            Ok(ActionManageType::Grants)
15201        } else if self.parse_keyword(Keyword::WAREHOUSES) {
15202            Ok(ActionManageType::Warehouses)
15203        } else {
15204            self.expected("GRANT MANAGE type", self.peek_token())
15205        }
15206    }
15207
15208    fn parse_action_modify_type(&mut self) -> Option<ActionModifyType> {
15209        if self.parse_keywords(&[Keyword::LOG, Keyword::LEVEL]) {
15210            Some(ActionModifyType::LogLevel)
15211        } else if self.parse_keywords(&[Keyword::TRACE, Keyword::LEVEL]) {
15212            Some(ActionModifyType::TraceLevel)
15213        } else if self.parse_keywords(&[Keyword::SESSION, Keyword::LOG, Keyword::LEVEL]) {
15214            Some(ActionModifyType::SessionLogLevel)
15215        } else if self.parse_keywords(&[Keyword::SESSION, Keyword::TRACE, Keyword::LEVEL]) {
15216            Some(ActionModifyType::SessionTraceLevel)
15217        } else {
15218            None
15219        }
15220    }
15221
15222    fn parse_action_monitor_type(&mut self) -> Option<ActionMonitorType> {
15223        if self.parse_keyword(Keyword::EXECUTION) {
15224            Some(ActionMonitorType::Execution)
15225        } else if self.parse_keyword(Keyword::SECURITY) {
15226            Some(ActionMonitorType::Security)
15227        } else if self.parse_keyword(Keyword::USAGE) {
15228            Some(ActionMonitorType::Usage)
15229        } else {
15230            None
15231        }
15232    }
15233
15234    pub fn parse_grantee_name(&mut self) -> Result<GranteeName, ParserError> {
15235        let mut name = self.parse_object_name(false)?;
15236        if self.dialect.supports_user_host_grantee()
15237            && name.0.len() == 1
15238            && name.0[0].as_ident().is_some()
15239            && self.consume_token(&Token::AtSign)
15240        {
15241            let user = name.0.pop().unwrap().as_ident().unwrap().clone();
15242            let host = self.parse_identifier()?;
15243            Ok(GranteeName::UserHost { user, host })
15244        } else {
15245            Ok(GranteeName::ObjectName(name))
15246        }
15247    }
15248
15249    /// Parse [`Statement::Deny`]
15250    pub fn parse_deny(&mut self) -> Result<Statement, ParserError> {
15251        self.expect_keyword(Keyword::DENY)?;
15252
15253        let (privileges, objects) = self.parse_grant_deny_revoke_privileges_objects()?;
15254        let objects = match objects {
15255            Some(o) => o,
15256            None => {
15257                return parser_err!(
15258                    "DENY statements must specify an object",
15259                    self.peek_token().span.start
15260                )
15261            }
15262        };
15263
15264        self.expect_keyword_is(Keyword::TO)?;
15265        let grantees = self.parse_grantees()?;
15266        let cascade = self.parse_cascade_option();
15267        let granted_by = if self.parse_keywords(&[Keyword::AS]) {
15268            Some(self.parse_identifier()?)
15269        } else {
15270            None
15271        };
15272
15273        Ok(Statement::Deny(DenyStatement {
15274            privileges,
15275            objects,
15276            grantees,
15277            cascade,
15278            granted_by,
15279        }))
15280    }
15281
15282    /// Parse a REVOKE statement
15283    pub fn parse_revoke(&mut self) -> Result<Statement, ParserError> {
15284        let (privileges, objects) = self.parse_grant_deny_revoke_privileges_objects()?;
15285
15286        self.expect_keyword_is(Keyword::FROM)?;
15287        let grantees = self.parse_grantees()?;
15288
15289        let granted_by = if self.parse_keywords(&[Keyword::GRANTED, Keyword::BY]) {
15290            Some(self.parse_identifier()?)
15291        } else {
15292            None
15293        };
15294
15295        let cascade = self.parse_cascade_option();
15296
15297        Ok(Statement::Revoke {
15298            privileges,
15299            objects,
15300            grantees,
15301            granted_by,
15302            cascade,
15303        })
15304    }
15305
15306    /// Parse an REPLACE statement
15307    pub fn parse_replace(&mut self) -> Result<Statement, ParserError> {
15308        if !dialect_of!(self is MySqlDialect | GenericDialect) {
15309            return parser_err!(
15310                "Unsupported statement REPLACE",
15311                self.peek_token().span.start
15312            );
15313        }
15314
15315        let mut insert = self.parse_insert()?;
15316        if let Statement::Insert(Insert { replace_into, .. }) = &mut insert {
15317            *replace_into = true;
15318        }
15319
15320        Ok(insert)
15321    }
15322
15323    /// Parse an INSERT statement, returning a `Box`ed SetExpr
15324    ///
15325    /// This is used to reduce the size of the stack frames in debug builds
15326    fn parse_insert_setexpr_boxed(&mut self) -> Result<Box<SetExpr>, ParserError> {
15327        Ok(Box::new(SetExpr::Insert(self.parse_insert()?)))
15328    }
15329
15330    /// Parse an INSERT statement
15331    pub fn parse_insert(&mut self) -> Result<Statement, ParserError> {
15332        let or = self.parse_conflict_clause();
15333        let priority = if !dialect_of!(self is MySqlDialect | GenericDialect) {
15334            None
15335        } else if self.parse_keyword(Keyword::LOW_PRIORITY) {
15336            Some(MysqlInsertPriority::LowPriority)
15337        } else if self.parse_keyword(Keyword::DELAYED) {
15338            Some(MysqlInsertPriority::Delayed)
15339        } else if self.parse_keyword(Keyword::HIGH_PRIORITY) {
15340            Some(MysqlInsertPriority::HighPriority)
15341        } else {
15342            None
15343        };
15344
15345        let ignore = dialect_of!(self is MySqlDialect | GenericDialect)
15346            && self.parse_keyword(Keyword::IGNORE);
15347
15348        let replace_into = false;
15349
15350        let overwrite = self.parse_keyword(Keyword::OVERWRITE);
15351        let into = self.parse_keyword(Keyword::INTO);
15352
15353        let local = self.parse_keyword(Keyword::LOCAL);
15354
15355        if self.parse_keyword(Keyword::DIRECTORY) {
15356            let path = self.parse_literal_string()?;
15357            let file_format = if self.parse_keywords(&[Keyword::STORED, Keyword::AS]) {
15358                Some(self.parse_file_format()?)
15359            } else {
15360                None
15361            };
15362            let source = self.parse_query()?;
15363            Ok(Statement::Directory {
15364                local,
15365                path,
15366                overwrite,
15367                file_format,
15368                source,
15369            })
15370        } else {
15371            // Hive lets you put table here regardless
15372            let table = self.parse_keyword(Keyword::TABLE);
15373            let table_object = self.parse_table_object()?;
15374
15375            let table_alias =
15376                if dialect_of!(self is PostgreSqlDialect) && self.parse_keyword(Keyword::AS) {
15377                    Some(self.parse_identifier()?)
15378                } else {
15379                    None
15380                };
15381
15382            let is_mysql = dialect_of!(self is MySqlDialect);
15383
15384            let (columns, partitioned, after_columns, source, assignments) = if self
15385                .parse_keywords(&[Keyword::DEFAULT, Keyword::VALUES])
15386            {
15387                (vec![], None, vec![], None, vec![])
15388            } else {
15389                let (columns, partitioned, after_columns) = if !self.peek_subquery_start() {
15390                    let columns = self.parse_parenthesized_column_list(Optional, is_mysql)?;
15391
15392                    let partitioned = self.parse_insert_partition()?;
15393                    // Hive allows you to specify columns after partitions as well if you want.
15394                    let after_columns = if dialect_of!(self is HiveDialect) {
15395                        self.parse_parenthesized_column_list(Optional, false)?
15396                    } else {
15397                        vec![]
15398                    };
15399                    (columns, partitioned, after_columns)
15400                } else {
15401                    Default::default()
15402                };
15403
15404                let (source, assignments) = if self.peek_keyword(Keyword::FORMAT)
15405                    || self.peek_keyword(Keyword::SETTINGS)
15406                {
15407                    (None, vec![])
15408                } else if self.dialect.supports_insert_set() && self.parse_keyword(Keyword::SET) {
15409                    (None, self.parse_comma_separated(Parser::parse_assignment)?)
15410                } else {
15411                    (Some(self.parse_query()?), vec![])
15412                };
15413
15414                (columns, partitioned, after_columns, source, assignments)
15415            };
15416
15417            let (format_clause, settings) = if self.dialect.supports_insert_format() {
15418                // Settings always comes before `FORMAT` for ClickHouse:
15419                // <https://clickhouse.com/docs/en/sql-reference/statements/insert-into>
15420                let settings = self.parse_settings()?;
15421
15422                let format = if self.parse_keyword(Keyword::FORMAT) {
15423                    Some(self.parse_input_format_clause()?)
15424                } else {
15425                    None
15426                };
15427
15428                (format, settings)
15429            } else {
15430                Default::default()
15431            };
15432
15433            let insert_alias = if dialect_of!(self is MySqlDialect | GenericDialect)
15434                && self.parse_keyword(Keyword::AS)
15435            {
15436                let row_alias = self.parse_object_name(false)?;
15437                let col_aliases = Some(self.parse_parenthesized_column_list(Optional, false)?);
15438                Some(InsertAliases {
15439                    row_alias,
15440                    col_aliases,
15441                })
15442            } else {
15443                None
15444            };
15445
15446            let on = if self.parse_keyword(Keyword::ON) {
15447                if self.parse_keyword(Keyword::CONFLICT) {
15448                    let conflict_target =
15449                        if self.parse_keywords(&[Keyword::ON, Keyword::CONSTRAINT]) {
15450                            Some(ConflictTarget::OnConstraint(self.parse_object_name(false)?))
15451                        } else if self.peek_token() == Token::LParen {
15452                            Some(ConflictTarget::Columns(
15453                                self.parse_parenthesized_column_list(IsOptional::Mandatory, false)?,
15454                            ))
15455                        } else {
15456                            None
15457                        };
15458
15459                    self.expect_keyword_is(Keyword::DO)?;
15460                    let action = if self.parse_keyword(Keyword::NOTHING) {
15461                        OnConflictAction::DoNothing
15462                    } else {
15463                        self.expect_keyword_is(Keyword::UPDATE)?;
15464                        self.expect_keyword_is(Keyword::SET)?;
15465                        let assignments = self.parse_comma_separated(Parser::parse_assignment)?;
15466                        let selection = if self.parse_keyword(Keyword::WHERE) {
15467                            Some(self.parse_expr()?)
15468                        } else {
15469                            None
15470                        };
15471                        OnConflictAction::DoUpdate(DoUpdate {
15472                            assignments,
15473                            selection,
15474                        })
15475                    };
15476
15477                    Some(OnInsert::OnConflict(OnConflict {
15478                        conflict_target,
15479                        action,
15480                    }))
15481                } else {
15482                    self.expect_keyword_is(Keyword::DUPLICATE)?;
15483                    self.expect_keyword_is(Keyword::KEY)?;
15484                    self.expect_keyword_is(Keyword::UPDATE)?;
15485                    let l = self.parse_comma_separated(Parser::parse_assignment)?;
15486
15487                    Some(OnInsert::DuplicateKeyUpdate(l))
15488                }
15489            } else {
15490                None
15491            };
15492
15493            let returning = if self.parse_keyword(Keyword::RETURNING) {
15494                Some(self.parse_comma_separated(Parser::parse_select_item)?)
15495            } else {
15496                None
15497            };
15498
15499            Ok(Statement::Insert(Insert {
15500                or,
15501                table: table_object,
15502                table_alias,
15503                ignore,
15504                into,
15505                overwrite,
15506                partitioned,
15507                columns,
15508                after_columns,
15509                source,
15510                assignments,
15511                has_table_keyword: table,
15512                on,
15513                returning,
15514                replace_into,
15515                priority,
15516                insert_alias,
15517                settings,
15518                format_clause,
15519            }))
15520        }
15521    }
15522
15523    // Parses input format clause used for [ClickHouse].
15524    //
15525    // <https://clickhouse.com/docs/en/interfaces/formats>
15526    pub fn parse_input_format_clause(&mut self) -> Result<InputFormatClause, ParserError> {
15527        let ident = self.parse_identifier()?;
15528        let values = self
15529            .maybe_parse(|p| p.parse_comma_separated(|p| p.parse_expr()))?
15530            .unwrap_or_default();
15531
15532        Ok(InputFormatClause { ident, values })
15533    }
15534
15535    /// Returns true if the immediate tokens look like the
15536    /// beginning of a subquery. `(SELECT ...`
15537    fn peek_subquery_start(&mut self) -> bool {
15538        let [maybe_lparen, maybe_select] = self.peek_tokens();
15539        Token::LParen == maybe_lparen
15540            && matches!(maybe_select, Token::Word(w) if w.keyword == Keyword::SELECT)
15541    }
15542
15543    fn parse_conflict_clause(&mut self) -> Option<SqliteOnConflict> {
15544        if self.parse_keywords(&[Keyword::OR, Keyword::REPLACE]) {
15545            Some(SqliteOnConflict::Replace)
15546        } else if self.parse_keywords(&[Keyword::OR, Keyword::ROLLBACK]) {
15547            Some(SqliteOnConflict::Rollback)
15548        } else if self.parse_keywords(&[Keyword::OR, Keyword::ABORT]) {
15549            Some(SqliteOnConflict::Abort)
15550        } else if self.parse_keywords(&[Keyword::OR, Keyword::FAIL]) {
15551            Some(SqliteOnConflict::Fail)
15552        } else if self.parse_keywords(&[Keyword::OR, Keyword::IGNORE]) {
15553            Some(SqliteOnConflict::Ignore)
15554        } else if self.parse_keyword(Keyword::REPLACE) {
15555            Some(SqliteOnConflict::Replace)
15556        } else {
15557            None
15558        }
15559    }
15560
15561    pub fn parse_insert_partition(&mut self) -> Result<Option<Vec<Expr>>, ParserError> {
15562        if self.parse_keyword(Keyword::PARTITION) {
15563            self.expect_token(&Token::LParen)?;
15564            let partition_cols = Some(self.parse_comma_separated(Parser::parse_expr)?);
15565            self.expect_token(&Token::RParen)?;
15566            Ok(partition_cols)
15567        } else {
15568            Ok(None)
15569        }
15570    }
15571
15572    pub fn parse_load_data_table_format(
15573        &mut self,
15574    ) -> Result<Option<HiveLoadDataFormat>, ParserError> {
15575        if self.parse_keyword(Keyword::INPUTFORMAT) {
15576            let input_format = self.parse_expr()?;
15577            self.expect_keyword_is(Keyword::SERDE)?;
15578            let serde = self.parse_expr()?;
15579            Ok(Some(HiveLoadDataFormat {
15580                input_format,
15581                serde,
15582            }))
15583        } else {
15584            Ok(None)
15585        }
15586    }
15587
15588    /// Parse an UPDATE statement, returning a `Box`ed SetExpr
15589    ///
15590    /// This is used to reduce the size of the stack frames in debug builds
15591    fn parse_update_setexpr_boxed(&mut self) -> Result<Box<SetExpr>, ParserError> {
15592        Ok(Box::new(SetExpr::Update(self.parse_update()?)))
15593    }
15594
15595    pub fn parse_update(&mut self) -> Result<Statement, ParserError> {
15596        let or = self.parse_conflict_clause();
15597        let table = self.parse_table_and_joins()?;
15598        let from_before_set = if self.parse_keyword(Keyword::FROM) {
15599            Some(UpdateTableFromKind::BeforeSet(
15600                self.parse_table_with_joins()?,
15601            ))
15602        } else {
15603            None
15604        };
15605        self.expect_keyword(Keyword::SET)?;
15606        let assignments = self.parse_comma_separated(Parser::parse_assignment)?;
15607        let from = if from_before_set.is_none() && self.parse_keyword(Keyword::FROM) {
15608            Some(UpdateTableFromKind::AfterSet(
15609                self.parse_table_with_joins()?,
15610            ))
15611        } else {
15612            from_before_set
15613        };
15614        let selection = if self.parse_keyword(Keyword::WHERE) {
15615            Some(self.parse_expr()?)
15616        } else {
15617            None
15618        };
15619        let returning = if self.parse_keyword(Keyword::RETURNING) {
15620            Some(self.parse_comma_separated(Parser::parse_select_item)?)
15621        } else {
15622            None
15623        };
15624        let limit = if self.parse_keyword(Keyword::LIMIT) {
15625            Some(self.parse_expr()?)
15626        } else {
15627            None
15628        };
15629        Ok(Statement::Update {
15630            table,
15631            assignments,
15632            from,
15633            selection,
15634            returning,
15635            or,
15636            limit,
15637        })
15638    }
15639
15640    /// Parse a `var = expr` assignment, used in an UPDATE statement
15641    pub fn parse_assignment(&mut self) -> Result<Assignment, ParserError> {
15642        let target = self.parse_assignment_target()?;
15643        self.expect_token(&Token::Eq)?;
15644        let value = self.parse_expr()?;
15645        Ok(Assignment { target, value })
15646    }
15647
15648    /// Parse the left-hand side of an assignment, used in an UPDATE statement
15649    pub fn parse_assignment_target(&mut self) -> Result<AssignmentTarget, ParserError> {
15650        if self.consume_token(&Token::LParen) {
15651            let columns = self.parse_comma_separated(|p| p.parse_object_name(false))?;
15652            self.expect_token(&Token::RParen)?;
15653            Ok(AssignmentTarget::Tuple(columns))
15654        } else {
15655            let column = self.parse_object_name(false)?;
15656            Ok(AssignmentTarget::ColumnName(column))
15657        }
15658    }
15659
15660    pub fn parse_function_args(&mut self) -> Result<FunctionArg, ParserError> {
15661        let arg = if self.dialect.supports_named_fn_args_with_expr_name() {
15662            self.maybe_parse(|p| {
15663                let name = p.parse_expr()?;
15664                let operator = p.parse_function_named_arg_operator()?;
15665                let arg = p.parse_wildcard_expr()?.into();
15666                Ok(FunctionArg::ExprNamed {
15667                    name,
15668                    arg,
15669                    operator,
15670                })
15671            })?
15672        } else {
15673            self.maybe_parse(|p| {
15674                let name = p.parse_identifier()?;
15675                let operator = p.parse_function_named_arg_operator()?;
15676                let arg = p.parse_wildcard_expr()?.into();
15677                Ok(FunctionArg::Named {
15678                    name,
15679                    arg,
15680                    operator,
15681                })
15682            })?
15683        };
15684        if let Some(arg) = arg {
15685            return Ok(arg);
15686        }
15687        Ok(FunctionArg::Unnamed(self.parse_wildcard_expr()?.into()))
15688    }
15689
15690    fn parse_function_named_arg_operator(&mut self) -> Result<FunctionArgOperator, ParserError> {
15691        if self.parse_keyword(Keyword::VALUE) {
15692            return Ok(FunctionArgOperator::Value);
15693        }
15694        let tok = self.next_token();
15695        match tok.token {
15696            Token::RArrow if self.dialect.supports_named_fn_args_with_rarrow_operator() => {
15697                Ok(FunctionArgOperator::RightArrow)
15698            }
15699            Token::Eq if self.dialect.supports_named_fn_args_with_eq_operator() => {
15700                Ok(FunctionArgOperator::Equals)
15701            }
15702            Token::Assignment
15703                if self
15704                    .dialect
15705                    .supports_named_fn_args_with_assignment_operator() =>
15706            {
15707                Ok(FunctionArgOperator::Assignment)
15708            }
15709            Token::Colon if self.dialect.supports_named_fn_args_with_colon_operator() => {
15710                Ok(FunctionArgOperator::Colon)
15711            }
15712            _ => {
15713                self.prev_token();
15714                self.expected("argument operator", tok)
15715            }
15716        }
15717    }
15718
15719    pub fn parse_optional_args(&mut self) -> Result<Vec<FunctionArg>, ParserError> {
15720        if self.consume_token(&Token::RParen) {
15721            Ok(vec![])
15722        } else {
15723            let args = self.parse_comma_separated(Parser::parse_function_args)?;
15724            self.expect_token(&Token::RParen)?;
15725            Ok(args)
15726        }
15727    }
15728
15729    fn parse_table_function_args(&mut self) -> Result<TableFunctionArgs, ParserError> {
15730        if self.consume_token(&Token::RParen) {
15731            return Ok(TableFunctionArgs {
15732                args: vec![],
15733                settings: None,
15734            });
15735        }
15736        let mut args = vec![];
15737        let settings = loop {
15738            if let Some(settings) = self.parse_settings()? {
15739                break Some(settings);
15740            }
15741            args.push(self.parse_function_args()?);
15742            if self.is_parse_comma_separated_end() {
15743                break None;
15744            }
15745        };
15746        self.expect_token(&Token::RParen)?;
15747        Ok(TableFunctionArgs { args, settings })
15748    }
15749
15750    /// Parses a potentially empty list of arguments to a function
15751    /// (including the closing parenthesis).
15752    ///
15753    /// Examples:
15754    /// ```sql
15755    /// FIRST_VALUE(x ORDER BY 1,2,3);
15756    /// FIRST_VALUE(x IGNORE NULL);
15757    /// ```
15758    fn parse_function_argument_list(&mut self) -> Result<FunctionArgumentList, ParserError> {
15759        let mut clauses = vec![];
15760
15761        // Handle clauses that may exist with an empty argument list
15762
15763        if let Some(null_clause) = self.parse_json_null_clause() {
15764            clauses.push(FunctionArgumentClause::JsonNullClause(null_clause));
15765        }
15766
15767        if let Some(json_returning_clause) = self.maybe_parse_json_returning_clause()? {
15768            clauses.push(FunctionArgumentClause::JsonReturningClause(
15769                json_returning_clause,
15770            ));
15771        }
15772
15773        if self.consume_token(&Token::RParen) {
15774            return Ok(FunctionArgumentList {
15775                duplicate_treatment: None,
15776                args: vec![],
15777                clauses,
15778            });
15779        }
15780
15781        let duplicate_treatment = self.parse_duplicate_treatment()?;
15782        let args = self.parse_comma_separated(Parser::parse_function_args)?;
15783
15784        if self.dialect.supports_window_function_null_treatment_arg() {
15785            if let Some(null_treatment) = self.parse_null_treatment()? {
15786                clauses.push(FunctionArgumentClause::IgnoreOrRespectNulls(null_treatment));
15787            }
15788        }
15789
15790        if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
15791            clauses.push(FunctionArgumentClause::OrderBy(
15792                self.parse_comma_separated(Parser::parse_order_by_expr)?,
15793            ));
15794        }
15795
15796        if self.parse_keyword(Keyword::LIMIT) {
15797            clauses.push(FunctionArgumentClause::Limit(self.parse_expr()?));
15798        }
15799
15800        if dialect_of!(self is GenericDialect | BigQueryDialect)
15801            && self.parse_keyword(Keyword::HAVING)
15802        {
15803            let kind = match self.expect_one_of_keywords(&[Keyword::MIN, Keyword::MAX])? {
15804                Keyword::MIN => HavingBoundKind::Min,
15805                Keyword::MAX => HavingBoundKind::Max,
15806                _ => unreachable!(),
15807            };
15808            clauses.push(FunctionArgumentClause::Having(HavingBound(
15809                kind,
15810                self.parse_expr()?,
15811            )))
15812        }
15813
15814        if dialect_of!(self is GenericDialect | MySqlDialect)
15815            && self.parse_keyword(Keyword::SEPARATOR)
15816        {
15817            clauses.push(FunctionArgumentClause::Separator(self.parse_value()?.value));
15818        }
15819
15820        if let Some(on_overflow) = self.parse_listagg_on_overflow()? {
15821            clauses.push(FunctionArgumentClause::OnOverflow(on_overflow));
15822        }
15823
15824        if let Some(null_clause) = self.parse_json_null_clause() {
15825            clauses.push(FunctionArgumentClause::JsonNullClause(null_clause));
15826        }
15827
15828        if let Some(json_returning_clause) = self.maybe_parse_json_returning_clause()? {
15829            clauses.push(FunctionArgumentClause::JsonReturningClause(
15830                json_returning_clause,
15831            ));
15832        }
15833
15834        self.expect_token(&Token::RParen)?;
15835        Ok(FunctionArgumentList {
15836            duplicate_treatment,
15837            args,
15838            clauses,
15839        })
15840    }
15841
15842    fn parse_json_null_clause(&mut self) -> Option<JsonNullClause> {
15843        if self.parse_keywords(&[Keyword::ABSENT, Keyword::ON, Keyword::NULL]) {
15844            Some(JsonNullClause::AbsentOnNull)
15845        } else if self.parse_keywords(&[Keyword::NULL, Keyword::ON, Keyword::NULL]) {
15846            Some(JsonNullClause::NullOnNull)
15847        } else {
15848            None
15849        }
15850    }
15851
15852    fn maybe_parse_json_returning_clause(
15853        &mut self,
15854    ) -> Result<Option<JsonReturningClause>, ParserError> {
15855        if self.parse_keyword(Keyword::RETURNING) {
15856            let data_type = self.parse_data_type()?;
15857            Ok(Some(JsonReturningClause { data_type }))
15858        } else {
15859            Ok(None)
15860        }
15861    }
15862
15863    fn parse_duplicate_treatment(&mut self) -> Result<Option<DuplicateTreatment>, ParserError> {
15864        let loc = self.peek_token().span.start;
15865        match (
15866            self.parse_keyword(Keyword::ALL),
15867            self.parse_keyword(Keyword::DISTINCT),
15868        ) {
15869            (true, false) => Ok(Some(DuplicateTreatment::All)),
15870            (false, true) => Ok(Some(DuplicateTreatment::Distinct)),
15871            (false, false) => Ok(None),
15872            (true, true) => parser_err!("Cannot specify both ALL and DISTINCT".to_string(), loc),
15873        }
15874    }
15875
15876    /// Parse a comma-delimited list of projections after SELECT
15877    pub fn parse_select_item(&mut self) -> Result<SelectItem, ParserError> {
15878        let prefix = self
15879            .parse_one_of_keywords(
15880                self.dialect
15881                    .get_reserved_keywords_for_select_item_operator(),
15882            )
15883            .map(|keyword| Ident::new(format!("{keyword:?}")));
15884
15885        match self.parse_wildcard_expr()? {
15886            Expr::QualifiedWildcard(prefix, token) => Ok(SelectItem::QualifiedWildcard(
15887                SelectItemQualifiedWildcardKind::ObjectName(prefix),
15888                self.parse_wildcard_additional_options(token.0)?,
15889            )),
15890            Expr::Wildcard(token) => Ok(SelectItem::Wildcard(
15891                self.parse_wildcard_additional_options(token.0)?,
15892            )),
15893            Expr::Identifier(v) if v.value.to_lowercase() == "from" && v.quote_style.is_none() => {
15894                parser_err!(
15895                    format!("Expected an expression, found: {}", v),
15896                    self.peek_token().span.start
15897                )
15898            }
15899            Expr::BinaryOp {
15900                left,
15901                op: BinaryOperator::Eq,
15902                right,
15903            } if self.dialect.supports_eq_alias_assignment()
15904                && matches!(left.as_ref(), Expr::Identifier(_)) =>
15905            {
15906                let Expr::Identifier(alias) = *left else {
15907                    return parser_err!(
15908                        "BUG: expected identifier expression as alias",
15909                        self.peek_token().span.start
15910                    );
15911                };
15912                Ok(SelectItem::ExprWithAlias {
15913                    expr: *right,
15914                    alias,
15915                })
15916            }
15917            expr if self.dialect.supports_select_expr_star()
15918                && self.consume_tokens(&[Token::Period, Token::Mul]) =>
15919            {
15920                let wildcard_token = self.get_previous_token().clone();
15921                Ok(SelectItem::QualifiedWildcard(
15922                    SelectItemQualifiedWildcardKind::Expr(expr),
15923                    self.parse_wildcard_additional_options(wildcard_token)?,
15924                ))
15925            }
15926            expr => self
15927                .maybe_parse_select_item_alias()
15928                .map(|alias| match alias {
15929                    Some(alias) => SelectItem::ExprWithAlias {
15930                        expr: maybe_prefixed_expr(expr, prefix),
15931                        alias,
15932                    },
15933                    None => SelectItem::UnnamedExpr(maybe_prefixed_expr(expr, prefix)),
15934                }),
15935        }
15936    }
15937
15938    /// Parse an [`WildcardAdditionalOptions`] information for wildcard select items.
15939    ///
15940    /// If it is not possible to parse it, will return an option.
15941    pub fn parse_wildcard_additional_options(
15942        &mut self,
15943        wildcard_token: TokenWithSpan,
15944    ) -> Result<WildcardAdditionalOptions, ParserError> {
15945        let opt_ilike = if dialect_of!(self is GenericDialect | SnowflakeDialect) {
15946            self.parse_optional_select_item_ilike()?
15947        } else {
15948            None
15949        };
15950        let opt_exclude = if opt_ilike.is_none() && self.dialect.supports_select_wildcard_exclude()
15951        {
15952            self.parse_optional_select_item_exclude()?
15953        } else {
15954            None
15955        };
15956        let opt_except = if self.dialect.supports_select_wildcard_except() {
15957            self.parse_optional_select_item_except()?
15958        } else {
15959            None
15960        };
15961        let opt_replace = if dialect_of!(self is GenericDialect | BigQueryDialect | ClickHouseDialect | DuckDbDialect | SnowflakeDialect)
15962        {
15963            self.parse_optional_select_item_replace()?
15964        } else {
15965            None
15966        };
15967        let opt_rename = if dialect_of!(self is GenericDialect | SnowflakeDialect) {
15968            self.parse_optional_select_item_rename()?
15969        } else {
15970            None
15971        };
15972
15973        Ok(WildcardAdditionalOptions {
15974            wildcard_token: wildcard_token.into(),
15975            opt_ilike,
15976            opt_exclude,
15977            opt_except,
15978            opt_rename,
15979            opt_replace,
15980        })
15981    }
15982
15983    /// Parse an [`Ilike`](IlikeSelectItem) information for wildcard select items.
15984    ///
15985    /// If it is not possible to parse it, will return an option.
15986    pub fn parse_optional_select_item_ilike(
15987        &mut self,
15988    ) -> Result<Option<IlikeSelectItem>, ParserError> {
15989        let opt_ilike = if self.parse_keyword(Keyword::ILIKE) {
15990            let next_token = self.next_token();
15991            let pattern = match next_token.token {
15992                Token::SingleQuotedString(s) => s,
15993                _ => return self.expected("ilike pattern", next_token),
15994            };
15995            Some(IlikeSelectItem { pattern })
15996        } else {
15997            None
15998        };
15999        Ok(opt_ilike)
16000    }
16001
16002    /// Parse an [`Exclude`](ExcludeSelectItem) information for wildcard select items.
16003    ///
16004    /// If it is not possible to parse it, will return an option.
16005    pub fn parse_optional_select_item_exclude(
16006        &mut self,
16007    ) -> Result<Option<ExcludeSelectItem>, ParserError> {
16008        let opt_exclude = if self.parse_keyword(Keyword::EXCLUDE) {
16009            if self.consume_token(&Token::LParen) {
16010                let columns = self.parse_comma_separated(|parser| parser.parse_identifier())?;
16011                self.expect_token(&Token::RParen)?;
16012                Some(ExcludeSelectItem::Multiple(columns))
16013            } else {
16014                let column = self.parse_identifier()?;
16015                Some(ExcludeSelectItem::Single(column))
16016            }
16017        } else {
16018            None
16019        };
16020
16021        Ok(opt_exclude)
16022    }
16023
16024    /// Parse an [`Except`](ExceptSelectItem) information for wildcard select items.
16025    ///
16026    /// If it is not possible to parse it, will return an option.
16027    pub fn parse_optional_select_item_except(
16028        &mut self,
16029    ) -> Result<Option<ExceptSelectItem>, ParserError> {
16030        let opt_except = if self.parse_keyword(Keyword::EXCEPT) {
16031            if self.peek_token().token == Token::LParen {
16032                let idents = self.parse_parenthesized_column_list(Mandatory, false)?;
16033                match &idents[..] {
16034                    [] => {
16035                        return self.expected(
16036                            "at least one column should be parsed by the expect clause",
16037                            self.peek_token(),
16038                        )?;
16039                    }
16040                    [first, idents @ ..] => Some(ExceptSelectItem {
16041                        first_element: first.clone(),
16042                        additional_elements: idents.to_vec(),
16043                    }),
16044                }
16045            } else {
16046                // Clickhouse allows EXCEPT column_name
16047                let ident = self.parse_identifier()?;
16048                Some(ExceptSelectItem {
16049                    first_element: ident,
16050                    additional_elements: vec![],
16051                })
16052            }
16053        } else {
16054            None
16055        };
16056
16057        Ok(opt_except)
16058    }
16059
16060    /// Parse a [`Rename`](RenameSelectItem) information for wildcard select items.
16061    pub fn parse_optional_select_item_rename(
16062        &mut self,
16063    ) -> Result<Option<RenameSelectItem>, ParserError> {
16064        let opt_rename = if self.parse_keyword(Keyword::RENAME) {
16065            if self.consume_token(&Token::LParen) {
16066                let idents =
16067                    self.parse_comma_separated(|parser| parser.parse_identifier_with_alias())?;
16068                self.expect_token(&Token::RParen)?;
16069                Some(RenameSelectItem::Multiple(idents))
16070            } else {
16071                let ident = self.parse_identifier_with_alias()?;
16072                Some(RenameSelectItem::Single(ident))
16073            }
16074        } else {
16075            None
16076        };
16077
16078        Ok(opt_rename)
16079    }
16080
16081    /// Parse a [`Replace`](ReplaceSelectItem) information for wildcard select items.
16082    pub fn parse_optional_select_item_replace(
16083        &mut self,
16084    ) -> Result<Option<ReplaceSelectItem>, ParserError> {
16085        let opt_replace = if self.parse_keyword(Keyword::REPLACE) {
16086            if self.consume_token(&Token::LParen) {
16087                let items = self.parse_comma_separated(|parser| {
16088                    Ok(Box::new(parser.parse_replace_elements()?))
16089                })?;
16090                self.expect_token(&Token::RParen)?;
16091                Some(ReplaceSelectItem { items })
16092            } else {
16093                let tok = self.next_token();
16094                return self.expected("( after REPLACE but", tok);
16095            }
16096        } else {
16097            None
16098        };
16099
16100        Ok(opt_replace)
16101    }
16102    pub fn parse_replace_elements(&mut self) -> Result<ReplaceSelectElement, ParserError> {
16103        let expr = self.parse_expr()?;
16104        let as_keyword = self.parse_keyword(Keyword::AS);
16105        let ident = self.parse_identifier()?;
16106        Ok(ReplaceSelectElement {
16107            expr,
16108            column_name: ident,
16109            as_keyword,
16110        })
16111    }
16112
16113    /// Parse ASC or DESC, returns an Option with true if ASC, false of DESC or `None` if none of
16114    /// them.
16115    pub fn parse_asc_desc(&mut self) -> Option<bool> {
16116        if self.parse_keyword(Keyword::ASC) {
16117            Some(true)
16118        } else if self.parse_keyword(Keyword::DESC) {
16119            Some(false)
16120        } else {
16121            None
16122        }
16123    }
16124
16125    /// Parse an [OrderByExpr] expression.
16126    pub fn parse_order_by_expr(&mut self) -> Result<OrderByExpr, ParserError> {
16127        self.parse_order_by_expr_inner(false)
16128            .map(|(order_by, _)| order_by)
16129    }
16130
16131    /// Parse an [IndexColumn].
16132    pub fn parse_create_index_expr(&mut self) -> Result<IndexColumn, ParserError> {
16133        self.parse_order_by_expr_inner(true)
16134            .map(|(column, operator_class)| IndexColumn {
16135                column,
16136                operator_class,
16137            })
16138    }
16139
16140    fn parse_order_by_expr_inner(
16141        &mut self,
16142        with_operator_class: bool,
16143    ) -> Result<(OrderByExpr, Option<Ident>), ParserError> {
16144        let expr = self.parse_expr()?;
16145
16146        let operator_class: Option<Ident> = if with_operator_class {
16147            // We check that if non of the following keywords are present, then we parse an
16148            // identifier as operator class.
16149            if self
16150                .peek_one_of_keywords(&[Keyword::ASC, Keyword::DESC, Keyword::NULLS, Keyword::WITH])
16151                .is_some()
16152            {
16153                None
16154            } else {
16155                self.maybe_parse(|parser| parser.parse_identifier())?
16156            }
16157        } else {
16158            None
16159        };
16160
16161        let options = self.parse_order_by_options()?;
16162
16163        let with_fill = if dialect_of!(self is ClickHouseDialect | GenericDialect)
16164            && self.parse_keywords(&[Keyword::WITH, Keyword::FILL])
16165        {
16166            Some(self.parse_with_fill()?)
16167        } else {
16168            None
16169        };
16170
16171        Ok((
16172            OrderByExpr {
16173                expr,
16174                options,
16175                with_fill,
16176            },
16177            operator_class,
16178        ))
16179    }
16180
16181    fn parse_order_by_options(&mut self) -> Result<OrderByOptions, ParserError> {
16182        let asc = self.parse_asc_desc();
16183
16184        let nulls_first = if self.parse_keywords(&[Keyword::NULLS, Keyword::FIRST]) {
16185            Some(true)
16186        } else if self.parse_keywords(&[Keyword::NULLS, Keyword::LAST]) {
16187            Some(false)
16188        } else {
16189            None
16190        };
16191
16192        Ok(OrderByOptions { asc, nulls_first })
16193    }
16194
16195    // Parse a WITH FILL clause (ClickHouse dialect)
16196    // that follow the WITH FILL keywords in a ORDER BY clause
16197    pub fn parse_with_fill(&mut self) -> Result<WithFill, ParserError> {
16198        let from = if self.parse_keyword(Keyword::FROM) {
16199            Some(self.parse_expr()?)
16200        } else {
16201            None
16202        };
16203
16204        let to = if self.parse_keyword(Keyword::TO) {
16205            Some(self.parse_expr()?)
16206        } else {
16207            None
16208        };
16209
16210        let step = if self.parse_keyword(Keyword::STEP) {
16211            Some(self.parse_expr()?)
16212        } else {
16213            None
16214        };
16215
16216        Ok(WithFill { from, to, step })
16217    }
16218
16219    // Parse a set of comma separated INTERPOLATE expressions (ClickHouse dialect)
16220    // that follow the INTERPOLATE keyword in an ORDER BY clause with the WITH FILL modifier
16221    pub fn parse_interpolations(&mut self) -> Result<Option<Interpolate>, ParserError> {
16222        if !self.parse_keyword(Keyword::INTERPOLATE) {
16223            return Ok(None);
16224        }
16225
16226        if self.consume_token(&Token::LParen) {
16227            let interpolations =
16228                self.parse_comma_separated0(|p| p.parse_interpolation(), Token::RParen)?;
16229            self.expect_token(&Token::RParen)?;
16230            // INTERPOLATE () and INTERPOLATE ( ... ) variants
16231            return Ok(Some(Interpolate {
16232                exprs: Some(interpolations),
16233            }));
16234        }
16235
16236        // INTERPOLATE
16237        Ok(Some(Interpolate { exprs: None }))
16238    }
16239
16240    // Parse a INTERPOLATE expression (ClickHouse dialect)
16241    pub fn parse_interpolation(&mut self) -> Result<InterpolateExpr, ParserError> {
16242        let column = self.parse_identifier()?;
16243        let expr = if self.parse_keyword(Keyword::AS) {
16244            Some(self.parse_expr()?)
16245        } else {
16246            None
16247        };
16248        Ok(InterpolateExpr { column, expr })
16249    }
16250
16251    /// Parse a TOP clause, MSSQL equivalent of LIMIT,
16252    /// that follows after `SELECT [DISTINCT]`.
16253    pub fn parse_top(&mut self) -> Result<Top, ParserError> {
16254        let quantity = if self.consume_token(&Token::LParen) {
16255            let quantity = self.parse_expr()?;
16256            self.expect_token(&Token::RParen)?;
16257            Some(TopQuantity::Expr(quantity))
16258        } else {
16259            let next_token = self.next_token();
16260            let quantity = match next_token.token {
16261                Token::Number(s, _) => Self::parse::<u64>(s, next_token.span.start)?,
16262                _ => self.expected("literal int", next_token)?,
16263            };
16264            Some(TopQuantity::Constant(quantity))
16265        };
16266
16267        let percent = self.parse_keyword(Keyword::PERCENT);
16268
16269        let with_ties = self.parse_keywords(&[Keyword::WITH, Keyword::TIES]);
16270
16271        Ok(Top {
16272            with_ties,
16273            percent,
16274            quantity,
16275        })
16276    }
16277
16278    /// Parse a LIMIT clause
16279    pub fn parse_limit(&mut self) -> Result<Option<Expr>, ParserError> {
16280        if self.parse_keyword(Keyword::ALL) {
16281            Ok(None)
16282        } else {
16283            Ok(Some(self.parse_expr()?))
16284        }
16285    }
16286
16287    /// Parse an OFFSET clause
16288    pub fn parse_offset(&mut self) -> Result<Offset, ParserError> {
16289        let value = self.parse_expr()?;
16290        let rows = if self.parse_keyword(Keyword::ROW) {
16291            OffsetRows::Row
16292        } else if self.parse_keyword(Keyword::ROWS) {
16293            OffsetRows::Rows
16294        } else {
16295            OffsetRows::None
16296        };
16297        Ok(Offset { value, rows })
16298    }
16299
16300    /// Parse a FETCH clause
16301    pub fn parse_fetch(&mut self) -> Result<Fetch, ParserError> {
16302        let _ = self.parse_one_of_keywords(&[Keyword::FIRST, Keyword::NEXT]);
16303
16304        let (quantity, percent) = if self
16305            .parse_one_of_keywords(&[Keyword::ROW, Keyword::ROWS])
16306            .is_some()
16307        {
16308            (None, false)
16309        } else {
16310            let quantity = Expr::Value(self.parse_value()?);
16311            let percent = self.parse_keyword(Keyword::PERCENT);
16312            let _ = self.parse_one_of_keywords(&[Keyword::ROW, Keyword::ROWS]);
16313            (Some(quantity), percent)
16314        };
16315
16316        let with_ties = if self.parse_keyword(Keyword::ONLY) {
16317            false
16318        } else {
16319            self.parse_keywords(&[Keyword::WITH, Keyword::TIES])
16320        };
16321
16322        Ok(Fetch {
16323            with_ties,
16324            percent,
16325            quantity,
16326        })
16327    }
16328
16329    /// Parse a FOR UPDATE/FOR SHARE clause
16330    pub fn parse_lock(&mut self) -> Result<LockClause, ParserError> {
16331        let lock_type = match self.expect_one_of_keywords(&[Keyword::UPDATE, Keyword::SHARE])? {
16332            Keyword::UPDATE => LockType::Update,
16333            Keyword::SHARE => LockType::Share,
16334            _ => unreachable!(),
16335        };
16336        let of = if self.parse_keyword(Keyword::OF) {
16337            Some(self.parse_object_name(false)?)
16338        } else {
16339            None
16340        };
16341        let nonblock = if self.parse_keyword(Keyword::NOWAIT) {
16342            Some(NonBlock::Nowait)
16343        } else if self.parse_keywords(&[Keyword::SKIP, Keyword::LOCKED]) {
16344            Some(NonBlock::SkipLocked)
16345        } else {
16346            None
16347        };
16348        Ok(LockClause {
16349            lock_type,
16350            of,
16351            nonblock,
16352        })
16353    }
16354
16355    pub fn parse_values(&mut self, allow_empty: bool) -> Result<Values, ParserError> {
16356        let mut explicit_row = false;
16357
16358        let rows = self.parse_comma_separated(|parser| {
16359            if parser.parse_keyword(Keyword::ROW) {
16360                explicit_row = true;
16361            }
16362
16363            parser.expect_token(&Token::LParen)?;
16364            if allow_empty && parser.peek_token().token == Token::RParen {
16365                parser.next_token();
16366                Ok(vec![])
16367            } else {
16368                let exprs = parser.parse_comma_separated(Parser::parse_expr)?;
16369                parser.expect_token(&Token::RParen)?;
16370                Ok(exprs)
16371            }
16372        })?;
16373        Ok(Values { explicit_row, rows })
16374    }
16375
16376    pub fn parse_start_transaction(&mut self) -> Result<Statement, ParserError> {
16377        self.expect_keyword_is(Keyword::TRANSACTION)?;
16378        Ok(Statement::StartTransaction {
16379            modes: self.parse_transaction_modes()?,
16380            begin: false,
16381            transaction: Some(BeginTransactionKind::Transaction),
16382            modifier: None,
16383            statements: vec![],
16384            exception: None,
16385            has_end_keyword: false,
16386        })
16387    }
16388
16389    pub fn parse_begin(&mut self) -> Result<Statement, ParserError> {
16390        let modifier = if !self.dialect.supports_start_transaction_modifier() {
16391            None
16392        } else if self.parse_keyword(Keyword::DEFERRED) {
16393            Some(TransactionModifier::Deferred)
16394        } else if self.parse_keyword(Keyword::IMMEDIATE) {
16395            Some(TransactionModifier::Immediate)
16396        } else if self.parse_keyword(Keyword::EXCLUSIVE) {
16397            Some(TransactionModifier::Exclusive)
16398        } else if self.parse_keyword(Keyword::TRY) {
16399            Some(TransactionModifier::Try)
16400        } else if self.parse_keyword(Keyword::CATCH) {
16401            Some(TransactionModifier::Catch)
16402        } else {
16403            None
16404        };
16405        let transaction = match self.parse_one_of_keywords(&[Keyword::TRANSACTION, Keyword::WORK]) {
16406            Some(Keyword::TRANSACTION) => Some(BeginTransactionKind::Transaction),
16407            Some(Keyword::WORK) => Some(BeginTransactionKind::Work),
16408            _ => None,
16409        };
16410        Ok(Statement::StartTransaction {
16411            modes: self.parse_transaction_modes()?,
16412            begin: true,
16413            transaction,
16414            modifier,
16415            statements: vec![],
16416            exception: None,
16417            has_end_keyword: false,
16418        })
16419    }
16420
16421    pub fn parse_begin_exception_end(&mut self) -> Result<Statement, ParserError> {
16422        let statements = self.parse_statement_list(&[Keyword::EXCEPTION, Keyword::END])?;
16423
16424        let exception = if self.parse_keyword(Keyword::EXCEPTION) {
16425            let mut when = Vec::new();
16426
16427            // We can have multiple `WHEN` arms so we consume all cases until `END`
16428            while !self.peek_keyword(Keyword::END) {
16429                self.expect_keyword(Keyword::WHEN)?;
16430
16431                // Each `WHEN` case can have one or more conditions, e.g.
16432                // WHEN EXCEPTION_1 [OR EXCEPTION_2] THEN
16433                // So we parse identifiers until the `THEN` keyword.
16434                let mut idents = Vec::new();
16435
16436                while !self.parse_keyword(Keyword::THEN) {
16437                    let ident = self.parse_identifier()?;
16438                    idents.push(ident);
16439
16440                    self.maybe_parse(|p| p.expect_keyword(Keyword::OR))?;
16441                }
16442
16443                let statements = self.parse_statement_list(&[Keyword::WHEN, Keyword::END])?;
16444
16445                when.push(ExceptionWhen { idents, statements });
16446            }
16447
16448            Some(when)
16449        } else {
16450            None
16451        };
16452
16453        self.expect_keyword(Keyword::END)?;
16454
16455        Ok(Statement::StartTransaction {
16456            begin: true,
16457            statements,
16458            exception,
16459            has_end_keyword: true,
16460            transaction: None,
16461            modifier: None,
16462            modes: Default::default(),
16463        })
16464    }
16465
16466    pub fn parse_end(&mut self) -> Result<Statement, ParserError> {
16467        let modifier = if !self.dialect.supports_end_transaction_modifier() {
16468            None
16469        } else if self.parse_keyword(Keyword::TRY) {
16470            Some(TransactionModifier::Try)
16471        } else if self.parse_keyword(Keyword::CATCH) {
16472            Some(TransactionModifier::Catch)
16473        } else {
16474            None
16475        };
16476        Ok(Statement::Commit {
16477            chain: self.parse_commit_rollback_chain()?,
16478            end: true,
16479            modifier,
16480        })
16481    }
16482
16483    pub fn parse_transaction_modes(&mut self) -> Result<Vec<TransactionMode>, ParserError> {
16484        let mut modes = vec![];
16485        let mut required = false;
16486        loop {
16487            let mode = if self.parse_keywords(&[Keyword::ISOLATION, Keyword::LEVEL]) {
16488                let iso_level = if self.parse_keywords(&[Keyword::READ, Keyword::UNCOMMITTED]) {
16489                    TransactionIsolationLevel::ReadUncommitted
16490                } else if self.parse_keywords(&[Keyword::READ, Keyword::COMMITTED]) {
16491                    TransactionIsolationLevel::ReadCommitted
16492                } else if self.parse_keywords(&[Keyword::REPEATABLE, Keyword::READ]) {
16493                    TransactionIsolationLevel::RepeatableRead
16494                } else if self.parse_keyword(Keyword::SERIALIZABLE) {
16495                    TransactionIsolationLevel::Serializable
16496                } else if self.parse_keyword(Keyword::SNAPSHOT) {
16497                    TransactionIsolationLevel::Snapshot
16498                } else {
16499                    self.expected("isolation level", self.peek_token())?
16500                };
16501                TransactionMode::IsolationLevel(iso_level)
16502            } else if self.parse_keywords(&[Keyword::READ, Keyword::ONLY]) {
16503                TransactionMode::AccessMode(TransactionAccessMode::ReadOnly)
16504            } else if self.parse_keywords(&[Keyword::READ, Keyword::WRITE]) {
16505                TransactionMode::AccessMode(TransactionAccessMode::ReadWrite)
16506            } else if required {
16507                self.expected("transaction mode", self.peek_token())?
16508            } else {
16509                break;
16510            };
16511            modes.push(mode);
16512            // ANSI requires a comma after each transaction mode, but
16513            // PostgreSQL, for historical reasons, does not. We follow
16514            // PostgreSQL in making the comma optional, since that is strictly
16515            // more general.
16516            required = self.consume_token(&Token::Comma);
16517        }
16518        Ok(modes)
16519    }
16520
16521    pub fn parse_commit(&mut self) -> Result<Statement, ParserError> {
16522        Ok(Statement::Commit {
16523            chain: self.parse_commit_rollback_chain()?,
16524            end: false,
16525            modifier: None,
16526        })
16527    }
16528
16529    pub fn parse_rollback(&mut self) -> Result<Statement, ParserError> {
16530        let chain = self.parse_commit_rollback_chain()?;
16531        let savepoint = self.parse_rollback_savepoint()?;
16532
16533        Ok(Statement::Rollback { chain, savepoint })
16534    }
16535
16536    pub fn parse_commit_rollback_chain(&mut self) -> Result<bool, ParserError> {
16537        let _ = self.parse_one_of_keywords(&[Keyword::TRANSACTION, Keyword::WORK]);
16538        if self.parse_keyword(Keyword::AND) {
16539            let chain = !self.parse_keyword(Keyword::NO);
16540            self.expect_keyword_is(Keyword::CHAIN)?;
16541            Ok(chain)
16542        } else {
16543            Ok(false)
16544        }
16545    }
16546
16547    pub fn parse_rollback_savepoint(&mut self) -> Result<Option<Ident>, ParserError> {
16548        if self.parse_keyword(Keyword::TO) {
16549            let _ = self.parse_keyword(Keyword::SAVEPOINT);
16550            let savepoint = self.parse_identifier()?;
16551
16552            Ok(Some(savepoint))
16553        } else {
16554            Ok(None)
16555        }
16556    }
16557
16558    /// Parse a 'RAISERROR' statement
16559    pub fn parse_raiserror(&mut self) -> Result<Statement, ParserError> {
16560        self.expect_token(&Token::LParen)?;
16561        let message = Box::new(self.parse_expr()?);
16562        self.expect_token(&Token::Comma)?;
16563        let severity = Box::new(self.parse_expr()?);
16564        self.expect_token(&Token::Comma)?;
16565        let state = Box::new(self.parse_expr()?);
16566        let arguments = if self.consume_token(&Token::Comma) {
16567            self.parse_comma_separated(Parser::parse_expr)?
16568        } else {
16569            vec![]
16570        };
16571        self.expect_token(&Token::RParen)?;
16572        let options = if self.parse_keyword(Keyword::WITH) {
16573            self.parse_comma_separated(Parser::parse_raiserror_option)?
16574        } else {
16575            vec![]
16576        };
16577        Ok(Statement::RaisError {
16578            message,
16579            severity,
16580            state,
16581            arguments,
16582            options,
16583        })
16584    }
16585
16586    pub fn parse_raiserror_option(&mut self) -> Result<RaisErrorOption, ParserError> {
16587        match self.expect_one_of_keywords(&[Keyword::LOG, Keyword::NOWAIT, Keyword::SETERROR])? {
16588            Keyword::LOG => Ok(RaisErrorOption::Log),
16589            Keyword::NOWAIT => Ok(RaisErrorOption::NoWait),
16590            Keyword::SETERROR => Ok(RaisErrorOption::SetError),
16591            _ => self.expected(
16592                "LOG, NOWAIT OR SETERROR raiserror option",
16593                self.peek_token(),
16594            ),
16595        }
16596    }
16597
16598    pub fn parse_deallocate(&mut self) -> Result<Statement, ParserError> {
16599        let prepare = self.parse_keyword(Keyword::PREPARE);
16600        let name = self.parse_identifier()?;
16601        Ok(Statement::Deallocate { name, prepare })
16602    }
16603
16604    pub fn parse_execute(&mut self) -> Result<Statement, ParserError> {
16605        let name = if self.dialect.supports_execute_immediate()
16606            && self.parse_keyword(Keyword::IMMEDIATE)
16607        {
16608            None
16609        } else {
16610            let name = self.parse_object_name(false)?;
16611            Some(name)
16612        };
16613
16614        let has_parentheses = self.consume_token(&Token::LParen);
16615
16616        let end_kws = &[Keyword::USING, Keyword::OUTPUT, Keyword::DEFAULT];
16617        let end_token = match (has_parentheses, self.peek_token().token) {
16618            (true, _) => Token::RParen,
16619            (false, Token::EOF) => Token::EOF,
16620            (false, Token::Word(w)) if end_kws.contains(&w.keyword) => Token::Word(w),
16621            (false, _) => Token::SemiColon,
16622        };
16623
16624        let parameters = self.parse_comma_separated0(Parser::parse_expr, end_token)?;
16625
16626        if has_parentheses {
16627            self.expect_token(&Token::RParen)?;
16628        }
16629
16630        let into = if self.parse_keyword(Keyword::INTO) {
16631            self.parse_comma_separated(Self::parse_identifier)?
16632        } else {
16633            vec![]
16634        };
16635
16636        let using = if self.parse_keyword(Keyword::USING) {
16637            self.parse_comma_separated(Self::parse_expr_with_alias)?
16638        } else {
16639            vec![]
16640        };
16641
16642        let output = self.parse_keyword(Keyword::OUTPUT);
16643
16644        let default = self.parse_keyword(Keyword::DEFAULT);
16645
16646        Ok(Statement::Execute {
16647            immediate: name.is_none(),
16648            name,
16649            parameters,
16650            has_parentheses,
16651            into,
16652            using,
16653            output,
16654            default,
16655        })
16656    }
16657
16658    pub fn parse_prepare(&mut self) -> Result<Statement, ParserError> {
16659        let name = self.parse_identifier()?;
16660
16661        let mut data_types = vec![];
16662        if self.consume_token(&Token::LParen) {
16663            data_types = self.parse_comma_separated(Parser::parse_data_type)?;
16664            self.expect_token(&Token::RParen)?;
16665        }
16666
16667        self.expect_keyword_is(Keyword::AS)?;
16668        let statement = Box::new(self.parse_statement()?);
16669        Ok(Statement::Prepare {
16670            name,
16671            data_types,
16672            statement,
16673        })
16674    }
16675
16676    pub fn parse_unload(&mut self) -> Result<Statement, ParserError> {
16677        self.expect_keyword(Keyword::UNLOAD)?;
16678        self.expect_token(&Token::LParen)?;
16679        let (query, query_text) = if matches!(self.peek_token().token, Token::SingleQuotedString(_))
16680        {
16681            (None, Some(self.parse_literal_string()?))
16682        } else {
16683            (Some(self.parse_query()?), None)
16684        };
16685        self.expect_token(&Token::RParen)?;
16686
16687        self.expect_keyword_is(Keyword::TO)?;
16688        let to = self.parse_identifier()?;
16689        let auth = if self.parse_keyword(Keyword::IAM_ROLE) {
16690            Some(self.parse_iam_role_kind()?)
16691        } else {
16692            None
16693        };
16694        let with = self.parse_options(Keyword::WITH)?;
16695        let mut options = vec![];
16696        while let Some(opt) = self.maybe_parse(|parser| parser.parse_copy_legacy_option())? {
16697            options.push(opt);
16698        }
16699        Ok(Statement::Unload {
16700            query,
16701            query_text,
16702            to,
16703            auth,
16704            with,
16705            options,
16706        })
16707    }
16708
16709    pub fn parse_merge_clauses(&mut self) -> Result<Vec<MergeClause>, ParserError> {
16710        let mut clauses = vec![];
16711        loop {
16712            if !(self.parse_keyword(Keyword::WHEN)) {
16713                break;
16714            }
16715
16716            let mut clause_kind = MergeClauseKind::Matched;
16717            if self.parse_keyword(Keyword::NOT) {
16718                clause_kind = MergeClauseKind::NotMatched;
16719            }
16720            self.expect_keyword_is(Keyword::MATCHED)?;
16721
16722            if matches!(clause_kind, MergeClauseKind::NotMatched)
16723                && self.parse_keywords(&[Keyword::BY, Keyword::SOURCE])
16724            {
16725                clause_kind = MergeClauseKind::NotMatchedBySource;
16726            } else if matches!(clause_kind, MergeClauseKind::NotMatched)
16727                && self.parse_keywords(&[Keyword::BY, Keyword::TARGET])
16728            {
16729                clause_kind = MergeClauseKind::NotMatchedByTarget;
16730            }
16731
16732            let predicate = if self.parse_keyword(Keyword::AND) {
16733                Some(self.parse_expr()?)
16734            } else {
16735                None
16736            };
16737
16738            self.expect_keyword_is(Keyword::THEN)?;
16739
16740            let merge_clause = match self.parse_one_of_keywords(&[
16741                Keyword::UPDATE,
16742                Keyword::INSERT,
16743                Keyword::DELETE,
16744            ]) {
16745                Some(Keyword::UPDATE) => {
16746                    if matches!(
16747                        clause_kind,
16748                        MergeClauseKind::NotMatched | MergeClauseKind::NotMatchedByTarget
16749                    ) {
16750                        return Err(ParserError::ParserError(format!(
16751                            "UPDATE is not allowed in a {clause_kind} merge clause"
16752                        )));
16753                    }
16754                    self.expect_keyword_is(Keyword::SET)?;
16755                    MergeAction::Update {
16756                        assignments: self.parse_comma_separated(Parser::parse_assignment)?,
16757                    }
16758                }
16759                Some(Keyword::DELETE) => {
16760                    if matches!(
16761                        clause_kind,
16762                        MergeClauseKind::NotMatched | MergeClauseKind::NotMatchedByTarget
16763                    ) {
16764                        return Err(ParserError::ParserError(format!(
16765                            "DELETE is not allowed in a {clause_kind} merge clause"
16766                        )));
16767                    }
16768                    MergeAction::Delete
16769                }
16770                Some(Keyword::INSERT) => {
16771                    if !matches!(
16772                        clause_kind,
16773                        MergeClauseKind::NotMatched | MergeClauseKind::NotMatchedByTarget
16774                    ) {
16775                        return Err(ParserError::ParserError(format!(
16776                            "INSERT is not allowed in a {clause_kind} merge clause"
16777                        )));
16778                    }
16779                    let is_mysql = dialect_of!(self is MySqlDialect);
16780
16781                    let columns = self.parse_parenthesized_column_list(Optional, is_mysql)?;
16782                    let kind = if dialect_of!(self is BigQueryDialect | GenericDialect)
16783                        && self.parse_keyword(Keyword::ROW)
16784                    {
16785                        MergeInsertKind::Row
16786                    } else {
16787                        self.expect_keyword_is(Keyword::VALUES)?;
16788                        let values = self.parse_values(is_mysql)?;
16789                        MergeInsertKind::Values(values)
16790                    };
16791                    MergeAction::Insert(MergeInsertExpr { columns, kind })
16792                }
16793                _ => {
16794                    return Err(ParserError::ParserError(
16795                        "expected UPDATE, DELETE or INSERT in merge clause".to_string(),
16796                    ));
16797                }
16798            };
16799            clauses.push(MergeClause {
16800                clause_kind,
16801                predicate,
16802                action: merge_clause,
16803            });
16804        }
16805        Ok(clauses)
16806    }
16807
16808    fn parse_output(&mut self, start_keyword: Keyword) -> Result<OutputClause, ParserError> {
16809        let select_items = self.parse_projection()?;
16810        let into_table = if start_keyword == Keyword::OUTPUT && self.peek_keyword(Keyword::INTO) {
16811            self.expect_keyword_is(Keyword::INTO)?;
16812            Some(self.parse_select_into()?)
16813        } else {
16814            None
16815        };
16816
16817        Ok(if start_keyword == Keyword::OUTPUT {
16818            OutputClause::Output {
16819                select_items,
16820                into_table,
16821            }
16822        } else {
16823            OutputClause::Returning { select_items }
16824        })
16825    }
16826
16827    fn parse_select_into(&mut self) -> Result<SelectInto, ParserError> {
16828        let temporary = self
16829            .parse_one_of_keywords(&[Keyword::TEMP, Keyword::TEMPORARY])
16830            .is_some();
16831        let unlogged = self.parse_keyword(Keyword::UNLOGGED);
16832        let table = self.parse_keyword(Keyword::TABLE);
16833        let name = self.parse_object_name(false)?;
16834
16835        Ok(SelectInto {
16836            temporary,
16837            unlogged,
16838            table,
16839            name,
16840        })
16841    }
16842
16843    pub fn parse_merge(&mut self) -> Result<Statement, ParserError> {
16844        let into = self.parse_keyword(Keyword::INTO);
16845
16846        let table = self.parse_table_factor()?;
16847
16848        self.expect_keyword_is(Keyword::USING)?;
16849        let source = self.parse_table_factor()?;
16850        self.expect_keyword_is(Keyword::ON)?;
16851        let on = self.parse_expr()?;
16852        let clauses = self.parse_merge_clauses()?;
16853        let output = match self.parse_one_of_keywords(&[Keyword::OUTPUT, Keyword::RETURNING]) {
16854            Some(start_keyword) => Some(self.parse_output(start_keyword)?),
16855            None => None,
16856        };
16857
16858        Ok(Statement::Merge {
16859            into,
16860            table,
16861            source,
16862            on: Box::new(on),
16863            clauses,
16864            output,
16865        })
16866    }
16867
16868    fn parse_pragma_value(&mut self) -> Result<Value, ParserError> {
16869        match self.parse_value()?.value {
16870            v @ Value::SingleQuotedString(_) => Ok(v),
16871            v @ Value::DoubleQuotedString(_) => Ok(v),
16872            v @ Value::Number(_, _) => Ok(v),
16873            v @ Value::Placeholder(_) => Ok(v),
16874            _ => {
16875                self.prev_token();
16876                self.expected("number or string or ? placeholder", self.peek_token())
16877            }
16878        }
16879    }
16880
16881    // PRAGMA [schema-name '.'] pragma-name [('=' pragma-value) | '(' pragma-value ')']
16882    pub fn parse_pragma(&mut self) -> Result<Statement, ParserError> {
16883        let name = self.parse_object_name(false)?;
16884        if self.consume_token(&Token::LParen) {
16885            let value = self.parse_pragma_value()?;
16886            self.expect_token(&Token::RParen)?;
16887            Ok(Statement::Pragma {
16888                name,
16889                value: Some(value),
16890                is_eq: false,
16891            })
16892        } else if self.consume_token(&Token::Eq) {
16893            Ok(Statement::Pragma {
16894                name,
16895                value: Some(self.parse_pragma_value()?),
16896                is_eq: true,
16897            })
16898        } else {
16899            Ok(Statement::Pragma {
16900                name,
16901                value: None,
16902                is_eq: false,
16903            })
16904        }
16905    }
16906
16907    /// `INSTALL [extension_name]`
16908    pub fn parse_install(&mut self) -> Result<Statement, ParserError> {
16909        let extension_name = self.parse_identifier()?;
16910
16911        Ok(Statement::Install { extension_name })
16912    }
16913
16914    /// Parse a SQL LOAD statement
16915    pub fn parse_load(&mut self) -> Result<Statement, ParserError> {
16916        if self.dialect.supports_load_extension() {
16917            let extension_name = self.parse_identifier()?;
16918            Ok(Statement::Load { extension_name })
16919        } else if self.parse_keyword(Keyword::DATA) && self.dialect.supports_load_data() {
16920            let local = self.parse_one_of_keywords(&[Keyword::LOCAL]).is_some();
16921            self.expect_keyword_is(Keyword::INPATH)?;
16922            let inpath = self.parse_literal_string()?;
16923            let overwrite = self.parse_one_of_keywords(&[Keyword::OVERWRITE]).is_some();
16924            self.expect_keyword_is(Keyword::INTO)?;
16925            self.expect_keyword_is(Keyword::TABLE)?;
16926            let table_name = self.parse_object_name(false)?;
16927            let partitioned = self.parse_insert_partition()?;
16928            let table_format = self.parse_load_data_table_format()?;
16929            Ok(Statement::LoadData {
16930                local,
16931                inpath,
16932                overwrite,
16933                table_name,
16934                partitioned,
16935                table_format,
16936            })
16937        } else {
16938            self.expected(
16939                "`DATA` or an extension name after `LOAD`",
16940                self.peek_token(),
16941            )
16942        }
16943    }
16944
16945    /// ```sql
16946    /// OPTIMIZE TABLE [db.]name [ON CLUSTER cluster] [PARTITION partition | PARTITION ID 'partition_id'] [FINAL] [DEDUPLICATE [BY expression]]
16947    /// ```
16948    /// [ClickHouse](https://clickhouse.com/docs/en/sql-reference/statements/optimize)
16949    pub fn parse_optimize_table(&mut self) -> Result<Statement, ParserError> {
16950        self.expect_keyword_is(Keyword::TABLE)?;
16951        let name = self.parse_object_name(false)?;
16952        let on_cluster = self.parse_optional_on_cluster()?;
16953
16954        let partition = if self.parse_keyword(Keyword::PARTITION) {
16955            if self.parse_keyword(Keyword::ID) {
16956                Some(Partition::Identifier(self.parse_identifier()?))
16957            } else {
16958                Some(Partition::Expr(self.parse_expr()?))
16959            }
16960        } else {
16961            None
16962        };
16963
16964        let include_final = self.parse_keyword(Keyword::FINAL);
16965        let deduplicate = if self.parse_keyword(Keyword::DEDUPLICATE) {
16966            if self.parse_keyword(Keyword::BY) {
16967                Some(Deduplicate::ByExpression(self.parse_expr()?))
16968            } else {
16969                Some(Deduplicate::All)
16970            }
16971        } else {
16972            None
16973        };
16974
16975        Ok(Statement::OptimizeTable {
16976            name,
16977            on_cluster,
16978            partition,
16979            include_final,
16980            deduplicate,
16981        })
16982    }
16983
16984    /// ```sql
16985    /// CREATE [ { TEMPORARY | TEMP } ] SEQUENCE [ IF NOT EXISTS ] <sequence_name>
16986    /// ```
16987    ///
16988    /// See [Postgres docs](https://www.postgresql.org/docs/current/sql-createsequence.html) for more details.
16989    pub fn parse_create_sequence(&mut self, temporary: bool) -> Result<Statement, ParserError> {
16990        //[ IF NOT EXISTS ]
16991        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
16992        //name
16993        let name = self.parse_object_name(false)?;
16994        //[ AS data_type ]
16995        let mut data_type: Option<DataType> = None;
16996        if self.parse_keywords(&[Keyword::AS]) {
16997            data_type = Some(self.parse_data_type()?)
16998        }
16999        let sequence_options = self.parse_create_sequence_options()?;
17000        // [ OWNED BY { table_name.column_name | NONE } ]
17001        let owned_by = if self.parse_keywords(&[Keyword::OWNED, Keyword::BY]) {
17002            if self.parse_keywords(&[Keyword::NONE]) {
17003                Some(ObjectName::from(vec![Ident::new("NONE")]))
17004            } else {
17005                Some(self.parse_object_name(false)?)
17006            }
17007        } else {
17008            None
17009        };
17010        Ok(Statement::CreateSequence {
17011            temporary,
17012            if_not_exists,
17013            name,
17014            data_type,
17015            sequence_options,
17016            owned_by,
17017        })
17018    }
17019
17020    fn parse_create_sequence_options(&mut self) -> Result<Vec<SequenceOptions>, ParserError> {
17021        let mut sequence_options = vec![];
17022        //[ INCREMENT [ BY ] increment ]
17023        if self.parse_keywords(&[Keyword::INCREMENT]) {
17024            if self.parse_keywords(&[Keyword::BY]) {
17025                sequence_options.push(SequenceOptions::IncrementBy(self.parse_number()?, true));
17026            } else {
17027                sequence_options.push(SequenceOptions::IncrementBy(self.parse_number()?, false));
17028            }
17029        }
17030        //[ MINVALUE minvalue | NO MINVALUE ]
17031        if self.parse_keyword(Keyword::MINVALUE) {
17032            sequence_options.push(SequenceOptions::MinValue(Some(self.parse_number()?)));
17033        } else if self.parse_keywords(&[Keyword::NO, Keyword::MINVALUE]) {
17034            sequence_options.push(SequenceOptions::MinValue(None));
17035        }
17036        //[ MAXVALUE maxvalue | NO MAXVALUE ]
17037        if self.parse_keywords(&[Keyword::MAXVALUE]) {
17038            sequence_options.push(SequenceOptions::MaxValue(Some(self.parse_number()?)));
17039        } else if self.parse_keywords(&[Keyword::NO, Keyword::MAXVALUE]) {
17040            sequence_options.push(SequenceOptions::MaxValue(None));
17041        }
17042
17043        //[ START [ WITH ] start ]
17044        if self.parse_keywords(&[Keyword::START]) {
17045            if self.parse_keywords(&[Keyword::WITH]) {
17046                sequence_options.push(SequenceOptions::StartWith(self.parse_number()?, true));
17047            } else {
17048                sequence_options.push(SequenceOptions::StartWith(self.parse_number()?, false));
17049            }
17050        }
17051        //[ CACHE cache ]
17052        if self.parse_keywords(&[Keyword::CACHE]) {
17053            sequence_options.push(SequenceOptions::Cache(self.parse_number()?));
17054        }
17055        // [ [ NO ] CYCLE ]
17056        if self.parse_keywords(&[Keyword::NO, Keyword::CYCLE]) {
17057            sequence_options.push(SequenceOptions::Cycle(true));
17058        } else if self.parse_keywords(&[Keyword::CYCLE]) {
17059            sequence_options.push(SequenceOptions::Cycle(false));
17060        }
17061
17062        Ok(sequence_options)
17063    }
17064
17065    ///   Parse a `CREATE SERVER` statement.
17066    ///
17067    ///  See [Statement::CreateServer]
17068    pub fn parse_pg_create_server(&mut self) -> Result<Statement, ParserError> {
17069        let ine = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
17070        let name = self.parse_object_name(false)?;
17071
17072        let server_type = if self.parse_keyword(Keyword::TYPE) {
17073            Some(self.parse_identifier()?)
17074        } else {
17075            None
17076        };
17077
17078        let version = if self.parse_keyword(Keyword::VERSION) {
17079            Some(self.parse_identifier()?)
17080        } else {
17081            None
17082        };
17083
17084        self.expect_keywords(&[Keyword::FOREIGN, Keyword::DATA, Keyword::WRAPPER])?;
17085        let foreign_data_wrapper = self.parse_object_name(false)?;
17086
17087        let mut options = None;
17088        if self.parse_keyword(Keyword::OPTIONS) {
17089            self.expect_token(&Token::LParen)?;
17090            options = Some(self.parse_comma_separated(|p| {
17091                let key = p.parse_identifier()?;
17092                let value = p.parse_identifier()?;
17093                Ok(CreateServerOption { key, value })
17094            })?);
17095            self.expect_token(&Token::RParen)?;
17096        }
17097
17098        Ok(Statement::CreateServer(CreateServerStatement {
17099            name,
17100            if_not_exists: ine,
17101            server_type,
17102            version,
17103            foreign_data_wrapper,
17104            options,
17105        }))
17106    }
17107
17108    /// The index of the first unprocessed token.
17109    pub fn index(&self) -> usize {
17110        self.index
17111    }
17112
17113    pub fn parse_named_window(&mut self) -> Result<NamedWindowDefinition, ParserError> {
17114        let ident = self.parse_identifier()?;
17115        self.expect_keyword_is(Keyword::AS)?;
17116
17117        let window_expr = if self.consume_token(&Token::LParen) {
17118            NamedWindowExpr::WindowSpec(self.parse_window_spec()?)
17119        } else if self.dialect.supports_window_clause_named_window_reference() {
17120            NamedWindowExpr::NamedWindow(self.parse_identifier()?)
17121        } else {
17122            return self.expected("(", self.peek_token());
17123        };
17124
17125        Ok(NamedWindowDefinition(ident, window_expr))
17126    }
17127
17128    pub fn parse_create_procedure(&mut self, or_alter: bool) -> Result<Statement, ParserError> {
17129        let name = self.parse_object_name(false)?;
17130        let params = self.parse_optional_procedure_parameters()?;
17131
17132        let language = if self.parse_keyword(Keyword::LANGUAGE) {
17133            Some(self.parse_identifier()?)
17134        } else {
17135            None
17136        };
17137
17138        self.expect_keyword_is(Keyword::AS)?;
17139
17140        let body = self.parse_conditional_statements(&[Keyword::END])?;
17141
17142        Ok(Statement::CreateProcedure {
17143            name,
17144            or_alter,
17145            params,
17146            language,
17147            body,
17148        })
17149    }
17150
17151    pub fn parse_window_spec(&mut self) -> Result<WindowSpec, ParserError> {
17152        let window_name = match self.peek_token().token {
17153            Token::Word(word) if word.keyword == Keyword::NoKeyword => {
17154                self.parse_optional_ident()?
17155            }
17156            _ => None,
17157        };
17158
17159        let partition_by = if self.parse_keywords(&[Keyword::PARTITION, Keyword::BY]) {
17160            self.parse_comma_separated(Parser::parse_expr)?
17161        } else {
17162            vec![]
17163        };
17164        let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
17165            self.parse_comma_separated(Parser::parse_order_by_expr)?
17166        } else {
17167            vec![]
17168        };
17169
17170        let window_frame = if !self.consume_token(&Token::RParen) {
17171            let window_frame = self.parse_window_frame()?;
17172            self.expect_token(&Token::RParen)?;
17173            Some(window_frame)
17174        } else {
17175            None
17176        };
17177        Ok(WindowSpec {
17178            window_name,
17179            partition_by,
17180            order_by,
17181            window_frame,
17182        })
17183    }
17184
17185    pub fn parse_create_type(&mut self) -> Result<Statement, ParserError> {
17186        let name = self.parse_object_name(false)?;
17187        self.expect_keyword_is(Keyword::AS)?;
17188
17189        if self.parse_keyword(Keyword::ENUM) {
17190            return self.parse_create_type_enum(name);
17191        }
17192
17193        let mut attributes = vec![];
17194        if !self.consume_token(&Token::LParen) || self.consume_token(&Token::RParen) {
17195            return Ok(Statement::CreateType {
17196                name,
17197                representation: UserDefinedTypeRepresentation::Composite { attributes },
17198            });
17199        }
17200
17201        loop {
17202            let attr_name = self.parse_identifier()?;
17203            let attr_data_type = self.parse_data_type()?;
17204            let attr_collation = if self.parse_keyword(Keyword::COLLATE) {
17205                Some(self.parse_object_name(false)?)
17206            } else {
17207                None
17208            };
17209            attributes.push(UserDefinedTypeCompositeAttributeDef {
17210                name: attr_name,
17211                data_type: attr_data_type,
17212                collation: attr_collation,
17213            });
17214            let comma = self.consume_token(&Token::Comma);
17215            if self.consume_token(&Token::RParen) {
17216                // allow a trailing comma
17217                break;
17218            } else if !comma {
17219                return self.expected("',' or ')' after attribute definition", self.peek_token());
17220            }
17221        }
17222
17223        Ok(Statement::CreateType {
17224            name,
17225            representation: UserDefinedTypeRepresentation::Composite { attributes },
17226        })
17227    }
17228
17229    /// Parse remainder of `CREATE TYPE AS ENUM` statement (see [Statement::CreateType] and [Self::parse_create_type])
17230    ///
17231    /// See [PostgreSQL](https://www.postgresql.org/docs/current/sql-createtype.html)
17232    pub fn parse_create_type_enum(&mut self, name: ObjectName) -> Result<Statement, ParserError> {
17233        self.expect_token(&Token::LParen)?;
17234        let labels = self.parse_comma_separated0(|p| p.parse_identifier(), Token::RParen)?;
17235        self.expect_token(&Token::RParen)?;
17236
17237        Ok(Statement::CreateType {
17238            name,
17239            representation: UserDefinedTypeRepresentation::Enum { labels },
17240        })
17241    }
17242
17243    fn parse_parenthesized_identifiers(&mut self) -> Result<Vec<Ident>, ParserError> {
17244        self.expect_token(&Token::LParen)?;
17245        let idents = self.parse_comma_separated0(|p| p.parse_identifier(), Token::RParen)?;
17246        self.expect_token(&Token::RParen)?;
17247        Ok(idents)
17248    }
17249
17250    fn parse_column_position(&mut self) -> Result<Option<MySQLColumnPosition>, ParserError> {
17251        if dialect_of!(self is MySqlDialect | GenericDialect) {
17252            if self.parse_keyword(Keyword::FIRST) {
17253                Ok(Some(MySQLColumnPosition::First))
17254            } else if self.parse_keyword(Keyword::AFTER) {
17255                let ident = self.parse_identifier()?;
17256                Ok(Some(MySQLColumnPosition::After(ident)))
17257            } else {
17258                Ok(None)
17259            }
17260        } else {
17261            Ok(None)
17262        }
17263    }
17264
17265    /// Parse [Statement::Print]
17266    fn parse_print(&mut self) -> Result<Statement, ParserError> {
17267        Ok(Statement::Print(PrintStatement {
17268            message: Box::new(self.parse_expr()?),
17269        }))
17270    }
17271
17272    /// Parse [Statement::Return]
17273    fn parse_return(&mut self) -> Result<Statement, ParserError> {
17274        match self.maybe_parse(|p| p.parse_expr())? {
17275            Some(expr) => Ok(Statement::Return(ReturnStatement {
17276                value: Some(ReturnStatementValue::Expr(expr)),
17277            })),
17278            None => Ok(Statement::Return(ReturnStatement { value: None })),
17279        }
17280    }
17281
17282    /// /// Parse a `EXPORT DATA` statement.
17283    ///
17284    /// See [Statement::ExportData]
17285    fn parse_export_data(&mut self) -> Result<Statement, ParserError> {
17286        self.expect_keywords(&[Keyword::EXPORT, Keyword::DATA])?;
17287
17288        let connection = if self.parse_keywords(&[Keyword::WITH, Keyword::CONNECTION]) {
17289            Some(self.parse_object_name(false)?)
17290        } else {
17291            None
17292        };
17293        self.expect_keyword(Keyword::OPTIONS)?;
17294        self.expect_token(&Token::LParen)?;
17295        let options = self.parse_comma_separated(|p| p.parse_sql_option())?;
17296        self.expect_token(&Token::RParen)?;
17297        self.expect_keyword(Keyword::AS)?;
17298        let query = self.parse_query()?;
17299        Ok(Statement::ExportData(ExportData {
17300            options,
17301            query,
17302            connection,
17303        }))
17304    }
17305
17306    fn parse_vacuum(&mut self) -> Result<Statement, ParserError> {
17307        self.expect_keyword(Keyword::VACUUM)?;
17308        let full = self.parse_keyword(Keyword::FULL);
17309        let sort_only = self.parse_keywords(&[Keyword::SORT, Keyword::ONLY]);
17310        let delete_only = self.parse_keywords(&[Keyword::DELETE, Keyword::ONLY]);
17311        let reindex = self.parse_keyword(Keyword::REINDEX);
17312        let recluster = self.parse_keyword(Keyword::RECLUSTER);
17313        let (table_name, threshold, boost) =
17314            match self.maybe_parse(|p| p.parse_object_name(false))? {
17315                Some(table_name) => {
17316                    let threshold = if self.parse_keyword(Keyword::TO) {
17317                        let value = self.parse_value()?;
17318                        self.expect_keyword(Keyword::PERCENT)?;
17319                        Some(value.value)
17320                    } else {
17321                        None
17322                    };
17323                    let boost = self.parse_keyword(Keyword::BOOST);
17324                    (Some(table_name), threshold, boost)
17325                }
17326                _ => (None, None, false),
17327            };
17328        Ok(Statement::Vacuum(VacuumStatement {
17329            full,
17330            sort_only,
17331            delete_only,
17332            reindex,
17333            recluster,
17334            table_name,
17335            threshold,
17336            boost,
17337        }))
17338    }
17339
17340    /// Consume the parser and return its underlying token buffer
17341    pub fn into_tokens(self) -> Vec<TokenWithSpan> {
17342        self.tokens
17343    }
17344
17345    /// Returns true if the next keyword indicates a sub query, i.e. SELECT or WITH
17346    fn peek_sub_query(&mut self) -> bool {
17347        if self
17348            .parse_one_of_keywords(&[Keyword::SELECT, Keyword::WITH])
17349            .is_some()
17350        {
17351            self.prev_token();
17352            return true;
17353        }
17354        false
17355    }
17356
17357    pub(crate) fn parse_show_stmt_options(&mut self) -> Result<ShowStatementOptions, ParserError> {
17358        let show_in;
17359        let mut filter_position = None;
17360        if self.dialect.supports_show_like_before_in() {
17361            if let Some(filter) = self.parse_show_statement_filter()? {
17362                filter_position = Some(ShowStatementFilterPosition::Infix(filter));
17363            }
17364            show_in = self.maybe_parse_show_stmt_in()?;
17365        } else {
17366            show_in = self.maybe_parse_show_stmt_in()?;
17367            if let Some(filter) = self.parse_show_statement_filter()? {
17368                filter_position = Some(ShowStatementFilterPosition::Suffix(filter));
17369            }
17370        }
17371        let starts_with = self.maybe_parse_show_stmt_starts_with()?;
17372        let limit = self.maybe_parse_show_stmt_limit()?;
17373        let from = self.maybe_parse_show_stmt_from()?;
17374        Ok(ShowStatementOptions {
17375            filter_position,
17376            show_in,
17377            starts_with,
17378            limit,
17379            limit_from: from,
17380        })
17381    }
17382
17383    fn maybe_parse_show_stmt_in(&mut self) -> Result<Option<ShowStatementIn>, ParserError> {
17384        let clause = match self.parse_one_of_keywords(&[Keyword::FROM, Keyword::IN]) {
17385            Some(Keyword::FROM) => ShowStatementInClause::FROM,
17386            Some(Keyword::IN) => ShowStatementInClause::IN,
17387            None => return Ok(None),
17388            _ => return self.expected("FROM or IN", self.peek_token()),
17389        };
17390
17391        let (parent_type, parent_name) = match self.parse_one_of_keywords(&[
17392            Keyword::ACCOUNT,
17393            Keyword::DATABASE,
17394            Keyword::SCHEMA,
17395            Keyword::TABLE,
17396            Keyword::VIEW,
17397        ]) {
17398            // If we see these next keywords it means we don't have a parent name
17399            Some(Keyword::DATABASE)
17400                if self.peek_keywords(&[Keyword::STARTS, Keyword::WITH])
17401                    | self.peek_keyword(Keyword::LIMIT) =>
17402            {
17403                (Some(ShowStatementInParentType::Database), None)
17404            }
17405            Some(Keyword::SCHEMA)
17406                if self.peek_keywords(&[Keyword::STARTS, Keyword::WITH])
17407                    | self.peek_keyword(Keyword::LIMIT) =>
17408            {
17409                (Some(ShowStatementInParentType::Schema), None)
17410            }
17411            Some(parent_kw) => {
17412                // The parent name here is still optional, for example:
17413                // SHOW TABLES IN ACCOUNT, so parsing the object name
17414                // may fail because the statement ends.
17415                let parent_name = self.maybe_parse(|p| p.parse_object_name(false))?;
17416                match parent_kw {
17417                    Keyword::ACCOUNT => (Some(ShowStatementInParentType::Account), parent_name),
17418                    Keyword::DATABASE => (Some(ShowStatementInParentType::Database), parent_name),
17419                    Keyword::SCHEMA => (Some(ShowStatementInParentType::Schema), parent_name),
17420                    Keyword::TABLE => (Some(ShowStatementInParentType::Table), parent_name),
17421                    Keyword::VIEW => (Some(ShowStatementInParentType::View), parent_name),
17422                    _ => {
17423                        return self.expected(
17424                            "one of ACCOUNT, DATABASE, SCHEMA, TABLE or VIEW",
17425                            self.peek_token(),
17426                        )
17427                    }
17428                }
17429            }
17430            None => {
17431                // Parsing MySQL style FROM tbl_name FROM db_name
17432                // which is equivalent to FROM tbl_name.db_name
17433                let mut parent_name = self.parse_object_name(false)?;
17434                if self
17435                    .parse_one_of_keywords(&[Keyword::FROM, Keyword::IN])
17436                    .is_some()
17437                {
17438                    parent_name
17439                        .0
17440                        .insert(0, ObjectNamePart::Identifier(self.parse_identifier()?));
17441                }
17442                (None, Some(parent_name))
17443            }
17444        };
17445
17446        Ok(Some(ShowStatementIn {
17447            clause,
17448            parent_type,
17449            parent_name,
17450        }))
17451    }
17452
17453    fn maybe_parse_show_stmt_starts_with(&mut self) -> Result<Option<Value>, ParserError> {
17454        if self.parse_keywords(&[Keyword::STARTS, Keyword::WITH]) {
17455            Ok(Some(self.parse_value()?.value))
17456        } else {
17457            Ok(None)
17458        }
17459    }
17460
17461    fn maybe_parse_show_stmt_limit(&mut self) -> Result<Option<Expr>, ParserError> {
17462        if self.parse_keyword(Keyword::LIMIT) {
17463            Ok(self.parse_limit()?)
17464        } else {
17465            Ok(None)
17466        }
17467    }
17468
17469    fn maybe_parse_show_stmt_from(&mut self) -> Result<Option<Value>, ParserError> {
17470        if self.parse_keyword(Keyword::FROM) {
17471            Ok(Some(self.parse_value()?.value))
17472        } else {
17473            Ok(None)
17474        }
17475    }
17476
17477    pub(crate) fn in_column_definition_state(&self) -> bool {
17478        matches!(self.state, ColumnDefinition)
17479    }
17480
17481    /// Parses options provided in key-value format.
17482    ///
17483    /// * `parenthesized` - true if the options are enclosed in parenthesis
17484    /// * `end_words` - a list of keywords that any of them indicates the end of the options section
17485    pub(crate) fn parse_key_value_options(
17486        &mut self,
17487        parenthesized: bool,
17488        end_words: &[Keyword],
17489    ) -> Result<Vec<KeyValueOption>, ParserError> {
17490        let mut options: Vec<KeyValueOption> = Vec::new();
17491        if parenthesized {
17492            self.expect_token(&Token::LParen)?;
17493        }
17494        loop {
17495            match self.next_token().token {
17496                Token::RParen => {
17497                    if parenthesized {
17498                        break;
17499                    } else {
17500                        return self.expected(" another option or EOF", self.peek_token());
17501                    }
17502                }
17503                Token::EOF => break,
17504                Token::Comma => continue,
17505                Token::Word(w) if !end_words.contains(&w.keyword) => {
17506                    options.push(self.parse_key_value_option(w)?)
17507                }
17508                Token::Word(w) if end_words.contains(&w.keyword) => {
17509                    self.prev_token();
17510                    break;
17511                }
17512                _ => return self.expected("another option, EOF, Comma or ')'", self.peek_token()),
17513            };
17514        }
17515        Ok(options)
17516    }
17517
17518    /// Parses a `KEY = VALUE` construct based on the specified key
17519    pub(crate) fn parse_key_value_option(
17520        &mut self,
17521        key: Word,
17522    ) -> Result<KeyValueOption, ParserError> {
17523        self.expect_token(&Token::Eq)?;
17524        match self.next_token().token {
17525            Token::SingleQuotedString(value) => Ok(KeyValueOption {
17526                option_name: key.value,
17527                option_type: KeyValueOptionType::STRING,
17528                value,
17529            }),
17530            Token::Word(word)
17531                if word.keyword == Keyword::TRUE || word.keyword == Keyword::FALSE =>
17532            {
17533                Ok(KeyValueOption {
17534                    option_name: key.value,
17535                    option_type: KeyValueOptionType::BOOLEAN,
17536                    value: word.value.to_uppercase(),
17537                })
17538            }
17539            Token::Word(word) => Ok(KeyValueOption {
17540                option_name: key.value,
17541                option_type: KeyValueOptionType::ENUM,
17542                value: word.value,
17543            }),
17544            Token::Number(n, _) => Ok(KeyValueOption {
17545                option_name: key.value,
17546                option_type: KeyValueOptionType::NUMBER,
17547                value: n,
17548            }),
17549            _ => self.expected("expected option value", self.peek_token()),
17550        }
17551    }
17552}
17553
17554fn maybe_prefixed_expr(expr: Expr, prefix: Option<Ident>) -> Expr {
17555    if let Some(prefix) = prefix {
17556        Expr::Prefixed {
17557            prefix,
17558            value: Box::new(expr),
17559        }
17560    } else {
17561        expr
17562    }
17563}
17564
17565impl Word {
17566    #[deprecated(since = "0.54.0", note = "please use `into_ident` instead")]
17567    pub fn to_ident(&self, span: Span) -> Ident {
17568        Ident {
17569            value: self.value.clone(),
17570            quote_style: self.quote_style,
17571            span,
17572        }
17573    }
17574
17575    /// Convert this word into an [`Ident`] identifier
17576    pub fn into_ident(self, span: Span) -> Ident {
17577        Ident {
17578            value: self.value,
17579            quote_style: self.quote_style,
17580            span,
17581        }
17582    }
17583}
17584
17585#[cfg(test)]
17586mod tests {
17587    use crate::test_utils::{all_dialects, TestedDialects};
17588
17589    use super::*;
17590
17591    #[test]
17592    fn test_prev_index() {
17593        let sql = "SELECT version";
17594        all_dialects().run_parser_method(sql, |parser| {
17595            assert_eq!(parser.peek_token(), Token::make_keyword("SELECT"));
17596            assert_eq!(parser.next_token(), Token::make_keyword("SELECT"));
17597            parser.prev_token();
17598            assert_eq!(parser.next_token(), Token::make_keyword("SELECT"));
17599            assert_eq!(parser.next_token(), Token::make_word("version", None));
17600            parser.prev_token();
17601            assert_eq!(parser.peek_token(), Token::make_word("version", None));
17602            assert_eq!(parser.next_token(), Token::make_word("version", None));
17603            assert_eq!(parser.peek_token(), Token::EOF);
17604            parser.prev_token();
17605            assert_eq!(parser.next_token(), Token::make_word("version", None));
17606            assert_eq!(parser.next_token(), Token::EOF);
17607            assert_eq!(parser.next_token(), Token::EOF);
17608            parser.prev_token();
17609        });
17610    }
17611
17612    #[test]
17613    fn test_peek_tokens() {
17614        all_dialects().run_parser_method("SELECT foo AS bar FROM baz", |parser| {
17615            assert!(matches!(
17616                parser.peek_tokens(),
17617                [Token::Word(Word {
17618                    keyword: Keyword::SELECT,
17619                    ..
17620                })]
17621            ));
17622
17623            assert!(matches!(
17624                parser.peek_tokens(),
17625                [
17626                    Token::Word(Word {
17627                        keyword: Keyword::SELECT,
17628                        ..
17629                    }),
17630                    Token::Word(_),
17631                    Token::Word(Word {
17632                        keyword: Keyword::AS,
17633                        ..
17634                    }),
17635                ]
17636            ));
17637
17638            for _ in 0..4 {
17639                parser.next_token();
17640            }
17641
17642            assert!(matches!(
17643                parser.peek_tokens(),
17644                [
17645                    Token::Word(Word {
17646                        keyword: Keyword::FROM,
17647                        ..
17648                    }),
17649                    Token::Word(_),
17650                    Token::EOF,
17651                    Token::EOF,
17652                ]
17653            ))
17654        })
17655    }
17656
17657    #[cfg(test)]
17658    mod test_parse_data_type {
17659        use crate::ast::{
17660            CharLengthUnits, CharacterLength, DataType, ExactNumberInfo, ObjectName, TimezoneInfo,
17661        };
17662        use crate::dialect::{AnsiDialect, GenericDialect, PostgreSqlDialect};
17663        use crate::test_utils::TestedDialects;
17664
17665        macro_rules! test_parse_data_type {
17666            ($dialect:expr, $input:expr, $expected_type:expr $(,)?) => {{
17667                $dialect.run_parser_method(&*$input, |parser| {
17668                    let data_type = parser.parse_data_type().unwrap();
17669                    assert_eq!($expected_type, data_type);
17670                    assert_eq!($input.to_string(), data_type.to_string());
17671                });
17672            }};
17673        }
17674
17675        #[test]
17676        fn test_ansii_character_string_types() {
17677            // Character string types: <https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#character-string-type>
17678            let dialect =
17679                TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
17680
17681            test_parse_data_type!(dialect, "CHARACTER", DataType::Character(None));
17682
17683            test_parse_data_type!(
17684                dialect,
17685                "CHARACTER(20)",
17686                DataType::Character(Some(CharacterLength::IntegerLength {
17687                    length: 20,
17688                    unit: None
17689                }))
17690            );
17691
17692            test_parse_data_type!(
17693                dialect,
17694                "CHARACTER(20 CHARACTERS)",
17695                DataType::Character(Some(CharacterLength::IntegerLength {
17696                    length: 20,
17697                    unit: Some(CharLengthUnits::Characters)
17698                }))
17699            );
17700
17701            test_parse_data_type!(
17702                dialect,
17703                "CHARACTER(20 OCTETS)",
17704                DataType::Character(Some(CharacterLength::IntegerLength {
17705                    length: 20,
17706                    unit: Some(CharLengthUnits::Octets)
17707                }))
17708            );
17709
17710            test_parse_data_type!(dialect, "CHAR", DataType::Char(None));
17711
17712            test_parse_data_type!(
17713                dialect,
17714                "CHAR(20)",
17715                DataType::Char(Some(CharacterLength::IntegerLength {
17716                    length: 20,
17717                    unit: None
17718                }))
17719            );
17720
17721            test_parse_data_type!(
17722                dialect,
17723                "CHAR(20 CHARACTERS)",
17724                DataType::Char(Some(CharacterLength::IntegerLength {
17725                    length: 20,
17726                    unit: Some(CharLengthUnits::Characters)
17727                }))
17728            );
17729
17730            test_parse_data_type!(
17731                dialect,
17732                "CHAR(20 OCTETS)",
17733                DataType::Char(Some(CharacterLength::IntegerLength {
17734                    length: 20,
17735                    unit: Some(CharLengthUnits::Octets)
17736                }))
17737            );
17738
17739            test_parse_data_type!(
17740                dialect,
17741                "CHARACTER VARYING(20)",
17742                DataType::CharacterVarying(Some(CharacterLength::IntegerLength {
17743                    length: 20,
17744                    unit: None
17745                }))
17746            );
17747
17748            test_parse_data_type!(
17749                dialect,
17750                "CHARACTER VARYING(20 CHARACTERS)",
17751                DataType::CharacterVarying(Some(CharacterLength::IntegerLength {
17752                    length: 20,
17753                    unit: Some(CharLengthUnits::Characters)
17754                }))
17755            );
17756
17757            test_parse_data_type!(
17758                dialect,
17759                "CHARACTER VARYING(20 OCTETS)",
17760                DataType::CharacterVarying(Some(CharacterLength::IntegerLength {
17761                    length: 20,
17762                    unit: Some(CharLengthUnits::Octets)
17763                }))
17764            );
17765
17766            test_parse_data_type!(
17767                dialect,
17768                "CHAR VARYING(20)",
17769                DataType::CharVarying(Some(CharacterLength::IntegerLength {
17770                    length: 20,
17771                    unit: None
17772                }))
17773            );
17774
17775            test_parse_data_type!(
17776                dialect,
17777                "CHAR VARYING(20 CHARACTERS)",
17778                DataType::CharVarying(Some(CharacterLength::IntegerLength {
17779                    length: 20,
17780                    unit: Some(CharLengthUnits::Characters)
17781                }))
17782            );
17783
17784            test_parse_data_type!(
17785                dialect,
17786                "CHAR VARYING(20 OCTETS)",
17787                DataType::CharVarying(Some(CharacterLength::IntegerLength {
17788                    length: 20,
17789                    unit: Some(CharLengthUnits::Octets)
17790                }))
17791            );
17792
17793            test_parse_data_type!(
17794                dialect,
17795                "VARCHAR(20)",
17796                DataType::Varchar(Some(CharacterLength::IntegerLength {
17797                    length: 20,
17798                    unit: None
17799                }))
17800            );
17801        }
17802
17803        #[test]
17804        fn test_ansii_character_large_object_types() {
17805            // Character large object types: <https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#character-large-object-length>
17806            let dialect =
17807                TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
17808
17809            test_parse_data_type!(
17810                dialect,
17811                "CHARACTER LARGE OBJECT",
17812                DataType::CharacterLargeObject(None)
17813            );
17814            test_parse_data_type!(
17815                dialect,
17816                "CHARACTER LARGE OBJECT(20)",
17817                DataType::CharacterLargeObject(Some(20))
17818            );
17819
17820            test_parse_data_type!(
17821                dialect,
17822                "CHAR LARGE OBJECT",
17823                DataType::CharLargeObject(None)
17824            );
17825            test_parse_data_type!(
17826                dialect,
17827                "CHAR LARGE OBJECT(20)",
17828                DataType::CharLargeObject(Some(20))
17829            );
17830
17831            test_parse_data_type!(dialect, "CLOB", DataType::Clob(None));
17832            test_parse_data_type!(dialect, "CLOB(20)", DataType::Clob(Some(20)));
17833        }
17834
17835        #[test]
17836        fn test_parse_custom_types() {
17837            let dialect =
17838                TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
17839
17840            test_parse_data_type!(
17841                dialect,
17842                "GEOMETRY",
17843                DataType::Custom(ObjectName::from(vec!["GEOMETRY".into()]), vec![])
17844            );
17845
17846            test_parse_data_type!(
17847                dialect,
17848                "GEOMETRY(POINT)",
17849                DataType::Custom(
17850                    ObjectName::from(vec!["GEOMETRY".into()]),
17851                    vec!["POINT".to_string()]
17852                )
17853            );
17854
17855            test_parse_data_type!(
17856                dialect,
17857                "GEOMETRY(POINT, 4326)",
17858                DataType::Custom(
17859                    ObjectName::from(vec!["GEOMETRY".into()]),
17860                    vec!["POINT".to_string(), "4326".to_string()]
17861                )
17862            );
17863        }
17864
17865        #[test]
17866        fn test_ansii_exact_numeric_types() {
17867            // Exact numeric types: <https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#exact-numeric-type>
17868            let dialect = TestedDialects::new(vec![
17869                Box::new(GenericDialect {}),
17870                Box::new(AnsiDialect {}),
17871                Box::new(PostgreSqlDialect {}),
17872            ]);
17873
17874            test_parse_data_type!(dialect, "NUMERIC", DataType::Numeric(ExactNumberInfo::None));
17875
17876            test_parse_data_type!(
17877                dialect,
17878                "NUMERIC(2)",
17879                DataType::Numeric(ExactNumberInfo::Precision(2))
17880            );
17881
17882            test_parse_data_type!(
17883                dialect,
17884                "NUMERIC(2,10)",
17885                DataType::Numeric(ExactNumberInfo::PrecisionAndScale(2, 10))
17886            );
17887
17888            test_parse_data_type!(dialect, "DECIMAL", DataType::Decimal(ExactNumberInfo::None));
17889
17890            test_parse_data_type!(
17891                dialect,
17892                "DECIMAL(2)",
17893                DataType::Decimal(ExactNumberInfo::Precision(2))
17894            );
17895
17896            test_parse_data_type!(
17897                dialect,
17898                "DECIMAL(2,10)",
17899                DataType::Decimal(ExactNumberInfo::PrecisionAndScale(2, 10))
17900            );
17901
17902            test_parse_data_type!(dialect, "DEC", DataType::Dec(ExactNumberInfo::None));
17903
17904            test_parse_data_type!(
17905                dialect,
17906                "DEC(2)",
17907                DataType::Dec(ExactNumberInfo::Precision(2))
17908            );
17909
17910            test_parse_data_type!(
17911                dialect,
17912                "DEC(2,10)",
17913                DataType::Dec(ExactNumberInfo::PrecisionAndScale(2, 10))
17914            );
17915
17916            // Test negative scale values.
17917            test_parse_data_type!(
17918                dialect,
17919                "NUMERIC(10,-2)",
17920                DataType::Numeric(ExactNumberInfo::PrecisionAndScale(10, -2))
17921            );
17922
17923            test_parse_data_type!(
17924                dialect,
17925                "DECIMAL(1000,-10)",
17926                DataType::Decimal(ExactNumberInfo::PrecisionAndScale(1000, -10))
17927            );
17928
17929            test_parse_data_type!(
17930                dialect,
17931                "DEC(5,-1000)",
17932                DataType::Dec(ExactNumberInfo::PrecisionAndScale(5, -1000))
17933            );
17934
17935            test_parse_data_type!(
17936                dialect,
17937                "NUMERIC(10,-5)",
17938                DataType::Numeric(ExactNumberInfo::PrecisionAndScale(10, -5))
17939            );
17940
17941            test_parse_data_type!(
17942                dialect,
17943                "DECIMAL(20,-10)",
17944                DataType::Decimal(ExactNumberInfo::PrecisionAndScale(20, -10))
17945            );
17946
17947            test_parse_data_type!(
17948                dialect,
17949                "DEC(5,-2)",
17950                DataType::Dec(ExactNumberInfo::PrecisionAndScale(5, -2))
17951            );
17952
17953            dialect.run_parser_method("NUMERIC(10,+5)", |parser| {
17954                let data_type = parser.parse_data_type().unwrap();
17955                assert_eq!(
17956                    DataType::Numeric(ExactNumberInfo::PrecisionAndScale(10, 5)),
17957                    data_type
17958                );
17959                // Note: Explicit '+' sign is not preserved in output, which is correct
17960                assert_eq!("NUMERIC(10,5)", data_type.to_string());
17961            });
17962        }
17963
17964        #[test]
17965        fn test_ansii_date_type() {
17966            // Datetime types: <https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#datetime-type>
17967            let dialect =
17968                TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
17969
17970            test_parse_data_type!(dialect, "DATE", DataType::Date);
17971
17972            test_parse_data_type!(dialect, "TIME", DataType::Time(None, TimezoneInfo::None));
17973
17974            test_parse_data_type!(
17975                dialect,
17976                "TIME(6)",
17977                DataType::Time(Some(6), TimezoneInfo::None)
17978            );
17979
17980            test_parse_data_type!(
17981                dialect,
17982                "TIME WITH TIME ZONE",
17983                DataType::Time(None, TimezoneInfo::WithTimeZone)
17984            );
17985
17986            test_parse_data_type!(
17987                dialect,
17988                "TIME(6) WITH TIME ZONE",
17989                DataType::Time(Some(6), TimezoneInfo::WithTimeZone)
17990            );
17991
17992            test_parse_data_type!(
17993                dialect,
17994                "TIME WITHOUT TIME ZONE",
17995                DataType::Time(None, TimezoneInfo::WithoutTimeZone)
17996            );
17997
17998            test_parse_data_type!(
17999                dialect,
18000                "TIME(6) WITHOUT TIME ZONE",
18001                DataType::Time(Some(6), TimezoneInfo::WithoutTimeZone)
18002            );
18003
18004            test_parse_data_type!(
18005                dialect,
18006                "TIMESTAMP",
18007                DataType::Timestamp(None, TimezoneInfo::None)
18008            );
18009
18010            test_parse_data_type!(
18011                dialect,
18012                "TIMESTAMP(22)",
18013                DataType::Timestamp(Some(22), TimezoneInfo::None)
18014            );
18015
18016            test_parse_data_type!(
18017                dialect,
18018                "TIMESTAMP(22) WITH TIME ZONE",
18019                DataType::Timestamp(Some(22), TimezoneInfo::WithTimeZone)
18020            );
18021
18022            test_parse_data_type!(
18023                dialect,
18024                "TIMESTAMP(33) WITHOUT TIME ZONE",
18025                DataType::Timestamp(Some(33), TimezoneInfo::WithoutTimeZone)
18026            );
18027        }
18028    }
18029
18030    #[test]
18031    fn test_parse_schema_name() {
18032        // The expected name should be identical as the input name, that's why I don't receive both
18033        macro_rules! test_parse_schema_name {
18034            ($input:expr, $expected_name:expr $(,)?) => {{
18035                all_dialects().run_parser_method(&*$input, |parser| {
18036                    let schema_name = parser.parse_schema_name().unwrap();
18037                    // Validate that the structure is the same as expected
18038                    assert_eq!(schema_name, $expected_name);
18039                    // Validate that the input and the expected structure serialization are the same
18040                    assert_eq!(schema_name.to_string(), $input.to_string());
18041                });
18042            }};
18043        }
18044
18045        let dummy_name = ObjectName::from(vec![Ident::new("dummy_name")]);
18046        let dummy_authorization = Ident::new("dummy_authorization");
18047
18048        test_parse_schema_name!(
18049            format!("{dummy_name}"),
18050            SchemaName::Simple(dummy_name.clone())
18051        );
18052
18053        test_parse_schema_name!(
18054            format!("AUTHORIZATION {dummy_authorization}"),
18055            SchemaName::UnnamedAuthorization(dummy_authorization.clone()),
18056        );
18057        test_parse_schema_name!(
18058            format!("{dummy_name} AUTHORIZATION {dummy_authorization}"),
18059            SchemaName::NamedAuthorization(dummy_name.clone(), dummy_authorization.clone()),
18060        );
18061    }
18062
18063    #[test]
18064    fn mysql_parse_index_table_constraint() {
18065        macro_rules! test_parse_table_constraint {
18066            ($dialect:expr, $input:expr, $expected:expr $(,)?) => {{
18067                $dialect.run_parser_method(&*$input, |parser| {
18068                    let constraint = parser.parse_optional_table_constraint().unwrap().unwrap();
18069                    // Validate that the structure is the same as expected
18070                    assert_eq!(constraint, $expected);
18071                    // Validate that the input and the expected structure serialization are the same
18072                    assert_eq!(constraint.to_string(), $input.to_string());
18073                });
18074            }};
18075        }
18076
18077        fn mk_expected_col(name: &str) -> IndexColumn {
18078            IndexColumn {
18079                column: OrderByExpr {
18080                    expr: Expr::Identifier(name.into()),
18081                    options: OrderByOptions {
18082                        asc: None,
18083                        nulls_first: None,
18084                    },
18085                    with_fill: None,
18086                },
18087                operator_class: None,
18088            }
18089        }
18090
18091        let dialect =
18092            TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(MySqlDialect {})]);
18093
18094        test_parse_table_constraint!(
18095            dialect,
18096            "INDEX (c1)",
18097            TableConstraint::Index {
18098                display_as_key: false,
18099                name: None,
18100                index_type: None,
18101                columns: vec![mk_expected_col("c1")],
18102                index_options: vec![],
18103            }
18104        );
18105
18106        test_parse_table_constraint!(
18107            dialect,
18108            "KEY (c1)",
18109            TableConstraint::Index {
18110                display_as_key: true,
18111                name: None,
18112                index_type: None,
18113                columns: vec![mk_expected_col("c1")],
18114                index_options: vec![],
18115            }
18116        );
18117
18118        test_parse_table_constraint!(
18119            dialect,
18120            "INDEX 'index' (c1, c2)",
18121            TableConstraint::Index {
18122                display_as_key: false,
18123                name: Some(Ident::with_quote('\'', "index")),
18124                index_type: None,
18125                columns: vec![mk_expected_col("c1"), mk_expected_col("c2")],
18126                index_options: vec![],
18127            }
18128        );
18129
18130        test_parse_table_constraint!(
18131            dialect,
18132            "INDEX USING BTREE (c1)",
18133            TableConstraint::Index {
18134                display_as_key: false,
18135                name: None,
18136                index_type: Some(IndexType::BTree),
18137                columns: vec![mk_expected_col("c1")],
18138                index_options: vec![],
18139            }
18140        );
18141
18142        test_parse_table_constraint!(
18143            dialect,
18144            "INDEX USING HASH (c1)",
18145            TableConstraint::Index {
18146                display_as_key: false,
18147                name: None,
18148                index_type: Some(IndexType::Hash),
18149                columns: vec![mk_expected_col("c1")],
18150                index_options: vec![],
18151            }
18152        );
18153
18154        test_parse_table_constraint!(
18155            dialect,
18156            "INDEX idx_name USING BTREE (c1)",
18157            TableConstraint::Index {
18158                display_as_key: false,
18159                name: Some(Ident::new("idx_name")),
18160                index_type: Some(IndexType::BTree),
18161                columns: vec![mk_expected_col("c1")],
18162                index_options: vec![],
18163            }
18164        );
18165
18166        test_parse_table_constraint!(
18167            dialect,
18168            "INDEX idx_name USING HASH (c1)",
18169            TableConstraint::Index {
18170                display_as_key: false,
18171                name: Some(Ident::new("idx_name")),
18172                index_type: Some(IndexType::Hash),
18173                columns: vec![mk_expected_col("c1")],
18174                index_options: vec![],
18175            }
18176        );
18177    }
18178
18179    #[test]
18180    fn test_tokenizer_error_loc() {
18181        let sql = "foo '";
18182        let ast = Parser::parse_sql(&GenericDialect, sql);
18183        assert_eq!(
18184            ast,
18185            Err(ParserError::TokenizerError(
18186                "Unterminated string literal at Line: 1, Column: 5".to_string()
18187            ))
18188        );
18189    }
18190
18191    #[test]
18192    fn test_parser_error_loc() {
18193        let sql = "SELECT this is a syntax error";
18194        let ast = Parser::parse_sql(&GenericDialect, sql);
18195        assert_eq!(
18196            ast,
18197            Err(ParserError::ParserError(
18198                "Expected: [NOT] NULL | TRUE | FALSE | DISTINCT | [form] NORMALIZED FROM after IS, found: a at Line: 1, Column: 16"
18199                    .to_string()
18200            ))
18201        );
18202    }
18203
18204    #[test]
18205    fn test_nested_explain_error() {
18206        let sql = "EXPLAIN EXPLAIN SELECT 1";
18207        let ast = Parser::parse_sql(&GenericDialect, sql);
18208        assert_eq!(
18209            ast,
18210            Err(ParserError::ParserError(
18211                "Explain must be root of the plan".to_string()
18212            ))
18213        );
18214    }
18215
18216    #[test]
18217    fn test_parse_multipart_identifier_positive() {
18218        let dialect = TestedDialects::new(vec![Box::new(GenericDialect {})]);
18219
18220        // parse multipart with quotes
18221        let expected = vec![
18222            Ident {
18223                value: "CATALOG".to_string(),
18224                quote_style: None,
18225                span: Span::empty(),
18226            },
18227            Ident {
18228                value: "F(o)o. \"bar".to_string(),
18229                quote_style: Some('"'),
18230                span: Span::empty(),
18231            },
18232            Ident {
18233                value: "table".to_string(),
18234                quote_style: None,
18235                span: Span::empty(),
18236            },
18237        ];
18238        dialect.run_parser_method(r#"CATALOG."F(o)o. ""bar".table"#, |parser| {
18239            let actual = parser.parse_multipart_identifier().unwrap();
18240            assert_eq!(expected, actual);
18241        });
18242
18243        // allow whitespace between ident parts
18244        let expected = vec![
18245            Ident {
18246                value: "CATALOG".to_string(),
18247                quote_style: None,
18248                span: Span::empty(),
18249            },
18250            Ident {
18251                value: "table".to_string(),
18252                quote_style: None,
18253                span: Span::empty(),
18254            },
18255        ];
18256        dialect.run_parser_method("CATALOG . table", |parser| {
18257            let actual = parser.parse_multipart_identifier().unwrap();
18258            assert_eq!(expected, actual);
18259        });
18260    }
18261
18262    #[test]
18263    fn test_parse_multipart_identifier_negative() {
18264        macro_rules! test_parse_multipart_identifier_error {
18265            ($input:expr, $expected_err:expr $(,)?) => {{
18266                all_dialects().run_parser_method(&*$input, |parser| {
18267                    let actual_err = parser.parse_multipart_identifier().unwrap_err();
18268                    assert_eq!(actual_err.to_string(), $expected_err);
18269                });
18270            }};
18271        }
18272
18273        test_parse_multipart_identifier_error!(
18274            "",
18275            "sql parser error: Empty input when parsing identifier",
18276        );
18277
18278        test_parse_multipart_identifier_error!(
18279            "*schema.table",
18280            "sql parser error: Unexpected token in identifier: *",
18281        );
18282
18283        test_parse_multipart_identifier_error!(
18284            "schema.table*",
18285            "sql parser error: Unexpected token in identifier: *",
18286        );
18287
18288        test_parse_multipart_identifier_error!(
18289            "schema.table.",
18290            "sql parser error: Trailing period in identifier",
18291        );
18292
18293        test_parse_multipart_identifier_error!(
18294            "schema.*",
18295            "sql parser error: Unexpected token following period in identifier: *",
18296        );
18297    }
18298
18299    #[test]
18300    fn test_mysql_partition_selection() {
18301        let sql = "SELECT * FROM employees PARTITION (p0, p2)";
18302        let expected = vec!["p0", "p2"];
18303
18304        let ast: Vec<Statement> = Parser::parse_sql(&MySqlDialect {}, sql).unwrap();
18305        assert_eq!(ast.len(), 1);
18306        if let Statement::Query(v) = &ast[0] {
18307            if let SetExpr::Select(select) = &*v.body {
18308                assert_eq!(select.from.len(), 1);
18309                let from: &TableWithJoins = &select.from[0];
18310                let table_factor = &from.relation;
18311                if let TableFactor::Table { partitions, .. } = table_factor {
18312                    let actual: Vec<&str> = partitions
18313                        .iter()
18314                        .map(|ident| ident.value.as_str())
18315                        .collect();
18316                    assert_eq!(expected, actual);
18317                }
18318            }
18319        } else {
18320            panic!("fail to parse mysql partition selection");
18321        }
18322    }
18323
18324    #[test]
18325    fn test_replace_into_placeholders() {
18326        let sql = "REPLACE INTO t (a) VALUES (&a)";
18327
18328        assert!(Parser::parse_sql(&GenericDialect {}, sql).is_err());
18329    }
18330
18331    #[test]
18332    fn test_replace_into_set_placeholder() {
18333        let sql = "REPLACE INTO t SET ?";
18334
18335        assert!(Parser::parse_sql(&GenericDialect {}, sql).is_err());
18336    }
18337
18338    #[test]
18339    fn test_replace_incomplete() {
18340        let sql = r#"REPLACE"#;
18341
18342        assert!(Parser::parse_sql(&MySqlDialect {}, sql).is_err());
18343    }
18344
18345    #[test]
18346    fn test_placeholder_invalid_whitespace() {
18347        for w in ["  ", "/*invalid*/"] {
18348            let sql = format!("\nSELECT\n  :{w}fooBar");
18349            assert!(Parser::parse_sql(&GenericDialect, &sql).is_err());
18350        }
18351    }
18352}