sqlparser/parser/
mod.rs

1// Licensed under the Apache License, Version 2.0 (the "License");
2// you may not use this file except in compliance with the License.
3// You may obtain a copy of the License at
4//
5// http://www.apache.org/licenses/LICENSE-2.0
6//
7// Unless required by applicable law or agreed to in writing, software
8// distributed under the License is distributed on an "AS IS" BASIS,
9// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
10// See the License for the specific language governing permissions and
11// limitations under the License.
12
13//! SQL Parser
14
15#[cfg(not(feature = "std"))]
16use alloc::{
17    boxed::Box,
18    format,
19    string::{String, ToString},
20    vec,
21    vec::Vec,
22};
23use core::{
24    fmt::{self, Display},
25    str::FromStr,
26};
27use helpers::attached_token::AttachedToken;
28
29use log::debug;
30
31use recursion::RecursionCounter;
32use IsLateral::*;
33use IsOptional::*;
34
35use crate::ast::helpers::{
36    key_value_options::{
37        KeyValueOption, KeyValueOptionKind, KeyValueOptions, KeyValueOptionsDelimiter,
38    },
39    stmt_create_table::{CreateTableBuilder, CreateTableConfiguration},
40};
41use crate::ast::Statement::CreatePolicy;
42use crate::ast::*;
43use crate::dialect::*;
44use crate::keywords::{Keyword, ALL_KEYWORDS};
45use crate::tokenizer::*;
46use sqlparser::parser::ParserState::ColumnDefinition;
47
48#[derive(Debug, Clone, PartialEq, Eq)]
49pub enum ParserError {
50    TokenizerError(String),
51    ParserError(String),
52    RecursionLimitExceeded,
53}
54
55// Use `Parser::expected` instead, if possible
56macro_rules! parser_err {
57    ($MSG:expr, $loc:expr) => {
58        Err(ParserError::ParserError(format!("{}{}", $MSG, $loc)))
59    };
60}
61
62mod alter;
63mod merge;
64
65#[cfg(feature = "std")]
66/// Implementation [`RecursionCounter`] if std is available
67mod recursion {
68    use std::cell::Cell;
69    use std::rc::Rc;
70
71    use super::ParserError;
72
73    /// Tracks remaining recursion depth. This value is decremented on
74    /// each call to [`RecursionCounter::try_decrease()`], when it reaches 0 an error will
75    /// be returned.
76    ///
77    /// Note: Uses an [`std::rc::Rc`] and [`std::cell::Cell`] in order to satisfy the Rust
78    /// borrow checker so the automatic [`DepthGuard`] decrement a
79    /// reference to the counter.
80    ///
81    /// Note: when "recursive-protection" feature is enabled, this crate uses additional stack overflow protection
82    /// for some of its recursive methods. See [`recursive::recursive`] for more information.
83    pub(crate) struct RecursionCounter {
84        remaining_depth: Rc<Cell<usize>>,
85    }
86
87    impl RecursionCounter {
88        /// Creates a [`RecursionCounter`] with the specified maximum
89        /// depth
90        pub fn new(remaining_depth: usize) -> Self {
91            Self {
92                remaining_depth: Rc::new(remaining_depth.into()),
93            }
94        }
95
96        /// Decreases the remaining depth by 1.
97        ///
98        /// Returns [`Err`] if the remaining depth falls to 0.
99        ///
100        /// Returns a [`DepthGuard`] which will adds 1 to the
101        /// remaining depth upon drop;
102        pub fn try_decrease(&self) -> Result<DepthGuard, ParserError> {
103            let old_value = self.remaining_depth.get();
104            // ran out of space
105            if old_value == 0 {
106                Err(ParserError::RecursionLimitExceeded)
107            } else {
108                self.remaining_depth.set(old_value - 1);
109                Ok(DepthGuard::new(Rc::clone(&self.remaining_depth)))
110            }
111        }
112    }
113
114    /// Guard that increases the remaining depth by 1 on drop
115    pub struct DepthGuard {
116        remaining_depth: Rc<Cell<usize>>,
117    }
118
119    impl DepthGuard {
120        fn new(remaining_depth: Rc<Cell<usize>>) -> Self {
121            Self { remaining_depth }
122        }
123    }
124    impl Drop for DepthGuard {
125        fn drop(&mut self) {
126            let old_value = self.remaining_depth.get();
127            self.remaining_depth.set(old_value + 1);
128        }
129    }
130}
131
132#[cfg(not(feature = "std"))]
133mod recursion {
134    /// Implementation [`RecursionCounter`] if std is NOT available (and does not
135    /// guard against stack overflow).
136    ///
137    /// Has the same API as the std [`RecursionCounter`] implementation
138    /// but does not actually limit stack depth.
139    pub(crate) struct RecursionCounter {}
140
141    impl RecursionCounter {
142        pub fn new(_remaining_depth: usize) -> Self {
143            Self {}
144        }
145        pub fn try_decrease(&self) -> Result<DepthGuard, super::ParserError> {
146            Ok(DepthGuard {})
147        }
148    }
149
150    pub struct DepthGuard {}
151}
152
153#[derive(PartialEq, Eq)]
154pub enum IsOptional {
155    Optional,
156    Mandatory,
157}
158
159pub enum IsLateral {
160    Lateral,
161    NotLateral,
162}
163
164pub enum WildcardExpr {
165    Expr(Expr),
166    QualifiedWildcard(ObjectName),
167    Wildcard,
168}
169
170impl From<TokenizerError> for ParserError {
171    fn from(e: TokenizerError) -> Self {
172        ParserError::TokenizerError(e.to_string())
173    }
174}
175
176impl fmt::Display for ParserError {
177    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
178        write!(
179            f,
180            "sql parser error: {}",
181            match self {
182                ParserError::TokenizerError(s) => s,
183                ParserError::ParserError(s) => s,
184                ParserError::RecursionLimitExceeded => "recursion limit exceeded",
185            }
186        )
187    }
188}
189
190#[cfg(feature = "std")]
191impl std::error::Error for ParserError {}
192
193// By default, allow expressions up to this deep before erroring
194const DEFAULT_REMAINING_DEPTH: usize = 50;
195
196// A constant EOF token that can be referenced.
197const EOF_TOKEN: TokenWithSpan = TokenWithSpan {
198    token: Token::EOF,
199    span: Span {
200        start: Location { line: 0, column: 0 },
201        end: Location { line: 0, column: 0 },
202    },
203};
204
205/// Composite types declarations using angle brackets syntax can be arbitrary
206/// nested such that the following declaration is possible:
207///      `ARRAY<ARRAY<INT>>`
208/// But the tokenizer recognizes the `>>` as a ShiftRight token.
209/// We work around that limitation when parsing a data type by accepting
210/// either a `>` or `>>` token in such cases, remembering which variant we
211/// matched.
212/// In the latter case having matched a `>>`, the parent type will not look to
213/// match its closing `>` as a result since that will have taken place at the
214/// child type.
215///
216/// See [Parser::parse_data_type] for details
217struct MatchedTrailingBracket(bool);
218
219impl From<bool> for MatchedTrailingBracket {
220    fn from(value: bool) -> Self {
221        Self(value)
222    }
223}
224
225/// Options that control how the [`Parser`] parses SQL text
226#[derive(Debug, Clone, PartialEq, Eq)]
227pub struct ParserOptions {
228    pub trailing_commas: bool,
229    /// Controls how literal values are unescaped. See
230    /// [`Tokenizer::with_unescape`] for more details.
231    pub unescape: bool,
232    /// Controls if the parser expects a semi-colon token
233    /// between statements. Default is `true`.
234    pub require_semicolon_stmt_delimiter: bool,
235}
236
237impl Default for ParserOptions {
238    fn default() -> Self {
239        Self {
240            trailing_commas: false,
241            unescape: true,
242            require_semicolon_stmt_delimiter: true,
243        }
244    }
245}
246
247impl ParserOptions {
248    /// Create a new [`ParserOptions`]
249    pub fn new() -> Self {
250        Default::default()
251    }
252
253    /// Set if trailing commas are allowed.
254    ///
255    /// If this option is `false` (the default), the following SQL will
256    /// not parse. If the option is `true`, the SQL will parse.
257    ///
258    /// ```sql
259    ///  SELECT
260    ///   foo,
261    ///   bar,
262    ///  FROM baz
263    /// ```
264    pub fn with_trailing_commas(mut self, trailing_commas: bool) -> Self {
265        self.trailing_commas = trailing_commas;
266        self
267    }
268
269    /// Set if literal values are unescaped. Defaults to true. See
270    /// [`Tokenizer::with_unescape`] for more details.
271    pub fn with_unescape(mut self, unescape: bool) -> Self {
272        self.unescape = unescape;
273        self
274    }
275}
276
277#[derive(Copy, Clone)]
278enum ParserState {
279    /// The default state of the parser.
280    Normal,
281    /// The state when parsing a CONNECT BY expression. This allows parsing
282    /// PRIOR expressions while still allowing prior as an identifier name
283    /// in other contexts.
284    ConnectBy,
285    /// The state when parsing column definitions.  This state prohibits
286    /// NOT NULL as an alias for IS NOT NULL.  For example:
287    /// ```sql
288    /// CREATE TABLE foo (abc BIGINT NOT NULL);
289    /// ```
290    ColumnDefinition,
291}
292
293/// A SQL Parser
294///
295/// This struct is the main entry point for parsing SQL queries.
296///
297/// # Functionality:
298/// * Parsing SQL: see examples on [`Parser::new`] and [`Parser::parse_sql`]
299/// * Controlling recursion: See [`Parser::with_recursion_limit`]
300/// * Controlling parser options: See [`Parser::with_options`]
301/// * Providing your own tokens: See [`Parser::with_tokens`]
302///
303/// # Internals
304///
305/// The parser uses a [`Tokenizer`] to tokenize the input SQL string into a
306/// `Vec` of [`TokenWithSpan`]s and maintains an `index` to the current token
307/// being processed. The token vec may contain multiple SQL statements.
308///
309/// * The "current" token is the token at `index - 1`
310/// * The "next" token is the token at `index`
311/// * The "previous" token is the token at `index - 2`
312///
313/// If `index` is equal to the length of the token stream, the 'next' token is
314/// [`Token::EOF`].
315///
316/// For example, the SQL string "SELECT * FROM foo" will be tokenized into
317/// following tokens:
318/// ```text
319///  [
320///    "SELECT", // token index 0
321///    " ",      // whitespace
322///    "*",
323///    " ",
324///    "FROM",
325///    " ",
326///    "foo"
327///   ]
328/// ```
329///
330///
331pub struct Parser<'a> {
332    /// The tokens
333    tokens: Vec<TokenWithSpan>,
334    /// The index of the first unprocessed token in [`Parser::tokens`].
335    index: usize,
336    /// The current state of the parser.
337    state: ParserState,
338    /// The SQL dialect to use.
339    dialect: &'a dyn Dialect,
340    /// Additional options that allow you to mix & match behavior
341    /// otherwise constrained to certain dialects (e.g. trailing
342    /// commas) and/or format of parse (e.g. unescaping).
343    options: ParserOptions,
344    /// Ensures the stack does not overflow by limiting recursion depth.
345    recursion_counter: RecursionCounter,
346}
347
348impl<'a> Parser<'a> {
349    /// Create a parser for a [`Dialect`]
350    ///
351    /// See also [`Parser::parse_sql`]
352    ///
353    /// Example:
354    /// ```
355    /// # use sqlparser::{parser::{Parser, ParserError}, dialect::GenericDialect};
356    /// # fn main() -> Result<(), ParserError> {
357    /// let dialect = GenericDialect{};
358    /// let statements = Parser::new(&dialect)
359    ///   .try_with_sql("SELECT * FROM foo")?
360    ///   .parse_statements()?;
361    /// # Ok(())
362    /// # }
363    /// ```
364    pub fn new(dialect: &'a dyn Dialect) -> Self {
365        Self {
366            tokens: vec![],
367            index: 0,
368            state: ParserState::Normal,
369            dialect,
370            recursion_counter: RecursionCounter::new(DEFAULT_REMAINING_DEPTH),
371            options: ParserOptions::new().with_trailing_commas(dialect.supports_trailing_commas()),
372        }
373    }
374
375    /// Specify the maximum recursion limit while parsing.
376    ///
377    /// [`Parser`] prevents stack overflows by returning
378    /// [`ParserError::RecursionLimitExceeded`] if the parser exceeds
379    /// this depth while processing the query.
380    ///
381    /// Example:
382    /// ```
383    /// # use sqlparser::{parser::{Parser, ParserError}, dialect::GenericDialect};
384    /// # fn main() -> Result<(), ParserError> {
385    /// let dialect = GenericDialect{};
386    /// let result = Parser::new(&dialect)
387    ///   .with_recursion_limit(1)
388    ///   .try_with_sql("SELECT * FROM foo WHERE (a OR (b OR (c OR d)))")?
389    ///   .parse_statements();
390    ///   assert_eq!(result, Err(ParserError::RecursionLimitExceeded));
391    /// # Ok(())
392    /// # }
393    /// ```
394    ///
395    /// Note: when "recursive-protection" feature is enabled, this crate uses additional stack overflow protection
396    //  for some of its recursive methods. See [`recursive::recursive`] for more information.
397    pub fn with_recursion_limit(mut self, recursion_limit: usize) -> Self {
398        self.recursion_counter = RecursionCounter::new(recursion_limit);
399        self
400    }
401
402    /// Specify additional parser options
403    ///
404    /// [`Parser`] supports additional options ([`ParserOptions`])
405    /// that allow you to mix & match behavior otherwise constrained
406    /// to certain dialects (e.g. trailing commas).
407    ///
408    /// Example:
409    /// ```
410    /// # use sqlparser::{parser::{Parser, ParserError, ParserOptions}, dialect::GenericDialect};
411    /// # fn main() -> Result<(), ParserError> {
412    /// let dialect = GenericDialect{};
413    /// let options = ParserOptions::new()
414    ///    .with_trailing_commas(true)
415    ///    .with_unescape(false);
416    /// let result = Parser::new(&dialect)
417    ///   .with_options(options)
418    ///   .try_with_sql("SELECT a, b, COUNT(*), FROM foo GROUP BY a, b,")?
419    ///   .parse_statements();
420    ///   assert!(matches!(result, Ok(_)));
421    /// # Ok(())
422    /// # }
423    /// ```
424    pub fn with_options(mut self, options: ParserOptions) -> Self {
425        self.options = options;
426        self
427    }
428
429    /// Reset this parser to parse the specified token stream
430    pub fn with_tokens_with_locations(mut self, tokens: Vec<TokenWithSpan>) -> Self {
431        self.tokens = tokens;
432        self.index = 0;
433        self
434    }
435
436    /// Reset this parser state to parse the specified tokens
437    pub fn with_tokens(self, tokens: Vec<Token>) -> Self {
438        // Put in dummy locations
439        let tokens_with_locations: Vec<TokenWithSpan> = tokens
440            .into_iter()
441            .map(|token| TokenWithSpan {
442                token,
443                span: Span::empty(),
444            })
445            .collect();
446        self.with_tokens_with_locations(tokens_with_locations)
447    }
448
449    /// Tokenize the sql string and sets this [`Parser`]'s state to
450    /// parse the resulting tokens
451    ///
452    /// Returns an error if there was an error tokenizing the SQL string.
453    ///
454    /// See example on [`Parser::new()`] for an example
455    pub fn try_with_sql(self, sql: &str) -> Result<Self, ParserError> {
456        debug!("Parsing sql '{sql}'...");
457        let tokens = Tokenizer::new(self.dialect, sql)
458            .with_unescape(self.options.unescape)
459            .tokenize_with_location()?;
460        Ok(self.with_tokens_with_locations(tokens))
461    }
462
463    /// Parse potentially multiple statements
464    ///
465    /// Example
466    /// ```
467    /// # use sqlparser::{parser::{Parser, ParserError}, dialect::GenericDialect};
468    /// # fn main() -> Result<(), ParserError> {
469    /// let dialect = GenericDialect{};
470    /// let statements = Parser::new(&dialect)
471    ///   // Parse a SQL string with 2 separate statements
472    ///   .try_with_sql("SELECT * FROM foo; SELECT * FROM bar;")?
473    ///   .parse_statements()?;
474    /// assert_eq!(statements.len(), 2);
475    /// # Ok(())
476    /// # }
477    /// ```
478    pub fn parse_statements(&mut self) -> Result<Vec<Statement>, ParserError> {
479        let mut stmts = Vec::new();
480        let mut expecting_statement_delimiter = false;
481        loop {
482            // ignore empty statements (between successive statement delimiters)
483            while self.consume_token(&Token::SemiColon) {
484                expecting_statement_delimiter = false;
485            }
486
487            if !self.options.require_semicolon_stmt_delimiter {
488                expecting_statement_delimiter = false;
489            }
490
491            match self.peek_token().token {
492                Token::EOF => break,
493
494                // end of statement
495                Token::Word(word) => {
496                    if expecting_statement_delimiter && word.keyword == Keyword::END {
497                        break;
498                    }
499                }
500                _ => {}
501            }
502
503            if expecting_statement_delimiter {
504                return self.expected("end of statement", self.peek_token());
505            }
506
507            let statement = self.parse_statement()?;
508            stmts.push(statement);
509            expecting_statement_delimiter = true;
510        }
511        Ok(stmts)
512    }
513
514    /// Convenience method to parse a string with one or more SQL
515    /// statements into produce an Abstract Syntax Tree (AST).
516    ///
517    /// Example
518    /// ```
519    /// # use sqlparser::{parser::{Parser, ParserError}, dialect::GenericDialect};
520    /// # fn main() -> Result<(), ParserError> {
521    /// let dialect = GenericDialect{};
522    /// let statements = Parser::parse_sql(
523    ///   &dialect, "SELECT * FROM foo"
524    /// )?;
525    /// assert_eq!(statements.len(), 1);
526    /// # Ok(())
527    /// # }
528    /// ```
529    pub fn parse_sql(dialect: &dyn Dialect, sql: &str) -> Result<Vec<Statement>, ParserError> {
530        Parser::new(dialect).try_with_sql(sql)?.parse_statements()
531    }
532
533    /// Parse a single top-level statement (such as SELECT, INSERT, CREATE, etc.),
534    /// stopping before the statement separator, if any.
535    pub fn parse_statement(&mut self) -> Result<Statement, ParserError> {
536        let _guard = self.recursion_counter.try_decrease()?;
537
538        // allow the dialect to override statement parsing
539        if let Some(statement) = self.dialect.parse_statement(self) {
540            return statement;
541        }
542
543        let next_token = self.next_token();
544        match &next_token.token {
545            Token::Word(w) => match w.keyword {
546                Keyword::KILL => self.parse_kill(),
547                Keyword::FLUSH => self.parse_flush(),
548                Keyword::DESC => self.parse_explain(DescribeAlias::Desc),
549                Keyword::DESCRIBE => self.parse_explain(DescribeAlias::Describe),
550                Keyword::EXPLAIN => self.parse_explain(DescribeAlias::Explain),
551                Keyword::ANALYZE => self.parse_analyze(),
552                Keyword::CASE => {
553                    self.prev_token();
554                    self.parse_case_stmt()
555                }
556                Keyword::IF => {
557                    self.prev_token();
558                    self.parse_if_stmt()
559                }
560                Keyword::WHILE => {
561                    self.prev_token();
562                    self.parse_while()
563                }
564                Keyword::RAISE => {
565                    self.prev_token();
566                    self.parse_raise_stmt()
567                }
568                Keyword::SELECT | Keyword::WITH | Keyword::VALUES | Keyword::FROM => {
569                    self.prev_token();
570                    self.parse_query().map(Statement::Query)
571                }
572                Keyword::TRUNCATE => self.parse_truncate(),
573                Keyword::ATTACH => {
574                    if dialect_of!(self is DuckDbDialect) {
575                        self.parse_attach_duckdb_database()
576                    } else {
577                        self.parse_attach_database()
578                    }
579                }
580                Keyword::DETACH if dialect_of!(self is DuckDbDialect | GenericDialect) => {
581                    self.parse_detach_duckdb_database()
582                }
583                Keyword::MSCK => self.parse_msck(),
584                Keyword::CREATE => self.parse_create(),
585                Keyword::CACHE => self.parse_cache_table(),
586                Keyword::DROP => self.parse_drop(),
587                Keyword::DISCARD => self.parse_discard(),
588                Keyword::DECLARE => self.parse_declare(),
589                Keyword::FETCH => self.parse_fetch_statement(),
590                Keyword::DELETE => self.parse_delete(next_token),
591                Keyword::INSERT => self.parse_insert(next_token),
592                Keyword::REPLACE => self.parse_replace(next_token),
593                Keyword::UNCACHE => self.parse_uncache_table(),
594                Keyword::UPDATE => self.parse_update(next_token),
595                Keyword::ALTER => self.parse_alter(),
596                Keyword::CALL => self.parse_call(),
597                Keyword::COPY => self.parse_copy(),
598                Keyword::OPEN => {
599                    self.prev_token();
600                    self.parse_open()
601                }
602                Keyword::CLOSE => self.parse_close(),
603                Keyword::SET => self.parse_set(),
604                Keyword::SHOW => self.parse_show(),
605                Keyword::USE => self.parse_use(),
606                Keyword::GRANT => self.parse_grant(),
607                Keyword::DENY => {
608                    self.prev_token();
609                    self.parse_deny()
610                }
611                Keyword::REVOKE => self.parse_revoke(),
612                Keyword::START => self.parse_start_transaction(),
613                Keyword::BEGIN => self.parse_begin(),
614                Keyword::END => self.parse_end(),
615                Keyword::SAVEPOINT => self.parse_savepoint(),
616                Keyword::RELEASE => self.parse_release(),
617                Keyword::COMMIT => self.parse_commit(),
618                Keyword::RAISERROR => Ok(self.parse_raiserror()?),
619                Keyword::ROLLBACK => self.parse_rollback(),
620                Keyword::ASSERT => self.parse_assert(),
621                // `PREPARE`, `EXECUTE` and `DEALLOCATE` are Postgres-specific
622                // syntaxes. They are used for Postgres prepared statement.
623                Keyword::DEALLOCATE => self.parse_deallocate(),
624                Keyword::EXECUTE | Keyword::EXEC => self.parse_execute(),
625                Keyword::PREPARE => self.parse_prepare(),
626                Keyword::MERGE => self.parse_merge(next_token),
627                // `LISTEN`, `UNLISTEN` and `NOTIFY` are Postgres-specific
628                // syntaxes. They are used for Postgres statement.
629                Keyword::LISTEN if self.dialect.supports_listen_notify() => self.parse_listen(),
630                Keyword::UNLISTEN if self.dialect.supports_listen_notify() => self.parse_unlisten(),
631                Keyword::NOTIFY if self.dialect.supports_listen_notify() => self.parse_notify(),
632                // `PRAGMA` is sqlite specific https://www.sqlite.org/pragma.html
633                Keyword::PRAGMA => self.parse_pragma(),
634                Keyword::UNLOAD => {
635                    self.prev_token();
636                    self.parse_unload()
637                }
638                Keyword::RENAME => self.parse_rename(),
639                // `INSTALL` is duckdb specific https://duckdb.org/docs/extensions/overview
640                Keyword::INSTALL if dialect_of!(self is DuckDbDialect | GenericDialect) => {
641                    self.parse_install()
642                }
643                Keyword::LOAD => self.parse_load(),
644                // `OPTIMIZE` is clickhouse specific https://clickhouse.tech/docs/en/sql-reference/statements/optimize/
645                Keyword::OPTIMIZE if dialect_of!(self is ClickHouseDialect | GenericDialect) => {
646                    self.parse_optimize_table()
647                }
648                // `COMMENT` is snowflake specific https://docs.snowflake.com/en/sql-reference/sql/comment
649                Keyword::COMMENT if self.dialect.supports_comment_on() => self.parse_comment(),
650                Keyword::PRINT => self.parse_print(),
651                Keyword::RETURN => self.parse_return(),
652                Keyword::EXPORT => {
653                    self.prev_token();
654                    self.parse_export_data()
655                }
656                Keyword::VACUUM => {
657                    self.prev_token();
658                    self.parse_vacuum()
659                }
660                Keyword::RESET => self.parse_reset(),
661                _ => self.expected("an SQL statement", next_token),
662            },
663            Token::LParen => {
664                self.prev_token();
665                self.parse_query().map(Statement::Query)
666            }
667            _ => self.expected("an SQL statement", next_token),
668        }
669    }
670
671    /// Parse a `CASE` statement.
672    ///
673    /// See [Statement::Case]
674    pub fn parse_case_stmt(&mut self) -> Result<Statement, ParserError> {
675        let case_token = self.expect_keyword(Keyword::CASE)?;
676
677        let match_expr = if self.peek_keyword(Keyword::WHEN) {
678            None
679        } else {
680            Some(self.parse_expr()?)
681        };
682
683        self.expect_keyword_is(Keyword::WHEN)?;
684        let when_blocks = self.parse_keyword_separated(Keyword::WHEN, |parser| {
685            parser.parse_conditional_statement_block(&[Keyword::WHEN, Keyword::ELSE, Keyword::END])
686        })?;
687
688        let else_block = if self.parse_keyword(Keyword::ELSE) {
689            Some(self.parse_conditional_statement_block(&[Keyword::END])?)
690        } else {
691            None
692        };
693
694        let mut end_case_token = self.expect_keyword(Keyword::END)?;
695        if self.peek_keyword(Keyword::CASE) {
696            end_case_token = self.expect_keyword(Keyword::CASE)?;
697        }
698
699        Ok(Statement::Case(CaseStatement {
700            case_token: AttachedToken(case_token),
701            match_expr,
702            when_blocks,
703            else_block,
704            end_case_token: AttachedToken(end_case_token),
705        }))
706    }
707
708    /// Parse an `IF` statement.
709    ///
710    /// See [Statement::If]
711    pub fn parse_if_stmt(&mut self) -> Result<Statement, ParserError> {
712        self.expect_keyword_is(Keyword::IF)?;
713        let if_block = self.parse_conditional_statement_block(&[
714            Keyword::ELSE,
715            Keyword::ELSEIF,
716            Keyword::END,
717        ])?;
718
719        let elseif_blocks = if self.parse_keyword(Keyword::ELSEIF) {
720            self.parse_keyword_separated(Keyword::ELSEIF, |parser| {
721                parser.parse_conditional_statement_block(&[
722                    Keyword::ELSEIF,
723                    Keyword::ELSE,
724                    Keyword::END,
725                ])
726            })?
727        } else {
728            vec![]
729        };
730
731        let else_block = if self.parse_keyword(Keyword::ELSE) {
732            Some(self.parse_conditional_statement_block(&[Keyword::END])?)
733        } else {
734            None
735        };
736
737        self.expect_keyword_is(Keyword::END)?;
738        let end_token = self.expect_keyword(Keyword::IF)?;
739
740        Ok(Statement::If(IfStatement {
741            if_block,
742            elseif_blocks,
743            else_block,
744            end_token: Some(AttachedToken(end_token)),
745        }))
746    }
747
748    /// Parse a `WHILE` statement.
749    ///
750    /// See [Statement::While]
751    fn parse_while(&mut self) -> Result<Statement, ParserError> {
752        self.expect_keyword_is(Keyword::WHILE)?;
753        let while_block = self.parse_conditional_statement_block(&[Keyword::END])?;
754
755        Ok(Statement::While(WhileStatement { while_block }))
756    }
757
758    /// Parses an expression and associated list of statements
759    /// belonging to a conditional statement like `IF` or `WHEN` or `WHILE`.
760    ///
761    /// Example:
762    /// ```sql
763    /// IF condition THEN statement1; statement2;
764    /// ```
765    fn parse_conditional_statement_block(
766        &mut self,
767        terminal_keywords: &[Keyword],
768    ) -> Result<ConditionalStatementBlock, ParserError> {
769        let start_token = self.get_current_token().clone(); // self.expect_keyword(keyword)?;
770        let mut then_token = None;
771
772        let condition = match &start_token.token {
773            Token::Word(w) if w.keyword == Keyword::ELSE => None,
774            Token::Word(w) if w.keyword == Keyword::WHILE => {
775                let expr = self.parse_expr()?;
776                Some(expr)
777            }
778            _ => {
779                let expr = self.parse_expr()?;
780                then_token = Some(AttachedToken(self.expect_keyword(Keyword::THEN)?));
781                Some(expr)
782            }
783        };
784
785        let conditional_statements = self.parse_conditional_statements(terminal_keywords)?;
786
787        Ok(ConditionalStatementBlock {
788            start_token: AttachedToken(start_token),
789            condition,
790            then_token,
791            conditional_statements,
792        })
793    }
794
795    /// Parse a BEGIN/END block or a sequence of statements
796    /// This could be inside of a conditional (IF, CASE, WHILE etc.) or an object body defined optionally BEGIN/END and one or more statements.
797    pub(crate) fn parse_conditional_statements(
798        &mut self,
799        terminal_keywords: &[Keyword],
800    ) -> Result<ConditionalStatements, ParserError> {
801        let conditional_statements = if self.peek_keyword(Keyword::BEGIN) {
802            let begin_token = self.expect_keyword(Keyword::BEGIN)?;
803            let statements = self.parse_statement_list(terminal_keywords)?;
804            let end_token = self.expect_keyword(Keyword::END)?;
805
806            ConditionalStatements::BeginEnd(BeginEndStatements {
807                begin_token: AttachedToken(begin_token),
808                statements,
809                end_token: AttachedToken(end_token),
810            })
811        } else {
812            ConditionalStatements::Sequence {
813                statements: self.parse_statement_list(terminal_keywords)?,
814            }
815        };
816        Ok(conditional_statements)
817    }
818
819    /// Parse a `RAISE` statement.
820    ///
821    /// See [Statement::Raise]
822    pub fn parse_raise_stmt(&mut self) -> Result<Statement, ParserError> {
823        self.expect_keyword_is(Keyword::RAISE)?;
824
825        let value = if self.parse_keywords(&[Keyword::USING, Keyword::MESSAGE]) {
826            self.expect_token(&Token::Eq)?;
827            Some(RaiseStatementValue::UsingMessage(self.parse_expr()?))
828        } else {
829            self.maybe_parse(|parser| parser.parse_expr().map(RaiseStatementValue::Expr))?
830        };
831
832        Ok(Statement::Raise(RaiseStatement { value }))
833    }
834
835    pub fn parse_comment(&mut self) -> Result<Statement, ParserError> {
836        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
837
838        self.expect_keyword_is(Keyword::ON)?;
839        let token = self.next_token();
840
841        let (object_type, object_name) = match token.token {
842            Token::Word(w) if w.keyword == Keyword::COLUMN => {
843                (CommentObject::Column, self.parse_object_name(false)?)
844            }
845            Token::Word(w) if w.keyword == Keyword::TABLE => {
846                (CommentObject::Table, self.parse_object_name(false)?)
847            }
848            Token::Word(w) if w.keyword == Keyword::EXTENSION => {
849                (CommentObject::Extension, self.parse_object_name(false)?)
850            }
851            Token::Word(w) if w.keyword == Keyword::SCHEMA => {
852                (CommentObject::Schema, self.parse_object_name(false)?)
853            }
854            Token::Word(w) if w.keyword == Keyword::DATABASE => {
855                (CommentObject::Database, self.parse_object_name(false)?)
856            }
857            Token::Word(w) if w.keyword == Keyword::USER => {
858                (CommentObject::User, self.parse_object_name(false)?)
859            }
860            Token::Word(w) if w.keyword == Keyword::ROLE => {
861                (CommentObject::Role, self.parse_object_name(false)?)
862            }
863            _ => self.expected("comment object_type", token)?,
864        };
865
866        self.expect_keyword_is(Keyword::IS)?;
867        let comment = if self.parse_keyword(Keyword::NULL) {
868            None
869        } else {
870            Some(self.parse_literal_string()?)
871        };
872        Ok(Statement::Comment {
873            object_type,
874            object_name,
875            comment,
876            if_exists,
877        })
878    }
879
880    pub fn parse_flush(&mut self) -> Result<Statement, ParserError> {
881        let mut channel = None;
882        let mut tables: Vec<ObjectName> = vec![];
883        let mut read_lock = false;
884        let mut export = false;
885
886        if !dialect_of!(self is MySqlDialect | GenericDialect) {
887            return parser_err!("Unsupported statement FLUSH", self.peek_token().span.start);
888        }
889
890        let location = if self.parse_keyword(Keyword::NO_WRITE_TO_BINLOG) {
891            Some(FlushLocation::NoWriteToBinlog)
892        } else if self.parse_keyword(Keyword::LOCAL) {
893            Some(FlushLocation::Local)
894        } else {
895            None
896        };
897
898        let object_type = if self.parse_keywords(&[Keyword::BINARY, Keyword::LOGS]) {
899            FlushType::BinaryLogs
900        } else if self.parse_keywords(&[Keyword::ENGINE, Keyword::LOGS]) {
901            FlushType::EngineLogs
902        } else if self.parse_keywords(&[Keyword::ERROR, Keyword::LOGS]) {
903            FlushType::ErrorLogs
904        } else if self.parse_keywords(&[Keyword::GENERAL, Keyword::LOGS]) {
905            FlushType::GeneralLogs
906        } else if self.parse_keywords(&[Keyword::HOSTS]) {
907            FlushType::Hosts
908        } else if self.parse_keyword(Keyword::PRIVILEGES) {
909            FlushType::Privileges
910        } else if self.parse_keyword(Keyword::OPTIMIZER_COSTS) {
911            FlushType::OptimizerCosts
912        } else if self.parse_keywords(&[Keyword::RELAY, Keyword::LOGS]) {
913            if self.parse_keywords(&[Keyword::FOR, Keyword::CHANNEL]) {
914                channel = Some(self.parse_object_name(false).unwrap().to_string());
915            }
916            FlushType::RelayLogs
917        } else if self.parse_keywords(&[Keyword::SLOW, Keyword::LOGS]) {
918            FlushType::SlowLogs
919        } else if self.parse_keyword(Keyword::STATUS) {
920            FlushType::Status
921        } else if self.parse_keyword(Keyword::USER_RESOURCES) {
922            FlushType::UserResources
923        } else if self.parse_keywords(&[Keyword::LOGS]) {
924            FlushType::Logs
925        } else if self.parse_keywords(&[Keyword::TABLES]) {
926            loop {
927                let next_token = self.next_token();
928                match &next_token.token {
929                    Token::Word(w) => match w.keyword {
930                        Keyword::WITH => {
931                            read_lock = self.parse_keywords(&[Keyword::READ, Keyword::LOCK]);
932                        }
933                        Keyword::FOR => {
934                            export = self.parse_keyword(Keyword::EXPORT);
935                        }
936                        Keyword::NoKeyword => {
937                            self.prev_token();
938                            tables = self.parse_comma_separated(|p| p.parse_object_name(false))?;
939                        }
940                        _ => {}
941                    },
942                    _ => {
943                        break;
944                    }
945                }
946            }
947
948            FlushType::Tables
949        } else {
950            return self.expected(
951                "BINARY LOGS, ENGINE LOGS, ERROR LOGS, GENERAL LOGS, HOSTS, LOGS, PRIVILEGES, OPTIMIZER_COSTS,\
952                 RELAY LOGS [FOR CHANNEL channel], SLOW LOGS, STATUS, USER_RESOURCES",
953                self.peek_token(),
954            );
955        };
956
957        Ok(Statement::Flush {
958            object_type,
959            location,
960            channel,
961            read_lock,
962            export,
963            tables,
964        })
965    }
966
967    pub fn parse_msck(&mut self) -> Result<Statement, ParserError> {
968        let repair = self.parse_keyword(Keyword::REPAIR);
969        self.expect_keyword_is(Keyword::TABLE)?;
970        let table_name = self.parse_object_name(false)?;
971        let partition_action = self
972            .maybe_parse(|parser| {
973                let pa = match parser.parse_one_of_keywords(&[
974                    Keyword::ADD,
975                    Keyword::DROP,
976                    Keyword::SYNC,
977                ]) {
978                    Some(Keyword::ADD) => Some(AddDropSync::ADD),
979                    Some(Keyword::DROP) => Some(AddDropSync::DROP),
980                    Some(Keyword::SYNC) => Some(AddDropSync::SYNC),
981                    _ => None,
982                };
983                parser.expect_keyword_is(Keyword::PARTITIONS)?;
984                Ok(pa)
985            })?
986            .unwrap_or_default();
987        Ok(Msck {
988            repair,
989            table_name,
990            partition_action,
991        }
992        .into())
993    }
994
995    pub fn parse_truncate(&mut self) -> Result<Statement, ParserError> {
996        let table = self.parse_keyword(Keyword::TABLE);
997
998        let table_names = self
999            .parse_comma_separated(|p| {
1000                Ok((p.parse_keyword(Keyword::ONLY), p.parse_object_name(false)?))
1001            })?
1002            .into_iter()
1003            .map(|(only, name)| TruncateTableTarget { name, only })
1004            .collect();
1005
1006        let mut partitions = None;
1007        if self.parse_keyword(Keyword::PARTITION) {
1008            self.expect_token(&Token::LParen)?;
1009            partitions = Some(self.parse_comma_separated(Parser::parse_expr)?);
1010            self.expect_token(&Token::RParen)?;
1011        }
1012
1013        let mut identity = None;
1014        let mut cascade = None;
1015
1016        if dialect_of!(self is PostgreSqlDialect | GenericDialect) {
1017            identity = if self.parse_keywords(&[Keyword::RESTART, Keyword::IDENTITY]) {
1018                Some(TruncateIdentityOption::Restart)
1019            } else if self.parse_keywords(&[Keyword::CONTINUE, Keyword::IDENTITY]) {
1020                Some(TruncateIdentityOption::Continue)
1021            } else {
1022                None
1023            };
1024
1025            cascade = self.parse_cascade_option();
1026        };
1027
1028        let on_cluster = self.parse_optional_on_cluster()?;
1029
1030        Ok(Truncate {
1031            table_names,
1032            partitions,
1033            table,
1034            identity,
1035            cascade,
1036            on_cluster,
1037        }
1038        .into())
1039    }
1040
1041    fn parse_cascade_option(&mut self) -> Option<CascadeOption> {
1042        if self.parse_keyword(Keyword::CASCADE) {
1043            Some(CascadeOption::Cascade)
1044        } else if self.parse_keyword(Keyword::RESTRICT) {
1045            Some(CascadeOption::Restrict)
1046        } else {
1047            None
1048        }
1049    }
1050
1051    pub fn parse_attach_duckdb_database_options(
1052        &mut self,
1053    ) -> Result<Vec<AttachDuckDBDatabaseOption>, ParserError> {
1054        if !self.consume_token(&Token::LParen) {
1055            return Ok(vec![]);
1056        }
1057
1058        let mut options = vec![];
1059        loop {
1060            if self.parse_keyword(Keyword::READ_ONLY) {
1061                let boolean = if self.parse_keyword(Keyword::TRUE) {
1062                    Some(true)
1063                } else if self.parse_keyword(Keyword::FALSE) {
1064                    Some(false)
1065                } else {
1066                    None
1067                };
1068                options.push(AttachDuckDBDatabaseOption::ReadOnly(boolean));
1069            } else if self.parse_keyword(Keyword::TYPE) {
1070                let ident = self.parse_identifier()?;
1071                options.push(AttachDuckDBDatabaseOption::Type(ident));
1072            } else {
1073                return self.expected("expected one of: ), READ_ONLY, TYPE", self.peek_token());
1074            };
1075
1076            if self.consume_token(&Token::RParen) {
1077                return Ok(options);
1078            } else if self.consume_token(&Token::Comma) {
1079                continue;
1080            } else {
1081                return self.expected("expected one of: ')', ','", self.peek_token());
1082            }
1083        }
1084    }
1085
1086    pub fn parse_attach_duckdb_database(&mut self) -> Result<Statement, ParserError> {
1087        let database = self.parse_keyword(Keyword::DATABASE);
1088        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
1089        let database_path = self.parse_identifier()?;
1090        let database_alias = if self.parse_keyword(Keyword::AS) {
1091            Some(self.parse_identifier()?)
1092        } else {
1093            None
1094        };
1095
1096        let attach_options = self.parse_attach_duckdb_database_options()?;
1097        Ok(Statement::AttachDuckDBDatabase {
1098            if_not_exists,
1099            database,
1100            database_path,
1101            database_alias,
1102            attach_options,
1103        })
1104    }
1105
1106    pub fn parse_detach_duckdb_database(&mut self) -> Result<Statement, ParserError> {
1107        let database = self.parse_keyword(Keyword::DATABASE);
1108        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
1109        let database_alias = self.parse_identifier()?;
1110        Ok(Statement::DetachDuckDBDatabase {
1111            if_exists,
1112            database,
1113            database_alias,
1114        })
1115    }
1116
1117    pub fn parse_attach_database(&mut self) -> Result<Statement, ParserError> {
1118        let database = self.parse_keyword(Keyword::DATABASE);
1119        let database_file_name = self.parse_expr()?;
1120        self.expect_keyword_is(Keyword::AS)?;
1121        let schema_name = self.parse_identifier()?;
1122        Ok(Statement::AttachDatabase {
1123            database,
1124            schema_name,
1125            database_file_name,
1126        })
1127    }
1128
1129    pub fn parse_analyze(&mut self) -> Result<Statement, ParserError> {
1130        let has_table_keyword = self.parse_keyword(Keyword::TABLE);
1131        let table_name = self.parse_object_name(false)?;
1132        let mut for_columns = false;
1133        let mut cache_metadata = false;
1134        let mut noscan = false;
1135        let mut partitions = None;
1136        let mut compute_statistics = false;
1137        let mut columns = vec![];
1138        loop {
1139            match self.parse_one_of_keywords(&[
1140                Keyword::PARTITION,
1141                Keyword::FOR,
1142                Keyword::CACHE,
1143                Keyword::NOSCAN,
1144                Keyword::COMPUTE,
1145            ]) {
1146                Some(Keyword::PARTITION) => {
1147                    self.expect_token(&Token::LParen)?;
1148                    partitions = Some(self.parse_comma_separated(Parser::parse_expr)?);
1149                    self.expect_token(&Token::RParen)?;
1150                }
1151                Some(Keyword::NOSCAN) => noscan = true,
1152                Some(Keyword::FOR) => {
1153                    self.expect_keyword_is(Keyword::COLUMNS)?;
1154
1155                    columns = self
1156                        .maybe_parse(|parser| {
1157                            parser.parse_comma_separated(|p| p.parse_identifier())
1158                        })?
1159                        .unwrap_or_default();
1160                    for_columns = true
1161                }
1162                Some(Keyword::CACHE) => {
1163                    self.expect_keyword_is(Keyword::METADATA)?;
1164                    cache_metadata = true
1165                }
1166                Some(Keyword::COMPUTE) => {
1167                    self.expect_keyword_is(Keyword::STATISTICS)?;
1168                    compute_statistics = true
1169                }
1170                _ => break,
1171            }
1172        }
1173
1174        Ok(Analyze {
1175            has_table_keyword,
1176            table_name,
1177            for_columns,
1178            columns,
1179            partitions,
1180            cache_metadata,
1181            noscan,
1182            compute_statistics,
1183        }
1184        .into())
1185    }
1186
1187    /// Parse a new expression including wildcard & qualified wildcard.
1188    pub fn parse_wildcard_expr(&mut self) -> Result<Expr, ParserError> {
1189        let index = self.index;
1190
1191        let next_token = self.next_token();
1192        match next_token.token {
1193            t @ (Token::Word(_) | Token::SingleQuotedString(_)) => {
1194                if self.peek_token().token == Token::Period {
1195                    let mut id_parts: Vec<Ident> = vec![match t {
1196                        Token::Word(w) => w.into_ident(next_token.span),
1197                        Token::SingleQuotedString(s) => Ident::with_quote('\'', s),
1198                        _ => {
1199                            return Err(ParserError::ParserError(
1200                                "Internal parser error: unexpected token type".to_string(),
1201                            ))
1202                        }
1203                    }];
1204
1205                    while self.consume_token(&Token::Period) {
1206                        let next_token = self.next_token();
1207                        match next_token.token {
1208                            Token::Word(w) => id_parts.push(w.into_ident(next_token.span)),
1209                            Token::SingleQuotedString(s) => {
1210                                // SQLite has single-quoted identifiers
1211                                id_parts.push(Ident::with_quote('\'', s))
1212                            }
1213                            Token::Mul => {
1214                                return Ok(Expr::QualifiedWildcard(
1215                                    ObjectName::from(id_parts),
1216                                    AttachedToken(next_token),
1217                                ));
1218                            }
1219                            _ => {
1220                                return self
1221                                    .expected("an identifier or a '*' after '.'", next_token);
1222                            }
1223                        }
1224                    }
1225                }
1226            }
1227            Token::Mul => {
1228                return Ok(Expr::Wildcard(AttachedToken(next_token)));
1229            }
1230            _ => (),
1231        };
1232
1233        self.index = index;
1234        self.parse_expr()
1235    }
1236
1237    /// Parse a new expression.
1238    pub fn parse_expr(&mut self) -> Result<Expr, ParserError> {
1239        self.parse_subexpr(self.dialect.prec_unknown())
1240    }
1241
1242    pub fn parse_expr_with_alias_and_order_by(
1243        &mut self,
1244    ) -> Result<ExprWithAliasAndOrderBy, ParserError> {
1245        let expr = self.parse_expr()?;
1246
1247        fn validator(explicit: bool, kw: &Keyword, _parser: &mut Parser) -> bool {
1248            explicit || !&[Keyword::ASC, Keyword::DESC, Keyword::GROUP].contains(kw)
1249        }
1250        let alias = self.parse_optional_alias_inner(None, validator)?;
1251        let order_by = OrderByOptions {
1252            asc: self.parse_asc_desc(),
1253            nulls_first: None,
1254        };
1255        Ok(ExprWithAliasAndOrderBy {
1256            expr: ExprWithAlias { expr, alias },
1257            order_by,
1258        })
1259    }
1260
1261    /// Parse tokens until the precedence changes.
1262    pub fn parse_subexpr(&mut self, precedence: u8) -> Result<Expr, ParserError> {
1263        let _guard = self.recursion_counter.try_decrease()?;
1264        debug!("parsing expr");
1265        let mut expr = self.parse_prefix()?;
1266
1267        expr = self.parse_compound_expr(expr, vec![])?;
1268
1269        debug!("prefix: {expr:?}");
1270        loop {
1271            let next_precedence = self.get_next_precedence()?;
1272            debug!("next precedence: {next_precedence:?}");
1273
1274            if precedence >= next_precedence {
1275                break;
1276            }
1277
1278            // The period operator is handled exclusively by the
1279            // compound field access parsing.
1280            if Token::Period == self.peek_token_ref().token {
1281                break;
1282            }
1283
1284            expr = self.parse_infix(expr, next_precedence)?;
1285        }
1286        Ok(expr)
1287    }
1288
1289    pub fn parse_assert(&mut self) -> Result<Statement, ParserError> {
1290        let condition = self.parse_expr()?;
1291        let message = if self.parse_keyword(Keyword::AS) {
1292            Some(self.parse_expr()?)
1293        } else {
1294            None
1295        };
1296
1297        Ok(Statement::Assert { condition, message })
1298    }
1299
1300    pub fn parse_savepoint(&mut self) -> Result<Statement, ParserError> {
1301        let name = self.parse_identifier()?;
1302        Ok(Statement::Savepoint { name })
1303    }
1304
1305    pub fn parse_release(&mut self) -> Result<Statement, ParserError> {
1306        let _ = self.parse_keyword(Keyword::SAVEPOINT);
1307        let name = self.parse_identifier()?;
1308
1309        Ok(Statement::ReleaseSavepoint { name })
1310    }
1311
1312    pub fn parse_listen(&mut self) -> Result<Statement, ParserError> {
1313        let channel = self.parse_identifier()?;
1314        Ok(Statement::LISTEN { channel })
1315    }
1316
1317    pub fn parse_unlisten(&mut self) -> Result<Statement, ParserError> {
1318        let channel = if self.consume_token(&Token::Mul) {
1319            Ident::new(Expr::Wildcard(AttachedToken::empty()).to_string())
1320        } else {
1321            match self.parse_identifier() {
1322                Ok(expr) => expr,
1323                _ => {
1324                    self.prev_token();
1325                    return self.expected("wildcard or identifier", self.peek_token());
1326                }
1327            }
1328        };
1329        Ok(Statement::UNLISTEN { channel })
1330    }
1331
1332    pub fn parse_notify(&mut self) -> Result<Statement, ParserError> {
1333        let channel = self.parse_identifier()?;
1334        let payload = if self.consume_token(&Token::Comma) {
1335            Some(self.parse_literal_string()?)
1336        } else {
1337            None
1338        };
1339        Ok(Statement::NOTIFY { channel, payload })
1340    }
1341
1342    /// Parses a `RENAME TABLE` statement. See [Statement::RenameTable]
1343    pub fn parse_rename(&mut self) -> Result<Statement, ParserError> {
1344        if self.peek_keyword(Keyword::TABLE) {
1345            self.expect_keyword(Keyword::TABLE)?;
1346            let rename_tables = self.parse_comma_separated(|parser| {
1347                let old_name = parser.parse_object_name(false)?;
1348                parser.expect_keyword(Keyword::TO)?;
1349                let new_name = parser.parse_object_name(false)?;
1350
1351                Ok(RenameTable { old_name, new_name })
1352            })?;
1353            Ok(Statement::RenameTable(rename_tables))
1354        } else {
1355            self.expected("KEYWORD `TABLE` after RENAME", self.peek_token())
1356        }
1357    }
1358
1359    /// Tries to parse an expression by matching the specified word to known keywords that have a special meaning in the dialect.
1360    /// Returns `None if no match is found.
1361    fn parse_expr_prefix_by_reserved_word(
1362        &mut self,
1363        w: &Word,
1364        w_span: Span,
1365    ) -> Result<Option<Expr>, ParserError> {
1366        match w.keyword {
1367            Keyword::TRUE | Keyword::FALSE if self.dialect.supports_boolean_literals() => {
1368                self.prev_token();
1369                Ok(Some(Expr::Value(self.parse_value()?)))
1370            }
1371            Keyword::NULL => {
1372                self.prev_token();
1373                Ok(Some(Expr::Value(self.parse_value()?)))
1374            }
1375            Keyword::CURRENT_CATALOG
1376            | Keyword::CURRENT_USER
1377            | Keyword::SESSION_USER
1378            | Keyword::USER
1379            if dialect_of!(self is PostgreSqlDialect | GenericDialect) =>
1380                {
1381                    Ok(Some(Expr::Function(Function {
1382                        name: ObjectName::from(vec![w.clone().into_ident(w_span)]),
1383                        uses_odbc_syntax: false,
1384                        parameters: FunctionArguments::None,
1385                        args: FunctionArguments::None,
1386                        null_treatment: None,
1387                        filter: None,
1388                        over: None,
1389                        within_group: vec![],
1390                    })))
1391                }
1392            Keyword::CURRENT_TIMESTAMP
1393            | Keyword::CURRENT_TIME
1394            | Keyword::CURRENT_DATE
1395            | Keyword::LOCALTIME
1396            | Keyword::LOCALTIMESTAMP => {
1397                Ok(Some(self.parse_time_functions(ObjectName::from(vec![w.clone().into_ident(w_span)]))?))
1398            }
1399            Keyword::CASE => Ok(Some(self.parse_case_expr()?)),
1400            Keyword::CONVERT => Ok(Some(self.parse_convert_expr(false)?)),
1401            Keyword::TRY_CONVERT if self.dialect.supports_try_convert() => Ok(Some(self.parse_convert_expr(true)?)),
1402            Keyword::CAST => Ok(Some(self.parse_cast_expr(CastKind::Cast)?)),
1403            Keyword::TRY_CAST => Ok(Some(self.parse_cast_expr(CastKind::TryCast)?)),
1404            Keyword::SAFE_CAST => Ok(Some(self.parse_cast_expr(CastKind::SafeCast)?)),
1405            Keyword::EXISTS
1406            // Support parsing Databricks has a function named `exists`.
1407            if !dialect_of!(self is DatabricksDialect)
1408                || matches!(
1409                        self.peek_nth_token_ref(1).token,
1410                        Token::Word(Word {
1411                            keyword: Keyword::SELECT | Keyword::WITH,
1412                            ..
1413                        })
1414                    ) =>
1415                {
1416                    Ok(Some(self.parse_exists_expr(false)?))
1417                }
1418            Keyword::EXTRACT => Ok(Some(self.parse_extract_expr()?)),
1419            Keyword::CEIL => Ok(Some(self.parse_ceil_floor_expr(true)?)),
1420            Keyword::FLOOR => Ok(Some(self.parse_ceil_floor_expr(false)?)),
1421            Keyword::POSITION if self.peek_token_ref().token == Token::LParen => {
1422                Ok(Some(self.parse_position_expr(w.clone().into_ident(w_span))?))
1423            }
1424            Keyword::SUBSTR | Keyword::SUBSTRING => {
1425                self.prev_token();
1426                Ok(Some(self.parse_substring()?))
1427            }
1428            Keyword::OVERLAY => Ok(Some(self.parse_overlay_expr()?)),
1429            Keyword::TRIM => Ok(Some(self.parse_trim_expr()?)),
1430            Keyword::INTERVAL => Ok(Some(self.parse_interval()?)),
1431            // Treat ARRAY[1,2,3] as an array [1,2,3], otherwise try as subquery or a function call
1432            Keyword::ARRAY if *self.peek_token_ref() == Token::LBracket => {
1433                self.expect_token(&Token::LBracket)?;
1434                Ok(Some(self.parse_array_expr(true)?))
1435            }
1436            Keyword::ARRAY
1437            if self.peek_token() == Token::LParen
1438                && !dialect_of!(self is ClickHouseDialect | DatabricksDialect) =>
1439                {
1440                    self.expect_token(&Token::LParen)?;
1441                    let query = self.parse_query()?;
1442                    self.expect_token(&Token::RParen)?;
1443                    Ok(Some(Expr::Function(Function {
1444                        name: ObjectName::from(vec![w.clone().into_ident(w_span)]),
1445                        uses_odbc_syntax: false,
1446                        parameters: FunctionArguments::None,
1447                        args: FunctionArguments::Subquery(query),
1448                        filter: None,
1449                        null_treatment: None,
1450                        over: None,
1451                        within_group: vec![],
1452                    })))
1453                }
1454            Keyword::NOT => Ok(Some(self.parse_not()?)),
1455            Keyword::MATCH if self.dialect.supports_match_against() => {
1456                Ok(Some(self.parse_match_against()?))
1457            }
1458            Keyword::STRUCT if self.dialect.supports_struct_literal() => {
1459                let struct_expr = self.parse_struct_literal()?;
1460                Ok(Some(struct_expr))
1461            }
1462            Keyword::PRIOR if matches!(self.state, ParserState::ConnectBy) => {
1463                let expr = self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?;
1464                Ok(Some(Expr::Prior(Box::new(expr))))
1465            }
1466            Keyword::MAP if *self.peek_token_ref() == Token::LBrace && self.dialect.support_map_literal_syntax() => {
1467                Ok(Some(self.parse_duckdb_map_literal()?))
1468            }
1469            _ if self.dialect.supports_geometric_types() => match w.keyword {
1470                Keyword::CIRCLE => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Circle)?)),
1471                Keyword::BOX => Ok(Some(self.parse_geometric_type(GeometricTypeKind::GeometricBox)?)),
1472                Keyword::PATH => Ok(Some(self.parse_geometric_type(GeometricTypeKind::GeometricPath)?)),
1473                Keyword::LINE => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Line)?)),
1474                Keyword::LSEG => Ok(Some(self.parse_geometric_type(GeometricTypeKind::LineSegment)?)),
1475                Keyword::POINT => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Point)?)),
1476                Keyword::POLYGON => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Polygon)?)),
1477                _ => Ok(None),
1478            },
1479            _ => Ok(None),
1480        }
1481    }
1482
1483    /// Tries to parse an expression by a word that is not known to have a special meaning in the dialect.
1484    fn parse_expr_prefix_by_unreserved_word(
1485        &mut self,
1486        w: &Word,
1487        w_span: Span,
1488    ) -> Result<Expr, ParserError> {
1489        match self.peek_token().token {
1490            Token::LParen if !self.peek_outer_join_operator() => {
1491                let id_parts = vec![w.clone().into_ident(w_span)];
1492                self.parse_function(ObjectName::from(id_parts))
1493            }
1494            // string introducer https://dev.mysql.com/doc/refman/8.0/en/charset-introducer.html
1495            Token::SingleQuotedString(_)
1496            | Token::DoubleQuotedString(_)
1497            | Token::HexStringLiteral(_)
1498                if w.value.starts_with('_') =>
1499            {
1500                Ok(Expr::Prefixed {
1501                    prefix: w.clone().into_ident(w_span),
1502                    value: self.parse_introduced_string_expr()?.into(),
1503                })
1504            }
1505            // string introducer https://dev.mysql.com/doc/refman/8.0/en/charset-introducer.html
1506            Token::SingleQuotedString(_)
1507            | Token::DoubleQuotedString(_)
1508            | Token::HexStringLiteral(_)
1509                if w.value.starts_with('_') =>
1510            {
1511                Ok(Expr::Prefixed {
1512                    prefix: w.clone().into_ident(w_span),
1513                    value: self.parse_introduced_string_expr()?.into(),
1514                })
1515            }
1516            Token::Arrow if self.dialect.supports_lambda_functions() => {
1517                self.expect_token(&Token::Arrow)?;
1518                Ok(Expr::Lambda(LambdaFunction {
1519                    params: OneOrManyWithParens::One(w.clone().into_ident(w_span)),
1520                    body: Box::new(self.parse_expr()?),
1521                }))
1522            }
1523            _ => Ok(Expr::Identifier(w.clone().into_ident(w_span))),
1524        }
1525    }
1526
1527    /// Parse an expression prefix.
1528    pub fn parse_prefix(&mut self) -> Result<Expr, ParserError> {
1529        // allow the dialect to override prefix parsing
1530        if let Some(prefix) = self.dialect.parse_prefix(self) {
1531            return prefix;
1532        }
1533
1534        // PostgreSQL allows any string literal to be preceded by a type name, indicating that the
1535        // string literal represents a literal of that type. Some examples:
1536        //
1537        //      DATE '2020-05-20'
1538        //      TIMESTAMP WITH TIME ZONE '2020-05-20 7:43:54'
1539        //      BOOL 'true'
1540        //
1541        // The first two are standard SQL, while the latter is a PostgreSQL extension. Complicating
1542        // matters is the fact that INTERVAL string literals may optionally be followed by special
1543        // keywords, e.g.:
1544        //
1545        //      INTERVAL '7' DAY
1546        //
1547        // Note also that naively `SELECT date` looks like a syntax error because the `date` type
1548        // name is not followed by a string literal, but in fact in PostgreSQL it is a valid
1549        // expression that should parse as the column name "date".
1550        let loc = self.peek_token_ref().span.start;
1551        let opt_expr = self.maybe_parse(|parser| {
1552            match parser.parse_data_type()? {
1553                DataType::Interval { .. } => parser.parse_interval(),
1554                // PostgreSQL allows almost any identifier to be used as custom data type name,
1555                // and we support that in `parse_data_type()`. But unlike Postgres we don't
1556                // have a list of globally reserved keywords (since they vary across dialects),
1557                // so given `NOT 'a' LIKE 'b'`, we'd accept `NOT` as a possible custom data type
1558                // name, resulting in `NOT 'a'` being recognized as a `TypedString` instead of
1559                // an unary negation `NOT ('a' LIKE 'b')`. To solve this, we don't accept the
1560                // `type 'string'` syntax for the custom data types at all.
1561                DataType::Custom(..) => parser_err!("dummy", loc),
1562                data_type => Ok(Expr::TypedString(TypedString {
1563                    data_type,
1564                    value: parser.parse_value()?,
1565                    uses_odbc_syntax: false,
1566                })),
1567            }
1568        })?;
1569
1570        if let Some(expr) = opt_expr {
1571            return Ok(expr);
1572        }
1573
1574        // Cache some dialect properties to avoid lifetime issues with the
1575        // next_token reference.
1576
1577        let dialect = self.dialect;
1578
1579        self.advance_token();
1580        let next_token_index = self.get_current_index();
1581        let next_token = self.get_current_token();
1582        let span = next_token.span;
1583        let expr = match &next_token.token {
1584            Token::Word(w) => {
1585                // The word we consumed may fall into one of two cases: it has a special meaning, or not.
1586                // For example, in Snowflake, the word `interval` may have two meanings depending on the context:
1587                // `SELECT CURRENT_DATE() + INTERVAL '1 DAY', MAX(interval) FROM tbl;`
1588                //                          ^^^^^^^^^^^^^^^^      ^^^^^^^^
1589                //                         interval expression   identifier
1590                //
1591                // We first try to parse the word and following tokens as a special expression, and if that fails,
1592                // we rollback and try to parse it as an identifier.
1593                let w = w.clone();
1594                match self.try_parse(|parser| parser.parse_expr_prefix_by_reserved_word(&w, span)) {
1595                    // This word indicated an expression prefix and parsing was successful
1596                    Ok(Some(expr)) => Ok(expr),
1597
1598                    // No expression prefix associated with this word
1599                    Ok(None) => Ok(self.parse_expr_prefix_by_unreserved_word(&w, span)?),
1600
1601                    // If parsing of the word as a special expression failed, we are facing two options:
1602                    // 1. The statement is malformed, e.g. `SELECT INTERVAL '1 DAI` (`DAI` instead of `DAY`)
1603                    // 2. The word is used as an identifier, e.g. `SELECT MAX(interval) FROM tbl`
1604                    // We first try to parse the word as an identifier and if that fails
1605                    // we rollback and return the parsing error we got from trying to parse a
1606                    // special expression (to maintain backwards compatibility of parsing errors).
1607                    Err(e) => {
1608                        if !self.dialect.is_reserved_for_identifier(w.keyword) {
1609                            if let Ok(Some(expr)) = self.maybe_parse(|parser| {
1610                                parser.parse_expr_prefix_by_unreserved_word(&w, span)
1611                            }) {
1612                                return Ok(expr);
1613                            }
1614                        }
1615                        return Err(e);
1616                    }
1617                }
1618            } // End of Token::Word
1619            // array `[1, 2, 3]`
1620            Token::LBracket => self.parse_array_expr(false),
1621            tok @ Token::Minus | tok @ Token::Plus => {
1622                let op = if *tok == Token::Plus {
1623                    UnaryOperator::Plus
1624                } else {
1625                    UnaryOperator::Minus
1626                };
1627                Ok(Expr::UnaryOp {
1628                    op,
1629                    expr: Box::new(
1630                        self.parse_subexpr(self.dialect.prec_value(Precedence::MulDivModOp))?,
1631                    ),
1632                })
1633            }
1634            Token::ExclamationMark if dialect.supports_bang_not_operator() => Ok(Expr::UnaryOp {
1635                op: UnaryOperator::BangNot,
1636                expr: Box::new(self.parse_subexpr(self.dialect.prec_value(Precedence::UnaryNot))?),
1637            }),
1638            tok @ Token::DoubleExclamationMark
1639            | tok @ Token::PGSquareRoot
1640            | tok @ Token::PGCubeRoot
1641            | tok @ Token::AtSign
1642                if dialect_is!(dialect is PostgreSqlDialect) =>
1643            {
1644                let op = match tok {
1645                    Token::DoubleExclamationMark => UnaryOperator::PGPrefixFactorial,
1646                    Token::PGSquareRoot => UnaryOperator::PGSquareRoot,
1647                    Token::PGCubeRoot => UnaryOperator::PGCubeRoot,
1648                    Token::AtSign => UnaryOperator::PGAbs,
1649                    _ => {
1650                        return Err(ParserError::ParserError(
1651                            "Internal parser error: unexpected unary operator token".to_string(),
1652                        ))
1653                    }
1654                };
1655                Ok(Expr::UnaryOp {
1656                    op,
1657                    expr: Box::new(
1658                        self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?,
1659                    ),
1660                })
1661            }
1662            Token::Tilde => Ok(Expr::UnaryOp {
1663                op: UnaryOperator::BitwiseNot,
1664                expr: Box::new(self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?),
1665            }),
1666            tok @ Token::Sharp
1667            | tok @ Token::AtDashAt
1668            | tok @ Token::AtAt
1669            | tok @ Token::QuestionMarkDash
1670            | tok @ Token::QuestionPipe
1671                if self.dialect.supports_geometric_types() =>
1672            {
1673                let op = match tok {
1674                    Token::Sharp => UnaryOperator::Hash,
1675                    Token::AtDashAt => UnaryOperator::AtDashAt,
1676                    Token::AtAt => UnaryOperator::DoubleAt,
1677                    Token::QuestionMarkDash => UnaryOperator::QuestionDash,
1678                    Token::QuestionPipe => UnaryOperator::QuestionPipe,
1679                    _ => {
1680                        return Err(ParserError::ParserError(format!(
1681                            "Unexpected token in unary operator parsing: {tok:?}"
1682                        )))
1683                    }
1684                };
1685                Ok(Expr::UnaryOp {
1686                    op,
1687                    expr: Box::new(
1688                        self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?,
1689                    ),
1690                })
1691            }
1692            Token::EscapedStringLiteral(_) if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) =>
1693            {
1694                self.prev_token();
1695                Ok(Expr::Value(self.parse_value()?))
1696            }
1697            Token::UnicodeStringLiteral(_) => {
1698                self.prev_token();
1699                Ok(Expr::Value(self.parse_value()?))
1700            }
1701            Token::Number(_, _)
1702            | Token::SingleQuotedString(_)
1703            | Token::DoubleQuotedString(_)
1704            | Token::TripleSingleQuotedString(_)
1705            | Token::TripleDoubleQuotedString(_)
1706            | Token::DollarQuotedString(_)
1707            | Token::SingleQuotedByteStringLiteral(_)
1708            | Token::DoubleQuotedByteStringLiteral(_)
1709            | Token::TripleSingleQuotedByteStringLiteral(_)
1710            | Token::TripleDoubleQuotedByteStringLiteral(_)
1711            | Token::SingleQuotedRawStringLiteral(_)
1712            | Token::DoubleQuotedRawStringLiteral(_)
1713            | Token::TripleSingleQuotedRawStringLiteral(_)
1714            | Token::TripleDoubleQuotedRawStringLiteral(_)
1715            | Token::NationalStringLiteral(_)
1716            | Token::HexStringLiteral(_) => {
1717                self.prev_token();
1718                Ok(Expr::Value(self.parse_value()?))
1719            }
1720            Token::LParen => {
1721                let expr =
1722                    if let Some(expr) = self.try_parse_expr_sub_query()? {
1723                        expr
1724                    } else if let Some(lambda) = self.try_parse_lambda()? {
1725                        return Ok(lambda);
1726                    } else {
1727                        let exprs = self.parse_comma_separated(Parser::parse_expr)?;
1728                        match exprs.len() {
1729                            0 => return Err(ParserError::ParserError(
1730                                "Internal parser error: parse_comma_separated returned empty list"
1731                                    .to_string(),
1732                            )),
1733                            1 => Expr::Nested(Box::new(exprs.into_iter().next().unwrap())),
1734                            _ => Expr::Tuple(exprs),
1735                        }
1736                    };
1737                self.expect_token(&Token::RParen)?;
1738                Ok(expr)
1739            }
1740            Token::Placeholder(_) | Token::Colon | Token::AtSign => {
1741                self.prev_token();
1742                Ok(Expr::Value(self.parse_value()?))
1743            }
1744            Token::LBrace => {
1745                self.prev_token();
1746                self.parse_lbrace_expr()
1747            }
1748            _ => self.expected_at("an expression", next_token_index),
1749        }?;
1750
1751        if !self.in_column_definition_state() && self.parse_keyword(Keyword::COLLATE) {
1752            Ok(Expr::Collate {
1753                expr: Box::new(expr),
1754                collation: self.parse_object_name(false)?,
1755            })
1756        } else {
1757            Ok(expr)
1758        }
1759    }
1760
1761    fn parse_geometric_type(&mut self, kind: GeometricTypeKind) -> Result<Expr, ParserError> {
1762        Ok(Expr::TypedString(TypedString {
1763            data_type: DataType::GeometricType(kind),
1764            value: self.parse_value()?,
1765            uses_odbc_syntax: false,
1766        }))
1767    }
1768
1769    /// Try to parse an [Expr::CompoundFieldAccess] like `a.b.c` or `a.b[1].c`.
1770    /// If all the fields are `Expr::Identifier`s, return an [Expr::CompoundIdentifier] instead.
1771    /// If only the root exists, return the root.
1772    /// Parses compound expressions which may be delimited by period
1773    /// or bracket notation.
1774    /// For example: `a.b.c`, `a.b[1]`.
1775    pub fn parse_compound_expr(
1776        &mut self,
1777        root: Expr,
1778        mut chain: Vec<AccessExpr>,
1779    ) -> Result<Expr, ParserError> {
1780        let mut ending_wildcard: Option<TokenWithSpan> = None;
1781        loop {
1782            if self.consume_token(&Token::Period) {
1783                let next_token = self.peek_token_ref();
1784                match &next_token.token {
1785                    Token::Mul => {
1786                        // Postgres explicitly allows funcnm(tablenm.*) and the
1787                        // function array_agg traverses this control flow
1788                        if dialect_of!(self is PostgreSqlDialect) {
1789                            ending_wildcard = Some(self.next_token());
1790                        } else {
1791                            // Put back the consumed `.` tokens before exiting.
1792                            // If this expression is being parsed in the
1793                            // context of a projection, then the `.*` could imply
1794                            // a wildcard expansion. For example:
1795                            // `SELECT STRUCT('foo').* FROM T`
1796                            self.prev_token(); // .
1797                        }
1798
1799                        break;
1800                    }
1801                    Token::SingleQuotedString(s) => {
1802                        let expr =
1803                            Expr::Identifier(Ident::with_quote_and_span('\'', next_token.span, s));
1804                        chain.push(AccessExpr::Dot(expr));
1805                        self.advance_token(); // The consumed string
1806                    }
1807                    // Fallback to parsing an arbitrary expression.
1808                    _ => match self.parse_subexpr(self.dialect.prec_value(Precedence::Period))? {
1809                        // If we get back a compound field access or identifier,
1810                        // we flatten the nested expression.
1811                        // For example if the current root is `foo`
1812                        // and we get back a compound identifier expression `bar.baz`
1813                        // The full expression should be `foo.bar.baz` (i.e.
1814                        // a root with an access chain with 2 entries) and not
1815                        // `foo.(bar.baz)` (i.e. a root with an access chain with
1816                        // 1 entry`).
1817                        Expr::CompoundFieldAccess { root, access_chain } => {
1818                            chain.push(AccessExpr::Dot(*root));
1819                            chain.extend(access_chain);
1820                        }
1821                        Expr::CompoundIdentifier(parts) => chain
1822                            .extend(parts.into_iter().map(Expr::Identifier).map(AccessExpr::Dot)),
1823                        expr => {
1824                            chain.push(AccessExpr::Dot(expr));
1825                        }
1826                    },
1827                }
1828            } else if !self.dialect.supports_partiql()
1829                && self.peek_token_ref().token == Token::LBracket
1830            {
1831                self.parse_multi_dim_subscript(&mut chain)?;
1832            } else {
1833                break;
1834            }
1835        }
1836
1837        let tok_index = self.get_current_index();
1838        if let Some(wildcard_token) = ending_wildcard {
1839            if !Self::is_all_ident(&root, &chain) {
1840                return self.expected("an identifier or a '*' after '.'", self.peek_token());
1841            };
1842            Ok(Expr::QualifiedWildcard(
1843                ObjectName::from(Self::exprs_to_idents(root, chain)?),
1844                AttachedToken(wildcard_token),
1845            ))
1846        } else if self.maybe_parse_outer_join_operator() {
1847            if !Self::is_all_ident(&root, &chain) {
1848                return self.expected_at("column identifier before (+)", tok_index);
1849            };
1850            let expr = if chain.is_empty() {
1851                root
1852            } else {
1853                Expr::CompoundIdentifier(Self::exprs_to_idents(root, chain)?)
1854            };
1855            Ok(Expr::OuterJoin(expr.into()))
1856        } else {
1857            Self::build_compound_expr(root, chain)
1858        }
1859    }
1860
1861    /// Combines a root expression and access chain to form
1862    /// a compound expression. Which may be a [Expr::CompoundFieldAccess]
1863    /// or other special cased expressions like [Expr::CompoundIdentifier],
1864    /// [Expr::OuterJoin].
1865    fn build_compound_expr(
1866        root: Expr,
1867        mut access_chain: Vec<AccessExpr>,
1868    ) -> Result<Expr, ParserError> {
1869        if access_chain.is_empty() {
1870            return Ok(root);
1871        }
1872
1873        if Self::is_all_ident(&root, &access_chain) {
1874            return Ok(Expr::CompoundIdentifier(Self::exprs_to_idents(
1875                root,
1876                access_chain,
1877            )?));
1878        }
1879
1880        // Flatten qualified function calls.
1881        // For example, the expression `a.b.c.foo(1,2,3)` should
1882        // represent a function called `a.b.c.foo`, rather than
1883        // a composite expression.
1884        if matches!(root, Expr::Identifier(_))
1885            && matches!(
1886                access_chain.last(),
1887                Some(AccessExpr::Dot(Expr::Function(_)))
1888            )
1889            && access_chain
1890                .iter()
1891                .rev()
1892                .skip(1) // All except the Function
1893                .all(|access| matches!(access, AccessExpr::Dot(Expr::Identifier(_))))
1894        {
1895            let Some(AccessExpr::Dot(Expr::Function(mut func))) = access_chain.pop() else {
1896                return parser_err!("expected function expression", root.span().start);
1897            };
1898
1899            let compound_func_name = [root]
1900                .into_iter()
1901                .chain(access_chain.into_iter().flat_map(|access| match access {
1902                    AccessExpr::Dot(expr) => Some(expr),
1903                    _ => None,
1904                }))
1905                .flat_map(|expr| match expr {
1906                    Expr::Identifier(ident) => Some(ident),
1907                    _ => None,
1908                })
1909                .map(ObjectNamePart::Identifier)
1910                .chain(func.name.0)
1911                .collect::<Vec<_>>();
1912            func.name = ObjectName(compound_func_name);
1913
1914            return Ok(Expr::Function(func));
1915        }
1916
1917        // Flatten qualified outer join expressions.
1918        // For example, the expression `T.foo(+)` should
1919        // represent an outer join on the column name `T.foo`
1920        // rather than a composite expression.
1921        if access_chain.len() == 1
1922            && matches!(
1923                access_chain.last(),
1924                Some(AccessExpr::Dot(Expr::OuterJoin(_)))
1925            )
1926        {
1927            let Some(AccessExpr::Dot(Expr::OuterJoin(inner_expr))) = access_chain.pop() else {
1928                return parser_err!("expected (+) expression", root.span().start);
1929            };
1930
1931            if !Self::is_all_ident(&root, &[]) {
1932                return parser_err!("column identifier before (+)", root.span().start);
1933            };
1934
1935            let token_start = root.span().start;
1936            let mut idents = Self::exprs_to_idents(root, vec![])?;
1937            match *inner_expr {
1938                Expr::CompoundIdentifier(suffix) => idents.extend(suffix),
1939                Expr::Identifier(suffix) => idents.push(suffix),
1940                _ => {
1941                    return parser_err!("column identifier before (+)", token_start);
1942                }
1943            }
1944
1945            return Ok(Expr::OuterJoin(Expr::CompoundIdentifier(idents).into()));
1946        }
1947
1948        Ok(Expr::CompoundFieldAccess {
1949            root: Box::new(root),
1950            access_chain,
1951        })
1952    }
1953
1954    fn keyword_to_modifier(k: Keyword) -> Option<ContextModifier> {
1955        match k {
1956            Keyword::LOCAL => Some(ContextModifier::Local),
1957            Keyword::GLOBAL => Some(ContextModifier::Global),
1958            Keyword::SESSION => Some(ContextModifier::Session),
1959            _ => None,
1960        }
1961    }
1962
1963    /// Check if the root is an identifier and all fields are identifiers.
1964    fn is_all_ident(root: &Expr, fields: &[AccessExpr]) -> bool {
1965        if !matches!(root, Expr::Identifier(_)) {
1966            return false;
1967        }
1968        fields
1969            .iter()
1970            .all(|x| matches!(x, AccessExpr::Dot(Expr::Identifier(_))))
1971    }
1972
1973    /// Convert a root and a list of fields to a list of identifiers.
1974    fn exprs_to_idents(root: Expr, fields: Vec<AccessExpr>) -> Result<Vec<Ident>, ParserError> {
1975        let mut idents = vec![];
1976        if let Expr::Identifier(root) = root {
1977            idents.push(root);
1978            for x in fields {
1979                if let AccessExpr::Dot(Expr::Identifier(ident)) = x {
1980                    idents.push(ident);
1981                } else {
1982                    return parser_err!(
1983                        format!("Expected identifier, found: {}", x),
1984                        x.span().start
1985                    );
1986                }
1987            }
1988            Ok(idents)
1989        } else {
1990            parser_err!(
1991                format!("Expected identifier, found: {}", root),
1992                root.span().start
1993            )
1994        }
1995    }
1996
1997    /// Returns true if the next tokens indicate the outer join operator `(+)`.
1998    fn peek_outer_join_operator(&mut self) -> bool {
1999        if !self.dialect.supports_outer_join_operator() {
2000            return false;
2001        }
2002
2003        let [maybe_lparen, maybe_plus, maybe_rparen] = self.peek_tokens_ref();
2004        Token::LParen == maybe_lparen.token
2005            && Token::Plus == maybe_plus.token
2006            && Token::RParen == maybe_rparen.token
2007    }
2008
2009    /// If the next tokens indicates the outer join operator `(+)`, consume
2010    /// the tokens and return true.
2011    fn maybe_parse_outer_join_operator(&mut self) -> bool {
2012        self.dialect.supports_outer_join_operator()
2013            && self.consume_tokens(&[Token::LParen, Token::Plus, Token::RParen])
2014    }
2015
2016    pub fn parse_utility_options(&mut self) -> Result<Vec<UtilityOption>, ParserError> {
2017        self.expect_token(&Token::LParen)?;
2018        let options = self.parse_comma_separated(Self::parse_utility_option)?;
2019        self.expect_token(&Token::RParen)?;
2020
2021        Ok(options)
2022    }
2023
2024    fn parse_utility_option(&mut self) -> Result<UtilityOption, ParserError> {
2025        let name = self.parse_identifier()?;
2026
2027        let next_token = self.peek_token();
2028        if next_token == Token::Comma || next_token == Token::RParen {
2029            return Ok(UtilityOption { name, arg: None });
2030        }
2031        let arg = self.parse_expr()?;
2032
2033        Ok(UtilityOption {
2034            name,
2035            arg: Some(arg),
2036        })
2037    }
2038
2039    fn try_parse_expr_sub_query(&mut self) -> Result<Option<Expr>, ParserError> {
2040        if !self.peek_sub_query() {
2041            return Ok(None);
2042        }
2043
2044        Ok(Some(Expr::Subquery(self.parse_query()?)))
2045    }
2046
2047    fn try_parse_lambda(&mut self) -> Result<Option<Expr>, ParserError> {
2048        if !self.dialect.supports_lambda_functions() {
2049            return Ok(None);
2050        }
2051        self.maybe_parse(|p| {
2052            let params = p.parse_comma_separated(|p| p.parse_identifier())?;
2053            p.expect_token(&Token::RParen)?;
2054            p.expect_token(&Token::Arrow)?;
2055            let expr = p.parse_expr()?;
2056            Ok(Expr::Lambda(LambdaFunction {
2057                params: OneOrManyWithParens::Many(params),
2058                body: Box::new(expr),
2059            }))
2060        })
2061    }
2062
2063    /// Tries to parse the body of an [ODBC escaping sequence]
2064    /// i.e. without the enclosing braces
2065    /// Currently implemented:
2066    /// Scalar Function Calls
2067    /// Date, Time, and Timestamp Literals
2068    /// See <https://learn.microsoft.com/en-us/sql/odbc/reference/develop-app/escape-sequences-in-odbc?view=sql-server-2017>
2069    fn maybe_parse_odbc_body(&mut self) -> Result<Option<Expr>, ParserError> {
2070        // Attempt 1: Try to parse it as a function.
2071        if let Some(expr) = self.maybe_parse_odbc_fn_body()? {
2072            return Ok(Some(expr));
2073        }
2074        // Attempt 2: Try to parse it as a Date, Time or Timestamp Literal
2075        self.maybe_parse_odbc_body_datetime()
2076    }
2077
2078    /// Tries to parse the body of an [ODBC Date, Time, and Timestamp Literals] call.
2079    ///
2080    /// ```sql
2081    /// {d '2025-07-17'}
2082    /// {t '14:12:01'}
2083    /// {ts '2025-07-17 14:12:01'}
2084    /// ```
2085    ///
2086    /// [ODBC Date, Time, and Timestamp Literals]:
2087    /// https://learn.microsoft.com/en-us/sql/odbc/reference/develop-app/date-time-and-timestamp-literals?view=sql-server-2017
2088    fn maybe_parse_odbc_body_datetime(&mut self) -> Result<Option<Expr>, ParserError> {
2089        self.maybe_parse(|p| {
2090            let token = p.next_token().clone();
2091            let word_string = token.token.to_string();
2092            let data_type = match word_string.as_str() {
2093                "t" => DataType::Time(None, TimezoneInfo::None),
2094                "d" => DataType::Date,
2095                "ts" => DataType::Timestamp(None, TimezoneInfo::None),
2096                _ => return p.expected("ODBC datetime keyword (t, d, or ts)", token),
2097            };
2098            let value = p.parse_value()?;
2099            Ok(Expr::TypedString(TypedString {
2100                data_type,
2101                value,
2102                uses_odbc_syntax: true,
2103            }))
2104        })
2105    }
2106
2107    /// Tries to parse the body of an [ODBC function] call.
2108    /// i.e. without the enclosing braces
2109    ///
2110    /// ```sql
2111    /// fn myfunc(1,2,3)
2112    /// ```
2113    ///
2114    /// [ODBC function]: https://learn.microsoft.com/en-us/sql/odbc/reference/develop-app/scalar-function-calls?view=sql-server-2017
2115    fn maybe_parse_odbc_fn_body(&mut self) -> Result<Option<Expr>, ParserError> {
2116        self.maybe_parse(|p| {
2117            p.expect_keyword(Keyword::FN)?;
2118            let fn_name = p.parse_object_name(false)?;
2119            let mut fn_call = p.parse_function_call(fn_name)?;
2120            fn_call.uses_odbc_syntax = true;
2121            Ok(Expr::Function(fn_call))
2122        })
2123    }
2124
2125    pub fn parse_function(&mut self, name: ObjectName) -> Result<Expr, ParserError> {
2126        self.parse_function_call(name).map(Expr::Function)
2127    }
2128
2129    fn parse_function_call(&mut self, name: ObjectName) -> Result<Function, ParserError> {
2130        self.expect_token(&Token::LParen)?;
2131
2132        // Snowflake permits a subquery to be passed as an argument without
2133        // an enclosing set of parens if it's the only argument.
2134        if dialect_of!(self is SnowflakeDialect) && self.peek_sub_query() {
2135            let subquery = self.parse_query()?;
2136            self.expect_token(&Token::RParen)?;
2137            return Ok(Function {
2138                name,
2139                uses_odbc_syntax: false,
2140                parameters: FunctionArguments::None,
2141                args: FunctionArguments::Subquery(subquery),
2142                filter: None,
2143                null_treatment: None,
2144                over: None,
2145                within_group: vec![],
2146            });
2147        }
2148
2149        let mut args = self.parse_function_argument_list()?;
2150        let mut parameters = FunctionArguments::None;
2151        // ClickHouse aggregations support parametric functions like `HISTOGRAM(0.5, 0.6)(x, y)`
2152        // which (0.5, 0.6) is a parameter to the function.
2153        if dialect_of!(self is ClickHouseDialect | GenericDialect)
2154            && self.consume_token(&Token::LParen)
2155        {
2156            parameters = FunctionArguments::List(args);
2157            args = self.parse_function_argument_list()?;
2158        }
2159
2160        let within_group = if self.parse_keywords(&[Keyword::WITHIN, Keyword::GROUP]) {
2161            self.expect_token(&Token::LParen)?;
2162            self.expect_keywords(&[Keyword::ORDER, Keyword::BY])?;
2163            let order_by = self.parse_comma_separated(Parser::parse_order_by_expr)?;
2164            self.expect_token(&Token::RParen)?;
2165            order_by
2166        } else {
2167            vec![]
2168        };
2169
2170        let filter = if self.dialect.supports_filter_during_aggregation()
2171            && self.parse_keyword(Keyword::FILTER)
2172            && self.consume_token(&Token::LParen)
2173            && self.parse_keyword(Keyword::WHERE)
2174        {
2175            let filter = Some(Box::new(self.parse_expr()?));
2176            self.expect_token(&Token::RParen)?;
2177            filter
2178        } else {
2179            None
2180        };
2181
2182        // Syntax for null treatment shows up either in the args list
2183        // or after the function call, but not both.
2184        let null_treatment = if args
2185            .clauses
2186            .iter()
2187            .all(|clause| !matches!(clause, FunctionArgumentClause::IgnoreOrRespectNulls(_)))
2188        {
2189            self.parse_null_treatment()?
2190        } else {
2191            None
2192        };
2193
2194        let over = if self.parse_keyword(Keyword::OVER) {
2195            if self.consume_token(&Token::LParen) {
2196                let window_spec = self.parse_window_spec()?;
2197                Some(WindowType::WindowSpec(window_spec))
2198            } else {
2199                Some(WindowType::NamedWindow(self.parse_identifier()?))
2200            }
2201        } else {
2202            None
2203        };
2204
2205        Ok(Function {
2206            name,
2207            uses_odbc_syntax: false,
2208            parameters,
2209            args: FunctionArguments::List(args),
2210            null_treatment,
2211            filter,
2212            over,
2213            within_group,
2214        })
2215    }
2216
2217    /// Optionally parses a null treatment clause.
2218    fn parse_null_treatment(&mut self) -> Result<Option<NullTreatment>, ParserError> {
2219        match self.parse_one_of_keywords(&[Keyword::RESPECT, Keyword::IGNORE]) {
2220            Some(keyword) => {
2221                self.expect_keyword_is(Keyword::NULLS)?;
2222
2223                Ok(match keyword {
2224                    Keyword::RESPECT => Some(NullTreatment::RespectNulls),
2225                    Keyword::IGNORE => Some(NullTreatment::IgnoreNulls),
2226                    _ => None,
2227                })
2228            }
2229            None => Ok(None),
2230        }
2231    }
2232
2233    pub fn parse_time_functions(&mut self, name: ObjectName) -> Result<Expr, ParserError> {
2234        let args = if self.consume_token(&Token::LParen) {
2235            FunctionArguments::List(self.parse_function_argument_list()?)
2236        } else {
2237            FunctionArguments::None
2238        };
2239        Ok(Expr::Function(Function {
2240            name,
2241            uses_odbc_syntax: false,
2242            parameters: FunctionArguments::None,
2243            args,
2244            filter: None,
2245            over: None,
2246            null_treatment: None,
2247            within_group: vec![],
2248        }))
2249    }
2250
2251    pub fn parse_window_frame_units(&mut self) -> Result<WindowFrameUnits, ParserError> {
2252        let next_token = self.next_token();
2253        match &next_token.token {
2254            Token::Word(w) => match w.keyword {
2255                Keyword::ROWS => Ok(WindowFrameUnits::Rows),
2256                Keyword::RANGE => Ok(WindowFrameUnits::Range),
2257                Keyword::GROUPS => Ok(WindowFrameUnits::Groups),
2258                _ => self.expected("ROWS, RANGE, GROUPS", next_token)?,
2259            },
2260            _ => self.expected("ROWS, RANGE, GROUPS", next_token),
2261        }
2262    }
2263
2264    pub fn parse_window_frame(&mut self) -> Result<WindowFrame, ParserError> {
2265        let units = self.parse_window_frame_units()?;
2266        let (start_bound, end_bound) = if self.parse_keyword(Keyword::BETWEEN) {
2267            let start_bound = self.parse_window_frame_bound()?;
2268            self.expect_keyword_is(Keyword::AND)?;
2269            let end_bound = Some(self.parse_window_frame_bound()?);
2270            (start_bound, end_bound)
2271        } else {
2272            (self.parse_window_frame_bound()?, None)
2273        };
2274        Ok(WindowFrame {
2275            units,
2276            start_bound,
2277            end_bound,
2278        })
2279    }
2280
2281    /// Parse `CURRENT ROW` or `{ <positive number> | UNBOUNDED } { PRECEDING | FOLLOWING }`
2282    pub fn parse_window_frame_bound(&mut self) -> Result<WindowFrameBound, ParserError> {
2283        if self.parse_keywords(&[Keyword::CURRENT, Keyword::ROW]) {
2284            Ok(WindowFrameBound::CurrentRow)
2285        } else {
2286            let rows = if self.parse_keyword(Keyword::UNBOUNDED) {
2287                None
2288            } else {
2289                Some(Box::new(match self.peek_token().token {
2290                    Token::SingleQuotedString(_) => self.parse_interval()?,
2291                    _ => self.parse_expr()?,
2292                }))
2293            };
2294            if self.parse_keyword(Keyword::PRECEDING) {
2295                Ok(WindowFrameBound::Preceding(rows))
2296            } else if self.parse_keyword(Keyword::FOLLOWING) {
2297                Ok(WindowFrameBound::Following(rows))
2298            } else {
2299                self.expected("PRECEDING or FOLLOWING", self.peek_token())
2300            }
2301        }
2302    }
2303
2304    /// Parse a group by expr. Group by expr can be one of group sets, roll up, cube, or simple expr.
2305    fn parse_group_by_expr(&mut self) -> Result<Expr, ParserError> {
2306        if self.dialect.supports_group_by_expr() {
2307            if self.parse_keywords(&[Keyword::GROUPING, Keyword::SETS]) {
2308                self.expect_token(&Token::LParen)?;
2309                let result = self.parse_comma_separated(|p| p.parse_tuple(false, true))?;
2310                self.expect_token(&Token::RParen)?;
2311                Ok(Expr::GroupingSets(result))
2312            } else if self.parse_keyword(Keyword::CUBE) {
2313                self.expect_token(&Token::LParen)?;
2314                let result = self.parse_comma_separated(|p| p.parse_tuple(true, true))?;
2315                self.expect_token(&Token::RParen)?;
2316                Ok(Expr::Cube(result))
2317            } else if self.parse_keyword(Keyword::ROLLUP) {
2318                self.expect_token(&Token::LParen)?;
2319                let result = self.parse_comma_separated(|p| p.parse_tuple(true, true))?;
2320                self.expect_token(&Token::RParen)?;
2321                Ok(Expr::Rollup(result))
2322            } else if self.consume_tokens(&[Token::LParen, Token::RParen]) {
2323                // PostgreSQL allow to use empty tuple as a group by expression,
2324                // e.g. `GROUP BY (), name`. Please refer to GROUP BY Clause section in
2325                // [PostgreSQL](https://www.postgresql.org/docs/16/sql-select.html)
2326                Ok(Expr::Tuple(vec![]))
2327            } else {
2328                self.parse_expr()
2329            }
2330        } else {
2331            // TODO parse rollup for other dialects
2332            self.parse_expr()
2333        }
2334    }
2335
2336    /// Parse a tuple with `(` and `)`.
2337    /// If `lift_singleton` is true, then a singleton tuple is lifted to a tuple of length 1, otherwise it will fail.
2338    /// If `allow_empty` is true, then an empty tuple is allowed.
2339    fn parse_tuple(
2340        &mut self,
2341        lift_singleton: bool,
2342        allow_empty: bool,
2343    ) -> Result<Vec<Expr>, ParserError> {
2344        if lift_singleton {
2345            if self.consume_token(&Token::LParen) {
2346                let result = if allow_empty && self.consume_token(&Token::RParen) {
2347                    vec![]
2348                } else {
2349                    let result = self.parse_comma_separated(Parser::parse_expr)?;
2350                    self.expect_token(&Token::RParen)?;
2351                    result
2352                };
2353                Ok(result)
2354            } else {
2355                Ok(vec![self.parse_expr()?])
2356            }
2357        } else {
2358            self.expect_token(&Token::LParen)?;
2359            let result = if allow_empty && self.consume_token(&Token::RParen) {
2360                vec![]
2361            } else {
2362                let result = self.parse_comma_separated(Parser::parse_expr)?;
2363                self.expect_token(&Token::RParen)?;
2364                result
2365            };
2366            Ok(result)
2367        }
2368    }
2369
2370    pub fn parse_case_expr(&mut self) -> Result<Expr, ParserError> {
2371        let case_token = AttachedToken(self.get_current_token().clone());
2372        let mut operand = None;
2373        if !self.parse_keyword(Keyword::WHEN) {
2374            operand = Some(Box::new(self.parse_expr()?));
2375            self.expect_keyword_is(Keyword::WHEN)?;
2376        }
2377        let mut conditions = vec![];
2378        loop {
2379            let condition = self.parse_expr()?;
2380            self.expect_keyword_is(Keyword::THEN)?;
2381            let result = self.parse_expr()?;
2382            conditions.push(CaseWhen { condition, result });
2383            if !self.parse_keyword(Keyword::WHEN) {
2384                break;
2385            }
2386        }
2387        let else_result = if self.parse_keyword(Keyword::ELSE) {
2388            Some(Box::new(self.parse_expr()?))
2389        } else {
2390            None
2391        };
2392        let end_token = AttachedToken(self.expect_keyword(Keyword::END)?);
2393        Ok(Expr::Case {
2394            case_token,
2395            end_token,
2396            operand,
2397            conditions,
2398            else_result,
2399        })
2400    }
2401
2402    pub fn parse_optional_cast_format(&mut self) -> Result<Option<CastFormat>, ParserError> {
2403        if self.parse_keyword(Keyword::FORMAT) {
2404            let value = self.parse_value()?.value;
2405            match self.parse_optional_time_zone()? {
2406                Some(tz) => Ok(Some(CastFormat::ValueAtTimeZone(value, tz))),
2407                None => Ok(Some(CastFormat::Value(value))),
2408            }
2409        } else {
2410            Ok(None)
2411        }
2412    }
2413
2414    pub fn parse_optional_time_zone(&mut self) -> Result<Option<Value>, ParserError> {
2415        if self.parse_keywords(&[Keyword::AT, Keyword::TIME, Keyword::ZONE]) {
2416            self.parse_value().map(|v| Some(v.value))
2417        } else {
2418            Ok(None)
2419        }
2420    }
2421
2422    /// mssql-like convert function
2423    fn parse_mssql_convert(&mut self, is_try: bool) -> Result<Expr, ParserError> {
2424        self.expect_token(&Token::LParen)?;
2425        let data_type = self.parse_data_type()?;
2426        self.expect_token(&Token::Comma)?;
2427        let expr = self.parse_expr()?;
2428        let styles = if self.consume_token(&Token::Comma) {
2429            self.parse_comma_separated(Parser::parse_expr)?
2430        } else {
2431            Default::default()
2432        };
2433        self.expect_token(&Token::RParen)?;
2434        Ok(Expr::Convert {
2435            is_try,
2436            expr: Box::new(expr),
2437            data_type: Some(data_type),
2438            charset: None,
2439            target_before_value: true,
2440            styles,
2441        })
2442    }
2443
2444    /// Parse a SQL CONVERT function:
2445    ///  - `CONVERT('héhé' USING utf8mb4)` (MySQL)
2446    ///  - `CONVERT('héhé', CHAR CHARACTER SET utf8mb4)` (MySQL)
2447    ///  - `CONVERT(DECIMAL(10, 5), 42)` (MSSQL) - the type comes first
2448    pub fn parse_convert_expr(&mut self, is_try: bool) -> Result<Expr, ParserError> {
2449        if self.dialect.convert_type_before_value() {
2450            return self.parse_mssql_convert(is_try);
2451        }
2452        self.expect_token(&Token::LParen)?;
2453        let expr = self.parse_expr()?;
2454        if self.parse_keyword(Keyword::USING) {
2455            let charset = self.parse_object_name(false)?;
2456            self.expect_token(&Token::RParen)?;
2457            return Ok(Expr::Convert {
2458                is_try,
2459                expr: Box::new(expr),
2460                data_type: None,
2461                charset: Some(charset),
2462                target_before_value: false,
2463                styles: vec![],
2464            });
2465        }
2466        self.expect_token(&Token::Comma)?;
2467        let data_type = self.parse_data_type()?;
2468        let charset = if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
2469            Some(self.parse_object_name(false)?)
2470        } else {
2471            None
2472        };
2473        self.expect_token(&Token::RParen)?;
2474        Ok(Expr::Convert {
2475            is_try,
2476            expr: Box::new(expr),
2477            data_type: Some(data_type),
2478            charset,
2479            target_before_value: false,
2480            styles: vec![],
2481        })
2482    }
2483
2484    /// Parse a SQL CAST function e.g. `CAST(expr AS FLOAT)`
2485    pub fn parse_cast_expr(&mut self, kind: CastKind) -> Result<Expr, ParserError> {
2486        self.expect_token(&Token::LParen)?;
2487        let expr = self.parse_expr()?;
2488        self.expect_keyword_is(Keyword::AS)?;
2489        let data_type = self.parse_data_type()?;
2490        let format = self.parse_optional_cast_format()?;
2491        self.expect_token(&Token::RParen)?;
2492        Ok(Expr::Cast {
2493            kind,
2494            expr: Box::new(expr),
2495            data_type,
2496            format,
2497        })
2498    }
2499
2500    /// Parse a SQL EXISTS expression e.g. `WHERE EXISTS(SELECT ...)`.
2501    pub fn parse_exists_expr(&mut self, negated: bool) -> Result<Expr, ParserError> {
2502        self.expect_token(&Token::LParen)?;
2503        let exists_node = Expr::Exists {
2504            negated,
2505            subquery: self.parse_query()?,
2506        };
2507        self.expect_token(&Token::RParen)?;
2508        Ok(exists_node)
2509    }
2510
2511    pub fn parse_extract_expr(&mut self) -> Result<Expr, ParserError> {
2512        self.expect_token(&Token::LParen)?;
2513        let field = self.parse_date_time_field()?;
2514
2515        let syntax = if self.parse_keyword(Keyword::FROM) {
2516            ExtractSyntax::From
2517        } else if self.consume_token(&Token::Comma)
2518            && dialect_of!(self is SnowflakeDialect | GenericDialect)
2519        {
2520            ExtractSyntax::Comma
2521        } else {
2522            return Err(ParserError::ParserError(
2523                "Expected 'FROM' or ','".to_string(),
2524            ));
2525        };
2526
2527        let expr = self.parse_expr()?;
2528        self.expect_token(&Token::RParen)?;
2529        Ok(Expr::Extract {
2530            field,
2531            expr: Box::new(expr),
2532            syntax,
2533        })
2534    }
2535
2536    pub fn parse_ceil_floor_expr(&mut self, is_ceil: bool) -> Result<Expr, ParserError> {
2537        self.expect_token(&Token::LParen)?;
2538        let expr = self.parse_expr()?;
2539        // Parse `CEIL/FLOOR(expr)`
2540        let field = if self.parse_keyword(Keyword::TO) {
2541            // Parse `CEIL/FLOOR(expr TO DateTimeField)`
2542            CeilFloorKind::DateTimeField(self.parse_date_time_field()?)
2543        } else if self.consume_token(&Token::Comma) {
2544            // Parse `CEIL/FLOOR(expr, scale)`
2545            match self.parse_value()?.value {
2546                Value::Number(n, s) => CeilFloorKind::Scale(Value::Number(n, s)),
2547                _ => {
2548                    return Err(ParserError::ParserError(
2549                        "Scale field can only be of number type".to_string(),
2550                    ))
2551                }
2552            }
2553        } else {
2554            CeilFloorKind::DateTimeField(DateTimeField::NoDateTime)
2555        };
2556        self.expect_token(&Token::RParen)?;
2557        if is_ceil {
2558            Ok(Expr::Ceil {
2559                expr: Box::new(expr),
2560                field,
2561            })
2562        } else {
2563            Ok(Expr::Floor {
2564                expr: Box::new(expr),
2565                field,
2566            })
2567        }
2568    }
2569
2570    pub fn parse_position_expr(&mut self, ident: Ident) -> Result<Expr, ParserError> {
2571        let between_prec = self.dialect.prec_value(Precedence::Between);
2572        let position_expr = self.maybe_parse(|p| {
2573            // PARSE SELECT POSITION('@' in field)
2574            p.expect_token(&Token::LParen)?;
2575
2576            // Parse the subexpr till the IN keyword
2577            let expr = p.parse_subexpr(between_prec)?;
2578            p.expect_keyword_is(Keyword::IN)?;
2579            let from = p.parse_expr()?;
2580            p.expect_token(&Token::RParen)?;
2581            Ok(Expr::Position {
2582                expr: Box::new(expr),
2583                r#in: Box::new(from),
2584            })
2585        })?;
2586        match position_expr {
2587            Some(expr) => Ok(expr),
2588            // Snowflake supports `position` as an ordinary function call
2589            // without the special `IN` syntax.
2590            None => self.parse_function(ObjectName::from(vec![ident])),
2591        }
2592    }
2593
2594    // { SUBSTRING | SUBSTR } (<EXPR> [FROM 1] [FOR 3])
2595    pub fn parse_substring(&mut self) -> Result<Expr, ParserError> {
2596        let shorthand = match self.expect_one_of_keywords(&[Keyword::SUBSTR, Keyword::SUBSTRING])? {
2597            Keyword::SUBSTR => true,
2598            Keyword::SUBSTRING => false,
2599            _ => {
2600                self.prev_token();
2601                return self.expected("SUBSTR or SUBSTRING", self.peek_token());
2602            }
2603        };
2604        self.expect_token(&Token::LParen)?;
2605        let expr = self.parse_expr()?;
2606        let mut from_expr = None;
2607        let special = self.consume_token(&Token::Comma);
2608        if special || self.parse_keyword(Keyword::FROM) {
2609            from_expr = Some(self.parse_expr()?);
2610        }
2611
2612        let mut to_expr = None;
2613        if self.parse_keyword(Keyword::FOR) || self.consume_token(&Token::Comma) {
2614            to_expr = Some(self.parse_expr()?);
2615        }
2616        self.expect_token(&Token::RParen)?;
2617
2618        Ok(Expr::Substring {
2619            expr: Box::new(expr),
2620            substring_from: from_expr.map(Box::new),
2621            substring_for: to_expr.map(Box::new),
2622            special,
2623            shorthand,
2624        })
2625    }
2626
2627    pub fn parse_overlay_expr(&mut self) -> Result<Expr, ParserError> {
2628        // PARSE OVERLAY (EXPR PLACING EXPR FROM 1 [FOR 3])
2629        self.expect_token(&Token::LParen)?;
2630        let expr = self.parse_expr()?;
2631        self.expect_keyword_is(Keyword::PLACING)?;
2632        let what_expr = self.parse_expr()?;
2633        self.expect_keyword_is(Keyword::FROM)?;
2634        let from_expr = self.parse_expr()?;
2635        let mut for_expr = None;
2636        if self.parse_keyword(Keyword::FOR) {
2637            for_expr = Some(self.parse_expr()?);
2638        }
2639        self.expect_token(&Token::RParen)?;
2640
2641        Ok(Expr::Overlay {
2642            expr: Box::new(expr),
2643            overlay_what: Box::new(what_expr),
2644            overlay_from: Box::new(from_expr),
2645            overlay_for: for_expr.map(Box::new),
2646        })
2647    }
2648
2649    /// ```sql
2650    /// TRIM ([WHERE] ['text' FROM] 'text')
2651    /// TRIM ('text')
2652    /// TRIM(<expr>, [, characters]) -- only Snowflake or BigQuery
2653    /// ```
2654    pub fn parse_trim_expr(&mut self) -> Result<Expr, ParserError> {
2655        self.expect_token(&Token::LParen)?;
2656        let mut trim_where = None;
2657        if let Token::Word(word) = self.peek_token().token {
2658            if [Keyword::BOTH, Keyword::LEADING, Keyword::TRAILING].contains(&word.keyword) {
2659                trim_where = Some(self.parse_trim_where()?);
2660            }
2661        }
2662        let expr = self.parse_expr()?;
2663        if self.parse_keyword(Keyword::FROM) {
2664            let trim_what = Box::new(expr);
2665            let expr = self.parse_expr()?;
2666            self.expect_token(&Token::RParen)?;
2667            Ok(Expr::Trim {
2668                expr: Box::new(expr),
2669                trim_where,
2670                trim_what: Some(trim_what),
2671                trim_characters: None,
2672            })
2673        } else if self.consume_token(&Token::Comma)
2674            && dialect_of!(self is DuckDbDialect | SnowflakeDialect | BigQueryDialect | GenericDialect)
2675        {
2676            let characters = self.parse_comma_separated(Parser::parse_expr)?;
2677            self.expect_token(&Token::RParen)?;
2678            Ok(Expr::Trim {
2679                expr: Box::new(expr),
2680                trim_where: None,
2681                trim_what: None,
2682                trim_characters: Some(characters),
2683            })
2684        } else {
2685            self.expect_token(&Token::RParen)?;
2686            Ok(Expr::Trim {
2687                expr: Box::new(expr),
2688                trim_where,
2689                trim_what: None,
2690                trim_characters: None,
2691            })
2692        }
2693    }
2694
2695    pub fn parse_trim_where(&mut self) -> Result<TrimWhereField, ParserError> {
2696        let next_token = self.next_token();
2697        match &next_token.token {
2698            Token::Word(w) => match w.keyword {
2699                Keyword::BOTH => Ok(TrimWhereField::Both),
2700                Keyword::LEADING => Ok(TrimWhereField::Leading),
2701                Keyword::TRAILING => Ok(TrimWhereField::Trailing),
2702                _ => self.expected("trim_where field", next_token)?,
2703            },
2704            _ => self.expected("trim_where field", next_token),
2705        }
2706    }
2707
2708    /// Parses an array expression `[ex1, ex2, ..]`
2709    /// if `named` is `true`, came from an expression like  `ARRAY[ex1, ex2]`
2710    pub fn parse_array_expr(&mut self, named: bool) -> Result<Expr, ParserError> {
2711        let exprs = self.parse_comma_separated0(Parser::parse_expr, Token::RBracket)?;
2712        self.expect_token(&Token::RBracket)?;
2713        Ok(Expr::Array(Array { elem: exprs, named }))
2714    }
2715
2716    pub fn parse_listagg_on_overflow(&mut self) -> Result<Option<ListAggOnOverflow>, ParserError> {
2717        if self.parse_keywords(&[Keyword::ON, Keyword::OVERFLOW]) {
2718            if self.parse_keyword(Keyword::ERROR) {
2719                Ok(Some(ListAggOnOverflow::Error))
2720            } else {
2721                self.expect_keyword_is(Keyword::TRUNCATE)?;
2722                let filler = match self.peek_token().token {
2723                    Token::Word(w)
2724                        if w.keyword == Keyword::WITH || w.keyword == Keyword::WITHOUT =>
2725                    {
2726                        None
2727                    }
2728                    Token::SingleQuotedString(_)
2729                    | Token::EscapedStringLiteral(_)
2730                    | Token::UnicodeStringLiteral(_)
2731                    | Token::NationalStringLiteral(_)
2732                    | Token::HexStringLiteral(_) => Some(Box::new(self.parse_expr()?)),
2733                    _ => self.expected(
2734                        "either filler, WITH, or WITHOUT in LISTAGG",
2735                        self.peek_token(),
2736                    )?,
2737                };
2738                let with_count = self.parse_keyword(Keyword::WITH);
2739                if !with_count && !self.parse_keyword(Keyword::WITHOUT) {
2740                    self.expected("either WITH or WITHOUT in LISTAGG", self.peek_token())?;
2741                }
2742                self.expect_keyword_is(Keyword::COUNT)?;
2743                Ok(Some(ListAggOnOverflow::Truncate { filler, with_count }))
2744            }
2745        } else {
2746            Ok(None)
2747        }
2748    }
2749
2750    // This function parses date/time fields for the EXTRACT function-like
2751    // operator, interval qualifiers, and the ceil/floor operations.
2752    // EXTRACT supports a wider set of date/time fields than interval qualifiers,
2753    // so this function may need to be split in two.
2754    pub fn parse_date_time_field(&mut self) -> Result<DateTimeField, ParserError> {
2755        let next_token = self.next_token();
2756        match &next_token.token {
2757            Token::Word(w) => match w.keyword {
2758                Keyword::YEAR => Ok(DateTimeField::Year),
2759                Keyword::YEARS => Ok(DateTimeField::Years),
2760                Keyword::MONTH => Ok(DateTimeField::Month),
2761                Keyword::MONTHS => Ok(DateTimeField::Months),
2762                Keyword::WEEK => {
2763                    let week_day = if dialect_of!(self is BigQueryDialect | GenericDialect)
2764                        && self.consume_token(&Token::LParen)
2765                    {
2766                        let week_day = self.parse_identifier()?;
2767                        self.expect_token(&Token::RParen)?;
2768                        Some(week_day)
2769                    } else {
2770                        None
2771                    };
2772                    Ok(DateTimeField::Week(week_day))
2773                }
2774                Keyword::WEEKS => Ok(DateTimeField::Weeks),
2775                Keyword::DAY => Ok(DateTimeField::Day),
2776                Keyword::DAYOFWEEK => Ok(DateTimeField::DayOfWeek),
2777                Keyword::DAYOFYEAR => Ok(DateTimeField::DayOfYear),
2778                Keyword::DAYS => Ok(DateTimeField::Days),
2779                Keyword::DATE => Ok(DateTimeField::Date),
2780                Keyword::DATETIME => Ok(DateTimeField::Datetime),
2781                Keyword::HOUR => Ok(DateTimeField::Hour),
2782                Keyword::HOURS => Ok(DateTimeField::Hours),
2783                Keyword::MINUTE => Ok(DateTimeField::Minute),
2784                Keyword::MINUTES => Ok(DateTimeField::Minutes),
2785                Keyword::SECOND => Ok(DateTimeField::Second),
2786                Keyword::SECONDS => Ok(DateTimeField::Seconds),
2787                Keyword::CENTURY => Ok(DateTimeField::Century),
2788                Keyword::DECADE => Ok(DateTimeField::Decade),
2789                Keyword::DOY => Ok(DateTimeField::Doy),
2790                Keyword::DOW => Ok(DateTimeField::Dow),
2791                Keyword::EPOCH => Ok(DateTimeField::Epoch),
2792                Keyword::ISODOW => Ok(DateTimeField::Isodow),
2793                Keyword::ISOYEAR => Ok(DateTimeField::Isoyear),
2794                Keyword::ISOWEEK => Ok(DateTimeField::IsoWeek),
2795                Keyword::JULIAN => Ok(DateTimeField::Julian),
2796                Keyword::MICROSECOND => Ok(DateTimeField::Microsecond),
2797                Keyword::MICROSECONDS => Ok(DateTimeField::Microseconds),
2798                Keyword::MILLENIUM => Ok(DateTimeField::Millenium),
2799                Keyword::MILLENNIUM => Ok(DateTimeField::Millennium),
2800                Keyword::MILLISECOND => Ok(DateTimeField::Millisecond),
2801                Keyword::MILLISECONDS => Ok(DateTimeField::Milliseconds),
2802                Keyword::NANOSECOND => Ok(DateTimeField::Nanosecond),
2803                Keyword::NANOSECONDS => Ok(DateTimeField::Nanoseconds),
2804                Keyword::QUARTER => Ok(DateTimeField::Quarter),
2805                Keyword::TIME => Ok(DateTimeField::Time),
2806                Keyword::TIMEZONE => Ok(DateTimeField::Timezone),
2807                Keyword::TIMEZONE_ABBR => Ok(DateTimeField::TimezoneAbbr),
2808                Keyword::TIMEZONE_HOUR => Ok(DateTimeField::TimezoneHour),
2809                Keyword::TIMEZONE_MINUTE => Ok(DateTimeField::TimezoneMinute),
2810                Keyword::TIMEZONE_REGION => Ok(DateTimeField::TimezoneRegion),
2811                _ if self.dialect.allow_extract_custom() => {
2812                    self.prev_token();
2813                    let custom = self.parse_identifier()?;
2814                    Ok(DateTimeField::Custom(custom))
2815                }
2816                _ => self.expected("date/time field", next_token),
2817            },
2818            Token::SingleQuotedString(_) if self.dialect.allow_extract_single_quotes() => {
2819                self.prev_token();
2820                let custom = self.parse_identifier()?;
2821                Ok(DateTimeField::Custom(custom))
2822            }
2823            _ => self.expected("date/time field", next_token),
2824        }
2825    }
2826
2827    pub fn parse_not(&mut self) -> Result<Expr, ParserError> {
2828        match self.peek_token().token {
2829            Token::Word(w) => match w.keyword {
2830                Keyword::EXISTS => {
2831                    let negated = true;
2832                    let _ = self.parse_keyword(Keyword::EXISTS);
2833                    self.parse_exists_expr(negated)
2834                }
2835                _ => Ok(Expr::UnaryOp {
2836                    op: UnaryOperator::Not,
2837                    expr: Box::new(
2838                        self.parse_subexpr(self.dialect.prec_value(Precedence::UnaryNot))?,
2839                    ),
2840                }),
2841            },
2842            _ => Ok(Expr::UnaryOp {
2843                op: UnaryOperator::Not,
2844                expr: Box::new(self.parse_subexpr(self.dialect.prec_value(Precedence::UnaryNot))?),
2845            }),
2846        }
2847    }
2848
2849    /// Parse expression types that start with a left brace '{'.
2850    /// Examples:
2851    /// ```sql
2852    /// -- Dictionary expr.
2853    /// {'key1': 'value1', 'key2': 'value2'}
2854    ///
2855    /// -- Function call using the ODBC syntax.
2856    /// { fn CONCAT('foo', 'bar') }
2857    /// ```
2858    fn parse_lbrace_expr(&mut self) -> Result<Expr, ParserError> {
2859        let token = self.expect_token(&Token::LBrace)?;
2860
2861        if let Some(fn_expr) = self.maybe_parse_odbc_body()? {
2862            self.expect_token(&Token::RBrace)?;
2863            return Ok(fn_expr);
2864        }
2865
2866        if self.dialect.supports_dictionary_syntax() {
2867            self.prev_token(); // Put back the '{'
2868            return self.parse_dictionary();
2869        }
2870
2871        self.expected("an expression", token)
2872    }
2873
2874    /// Parses fulltext expressions [`sqlparser::ast::Expr::MatchAgainst`]
2875    ///
2876    /// # Errors
2877    /// This method will raise an error if the column list is empty or with invalid identifiers,
2878    /// the match expression is not a literal string, or if the search modifier is not valid.
2879    pub fn parse_match_against(&mut self) -> Result<Expr, ParserError> {
2880        let columns = self.parse_parenthesized_qualified_column_list(Mandatory, false)?;
2881
2882        self.expect_keyword_is(Keyword::AGAINST)?;
2883
2884        self.expect_token(&Token::LParen)?;
2885
2886        // MySQL is too permissive about the value, IMO we can't validate it perfectly on syntax level.
2887        let match_value = self.parse_value()?.value;
2888
2889        let in_natural_language_mode_keywords = &[
2890            Keyword::IN,
2891            Keyword::NATURAL,
2892            Keyword::LANGUAGE,
2893            Keyword::MODE,
2894        ];
2895
2896        let with_query_expansion_keywords = &[Keyword::WITH, Keyword::QUERY, Keyword::EXPANSION];
2897
2898        let in_boolean_mode_keywords = &[Keyword::IN, Keyword::BOOLEAN, Keyword::MODE];
2899
2900        let opt_search_modifier = if self.parse_keywords(in_natural_language_mode_keywords) {
2901            if self.parse_keywords(with_query_expansion_keywords) {
2902                Some(SearchModifier::InNaturalLanguageModeWithQueryExpansion)
2903            } else {
2904                Some(SearchModifier::InNaturalLanguageMode)
2905            }
2906        } else if self.parse_keywords(in_boolean_mode_keywords) {
2907            Some(SearchModifier::InBooleanMode)
2908        } else if self.parse_keywords(with_query_expansion_keywords) {
2909            Some(SearchModifier::WithQueryExpansion)
2910        } else {
2911            None
2912        };
2913
2914        self.expect_token(&Token::RParen)?;
2915
2916        Ok(Expr::MatchAgainst {
2917            columns,
2918            match_value,
2919            opt_search_modifier,
2920        })
2921    }
2922
2923    /// Parse an `INTERVAL` expression.
2924    ///
2925    /// Some syntactically valid intervals:
2926    ///
2927    /// ```sql
2928    ///   1. INTERVAL '1' DAY
2929    ///   2. INTERVAL '1-1' YEAR TO MONTH
2930    ///   3. INTERVAL '1' SECOND
2931    ///   4. INTERVAL '1:1:1.1' HOUR (5) TO SECOND (5)
2932    ///   5. INTERVAL '1.1' SECOND (2, 2)
2933    ///   6. INTERVAL '1:1' HOUR (5) TO MINUTE (5)
2934    ///   7. (MySql & BigQuery only): INTERVAL 1 DAY
2935    /// ```
2936    ///
2937    /// Note that we do not currently attempt to parse the quoted value.
2938    pub fn parse_interval(&mut self) -> Result<Expr, ParserError> {
2939        // The SQL standard allows an optional sign before the value string, but
2940        // it is not clear if any implementations support that syntax, so we
2941        // don't currently try to parse it. (The sign can instead be included
2942        // inside the value string.)
2943
2944        // to match the different flavours of INTERVAL syntax, we only allow expressions
2945        // if the dialect requires an interval qualifier,
2946        // see https://github.com/sqlparser-rs/sqlparser-rs/pull/1398 for more details
2947        let value = if self.dialect.require_interval_qualifier() {
2948            // parse a whole expression so `INTERVAL 1 + 1 DAY` is valid
2949            self.parse_expr()?
2950        } else {
2951            // parse a prefix expression so `INTERVAL 1 DAY` is valid, but `INTERVAL 1 + 1 DAY` is not
2952            // this also means that `INTERVAL '5 days' > INTERVAL '1 day'` treated properly
2953            self.parse_prefix()?
2954        };
2955
2956        // Following the string literal is a qualifier which indicates the units
2957        // of the duration specified in the string literal.
2958        //
2959        // Note that PostgreSQL allows omitting the qualifier, so we provide
2960        // this more general implementation.
2961        let leading_field = if self.next_token_is_temporal_unit() {
2962            Some(self.parse_date_time_field()?)
2963        } else if self.dialect.require_interval_qualifier() {
2964            return parser_err!(
2965                "INTERVAL requires a unit after the literal value",
2966                self.peek_token().span.start
2967            );
2968        } else {
2969            None
2970        };
2971
2972        let (leading_precision, last_field, fsec_precision) =
2973            if leading_field == Some(DateTimeField::Second) {
2974                // SQL mandates special syntax for `SECOND TO SECOND` literals.
2975                // Instead of
2976                //     `SECOND [(<leading precision>)] TO SECOND[(<fractional seconds precision>)]`
2977                // one must use the special format:
2978                //     `SECOND [( <leading precision> [ , <fractional seconds precision>] )]`
2979                let last_field = None;
2980                let (leading_precision, fsec_precision) = self.parse_optional_precision_scale()?;
2981                (leading_precision, last_field, fsec_precision)
2982            } else {
2983                let leading_precision = self.parse_optional_precision()?;
2984                if self.parse_keyword(Keyword::TO) {
2985                    let last_field = Some(self.parse_date_time_field()?);
2986                    let fsec_precision = if last_field == Some(DateTimeField::Second) {
2987                        self.parse_optional_precision()?
2988                    } else {
2989                        None
2990                    };
2991                    (leading_precision, last_field, fsec_precision)
2992                } else {
2993                    (leading_precision, None, None)
2994                }
2995            };
2996
2997        Ok(Expr::Interval(Interval {
2998            value: Box::new(value),
2999            leading_field,
3000            leading_precision,
3001            last_field,
3002            fractional_seconds_precision: fsec_precision,
3003        }))
3004    }
3005
3006    /// Peek at the next token and determine if it is a temporal unit
3007    /// like `second`.
3008    pub fn next_token_is_temporal_unit(&mut self) -> bool {
3009        if let Token::Word(word) = self.peek_token().token {
3010            matches!(
3011                word.keyword,
3012                Keyword::YEAR
3013                    | Keyword::YEARS
3014                    | Keyword::MONTH
3015                    | Keyword::MONTHS
3016                    | Keyword::WEEK
3017                    | Keyword::WEEKS
3018                    | Keyword::DAY
3019                    | Keyword::DAYS
3020                    | Keyword::HOUR
3021                    | Keyword::HOURS
3022                    | Keyword::MINUTE
3023                    | Keyword::MINUTES
3024                    | Keyword::SECOND
3025                    | Keyword::SECONDS
3026                    | Keyword::CENTURY
3027                    | Keyword::DECADE
3028                    | Keyword::DOW
3029                    | Keyword::DOY
3030                    | Keyword::EPOCH
3031                    | Keyword::ISODOW
3032                    | Keyword::ISOYEAR
3033                    | Keyword::JULIAN
3034                    | Keyword::MICROSECOND
3035                    | Keyword::MICROSECONDS
3036                    | Keyword::MILLENIUM
3037                    | Keyword::MILLENNIUM
3038                    | Keyword::MILLISECOND
3039                    | Keyword::MILLISECONDS
3040                    | Keyword::NANOSECOND
3041                    | Keyword::NANOSECONDS
3042                    | Keyword::QUARTER
3043                    | Keyword::TIMEZONE
3044                    | Keyword::TIMEZONE_HOUR
3045                    | Keyword::TIMEZONE_MINUTE
3046            )
3047        } else {
3048            false
3049        }
3050    }
3051
3052    /// Syntax
3053    /// ```sql
3054    /// -- typed
3055    /// STRUCT<[field_name] field_type, ...>( expr1 [, ... ])
3056    /// -- typeless
3057    /// STRUCT( expr1 [AS field_name] [, ... ])
3058    /// ```
3059    fn parse_struct_literal(&mut self) -> Result<Expr, ParserError> {
3060        // Parse the fields definition if exist `<[field_name] field_type, ...>`
3061        self.prev_token();
3062        let (fields, trailing_bracket) =
3063            self.parse_struct_type_def(Self::parse_struct_field_def)?;
3064        if trailing_bracket.0 {
3065            return parser_err!(
3066                "unmatched > in STRUCT literal",
3067                self.peek_token().span.start
3068            );
3069        }
3070
3071        // Parse the struct values `(expr1 [, ... ])`
3072        self.expect_token(&Token::LParen)?;
3073        let values = self
3074            .parse_comma_separated(|parser| parser.parse_struct_field_expr(!fields.is_empty()))?;
3075        self.expect_token(&Token::RParen)?;
3076
3077        Ok(Expr::Struct { values, fields })
3078    }
3079
3080    /// Parse an expression value for a struct literal
3081    /// Syntax
3082    /// ```sql
3083    /// expr [AS name]
3084    /// ```
3085    ///
3086    /// For biquery [1], Parameter typed_syntax is set to true if the expression
3087    /// is to be parsed as a field expression declared using typed
3088    /// struct syntax [2], and false if using typeless struct syntax [3].
3089    ///
3090    /// [1]: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#constructing_a_struct
3091    /// [2]: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#typed_struct_syntax
3092    /// [3]: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#typeless_struct_syntax
3093    fn parse_struct_field_expr(&mut self, typed_syntax: bool) -> Result<Expr, ParserError> {
3094        let expr = self.parse_expr()?;
3095        if self.parse_keyword(Keyword::AS) {
3096            if typed_syntax {
3097                return parser_err!("Typed syntax does not allow AS", {
3098                    self.prev_token();
3099                    self.peek_token().span.start
3100                });
3101            }
3102            let field_name = self.parse_identifier()?;
3103            Ok(Expr::Named {
3104                expr: expr.into(),
3105                name: field_name,
3106            })
3107        } else {
3108            Ok(expr)
3109        }
3110    }
3111
3112    /// Parse a Struct type definition as a sequence of field-value pairs.
3113    /// The syntax of the Struct elem differs by dialect so it is customised
3114    /// by the `elem_parser` argument.
3115    ///
3116    /// Syntax
3117    /// ```sql
3118    /// Hive:
3119    /// STRUCT<field_name: field_type>
3120    ///
3121    /// BigQuery:
3122    /// STRUCT<[field_name] field_type>
3123    /// ```
3124    fn parse_struct_type_def<F>(
3125        &mut self,
3126        mut elem_parser: F,
3127    ) -> Result<(Vec<StructField>, MatchedTrailingBracket), ParserError>
3128    where
3129        F: FnMut(&mut Parser<'a>) -> Result<(StructField, MatchedTrailingBracket), ParserError>,
3130    {
3131        self.expect_keyword_is(Keyword::STRUCT)?;
3132
3133        // Nothing to do if we have no type information.
3134        if Token::Lt != self.peek_token() {
3135            return Ok((Default::default(), false.into()));
3136        }
3137        self.next_token();
3138
3139        let mut field_defs = vec![];
3140        let trailing_bracket = loop {
3141            let (def, trailing_bracket) = elem_parser(self)?;
3142            field_defs.push(def);
3143            // The struct field definition is finished if it occurs `>>` or comma.
3144            if trailing_bracket.0 || !self.consume_token(&Token::Comma) {
3145                break trailing_bracket;
3146            }
3147        };
3148
3149        Ok((
3150            field_defs,
3151            self.expect_closing_angle_bracket(trailing_bracket)?,
3152        ))
3153    }
3154
3155    /// Duckdb Struct Data Type <https://duckdb.org/docs/sql/data_types/struct.html#retrieving-from-structs>
3156    fn parse_duckdb_struct_type_def(&mut self) -> Result<Vec<StructField>, ParserError> {
3157        self.expect_keyword_is(Keyword::STRUCT)?;
3158        self.expect_token(&Token::LParen)?;
3159        let struct_body = self.parse_comma_separated(|parser| {
3160            let field_name = parser.parse_identifier()?;
3161            let field_type = parser.parse_data_type()?;
3162
3163            Ok(StructField {
3164                field_name: Some(field_name),
3165                field_type,
3166                options: None,
3167            })
3168        });
3169        self.expect_token(&Token::RParen)?;
3170        struct_body
3171    }
3172
3173    /// Parse a field definition in a [struct] or [tuple].
3174    /// Syntax:
3175    ///
3176    /// ```sql
3177    /// [field_name] field_type
3178    /// ```
3179    ///
3180    /// [struct]: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#declaring_a_struct_type
3181    /// [tuple]: https://clickhouse.com/docs/en/sql-reference/data-types/tuple
3182    fn parse_struct_field_def(
3183        &mut self,
3184    ) -> Result<(StructField, MatchedTrailingBracket), ParserError> {
3185        // Look beyond the next item to infer whether both field name
3186        // and type are specified.
3187        let is_anonymous_field = !matches!(
3188            (self.peek_nth_token(0).token, self.peek_nth_token(1).token),
3189            (Token::Word(_), Token::Word(_))
3190        );
3191
3192        let field_name = if is_anonymous_field {
3193            None
3194        } else {
3195            Some(self.parse_identifier()?)
3196        };
3197
3198        let (field_type, trailing_bracket) = self.parse_data_type_helper()?;
3199
3200        let options = self.maybe_parse_options(Keyword::OPTIONS)?;
3201        Ok((
3202            StructField {
3203                field_name,
3204                field_type,
3205                options,
3206            },
3207            trailing_bracket,
3208        ))
3209    }
3210
3211    /// DuckDB specific: Parse a Union type definition as a sequence of field-value pairs.
3212    ///
3213    /// Syntax:
3214    ///
3215    /// ```sql
3216    /// UNION(field_name field_type[,...])
3217    /// ```
3218    ///
3219    /// [1]: https://duckdb.org/docs/sql/data_types/union.html
3220    fn parse_union_type_def(&mut self) -> Result<Vec<UnionField>, ParserError> {
3221        self.expect_keyword_is(Keyword::UNION)?;
3222
3223        self.expect_token(&Token::LParen)?;
3224
3225        let fields = self.parse_comma_separated(|p| {
3226            Ok(UnionField {
3227                field_name: p.parse_identifier()?,
3228                field_type: p.parse_data_type()?,
3229            })
3230        })?;
3231
3232        self.expect_token(&Token::RParen)?;
3233
3234        Ok(fields)
3235    }
3236
3237    /// DuckDB and ClickHouse specific: Parse a duckdb [dictionary] or a clickhouse [map] setting
3238    ///
3239    /// Syntax:
3240    ///
3241    /// ```sql
3242    /// {'field_name': expr1[, ... ]}
3243    /// ```
3244    ///
3245    /// [dictionary]: https://duckdb.org/docs/sql/data_types/struct#creating-structs
3246    /// [map]: https://clickhouse.com/docs/operations/settings/settings#additional_table_filters
3247    fn parse_dictionary(&mut self) -> Result<Expr, ParserError> {
3248        self.expect_token(&Token::LBrace)?;
3249
3250        let fields = self.parse_comma_separated0(Self::parse_dictionary_field, Token::RBrace)?;
3251
3252        self.expect_token(&Token::RBrace)?;
3253
3254        Ok(Expr::Dictionary(fields))
3255    }
3256
3257    /// Parse a field for a duckdb [dictionary] or a clickhouse [map] setting
3258    ///
3259    /// Syntax
3260    ///
3261    /// ```sql
3262    /// 'name': expr
3263    /// ```
3264    ///
3265    /// [dictionary]: https://duckdb.org/docs/sql/data_types/struct#creating-structs
3266    /// [map]: https://clickhouse.com/docs/operations/settings/settings#additional_table_filters
3267    fn parse_dictionary_field(&mut self) -> Result<DictionaryField, ParserError> {
3268        let key = self.parse_identifier()?;
3269
3270        self.expect_token(&Token::Colon)?;
3271
3272        let expr = self.parse_expr()?;
3273
3274        Ok(DictionaryField {
3275            key,
3276            value: Box::new(expr),
3277        })
3278    }
3279
3280    /// DuckDB specific: Parse a duckdb [map]
3281    ///
3282    /// Syntax:
3283    ///
3284    /// ```sql
3285    /// Map {key1: value1[, ... ]}
3286    /// ```
3287    ///
3288    /// [map]: https://duckdb.org/docs/sql/data_types/map.html#creating-maps
3289    fn parse_duckdb_map_literal(&mut self) -> Result<Expr, ParserError> {
3290        self.expect_token(&Token::LBrace)?;
3291        let fields = self.parse_comma_separated0(Self::parse_duckdb_map_field, Token::RBrace)?;
3292        self.expect_token(&Token::RBrace)?;
3293        Ok(Expr::Map(Map { entries: fields }))
3294    }
3295
3296    /// Parse a field for a duckdb [map]
3297    ///
3298    /// Syntax
3299    ///
3300    /// ```sql
3301    /// key: value
3302    /// ```
3303    ///
3304    /// [map]: https://duckdb.org/docs/sql/data_types/map.html#creating-maps
3305    fn parse_duckdb_map_field(&mut self) -> Result<MapEntry, ParserError> {
3306        let key = self.parse_expr()?;
3307
3308        self.expect_token(&Token::Colon)?;
3309
3310        let value = self.parse_expr()?;
3311
3312        Ok(MapEntry {
3313            key: Box::new(key),
3314            value: Box::new(value),
3315        })
3316    }
3317
3318    /// Parse clickhouse [map]
3319    ///
3320    /// Syntax
3321    ///
3322    /// ```sql
3323    /// Map(key_data_type, value_data_type)
3324    /// ```
3325    ///
3326    /// [map]: https://clickhouse.com/docs/en/sql-reference/data-types/map
3327    fn parse_click_house_map_def(&mut self) -> Result<(DataType, DataType), ParserError> {
3328        self.expect_keyword_is(Keyword::MAP)?;
3329        self.expect_token(&Token::LParen)?;
3330        let key_data_type = self.parse_data_type()?;
3331        self.expect_token(&Token::Comma)?;
3332        let value_data_type = self.parse_data_type()?;
3333        self.expect_token(&Token::RParen)?;
3334
3335        Ok((key_data_type, value_data_type))
3336    }
3337
3338    /// Parse clickhouse [tuple]
3339    ///
3340    /// Syntax
3341    ///
3342    /// ```sql
3343    /// Tuple([field_name] field_type, ...)
3344    /// ```
3345    ///
3346    /// [tuple]: https://clickhouse.com/docs/en/sql-reference/data-types/tuple
3347    fn parse_click_house_tuple_def(&mut self) -> Result<Vec<StructField>, ParserError> {
3348        self.expect_keyword_is(Keyword::TUPLE)?;
3349        self.expect_token(&Token::LParen)?;
3350        let mut field_defs = vec![];
3351        loop {
3352            let (def, _) = self.parse_struct_field_def()?;
3353            field_defs.push(def);
3354            if !self.consume_token(&Token::Comma) {
3355                break;
3356            }
3357        }
3358        self.expect_token(&Token::RParen)?;
3359
3360        Ok(field_defs)
3361    }
3362
3363    /// For nested types that use the angle bracket syntax, this matches either
3364    /// `>`, `>>` or nothing depending on which variant is expected (specified by the previously
3365    /// matched `trailing_bracket` argument). It returns whether there is a trailing
3366    /// left to be matched - (i.e. if '>>' was matched).
3367    fn expect_closing_angle_bracket(
3368        &mut self,
3369        trailing_bracket: MatchedTrailingBracket,
3370    ) -> Result<MatchedTrailingBracket, ParserError> {
3371        let trailing_bracket = if !trailing_bracket.0 {
3372            match self.peek_token().token {
3373                Token::Gt => {
3374                    self.next_token();
3375                    false.into()
3376                }
3377                Token::ShiftRight => {
3378                    self.next_token();
3379                    true.into()
3380                }
3381                _ => return self.expected(">", self.peek_token()),
3382            }
3383        } else {
3384            false.into()
3385        };
3386
3387        Ok(trailing_bracket)
3388    }
3389
3390    /// Parse an operator following an expression
3391    pub fn parse_infix(&mut self, expr: Expr, precedence: u8) -> Result<Expr, ParserError> {
3392        // allow the dialect to override infix parsing
3393        if let Some(infix) = self.dialect.parse_infix(self, &expr, precedence) {
3394            return infix;
3395        }
3396
3397        let dialect = self.dialect;
3398
3399        self.advance_token();
3400        let tok = self.get_current_token();
3401        debug!("infix: {tok:?}");
3402        let tok_index = self.get_current_index();
3403        let span = tok.span;
3404        let regular_binary_operator = match &tok.token {
3405            Token::Spaceship => Some(BinaryOperator::Spaceship),
3406            Token::DoubleEq => Some(BinaryOperator::Eq),
3407            Token::Assignment => Some(BinaryOperator::Assignment),
3408            Token::Eq => Some(BinaryOperator::Eq),
3409            Token::Neq => Some(BinaryOperator::NotEq),
3410            Token::Gt => Some(BinaryOperator::Gt),
3411            Token::GtEq => Some(BinaryOperator::GtEq),
3412            Token::Lt => Some(BinaryOperator::Lt),
3413            Token::LtEq => Some(BinaryOperator::LtEq),
3414            Token::Plus => Some(BinaryOperator::Plus),
3415            Token::Minus => Some(BinaryOperator::Minus),
3416            Token::Mul => Some(BinaryOperator::Multiply),
3417            Token::Mod => Some(BinaryOperator::Modulo),
3418            Token::StringConcat => Some(BinaryOperator::StringConcat),
3419            Token::Pipe => Some(BinaryOperator::BitwiseOr),
3420            Token::Caret => {
3421                // In PostgreSQL, ^ stands for the exponentiation operation,
3422                // and # stands for XOR. See https://www.postgresql.org/docs/current/functions-math.html
3423                if dialect_is!(dialect is PostgreSqlDialect) {
3424                    Some(BinaryOperator::PGExp)
3425                } else {
3426                    Some(BinaryOperator::BitwiseXor)
3427                }
3428            }
3429            Token::Ampersand => Some(BinaryOperator::BitwiseAnd),
3430            Token::Div => Some(BinaryOperator::Divide),
3431            Token::DuckIntDiv if dialect_is!(dialect is DuckDbDialect | GenericDialect) => {
3432                Some(BinaryOperator::DuckIntegerDivide)
3433            }
3434            Token::ShiftLeft if dialect_is!(dialect is PostgreSqlDialect | DuckDbDialect | GenericDialect | RedshiftSqlDialect) => {
3435                Some(BinaryOperator::PGBitwiseShiftLeft)
3436            }
3437            Token::ShiftRight if dialect_is!(dialect is PostgreSqlDialect | DuckDbDialect | GenericDialect | RedshiftSqlDialect) => {
3438                Some(BinaryOperator::PGBitwiseShiftRight)
3439            }
3440            Token::Sharp if dialect_is!(dialect is PostgreSqlDialect | RedshiftSqlDialect) => {
3441                Some(BinaryOperator::PGBitwiseXor)
3442            }
3443            Token::Overlap if dialect_is!(dialect is PostgreSqlDialect | RedshiftSqlDialect) => {
3444                Some(BinaryOperator::PGOverlap)
3445            }
3446            Token::Overlap if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
3447                Some(BinaryOperator::PGOverlap)
3448            }
3449            Token::CaretAt if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
3450                Some(BinaryOperator::PGStartsWith)
3451            }
3452            Token::Tilde => Some(BinaryOperator::PGRegexMatch),
3453            Token::TildeAsterisk => Some(BinaryOperator::PGRegexIMatch),
3454            Token::ExclamationMarkTilde => Some(BinaryOperator::PGRegexNotMatch),
3455            Token::ExclamationMarkTildeAsterisk => Some(BinaryOperator::PGRegexNotIMatch),
3456            Token::DoubleTilde => Some(BinaryOperator::PGLikeMatch),
3457            Token::DoubleTildeAsterisk => Some(BinaryOperator::PGILikeMatch),
3458            Token::ExclamationMarkDoubleTilde => Some(BinaryOperator::PGNotLikeMatch),
3459            Token::ExclamationMarkDoubleTildeAsterisk => Some(BinaryOperator::PGNotILikeMatch),
3460            Token::Arrow => Some(BinaryOperator::Arrow),
3461            Token::LongArrow => Some(BinaryOperator::LongArrow),
3462            Token::HashArrow => Some(BinaryOperator::HashArrow),
3463            Token::HashLongArrow => Some(BinaryOperator::HashLongArrow),
3464            Token::AtArrow => Some(BinaryOperator::AtArrow),
3465            Token::ArrowAt => Some(BinaryOperator::ArrowAt),
3466            Token::HashMinus => Some(BinaryOperator::HashMinus),
3467            Token::AtQuestion => Some(BinaryOperator::AtQuestion),
3468            Token::AtAt => Some(BinaryOperator::AtAt),
3469            Token::Question => Some(BinaryOperator::Question),
3470            Token::QuestionAnd => Some(BinaryOperator::QuestionAnd),
3471            Token::QuestionPipe => Some(BinaryOperator::QuestionPipe),
3472            Token::CustomBinaryOperator(s) => Some(BinaryOperator::Custom(s.clone())),
3473            Token::DoubleSharp if self.dialect.supports_geometric_types() => {
3474                Some(BinaryOperator::DoubleHash)
3475            }
3476
3477            Token::AmpersandLeftAngleBracket if self.dialect.supports_geometric_types() => {
3478                Some(BinaryOperator::AndLt)
3479            }
3480            Token::AmpersandRightAngleBracket if self.dialect.supports_geometric_types() => {
3481                Some(BinaryOperator::AndGt)
3482            }
3483            Token::QuestionMarkDash if self.dialect.supports_geometric_types() => {
3484                Some(BinaryOperator::QuestionDash)
3485            }
3486            Token::AmpersandLeftAngleBracketVerticalBar
3487                if self.dialect.supports_geometric_types() =>
3488            {
3489                Some(BinaryOperator::AndLtPipe)
3490            }
3491            Token::VerticalBarAmpersandRightAngleBracket
3492                if self.dialect.supports_geometric_types() =>
3493            {
3494                Some(BinaryOperator::PipeAndGt)
3495            }
3496            Token::TwoWayArrow if self.dialect.supports_geometric_types() => {
3497                Some(BinaryOperator::LtDashGt)
3498            }
3499            Token::LeftAngleBracketCaret if self.dialect.supports_geometric_types() => {
3500                Some(BinaryOperator::LtCaret)
3501            }
3502            Token::RightAngleBracketCaret if self.dialect.supports_geometric_types() => {
3503                Some(BinaryOperator::GtCaret)
3504            }
3505            Token::QuestionMarkSharp if self.dialect.supports_geometric_types() => {
3506                Some(BinaryOperator::QuestionHash)
3507            }
3508            Token::QuestionMarkDoubleVerticalBar if self.dialect.supports_geometric_types() => {
3509                Some(BinaryOperator::QuestionDoublePipe)
3510            }
3511            Token::QuestionMarkDashVerticalBar if self.dialect.supports_geometric_types() => {
3512                Some(BinaryOperator::QuestionDashPipe)
3513            }
3514            Token::TildeEqual if self.dialect.supports_geometric_types() => {
3515                Some(BinaryOperator::TildeEq)
3516            }
3517            Token::ShiftLeftVerticalBar if self.dialect.supports_geometric_types() => {
3518                Some(BinaryOperator::LtLtPipe)
3519            }
3520            Token::VerticalBarShiftRight if self.dialect.supports_geometric_types() => {
3521                Some(BinaryOperator::PipeGtGt)
3522            }
3523            Token::AtSign if self.dialect.supports_geometric_types() => Some(BinaryOperator::At),
3524
3525            Token::Word(w) => match w.keyword {
3526                Keyword::AND => Some(BinaryOperator::And),
3527                Keyword::OR => Some(BinaryOperator::Or),
3528                Keyword::XOR => Some(BinaryOperator::Xor),
3529                Keyword::OVERLAPS => Some(BinaryOperator::Overlaps),
3530                Keyword::OPERATOR if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
3531                    self.expect_token(&Token::LParen)?;
3532                    // there are special rules for operator names in
3533                    // postgres so we can not use 'parse_object'
3534                    // or similar.
3535                    // See https://www.postgresql.org/docs/current/sql-createoperator.html
3536                    let mut idents = vec![];
3537                    loop {
3538                        self.advance_token();
3539                        idents.push(self.get_current_token().to_string());
3540                        if !self.consume_token(&Token::Period) {
3541                            break;
3542                        }
3543                    }
3544                    self.expect_token(&Token::RParen)?;
3545                    Some(BinaryOperator::PGCustomBinaryOperator(idents))
3546                }
3547                _ => None,
3548            },
3549            _ => None,
3550        };
3551
3552        let tok = self.token_at(tok_index);
3553        if let Some(op) = regular_binary_operator {
3554            if let Some(keyword) =
3555                self.parse_one_of_keywords(&[Keyword::ANY, Keyword::ALL, Keyword::SOME])
3556            {
3557                self.expect_token(&Token::LParen)?;
3558                let right = if self.peek_sub_query() {
3559                    // We have a subquery ahead (SELECT\WITH ...) need to rewind and
3560                    // use the parenthesis for parsing the subquery as an expression.
3561                    self.prev_token(); // LParen
3562                    self.parse_subexpr(precedence)?
3563                } else {
3564                    // Non-subquery expression
3565                    let right = self.parse_subexpr(precedence)?;
3566                    self.expect_token(&Token::RParen)?;
3567                    right
3568                };
3569
3570                if !matches!(
3571                    op,
3572                    BinaryOperator::Gt
3573                        | BinaryOperator::Lt
3574                        | BinaryOperator::GtEq
3575                        | BinaryOperator::LtEq
3576                        | BinaryOperator::Eq
3577                        | BinaryOperator::NotEq
3578                        | BinaryOperator::PGRegexMatch
3579                        | BinaryOperator::PGRegexIMatch
3580                        | BinaryOperator::PGRegexNotMatch
3581                        | BinaryOperator::PGRegexNotIMatch
3582                        | BinaryOperator::PGLikeMatch
3583                        | BinaryOperator::PGILikeMatch
3584                        | BinaryOperator::PGNotLikeMatch
3585                        | BinaryOperator::PGNotILikeMatch
3586                ) {
3587                    return parser_err!(
3588                        format!(
3589                        "Expected one of [=, >, <, =>, =<, !=, ~, ~*, !~, !~*, ~~, ~~*, !~~, !~~*] as comparison operator, found: {op}"
3590                    ),
3591                        span.start
3592                    );
3593                };
3594
3595                Ok(match keyword {
3596                    Keyword::ALL => Expr::AllOp {
3597                        left: Box::new(expr),
3598                        compare_op: op,
3599                        right: Box::new(right),
3600                    },
3601                    Keyword::ANY | Keyword::SOME => Expr::AnyOp {
3602                        left: Box::new(expr),
3603                        compare_op: op,
3604                        right: Box::new(right),
3605                        is_some: keyword == Keyword::SOME,
3606                    },
3607                    unexpected_keyword => return Err(ParserError::ParserError(
3608                        format!("Internal parser error: expected any of {{ALL, ANY, SOME}}, got {unexpected_keyword:?}"),
3609                    )),
3610                })
3611            } else {
3612                Ok(Expr::BinaryOp {
3613                    left: Box::new(expr),
3614                    op,
3615                    right: Box::new(self.parse_subexpr(precedence)?),
3616                })
3617            }
3618        } else if let Token::Word(w) = &tok.token {
3619            match w.keyword {
3620                Keyword::IS => {
3621                    if self.parse_keyword(Keyword::NULL) {
3622                        Ok(Expr::IsNull(Box::new(expr)))
3623                    } else if self.parse_keywords(&[Keyword::NOT, Keyword::NULL]) {
3624                        Ok(Expr::IsNotNull(Box::new(expr)))
3625                    } else if self.parse_keywords(&[Keyword::TRUE]) {
3626                        Ok(Expr::IsTrue(Box::new(expr)))
3627                    } else if self.parse_keywords(&[Keyword::NOT, Keyword::TRUE]) {
3628                        Ok(Expr::IsNotTrue(Box::new(expr)))
3629                    } else if self.parse_keywords(&[Keyword::FALSE]) {
3630                        Ok(Expr::IsFalse(Box::new(expr)))
3631                    } else if self.parse_keywords(&[Keyword::NOT, Keyword::FALSE]) {
3632                        Ok(Expr::IsNotFalse(Box::new(expr)))
3633                    } else if self.parse_keywords(&[Keyword::UNKNOWN]) {
3634                        Ok(Expr::IsUnknown(Box::new(expr)))
3635                    } else if self.parse_keywords(&[Keyword::NOT, Keyword::UNKNOWN]) {
3636                        Ok(Expr::IsNotUnknown(Box::new(expr)))
3637                    } else if self.parse_keywords(&[Keyword::DISTINCT, Keyword::FROM]) {
3638                        let expr2 = self.parse_expr()?;
3639                        Ok(Expr::IsDistinctFrom(Box::new(expr), Box::new(expr2)))
3640                    } else if self.parse_keywords(&[Keyword::NOT, Keyword::DISTINCT, Keyword::FROM])
3641                    {
3642                        let expr2 = self.parse_expr()?;
3643                        Ok(Expr::IsNotDistinctFrom(Box::new(expr), Box::new(expr2)))
3644                    } else if let Ok(is_normalized) = self.parse_unicode_is_normalized(expr) {
3645                        Ok(is_normalized)
3646                    } else {
3647                        self.expected(
3648                            "[NOT] NULL | TRUE | FALSE | DISTINCT | [form] NORMALIZED FROM after IS",
3649                            self.peek_token(),
3650                        )
3651                    }
3652                }
3653                Keyword::AT => {
3654                    self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
3655                    Ok(Expr::AtTimeZone {
3656                        timestamp: Box::new(expr),
3657                        time_zone: Box::new(self.parse_subexpr(precedence)?),
3658                    })
3659                }
3660                Keyword::NOT
3661                | Keyword::IN
3662                | Keyword::BETWEEN
3663                | Keyword::LIKE
3664                | Keyword::ILIKE
3665                | Keyword::SIMILAR
3666                | Keyword::REGEXP
3667                | Keyword::RLIKE => {
3668                    self.prev_token();
3669                    let negated = self.parse_keyword(Keyword::NOT);
3670                    let regexp = self.parse_keyword(Keyword::REGEXP);
3671                    let rlike = self.parse_keyword(Keyword::RLIKE);
3672                    let null = if !self.in_column_definition_state() {
3673                        self.parse_keyword(Keyword::NULL)
3674                    } else {
3675                        false
3676                    };
3677                    if regexp || rlike {
3678                        Ok(Expr::RLike {
3679                            negated,
3680                            expr: Box::new(expr),
3681                            pattern: Box::new(
3682                                self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
3683                            ),
3684                            regexp,
3685                        })
3686                    } else if negated && null {
3687                        Ok(Expr::IsNotNull(Box::new(expr)))
3688                    } else if self.parse_keyword(Keyword::IN) {
3689                        self.parse_in(expr, negated)
3690                    } else if self.parse_keyword(Keyword::BETWEEN) {
3691                        self.parse_between(expr, negated)
3692                    } else if self.parse_keyword(Keyword::LIKE) {
3693                        Ok(Expr::Like {
3694                            negated,
3695                            any: self.parse_keyword(Keyword::ANY),
3696                            expr: Box::new(expr),
3697                            pattern: Box::new(
3698                                self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
3699                            ),
3700                            escape_char: self.parse_escape_char()?,
3701                        })
3702                    } else if self.parse_keyword(Keyword::ILIKE) {
3703                        Ok(Expr::ILike {
3704                            negated,
3705                            any: self.parse_keyword(Keyword::ANY),
3706                            expr: Box::new(expr),
3707                            pattern: Box::new(
3708                                self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
3709                            ),
3710                            escape_char: self.parse_escape_char()?,
3711                        })
3712                    } else if self.parse_keywords(&[Keyword::SIMILAR, Keyword::TO]) {
3713                        Ok(Expr::SimilarTo {
3714                            negated,
3715                            expr: Box::new(expr),
3716                            pattern: Box::new(
3717                                self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
3718                            ),
3719                            escape_char: self.parse_escape_char()?,
3720                        })
3721                    } else {
3722                        self.expected("IN or BETWEEN after NOT", self.peek_token())
3723                    }
3724                }
3725                Keyword::NOTNULL if dialect.supports_notnull_operator() => {
3726                    Ok(Expr::IsNotNull(Box::new(expr)))
3727                }
3728                Keyword::MEMBER => {
3729                    if self.parse_keyword(Keyword::OF) {
3730                        self.expect_token(&Token::LParen)?;
3731                        let array = self.parse_expr()?;
3732                        self.expect_token(&Token::RParen)?;
3733                        Ok(Expr::MemberOf(MemberOf {
3734                            value: Box::new(expr),
3735                            array: Box::new(array),
3736                        }))
3737                    } else {
3738                        self.expected("OF after MEMBER", self.peek_token())
3739                    }
3740                }
3741                // Can only happen if `get_next_precedence` got out of sync with this function
3742                _ => parser_err!(
3743                    format!("No infix parser for token {:?}", tok.token),
3744                    tok.span.start
3745                ),
3746            }
3747        } else if Token::DoubleColon == *tok {
3748            Ok(Expr::Cast {
3749                kind: CastKind::DoubleColon,
3750                expr: Box::new(expr),
3751                data_type: self.parse_data_type()?,
3752                format: None,
3753            })
3754        } else if Token::ExclamationMark == *tok && self.dialect.supports_factorial_operator() {
3755            Ok(Expr::UnaryOp {
3756                op: UnaryOperator::PGPostfixFactorial,
3757                expr: Box::new(expr),
3758            })
3759        } else if Token::LBracket == *tok && self.dialect.supports_partiql()
3760            || (dialect_of!(self is SnowflakeDialect | GenericDialect) && Token::Colon == *tok)
3761        {
3762            self.prev_token();
3763            self.parse_json_access(expr)
3764        } else {
3765            // Can only happen if `get_next_precedence` got out of sync with this function
3766            parser_err!(
3767                format!("No infix parser for token {:?}", tok.token),
3768                tok.span.start
3769            )
3770        }
3771    }
3772
3773    /// Parse the `ESCAPE CHAR` portion of `LIKE`, `ILIKE`, and `SIMILAR TO`
3774    pub fn parse_escape_char(&mut self) -> Result<Option<Value>, ParserError> {
3775        if self.parse_keyword(Keyword::ESCAPE) {
3776            Ok(Some(self.parse_value()?.into()))
3777        } else {
3778            Ok(None)
3779        }
3780    }
3781
3782    /// Parses an array subscript like
3783    /// * `[:]`
3784    /// * `[l]`
3785    /// * `[l:]`
3786    /// * `[:u]`
3787    /// * `[l:u]`
3788    /// * `[l:u:s]`
3789    ///
3790    /// Parser is right after `[`
3791    fn parse_subscript_inner(&mut self) -> Result<Subscript, ParserError> {
3792        // at either `<lower>:(rest)` or `:(rest)]`
3793        let lower_bound = if self.consume_token(&Token::Colon) {
3794            None
3795        } else {
3796            Some(self.parse_expr()?)
3797        };
3798
3799        // check for end
3800        if self.consume_token(&Token::RBracket) {
3801            if let Some(lower_bound) = lower_bound {
3802                return Ok(Subscript::Index { index: lower_bound });
3803            };
3804            return Ok(Subscript::Slice {
3805                lower_bound,
3806                upper_bound: None,
3807                stride: None,
3808            });
3809        }
3810
3811        // consume the `:`
3812        if lower_bound.is_some() {
3813            self.expect_token(&Token::Colon)?;
3814        }
3815
3816        // we are now at either `]`, `<upper>(rest)]`
3817        let upper_bound = if self.consume_token(&Token::RBracket) {
3818            return Ok(Subscript::Slice {
3819                lower_bound,
3820                upper_bound: None,
3821                stride: None,
3822            });
3823        } else {
3824            Some(self.parse_expr()?)
3825        };
3826
3827        // check for end
3828        if self.consume_token(&Token::RBracket) {
3829            return Ok(Subscript::Slice {
3830                lower_bound,
3831                upper_bound,
3832                stride: None,
3833            });
3834        }
3835
3836        // we are now at `:]` or `:stride]`
3837        self.expect_token(&Token::Colon)?;
3838        let stride = if self.consume_token(&Token::RBracket) {
3839            None
3840        } else {
3841            Some(self.parse_expr()?)
3842        };
3843
3844        if stride.is_some() {
3845            self.expect_token(&Token::RBracket)?;
3846        }
3847
3848        Ok(Subscript::Slice {
3849            lower_bound,
3850            upper_bound,
3851            stride,
3852        })
3853    }
3854
3855    /// Parse a multi-dimension array accessing like `[1:3][1][1]`
3856    pub fn parse_multi_dim_subscript(
3857        &mut self,
3858        chain: &mut Vec<AccessExpr>,
3859    ) -> Result<(), ParserError> {
3860        while self.consume_token(&Token::LBracket) {
3861            self.parse_subscript(chain)?;
3862        }
3863        Ok(())
3864    }
3865
3866    /// Parses an array subscript like `[1:3]`
3867    ///
3868    /// Parser is right after `[`
3869    fn parse_subscript(&mut self, chain: &mut Vec<AccessExpr>) -> Result<(), ParserError> {
3870        let subscript = self.parse_subscript_inner()?;
3871        chain.push(AccessExpr::Subscript(subscript));
3872        Ok(())
3873    }
3874
3875    fn parse_json_path_object_key(&mut self) -> Result<JsonPathElem, ParserError> {
3876        let token = self.next_token();
3877        match token.token {
3878            Token::Word(Word {
3879                value,
3880                // path segments in SF dot notation can be unquoted or double-quoted
3881                quote_style: quote_style @ (Some('"') | None),
3882                // some experimentation suggests that snowflake permits
3883                // any keyword here unquoted.
3884                keyword: _,
3885            }) => Ok(JsonPathElem::Dot {
3886                key: value,
3887                quoted: quote_style.is_some(),
3888            }),
3889
3890            // This token should never be generated on snowflake or generic
3891            // dialects, but we handle it just in case this is used on future
3892            // dialects.
3893            Token::DoubleQuotedString(key) => Ok(JsonPathElem::Dot { key, quoted: true }),
3894
3895            _ => self.expected("variant object key name", token),
3896        }
3897    }
3898
3899    fn parse_json_access(&mut self, expr: Expr) -> Result<Expr, ParserError> {
3900        let path = self.parse_json_path()?;
3901        Ok(Expr::JsonAccess {
3902            value: Box::new(expr),
3903            path,
3904        })
3905    }
3906
3907    fn parse_json_path(&mut self) -> Result<JsonPath, ParserError> {
3908        let mut path = Vec::new();
3909        loop {
3910            match self.next_token().token {
3911                Token::Colon if path.is_empty() => {
3912                    path.push(self.parse_json_path_object_key()?);
3913                }
3914                Token::Period if !path.is_empty() => {
3915                    path.push(self.parse_json_path_object_key()?);
3916                }
3917                Token::LBracket => {
3918                    let key = self.parse_expr()?;
3919                    self.expect_token(&Token::RBracket)?;
3920
3921                    path.push(JsonPathElem::Bracket { key });
3922                }
3923                _ => {
3924                    self.prev_token();
3925                    break;
3926                }
3927            };
3928        }
3929
3930        debug_assert!(!path.is_empty());
3931        Ok(JsonPath { path })
3932    }
3933
3934    /// Parses the parens following the `[ NOT ] IN` operator.
3935    pub fn parse_in(&mut self, expr: Expr, negated: bool) -> Result<Expr, ParserError> {
3936        // BigQuery allows `IN UNNEST(array_expression)`
3937        // https://cloud.google.com/bigquery/docs/reference/standard-sql/operators#in_operators
3938        if self.parse_keyword(Keyword::UNNEST) {
3939            self.expect_token(&Token::LParen)?;
3940            let array_expr = self.parse_expr()?;
3941            self.expect_token(&Token::RParen)?;
3942            return Ok(Expr::InUnnest {
3943                expr: Box::new(expr),
3944                array_expr: Box::new(array_expr),
3945                negated,
3946            });
3947        }
3948        self.expect_token(&Token::LParen)?;
3949        let in_op = match self.maybe_parse(|p| p.parse_query())? {
3950            Some(subquery) => Expr::InSubquery {
3951                expr: Box::new(expr),
3952                subquery,
3953                negated,
3954            },
3955            None => Expr::InList {
3956                expr: Box::new(expr),
3957                list: if self.dialect.supports_in_empty_list() {
3958                    self.parse_comma_separated0(Parser::parse_expr, Token::RParen)?
3959                } else {
3960                    self.parse_comma_separated(Parser::parse_expr)?
3961                },
3962                negated,
3963            },
3964        };
3965        self.expect_token(&Token::RParen)?;
3966        Ok(in_op)
3967    }
3968
3969    /// Parses `BETWEEN <low> AND <high>`, assuming the `BETWEEN` keyword was already consumed.
3970    pub fn parse_between(&mut self, expr: Expr, negated: bool) -> Result<Expr, ParserError> {
3971        // Stop parsing subexpressions for <low> and <high> on tokens with
3972        // precedence lower than that of `BETWEEN`, such as `AND`, `IS`, etc.
3973        let low = self.parse_subexpr(self.dialect.prec_value(Precedence::Between))?;
3974        self.expect_keyword_is(Keyword::AND)?;
3975        let high = self.parse_subexpr(self.dialect.prec_value(Precedence::Between))?;
3976        Ok(Expr::Between {
3977            expr: Box::new(expr),
3978            negated,
3979            low: Box::new(low),
3980            high: Box::new(high),
3981        })
3982    }
3983
3984    /// Parse a PostgreSQL casting style which is in the form of `expr::datatype`.
3985    pub fn parse_pg_cast(&mut self, expr: Expr) -> Result<Expr, ParserError> {
3986        Ok(Expr::Cast {
3987            kind: CastKind::DoubleColon,
3988            expr: Box::new(expr),
3989            data_type: self.parse_data_type()?,
3990            format: None,
3991        })
3992    }
3993
3994    /// Get the precedence of the next token
3995    pub fn get_next_precedence(&self) -> Result<u8, ParserError> {
3996        self.dialect.get_next_precedence_default(self)
3997    }
3998
3999    /// Return the token at the given location, or EOF if the index is beyond
4000    /// the length of the current set of tokens.
4001    pub fn token_at(&self, index: usize) -> &TokenWithSpan {
4002        self.tokens.get(index).unwrap_or(&EOF_TOKEN)
4003    }
4004
4005    /// Return the first non-whitespace token that has not yet been processed
4006    /// or Token::EOF
4007    ///
4008    /// See [`Self::peek_token_ref`] to avoid the copy.
4009    pub fn peek_token(&self) -> TokenWithSpan {
4010        self.peek_nth_token(0)
4011    }
4012
4013    /// Return a reference to the first non-whitespace token that has not yet
4014    /// been processed or Token::EOF
4015    pub fn peek_token_ref(&self) -> &TokenWithSpan {
4016        self.peek_nth_token_ref(0)
4017    }
4018
4019    /// Returns the `N` next non-whitespace tokens that have not yet been
4020    /// processed.
4021    ///
4022    /// Example:
4023    /// ```rust
4024    /// # use sqlparser::dialect::GenericDialect;
4025    /// # use sqlparser::parser::Parser;
4026    /// # use sqlparser::keywords::Keyword;
4027    /// # use sqlparser::tokenizer::{Token, Word};
4028    /// let dialect = GenericDialect {};
4029    /// let mut parser = Parser::new(&dialect).try_with_sql("ORDER BY foo, bar").unwrap();
4030    ///
4031    /// // Note that Rust infers the number of tokens to peek based on the
4032    /// // length of the slice pattern!
4033    /// assert!(matches!(
4034    ///     parser.peek_tokens(),
4035    ///     [
4036    ///         Token::Word(Word { keyword: Keyword::ORDER, .. }),
4037    ///         Token::Word(Word { keyword: Keyword::BY, .. }),
4038    ///     ]
4039    /// ));
4040    /// ```
4041    pub fn peek_tokens<const N: usize>(&self) -> [Token; N] {
4042        self.peek_tokens_with_location()
4043            .map(|with_loc| with_loc.token)
4044    }
4045
4046    /// Returns the `N` next non-whitespace tokens with locations that have not
4047    /// yet been processed.
4048    ///
4049    /// See [`Self::peek_token`] for an example.
4050    pub fn peek_tokens_with_location<const N: usize>(&self) -> [TokenWithSpan; N] {
4051        let mut index = self.index;
4052        core::array::from_fn(|_| loop {
4053            let token = self.tokens.get(index);
4054            index += 1;
4055            if let Some(TokenWithSpan {
4056                token: Token::Whitespace(_),
4057                span: _,
4058            }) = token
4059            {
4060                continue;
4061            }
4062            break token.cloned().unwrap_or(TokenWithSpan {
4063                token: Token::EOF,
4064                span: Span::empty(),
4065            });
4066        })
4067    }
4068
4069    /// Returns references to the `N` next non-whitespace tokens
4070    /// that have not yet been processed.
4071    ///
4072    /// See [`Self::peek_tokens`] for an example.
4073    pub fn peek_tokens_ref<const N: usize>(&self) -> [&TokenWithSpan; N] {
4074        let mut index = self.index;
4075        core::array::from_fn(|_| loop {
4076            let token = self.tokens.get(index);
4077            index += 1;
4078            if let Some(TokenWithSpan {
4079                token: Token::Whitespace(_),
4080                span: _,
4081            }) = token
4082            {
4083                continue;
4084            }
4085            break token.unwrap_or(&EOF_TOKEN);
4086        })
4087    }
4088
4089    /// Return nth non-whitespace token that has not yet been processed
4090    pub fn peek_nth_token(&self, n: usize) -> TokenWithSpan {
4091        self.peek_nth_token_ref(n).clone()
4092    }
4093
4094    /// Return nth non-whitespace token that has not yet been processed
4095    pub fn peek_nth_token_ref(&self, mut n: usize) -> &TokenWithSpan {
4096        let mut index = self.index;
4097        loop {
4098            index += 1;
4099            match self.tokens.get(index - 1) {
4100                Some(TokenWithSpan {
4101                    token: Token::Whitespace(_),
4102                    span: _,
4103                }) => continue,
4104                non_whitespace => {
4105                    if n == 0 {
4106                        return non_whitespace.unwrap_or(&EOF_TOKEN);
4107                    }
4108                    n -= 1;
4109                }
4110            }
4111        }
4112    }
4113
4114    /// Return the first token, possibly whitespace, that has not yet been processed
4115    /// (or None if reached end-of-file).
4116    pub fn peek_token_no_skip(&self) -> TokenWithSpan {
4117        self.peek_nth_token_no_skip(0)
4118    }
4119
4120    /// Return nth token, possibly whitespace, that has not yet been processed.
4121    pub fn peek_nth_token_no_skip(&self, n: usize) -> TokenWithSpan {
4122        self.tokens
4123            .get(self.index + n)
4124            .cloned()
4125            .unwrap_or(TokenWithSpan {
4126                token: Token::EOF,
4127                span: Span::empty(),
4128            })
4129    }
4130
4131    /// Return true if the next tokens exactly `expected`
4132    ///
4133    /// Does not advance the current token.
4134    fn peek_keywords(&mut self, expected: &[Keyword]) -> bool {
4135        let index = self.index;
4136        let matched = self.parse_keywords(expected);
4137        self.index = index;
4138        matched
4139    }
4140
4141    /// Advances to the next non-whitespace token and returns a copy.
4142    ///
4143    /// Please use [`Self::advance_token`] and [`Self::get_current_token`] to
4144    /// avoid the copy.
4145    pub fn next_token(&mut self) -> TokenWithSpan {
4146        self.advance_token();
4147        self.get_current_token().clone()
4148    }
4149
4150    /// Returns the index of the current token
4151    ///
4152    /// This can be used with APIs that expect an index, such as
4153    /// [`Self::token_at`]
4154    pub fn get_current_index(&self) -> usize {
4155        self.index.saturating_sub(1)
4156    }
4157
4158    /// Return the next unprocessed token, possibly whitespace.
4159    pub fn next_token_no_skip(&mut self) -> Option<&TokenWithSpan> {
4160        self.index += 1;
4161        self.tokens.get(self.index - 1)
4162    }
4163
4164    /// Advances the current token to the next non-whitespace token
4165    ///
4166    /// See [`Self::get_current_token`] to get the current token after advancing
4167    pub fn advance_token(&mut self) {
4168        loop {
4169            self.index += 1;
4170            match self.tokens.get(self.index - 1) {
4171                Some(TokenWithSpan {
4172                    token: Token::Whitespace(_),
4173                    span: _,
4174                }) => continue,
4175                _ => break,
4176            }
4177        }
4178    }
4179
4180    /// Returns a reference to the current token
4181    ///
4182    /// Does not advance the current token.
4183    pub fn get_current_token(&self) -> &TokenWithSpan {
4184        self.token_at(self.index.saturating_sub(1))
4185    }
4186
4187    /// Returns a reference to the previous token
4188    ///
4189    /// Does not advance the current token.
4190    pub fn get_previous_token(&self) -> &TokenWithSpan {
4191        self.token_at(self.index.saturating_sub(2))
4192    }
4193
4194    /// Returns a reference to the next token
4195    ///
4196    /// Does not advance the current token.
4197    pub fn get_next_token(&self) -> &TokenWithSpan {
4198        self.token_at(self.index)
4199    }
4200
4201    /// Seek back the last one non-whitespace token.
4202    ///
4203    /// Must be called after `next_token()`, otherwise might panic. OK to call
4204    /// after `next_token()` indicates an EOF.
4205    ///
4206    // TODO rename to backup_token and deprecate prev_token?
4207    pub fn prev_token(&mut self) {
4208        loop {
4209            assert!(self.index > 0);
4210            self.index -= 1;
4211            if let Some(TokenWithSpan {
4212                token: Token::Whitespace(_),
4213                span: _,
4214            }) = self.tokens.get(self.index)
4215            {
4216                continue;
4217            }
4218            return;
4219        }
4220    }
4221
4222    /// Report `found` was encountered instead of `expected`
4223    pub fn expected<T>(&self, expected: &str, found: TokenWithSpan) -> Result<T, ParserError> {
4224        parser_err!(
4225            format!("Expected: {expected}, found: {found}"),
4226            found.span.start
4227        )
4228    }
4229
4230    /// report `found` was encountered instead of `expected`
4231    pub fn expected_ref<T>(&self, expected: &str, found: &TokenWithSpan) -> Result<T, ParserError> {
4232        parser_err!(
4233            format!("Expected: {expected}, found: {found}"),
4234            found.span.start
4235        )
4236    }
4237
4238    /// Report that the token at `index` was found instead of `expected`.
4239    pub fn expected_at<T>(&self, expected: &str, index: usize) -> Result<T, ParserError> {
4240        let found = self.tokens.get(index).unwrap_or(&EOF_TOKEN);
4241        parser_err!(
4242            format!("Expected: {expected}, found: {found}"),
4243            found.span.start
4244        )
4245    }
4246
4247    /// If the current token is the `expected` keyword, consume it and returns
4248    /// true. Otherwise, no tokens are consumed and returns false.
4249    #[must_use]
4250    pub fn parse_keyword(&mut self, expected: Keyword) -> bool {
4251        if self.peek_keyword(expected) {
4252            self.advance_token();
4253            true
4254        } else {
4255            false
4256        }
4257    }
4258
4259    #[must_use]
4260    pub fn peek_keyword(&self, expected: Keyword) -> bool {
4261        matches!(&self.peek_token_ref().token, Token::Word(w) if expected == w.keyword)
4262    }
4263
4264    /// If the current token is the `expected` keyword followed by
4265    /// specified tokens, consume them and returns true.
4266    /// Otherwise, no tokens are consumed and returns false.
4267    ///
4268    /// Note that if the length of `tokens` is too long, this function will
4269    /// not be efficient as it does a loop on the tokens with `peek_nth_token`
4270    /// each time.
4271    pub fn parse_keyword_with_tokens(&mut self, expected: Keyword, tokens: &[Token]) -> bool {
4272        self.keyword_with_tokens(expected, tokens, true)
4273    }
4274
4275    /// Peeks to see if the current token is the `expected` keyword followed by specified tokens
4276    /// without consuming them.
4277    ///
4278    /// See [Self::parse_keyword_with_tokens] for details.
4279    pub(crate) fn peek_keyword_with_tokens(&mut self, expected: Keyword, tokens: &[Token]) -> bool {
4280        self.keyword_with_tokens(expected, tokens, false)
4281    }
4282
4283    fn keyword_with_tokens(&mut self, expected: Keyword, tokens: &[Token], consume: bool) -> bool {
4284        match &self.peek_token_ref().token {
4285            Token::Word(w) if expected == w.keyword => {
4286                for (idx, token) in tokens.iter().enumerate() {
4287                    if self.peek_nth_token_ref(idx + 1).token != *token {
4288                        return false;
4289                    }
4290                }
4291
4292                if consume {
4293                    for _ in 0..(tokens.len() + 1) {
4294                        self.advance_token();
4295                    }
4296                }
4297
4298                true
4299            }
4300            _ => false,
4301        }
4302    }
4303
4304    /// If the current and subsequent tokens exactly match the `keywords`
4305    /// sequence, consume them and returns true. Otherwise, no tokens are
4306    /// consumed and returns false
4307    #[must_use]
4308    pub fn parse_keywords(&mut self, keywords: &[Keyword]) -> bool {
4309        let index = self.index;
4310        for &keyword in keywords {
4311            if !self.parse_keyword(keyword) {
4312                // println!("parse_keywords aborting .. did not find {:?}", keyword);
4313                // reset index and return immediately
4314                self.index = index;
4315                return false;
4316            }
4317        }
4318        true
4319    }
4320
4321    /// If the current token is one of the given `keywords`, returns the keyword
4322    /// that matches, without consuming the token. Otherwise, returns [`None`].
4323    #[must_use]
4324    pub fn peek_one_of_keywords(&self, keywords: &[Keyword]) -> Option<Keyword> {
4325        for keyword in keywords {
4326            if self.peek_keyword(*keyword) {
4327                return Some(*keyword);
4328            }
4329        }
4330        None
4331    }
4332
4333    /// If the current token is one of the given `keywords`, consume the token
4334    /// and return the keyword that matches. Otherwise, no tokens are consumed
4335    /// and returns [`None`].
4336    #[must_use]
4337    pub fn parse_one_of_keywords(&mut self, keywords: &[Keyword]) -> Option<Keyword> {
4338        match &self.peek_token_ref().token {
4339            Token::Word(w) => {
4340                keywords
4341                    .iter()
4342                    .find(|keyword| **keyword == w.keyword)
4343                    .map(|keyword| {
4344                        self.advance_token();
4345                        *keyword
4346                    })
4347            }
4348            _ => None,
4349        }
4350    }
4351
4352    /// If the current token is one of the expected keywords, consume the token
4353    /// and return the keyword that matches. Otherwise, return an error.
4354    pub fn expect_one_of_keywords(&mut self, keywords: &[Keyword]) -> Result<Keyword, ParserError> {
4355        if let Some(keyword) = self.parse_one_of_keywords(keywords) {
4356            Ok(keyword)
4357        } else {
4358            let keywords: Vec<String> = keywords.iter().map(|x| format!("{x:?}")).collect();
4359            self.expected_ref(
4360                &format!("one of {}", keywords.join(" or ")),
4361                self.peek_token_ref(),
4362            )
4363        }
4364    }
4365
4366    /// If the current token is the `expected` keyword, consume the token.
4367    /// Otherwise, return an error.
4368    ///
4369    // todo deprecate in favor of expected_keyword_is
4370    pub fn expect_keyword(&mut self, expected: Keyword) -> Result<TokenWithSpan, ParserError> {
4371        if self.parse_keyword(expected) {
4372            Ok(self.get_current_token().clone())
4373        } else {
4374            self.expected_ref(format!("{:?}", &expected).as_str(), self.peek_token_ref())
4375        }
4376    }
4377
4378    /// If the current token is the `expected` keyword, consume the token.
4379    /// Otherwise, return an error.
4380    ///
4381    /// This differs from expect_keyword only in that the matched keyword
4382    /// token is not returned.
4383    pub fn expect_keyword_is(&mut self, expected: Keyword) -> Result<(), ParserError> {
4384        if self.parse_keyword(expected) {
4385            Ok(())
4386        } else {
4387            self.expected_ref(format!("{:?}", &expected).as_str(), self.peek_token_ref())
4388        }
4389    }
4390
4391    /// If the current and subsequent tokens exactly match the `keywords`
4392    /// sequence, consume them and returns Ok. Otherwise, return an Error.
4393    pub fn expect_keywords(&mut self, expected: &[Keyword]) -> Result<(), ParserError> {
4394        for &kw in expected {
4395            self.expect_keyword_is(kw)?;
4396        }
4397        Ok(())
4398    }
4399
4400    /// Consume the next token if it matches the expected token, otherwise return false
4401    ///
4402    /// See [Self::advance_token] to consume the token unconditionally
4403    #[must_use]
4404    pub fn consume_token(&mut self, expected: &Token) -> bool {
4405        if self.peek_token_ref() == expected {
4406            self.advance_token();
4407            true
4408        } else {
4409            false
4410        }
4411    }
4412
4413    /// If the current and subsequent tokens exactly match the `tokens`
4414    /// sequence, consume them and returns true. Otherwise, no tokens are
4415    /// consumed and returns false
4416    #[must_use]
4417    pub fn consume_tokens(&mut self, tokens: &[Token]) -> bool {
4418        let index = self.index;
4419        for token in tokens {
4420            if !self.consume_token(token) {
4421                self.index = index;
4422                return false;
4423            }
4424        }
4425        true
4426    }
4427
4428    /// Bail out if the current token is not an expected keyword, or consume it if it is
4429    pub fn expect_token(&mut self, expected: &Token) -> Result<TokenWithSpan, ParserError> {
4430        if self.peek_token_ref() == expected {
4431            Ok(self.next_token())
4432        } else {
4433            self.expected_ref(&expected.to_string(), self.peek_token_ref())
4434        }
4435    }
4436
4437    fn parse<T: FromStr>(s: String, loc: Location) -> Result<T, ParserError>
4438    where
4439        <T as FromStr>::Err: Display,
4440    {
4441        s.parse::<T>().map_err(|e| {
4442            ParserError::ParserError(format!(
4443                "Could not parse '{s}' as {}: {e}{loc}",
4444                core::any::type_name::<T>()
4445            ))
4446        })
4447    }
4448
4449    /// Parse a comma-separated list of 1+ SelectItem
4450    pub fn parse_projection(&mut self) -> Result<Vec<SelectItem>, ParserError> {
4451        // BigQuery and Snowflake allow trailing commas, but only in project lists
4452        // e.g. `SELECT 1, 2, FROM t`
4453        // https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#trailing_commas
4454        // https://docs.snowflake.com/en/release-notes/2024/8_11#select-supports-trailing-commas
4455
4456        let trailing_commas =
4457            self.options.trailing_commas | self.dialect.supports_projection_trailing_commas();
4458
4459        self.parse_comma_separated_with_trailing_commas(
4460            |p| p.parse_select_item(),
4461            trailing_commas,
4462            Self::is_reserved_for_column_alias,
4463        )
4464    }
4465
4466    pub fn parse_actions_list(&mut self) -> Result<Vec<Action>, ParserError> {
4467        let mut values = vec![];
4468        loop {
4469            values.push(self.parse_grant_permission()?);
4470            if !self.consume_token(&Token::Comma) {
4471                break;
4472            } else if self.options.trailing_commas {
4473                match self.peek_token().token {
4474                    Token::Word(kw) if kw.keyword == Keyword::ON => {
4475                        break;
4476                    }
4477                    Token::RParen
4478                    | Token::SemiColon
4479                    | Token::EOF
4480                    | Token::RBracket
4481                    | Token::RBrace => break,
4482                    _ => continue,
4483                }
4484            }
4485        }
4486        Ok(values)
4487    }
4488
4489    /// Parse a list of [TableWithJoins]
4490    fn parse_table_with_joins(&mut self) -> Result<Vec<TableWithJoins>, ParserError> {
4491        let trailing_commas = self.dialect.supports_from_trailing_commas();
4492
4493        self.parse_comma_separated_with_trailing_commas(
4494            Parser::parse_table_and_joins,
4495            trailing_commas,
4496            |kw, parser| !self.dialect.is_table_factor(kw, parser),
4497        )
4498    }
4499
4500    /// Parse the comma of a comma-separated syntax element.
4501    /// `R` is a predicate that should return true if the next
4502    /// keyword is a reserved keyword.
4503    /// Allows for control over trailing commas
4504    ///
4505    /// Returns true if there is a next element
4506    fn is_parse_comma_separated_end_with_trailing_commas<R>(
4507        &mut self,
4508        trailing_commas: bool,
4509        is_reserved_keyword: &R,
4510    ) -> bool
4511    where
4512        R: Fn(&Keyword, &mut Parser) -> bool,
4513    {
4514        if !self.consume_token(&Token::Comma) {
4515            true
4516        } else if trailing_commas {
4517            let token = self.next_token().token;
4518            let is_end = match token {
4519                Token::Word(ref kw) if is_reserved_keyword(&kw.keyword, self) => true,
4520                Token::RParen | Token::SemiColon | Token::EOF | Token::RBracket | Token::RBrace => {
4521                    true
4522                }
4523                _ => false,
4524            };
4525            self.prev_token();
4526
4527            is_end
4528        } else {
4529            false
4530        }
4531    }
4532
4533    /// Parse the comma of a comma-separated syntax element.
4534    /// Returns true if there is a next element
4535    fn is_parse_comma_separated_end(&mut self) -> bool {
4536        self.is_parse_comma_separated_end_with_trailing_commas(
4537            self.options.trailing_commas,
4538            &Self::is_reserved_for_column_alias,
4539        )
4540    }
4541
4542    /// Parse a comma-separated list of 1+ items accepted by `F`
4543    pub fn parse_comma_separated<T, F>(&mut self, f: F) -> Result<Vec<T>, ParserError>
4544    where
4545        F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4546    {
4547        self.parse_comma_separated_with_trailing_commas(
4548            f,
4549            self.options.trailing_commas,
4550            Self::is_reserved_for_column_alias,
4551        )
4552    }
4553
4554    /// Parse a comma-separated list of 1+ items accepted by `F`.
4555    /// `R` is a predicate that should return true if the next
4556    /// keyword is a reserved keyword.
4557    /// Allows for control over trailing commas.
4558    fn parse_comma_separated_with_trailing_commas<T, F, R>(
4559        &mut self,
4560        mut f: F,
4561        trailing_commas: bool,
4562        is_reserved_keyword: R,
4563    ) -> Result<Vec<T>, ParserError>
4564    where
4565        F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4566        R: Fn(&Keyword, &mut Parser) -> bool,
4567    {
4568        let mut values = vec![];
4569        loop {
4570            values.push(f(self)?);
4571            if self.is_parse_comma_separated_end_with_trailing_commas(
4572                trailing_commas,
4573                &is_reserved_keyword,
4574            ) {
4575                break;
4576            }
4577        }
4578        Ok(values)
4579    }
4580
4581    /// Parse a period-separated list of 1+ items accepted by `F`
4582    fn parse_period_separated<T, F>(&mut self, mut f: F) -> Result<Vec<T>, ParserError>
4583    where
4584        F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4585    {
4586        let mut values = vec![];
4587        loop {
4588            values.push(f(self)?);
4589            if !self.consume_token(&Token::Period) {
4590                break;
4591            }
4592        }
4593        Ok(values)
4594    }
4595
4596    /// Parse a keyword-separated list of 1+ items accepted by `F`
4597    pub fn parse_keyword_separated<T, F>(
4598        &mut self,
4599        keyword: Keyword,
4600        mut f: F,
4601    ) -> Result<Vec<T>, ParserError>
4602    where
4603        F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4604    {
4605        let mut values = vec![];
4606        loop {
4607            values.push(f(self)?);
4608            if !self.parse_keyword(keyword) {
4609                break;
4610            }
4611        }
4612        Ok(values)
4613    }
4614
4615    pub fn parse_parenthesized<T, F>(&mut self, mut f: F) -> Result<T, ParserError>
4616    where
4617        F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4618    {
4619        self.expect_token(&Token::LParen)?;
4620        let res = f(self)?;
4621        self.expect_token(&Token::RParen)?;
4622        Ok(res)
4623    }
4624
4625    /// Parse a comma-separated list of 0+ items accepted by `F`
4626    /// * `end_token` - expected end token for the closure (e.g. [Token::RParen], [Token::RBrace] ...)
4627    pub fn parse_comma_separated0<T, F>(
4628        &mut self,
4629        f: F,
4630        end_token: Token,
4631    ) -> Result<Vec<T>, ParserError>
4632    where
4633        F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4634    {
4635        if self.peek_token().token == end_token {
4636            return Ok(vec![]);
4637        }
4638
4639        if self.options.trailing_commas && self.peek_tokens() == [Token::Comma, end_token] {
4640            let _ = self.consume_token(&Token::Comma);
4641            return Ok(vec![]);
4642        }
4643
4644        self.parse_comma_separated(f)
4645    }
4646
4647    /// Parses 0 or more statements, each followed by a semicolon.
4648    /// If the next token is any of `terminal_keywords` then no more
4649    /// statements will be parsed.
4650    pub(crate) fn parse_statement_list(
4651        &mut self,
4652        terminal_keywords: &[Keyword],
4653    ) -> Result<Vec<Statement>, ParserError> {
4654        let mut values = vec![];
4655        loop {
4656            match &self.peek_nth_token_ref(0).token {
4657                Token::EOF => break,
4658                Token::Word(w) => {
4659                    if w.quote_style.is_none() && terminal_keywords.contains(&w.keyword) {
4660                        break;
4661                    }
4662                }
4663                _ => {}
4664            }
4665
4666            values.push(self.parse_statement()?);
4667            self.expect_token(&Token::SemiColon)?;
4668        }
4669        Ok(values)
4670    }
4671
4672    /// Default implementation of a predicate that returns true if
4673    /// the specified keyword is reserved for column alias.
4674    /// See [Dialect::is_column_alias]
4675    fn is_reserved_for_column_alias(kw: &Keyword, parser: &mut Parser) -> bool {
4676        !parser.dialect.is_column_alias(kw, parser)
4677    }
4678
4679    /// Run a parser method `f`, reverting back to the current position if unsuccessful.
4680    /// Returns `ParserError::RecursionLimitExceeded` if `f` returns a `RecursionLimitExceeded`.
4681    /// Returns `Ok(None)` if `f` returns any other error.
4682    pub fn maybe_parse<T, F>(&mut self, f: F) -> Result<Option<T>, ParserError>
4683    where
4684        F: FnMut(&mut Parser) -> Result<T, ParserError>,
4685    {
4686        match self.try_parse(f) {
4687            Ok(t) => Ok(Some(t)),
4688            Err(ParserError::RecursionLimitExceeded) => Err(ParserError::RecursionLimitExceeded),
4689            _ => Ok(None),
4690        }
4691    }
4692
4693    /// Run a parser method `f`, reverting back to the current position if unsuccessful.
4694    pub fn try_parse<T, F>(&mut self, mut f: F) -> Result<T, ParserError>
4695    where
4696        F: FnMut(&mut Parser) -> Result<T, ParserError>,
4697    {
4698        let index = self.index;
4699        match f(self) {
4700            Ok(t) => Ok(t),
4701            Err(e) => {
4702                // Unwind stack if limit exceeded
4703                self.index = index;
4704                Err(e)
4705            }
4706        }
4707    }
4708
4709    /// Parse either `ALL`, `DISTINCT` or `DISTINCT ON (...)`. Returns [`None`] if `ALL` is parsed
4710    /// and results in a [`ParserError`] if both `ALL` and `DISTINCT` are found.
4711    pub fn parse_all_or_distinct(&mut self) -> Result<Option<Distinct>, ParserError> {
4712        let loc = self.peek_token().span.start;
4713        let all = self.parse_keyword(Keyword::ALL);
4714        let distinct = self.parse_keyword(Keyword::DISTINCT);
4715        if !distinct {
4716            return Ok(None);
4717        }
4718        if all {
4719            return parser_err!("Cannot specify both ALL and DISTINCT".to_string(), loc);
4720        }
4721        let on = self.parse_keyword(Keyword::ON);
4722        if !on {
4723            return Ok(Some(Distinct::Distinct));
4724        }
4725
4726        self.expect_token(&Token::LParen)?;
4727        let col_names = if self.consume_token(&Token::RParen) {
4728            self.prev_token();
4729            Vec::new()
4730        } else {
4731            self.parse_comma_separated(Parser::parse_expr)?
4732        };
4733        self.expect_token(&Token::RParen)?;
4734        Ok(Some(Distinct::On(col_names)))
4735    }
4736
4737    /// Parse a SQL CREATE statement
4738    pub fn parse_create(&mut self) -> Result<Statement, ParserError> {
4739        let or_replace = self.parse_keywords(&[Keyword::OR, Keyword::REPLACE]);
4740        let or_alter = self.parse_keywords(&[Keyword::OR, Keyword::ALTER]);
4741        let local = self.parse_one_of_keywords(&[Keyword::LOCAL]).is_some();
4742        let global = self.parse_one_of_keywords(&[Keyword::GLOBAL]).is_some();
4743        let transient = self.parse_one_of_keywords(&[Keyword::TRANSIENT]).is_some();
4744        let global: Option<bool> = if global {
4745            Some(true)
4746        } else if local {
4747            Some(false)
4748        } else {
4749            None
4750        };
4751        let temporary = self
4752            .parse_one_of_keywords(&[Keyword::TEMP, Keyword::TEMPORARY])
4753            .is_some();
4754        let persistent = dialect_of!(self is DuckDbDialect)
4755            && self.parse_one_of_keywords(&[Keyword::PERSISTENT]).is_some();
4756        let create_view_params = self.parse_create_view_params()?;
4757        if self.parse_keyword(Keyword::TABLE) {
4758            self.parse_create_table(or_replace, temporary, global, transient)
4759        } else if self.peek_keyword(Keyword::MATERIALIZED)
4760            || self.peek_keyword(Keyword::VIEW)
4761            || self.peek_keywords(&[Keyword::SECURE, Keyword::MATERIALIZED, Keyword::VIEW])
4762            || self.peek_keywords(&[Keyword::SECURE, Keyword::VIEW])
4763        {
4764            self.parse_create_view(or_alter, or_replace, temporary, create_view_params)
4765        } else if self.parse_keyword(Keyword::POLICY) {
4766            self.parse_create_policy()
4767        } else if self.parse_keyword(Keyword::EXTERNAL) {
4768            self.parse_create_external_table(or_replace)
4769        } else if self.parse_keyword(Keyword::FUNCTION) {
4770            self.parse_create_function(or_alter, or_replace, temporary)
4771        } else if self.parse_keyword(Keyword::DOMAIN) {
4772            self.parse_create_domain()
4773        } else if self.parse_keyword(Keyword::TRIGGER) {
4774            self.parse_create_trigger(temporary, or_alter, or_replace, false)
4775        } else if self.parse_keywords(&[Keyword::CONSTRAINT, Keyword::TRIGGER]) {
4776            self.parse_create_trigger(temporary, or_alter, or_replace, true)
4777        } else if self.parse_keyword(Keyword::MACRO) {
4778            self.parse_create_macro(or_replace, temporary)
4779        } else if self.parse_keyword(Keyword::SECRET) {
4780            self.parse_create_secret(or_replace, temporary, persistent)
4781        } else if self.parse_keyword(Keyword::USER) {
4782            self.parse_create_user(or_replace)
4783        } else if or_replace {
4784            self.expected(
4785                "[EXTERNAL] TABLE or [MATERIALIZED] VIEW or FUNCTION after CREATE OR REPLACE",
4786                self.peek_token(),
4787            )
4788        } else if self.parse_keyword(Keyword::EXTENSION) {
4789            self.parse_create_extension()
4790        } else if self.parse_keyword(Keyword::INDEX) {
4791            self.parse_create_index(false)
4792        } else if self.parse_keywords(&[Keyword::UNIQUE, Keyword::INDEX]) {
4793            self.parse_create_index(true)
4794        } else if self.parse_keyword(Keyword::VIRTUAL) {
4795            self.parse_create_virtual_table()
4796        } else if self.parse_keyword(Keyword::SCHEMA) {
4797            self.parse_create_schema()
4798        } else if self.parse_keyword(Keyword::DATABASE) {
4799            self.parse_create_database()
4800        } else if self.parse_keyword(Keyword::ROLE) {
4801            self.parse_create_role()
4802        } else if self.parse_keyword(Keyword::SEQUENCE) {
4803            self.parse_create_sequence(temporary)
4804        } else if self.parse_keyword(Keyword::TYPE) {
4805            self.parse_create_type()
4806        } else if self.parse_keyword(Keyword::PROCEDURE) {
4807            self.parse_create_procedure(or_alter)
4808        } else if self.parse_keyword(Keyword::CONNECTOR) {
4809            self.parse_create_connector()
4810        } else if self.parse_keyword(Keyword::OPERATOR) {
4811            // Check if this is CREATE OPERATOR FAMILY or CREATE OPERATOR CLASS
4812            if self.parse_keyword(Keyword::FAMILY) {
4813                self.parse_create_operator_family()
4814            } else if self.parse_keyword(Keyword::CLASS) {
4815                self.parse_create_operator_class()
4816            } else {
4817                self.parse_create_operator()
4818            }
4819        } else if self.parse_keyword(Keyword::SERVER) {
4820            self.parse_pg_create_server()
4821        } else {
4822            self.expected("an object type after CREATE", self.peek_token())
4823        }
4824    }
4825
4826    fn parse_create_user(&mut self, or_replace: bool) -> Result<Statement, ParserError> {
4827        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
4828        let name = self.parse_identifier()?;
4829        let options = self
4830            .parse_key_value_options(false, &[Keyword::WITH, Keyword::TAG])?
4831            .options;
4832        let with_tags = self.parse_keyword(Keyword::WITH);
4833        let tags = if self.parse_keyword(Keyword::TAG) {
4834            self.parse_key_value_options(true, &[])?.options
4835        } else {
4836            vec![]
4837        };
4838        Ok(Statement::CreateUser(CreateUser {
4839            or_replace,
4840            if_not_exists,
4841            name,
4842            options: KeyValueOptions {
4843                options,
4844                delimiter: KeyValueOptionsDelimiter::Space,
4845            },
4846            with_tags,
4847            tags: KeyValueOptions {
4848                options: tags,
4849                delimiter: KeyValueOptionsDelimiter::Comma,
4850            },
4851        }))
4852    }
4853
4854    /// See [DuckDB Docs](https://duckdb.org/docs/sql/statements/create_secret.html) for more details.
4855    pub fn parse_create_secret(
4856        &mut self,
4857        or_replace: bool,
4858        temporary: bool,
4859        persistent: bool,
4860    ) -> Result<Statement, ParserError> {
4861        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
4862
4863        let mut storage_specifier = None;
4864        let mut name = None;
4865        if self.peek_token() != Token::LParen {
4866            if self.parse_keyword(Keyword::IN) {
4867                storage_specifier = self.parse_identifier().ok()
4868            } else {
4869                name = self.parse_identifier().ok();
4870            }
4871
4872            // Storage specifier may follow the name
4873            if storage_specifier.is_none()
4874                && self.peek_token() != Token::LParen
4875                && self.parse_keyword(Keyword::IN)
4876            {
4877                storage_specifier = self.parse_identifier().ok();
4878            }
4879        }
4880
4881        self.expect_token(&Token::LParen)?;
4882        self.expect_keyword_is(Keyword::TYPE)?;
4883        let secret_type = self.parse_identifier()?;
4884
4885        let mut options = Vec::new();
4886        if self.consume_token(&Token::Comma) {
4887            options.append(&mut self.parse_comma_separated(|p| {
4888                let key = p.parse_identifier()?;
4889                let value = p.parse_identifier()?;
4890                Ok(SecretOption { key, value })
4891            })?);
4892        }
4893        self.expect_token(&Token::RParen)?;
4894
4895        let temp = match (temporary, persistent) {
4896            (true, false) => Some(true),
4897            (false, true) => Some(false),
4898            (false, false) => None,
4899            _ => self.expected("TEMPORARY or PERSISTENT", self.peek_token())?,
4900        };
4901
4902        Ok(Statement::CreateSecret {
4903            or_replace,
4904            temporary: temp,
4905            if_not_exists,
4906            name,
4907            storage_specifier,
4908            secret_type,
4909            options,
4910        })
4911    }
4912
4913    /// Parse a CACHE TABLE statement
4914    pub fn parse_cache_table(&mut self) -> Result<Statement, ParserError> {
4915        let (mut table_flag, mut options, mut has_as, mut query) = (None, vec![], false, None);
4916        if self.parse_keyword(Keyword::TABLE) {
4917            let table_name = self.parse_object_name(false)?;
4918            if self.peek_token().token != Token::EOF {
4919                if let Token::Word(word) = self.peek_token().token {
4920                    if word.keyword == Keyword::OPTIONS {
4921                        options = self.parse_options(Keyword::OPTIONS)?
4922                    }
4923                };
4924
4925                if self.peek_token().token != Token::EOF {
4926                    let (a, q) = self.parse_as_query()?;
4927                    has_as = a;
4928                    query = Some(q);
4929                }
4930
4931                Ok(Statement::Cache {
4932                    table_flag,
4933                    table_name,
4934                    has_as,
4935                    options,
4936                    query,
4937                })
4938            } else {
4939                Ok(Statement::Cache {
4940                    table_flag,
4941                    table_name,
4942                    has_as,
4943                    options,
4944                    query,
4945                })
4946            }
4947        } else {
4948            table_flag = Some(self.parse_object_name(false)?);
4949            if self.parse_keyword(Keyword::TABLE) {
4950                let table_name = self.parse_object_name(false)?;
4951                if self.peek_token() != Token::EOF {
4952                    if let Token::Word(word) = self.peek_token().token {
4953                        if word.keyword == Keyword::OPTIONS {
4954                            options = self.parse_options(Keyword::OPTIONS)?
4955                        }
4956                    };
4957
4958                    if self.peek_token() != Token::EOF {
4959                        let (a, q) = self.parse_as_query()?;
4960                        has_as = a;
4961                        query = Some(q);
4962                    }
4963
4964                    Ok(Statement::Cache {
4965                        table_flag,
4966                        table_name,
4967                        has_as,
4968                        options,
4969                        query,
4970                    })
4971                } else {
4972                    Ok(Statement::Cache {
4973                        table_flag,
4974                        table_name,
4975                        has_as,
4976                        options,
4977                        query,
4978                    })
4979                }
4980            } else {
4981                if self.peek_token() == Token::EOF {
4982                    self.prev_token();
4983                }
4984                self.expected("a `TABLE` keyword", self.peek_token())
4985            }
4986        }
4987    }
4988
4989    /// Parse 'AS' before as query,such as `WITH XXX AS SELECT XXX` oer `CACHE TABLE AS SELECT XXX`
4990    pub fn parse_as_query(&mut self) -> Result<(bool, Box<Query>), ParserError> {
4991        match self.peek_token().token {
4992            Token::Word(word) => match word.keyword {
4993                Keyword::AS => {
4994                    self.next_token();
4995                    Ok((true, self.parse_query()?))
4996                }
4997                _ => Ok((false, self.parse_query()?)),
4998            },
4999            _ => self.expected("a QUERY statement", self.peek_token()),
5000        }
5001    }
5002
5003    /// Parse a UNCACHE TABLE statement
5004    pub fn parse_uncache_table(&mut self) -> Result<Statement, ParserError> {
5005        self.expect_keyword_is(Keyword::TABLE)?;
5006        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
5007        let table_name = self.parse_object_name(false)?;
5008        Ok(Statement::UNCache {
5009            table_name,
5010            if_exists,
5011        })
5012    }
5013
5014    /// SQLite-specific `CREATE VIRTUAL TABLE`
5015    pub fn parse_create_virtual_table(&mut self) -> Result<Statement, ParserError> {
5016        self.expect_keyword_is(Keyword::TABLE)?;
5017        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5018        let table_name = self.parse_object_name(false)?;
5019        self.expect_keyword_is(Keyword::USING)?;
5020        let module_name = self.parse_identifier()?;
5021        // SQLite docs note that module "arguments syntax is sufficiently
5022        // general that the arguments can be made to appear as column
5023        // definitions in a traditional CREATE TABLE statement", but
5024        // we don't implement that.
5025        let module_args = self.parse_parenthesized_column_list(Optional, false)?;
5026        Ok(Statement::CreateVirtualTable {
5027            name: table_name,
5028            if_not_exists,
5029            module_name,
5030            module_args,
5031        })
5032    }
5033
5034    pub fn parse_create_schema(&mut self) -> Result<Statement, ParserError> {
5035        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5036
5037        let schema_name = self.parse_schema_name()?;
5038
5039        let default_collate_spec = if self.parse_keywords(&[Keyword::DEFAULT, Keyword::COLLATE]) {
5040            Some(self.parse_expr()?)
5041        } else {
5042            None
5043        };
5044
5045        let with = if self.peek_keyword(Keyword::WITH) {
5046            Some(self.parse_options(Keyword::WITH)?)
5047        } else {
5048            None
5049        };
5050
5051        let options = if self.peek_keyword(Keyword::OPTIONS) {
5052            Some(self.parse_options(Keyword::OPTIONS)?)
5053        } else {
5054            None
5055        };
5056
5057        let clone = if self.parse_keyword(Keyword::CLONE) {
5058            Some(self.parse_object_name(false)?)
5059        } else {
5060            None
5061        };
5062
5063        Ok(Statement::CreateSchema {
5064            schema_name,
5065            if_not_exists,
5066            with,
5067            options,
5068            default_collate_spec,
5069            clone,
5070        })
5071    }
5072
5073    fn parse_schema_name(&mut self) -> Result<SchemaName, ParserError> {
5074        if self.parse_keyword(Keyword::AUTHORIZATION) {
5075            Ok(SchemaName::UnnamedAuthorization(self.parse_identifier()?))
5076        } else {
5077            let name = self.parse_object_name(false)?;
5078
5079            if self.parse_keyword(Keyword::AUTHORIZATION) {
5080                Ok(SchemaName::NamedAuthorization(
5081                    name,
5082                    self.parse_identifier()?,
5083                ))
5084            } else {
5085                Ok(SchemaName::Simple(name))
5086            }
5087        }
5088    }
5089
5090    pub fn parse_create_database(&mut self) -> Result<Statement, ParserError> {
5091        let ine = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5092        let db_name = self.parse_object_name(false)?;
5093        let mut location = None;
5094        let mut managed_location = None;
5095        loop {
5096            match self.parse_one_of_keywords(&[Keyword::LOCATION, Keyword::MANAGEDLOCATION]) {
5097                Some(Keyword::LOCATION) => location = Some(self.parse_literal_string()?),
5098                Some(Keyword::MANAGEDLOCATION) => {
5099                    managed_location = Some(self.parse_literal_string()?)
5100                }
5101                _ => break,
5102            }
5103        }
5104        let clone = if self.parse_keyword(Keyword::CLONE) {
5105            Some(self.parse_object_name(false)?)
5106        } else {
5107            None
5108        };
5109
5110        Ok(Statement::CreateDatabase {
5111            db_name,
5112            if_not_exists: ine,
5113            location,
5114            managed_location,
5115            or_replace: false,
5116            transient: false,
5117            clone,
5118            data_retention_time_in_days: None,
5119            max_data_extension_time_in_days: None,
5120            external_volume: None,
5121            catalog: None,
5122            replace_invalid_characters: None,
5123            default_ddl_collation: None,
5124            storage_serialization_policy: None,
5125            comment: None,
5126            catalog_sync: None,
5127            catalog_sync_namespace_mode: None,
5128            catalog_sync_namespace_flatten_delimiter: None,
5129            with_tags: None,
5130            with_contacts: None,
5131        })
5132    }
5133
5134    pub fn parse_optional_create_function_using(
5135        &mut self,
5136    ) -> Result<Option<CreateFunctionUsing>, ParserError> {
5137        if !self.parse_keyword(Keyword::USING) {
5138            return Ok(None);
5139        };
5140        let keyword =
5141            self.expect_one_of_keywords(&[Keyword::JAR, Keyword::FILE, Keyword::ARCHIVE])?;
5142
5143        let uri = self.parse_literal_string()?;
5144
5145        match keyword {
5146            Keyword::JAR => Ok(Some(CreateFunctionUsing::Jar(uri))),
5147            Keyword::FILE => Ok(Some(CreateFunctionUsing::File(uri))),
5148            Keyword::ARCHIVE => Ok(Some(CreateFunctionUsing::Archive(uri))),
5149            _ => self.expected(
5150                "JAR, FILE or ARCHIVE, got {:?}",
5151                TokenWithSpan::wrap(Token::make_keyword(format!("{keyword:?}").as_str())),
5152            ),
5153        }
5154    }
5155
5156    pub fn parse_create_function(
5157        &mut self,
5158        or_alter: bool,
5159        or_replace: bool,
5160        temporary: bool,
5161    ) -> Result<Statement, ParserError> {
5162        if dialect_of!(self is HiveDialect) {
5163            self.parse_hive_create_function(or_replace, temporary)
5164        } else if dialect_of!(self is PostgreSqlDialect | GenericDialect) {
5165            self.parse_postgres_create_function(or_replace, temporary)
5166        } else if dialect_of!(self is DuckDbDialect) {
5167            self.parse_create_macro(or_replace, temporary)
5168        } else if dialect_of!(self is BigQueryDialect) {
5169            self.parse_bigquery_create_function(or_replace, temporary)
5170        } else if dialect_of!(self is MsSqlDialect) {
5171            self.parse_mssql_create_function(or_alter, or_replace, temporary)
5172        } else {
5173            self.prev_token();
5174            self.expected("an object type after CREATE", self.peek_token())
5175        }
5176    }
5177
5178    /// Parse `CREATE FUNCTION` for [PostgreSQL]
5179    ///
5180    /// [PostgreSQL]: https://www.postgresql.org/docs/15/sql-createfunction.html
5181    fn parse_postgres_create_function(
5182        &mut self,
5183        or_replace: bool,
5184        temporary: bool,
5185    ) -> Result<Statement, ParserError> {
5186        let name = self.parse_object_name(false)?;
5187
5188        self.expect_token(&Token::LParen)?;
5189        let args = if Token::RParen != self.peek_token_ref().token {
5190            self.parse_comma_separated(Parser::parse_function_arg)?
5191        } else {
5192            vec![]
5193        };
5194        self.expect_token(&Token::RParen)?;
5195
5196        let return_type = if self.parse_keyword(Keyword::RETURNS) {
5197            Some(self.parse_data_type()?)
5198        } else {
5199            None
5200        };
5201
5202        #[derive(Default)]
5203        struct Body {
5204            language: Option<Ident>,
5205            behavior: Option<FunctionBehavior>,
5206            function_body: Option<CreateFunctionBody>,
5207            called_on_null: Option<FunctionCalledOnNull>,
5208            parallel: Option<FunctionParallel>,
5209            security: Option<FunctionSecurity>,
5210        }
5211        let mut body = Body::default();
5212        let mut set_params: Vec<FunctionDefinitionSetParam> = Vec::new();
5213        loop {
5214            fn ensure_not_set<T>(field: &Option<T>, name: &str) -> Result<(), ParserError> {
5215                if field.is_some() {
5216                    return Err(ParserError::ParserError(format!(
5217                        "{name} specified more than once",
5218                    )));
5219                }
5220                Ok(())
5221            }
5222            if self.parse_keyword(Keyword::AS) {
5223                ensure_not_set(&body.function_body, "AS")?;
5224                body.function_body = Some(self.parse_create_function_body_string()?);
5225            } else if self.parse_keyword(Keyword::LANGUAGE) {
5226                ensure_not_set(&body.language, "LANGUAGE")?;
5227                body.language = Some(self.parse_identifier()?);
5228            } else if self.parse_keyword(Keyword::IMMUTABLE) {
5229                ensure_not_set(&body.behavior, "IMMUTABLE | STABLE | VOLATILE")?;
5230                body.behavior = Some(FunctionBehavior::Immutable);
5231            } else if self.parse_keyword(Keyword::STABLE) {
5232                ensure_not_set(&body.behavior, "IMMUTABLE | STABLE | VOLATILE")?;
5233                body.behavior = Some(FunctionBehavior::Stable);
5234            } else if self.parse_keyword(Keyword::VOLATILE) {
5235                ensure_not_set(&body.behavior, "IMMUTABLE | STABLE | VOLATILE")?;
5236                body.behavior = Some(FunctionBehavior::Volatile);
5237            } else if self.parse_keywords(&[
5238                Keyword::CALLED,
5239                Keyword::ON,
5240                Keyword::NULL,
5241                Keyword::INPUT,
5242            ]) {
5243                ensure_not_set(
5244                    &body.called_on_null,
5245                    "CALLED ON NULL INPUT | RETURNS NULL ON NULL INPUT | STRICT",
5246                )?;
5247                body.called_on_null = Some(FunctionCalledOnNull::CalledOnNullInput);
5248            } else if self.parse_keywords(&[
5249                Keyword::RETURNS,
5250                Keyword::NULL,
5251                Keyword::ON,
5252                Keyword::NULL,
5253                Keyword::INPUT,
5254            ]) {
5255                ensure_not_set(
5256                    &body.called_on_null,
5257                    "CALLED ON NULL INPUT | RETURNS NULL ON NULL INPUT | STRICT",
5258                )?;
5259                body.called_on_null = Some(FunctionCalledOnNull::ReturnsNullOnNullInput);
5260            } else if self.parse_keyword(Keyword::STRICT) {
5261                ensure_not_set(
5262                    &body.called_on_null,
5263                    "CALLED ON NULL INPUT | RETURNS NULL ON NULL INPUT | STRICT",
5264                )?;
5265                body.called_on_null = Some(FunctionCalledOnNull::Strict);
5266            } else if self.parse_keyword(Keyword::PARALLEL) {
5267                ensure_not_set(&body.parallel, "PARALLEL { UNSAFE | RESTRICTED | SAFE }")?;
5268                if self.parse_keyword(Keyword::UNSAFE) {
5269                    body.parallel = Some(FunctionParallel::Unsafe);
5270                } else if self.parse_keyword(Keyword::RESTRICTED) {
5271                    body.parallel = Some(FunctionParallel::Restricted);
5272                } else if self.parse_keyword(Keyword::SAFE) {
5273                    body.parallel = Some(FunctionParallel::Safe);
5274                } else {
5275                    return self.expected("one of UNSAFE | RESTRICTED | SAFE", self.peek_token());
5276                }
5277            } else if self.parse_keyword(Keyword::SECURITY) {
5278                ensure_not_set(&body.security, "SECURITY { DEFINER | INVOKER }")?;
5279                if self.parse_keyword(Keyword::DEFINER) {
5280                    body.security = Some(FunctionSecurity::Definer);
5281                } else if self.parse_keyword(Keyword::INVOKER) {
5282                    body.security = Some(FunctionSecurity::Invoker);
5283                } else {
5284                    return self.expected("DEFINER or INVOKER", self.peek_token());
5285                }
5286            } else if self.parse_keyword(Keyword::SET) {
5287                let name = self.parse_identifier()?;
5288                let value = if self.parse_keywords(&[Keyword::FROM, Keyword::CURRENT]) {
5289                    FunctionSetValue::FromCurrent
5290                } else {
5291                    if !self.consume_token(&Token::Eq) && !self.parse_keyword(Keyword::TO) {
5292                        return self.expected("= or TO", self.peek_token());
5293                    }
5294                    let values = self.parse_comma_separated(Parser::parse_expr)?;
5295                    FunctionSetValue::Values(values)
5296                };
5297                set_params.push(FunctionDefinitionSetParam { name, value });
5298            } else if self.parse_keyword(Keyword::RETURN) {
5299                ensure_not_set(&body.function_body, "RETURN")?;
5300                body.function_body = Some(CreateFunctionBody::Return(self.parse_expr()?));
5301            } else {
5302                break;
5303            }
5304        }
5305
5306        Ok(Statement::CreateFunction(CreateFunction {
5307            or_alter: false,
5308            or_replace,
5309            temporary,
5310            name,
5311            args: Some(args),
5312            return_type,
5313            behavior: body.behavior,
5314            called_on_null: body.called_on_null,
5315            parallel: body.parallel,
5316            security: body.security,
5317            set_params,
5318            language: body.language,
5319            function_body: body.function_body,
5320            if_not_exists: false,
5321            using: None,
5322            determinism_specifier: None,
5323            options: None,
5324            remote_connection: None,
5325        }))
5326    }
5327
5328    /// Parse `CREATE FUNCTION` for [Hive]
5329    ///
5330    /// [Hive]: https://cwiki.apache.org/confluence/display/hive/languagemanual+ddl#LanguageManualDDL-Create/Drop/ReloadFunction
5331    fn parse_hive_create_function(
5332        &mut self,
5333        or_replace: bool,
5334        temporary: bool,
5335    ) -> Result<Statement, ParserError> {
5336        let name = self.parse_object_name(false)?;
5337        self.expect_keyword_is(Keyword::AS)?;
5338
5339        let body = self.parse_create_function_body_string()?;
5340        let using = self.parse_optional_create_function_using()?;
5341
5342        Ok(Statement::CreateFunction(CreateFunction {
5343            or_alter: false,
5344            or_replace,
5345            temporary,
5346            name,
5347            function_body: Some(body),
5348            using,
5349            if_not_exists: false,
5350            args: None,
5351            return_type: None,
5352            behavior: None,
5353            called_on_null: None,
5354            parallel: None,
5355            security: None,
5356            set_params: vec![],
5357            language: None,
5358            determinism_specifier: None,
5359            options: None,
5360            remote_connection: None,
5361        }))
5362    }
5363
5364    /// Parse `CREATE FUNCTION` for [BigQuery]
5365    ///
5366    /// [BigQuery]: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#create_function_statement
5367    fn parse_bigquery_create_function(
5368        &mut self,
5369        or_replace: bool,
5370        temporary: bool,
5371    ) -> Result<Statement, ParserError> {
5372        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5373        let (name, args) = self.parse_create_function_name_and_params()?;
5374
5375        let return_type = if self.parse_keyword(Keyword::RETURNS) {
5376            Some(self.parse_data_type()?)
5377        } else {
5378            None
5379        };
5380
5381        let determinism_specifier = if self.parse_keyword(Keyword::DETERMINISTIC) {
5382            Some(FunctionDeterminismSpecifier::Deterministic)
5383        } else if self.parse_keywords(&[Keyword::NOT, Keyword::DETERMINISTIC]) {
5384            Some(FunctionDeterminismSpecifier::NotDeterministic)
5385        } else {
5386            None
5387        };
5388
5389        let language = if self.parse_keyword(Keyword::LANGUAGE) {
5390            Some(self.parse_identifier()?)
5391        } else {
5392            None
5393        };
5394
5395        let remote_connection =
5396            if self.parse_keywords(&[Keyword::REMOTE, Keyword::WITH, Keyword::CONNECTION]) {
5397                Some(self.parse_object_name(false)?)
5398            } else {
5399                None
5400            };
5401
5402        // `OPTIONS` may come before of after the function body but
5403        // may be specified at most once.
5404        let mut options = self.maybe_parse_options(Keyword::OPTIONS)?;
5405
5406        let function_body = if remote_connection.is_none() {
5407            self.expect_keyword_is(Keyword::AS)?;
5408            let expr = self.parse_expr()?;
5409            if options.is_none() {
5410                options = self.maybe_parse_options(Keyword::OPTIONS)?;
5411                Some(CreateFunctionBody::AsBeforeOptions {
5412                    body: expr,
5413                    link_symbol: None,
5414                })
5415            } else {
5416                Some(CreateFunctionBody::AsAfterOptions(expr))
5417            }
5418        } else {
5419            None
5420        };
5421
5422        Ok(Statement::CreateFunction(CreateFunction {
5423            or_alter: false,
5424            or_replace,
5425            temporary,
5426            if_not_exists,
5427            name,
5428            args: Some(args),
5429            return_type,
5430            function_body,
5431            language,
5432            determinism_specifier,
5433            options,
5434            remote_connection,
5435            using: None,
5436            behavior: None,
5437            called_on_null: None,
5438            parallel: None,
5439            security: None,
5440            set_params: vec![],
5441        }))
5442    }
5443
5444    /// Parse `CREATE FUNCTION` for [MsSql]
5445    ///
5446    /// [MsSql]: https://learn.microsoft.com/en-us/sql/t-sql/statements/create-function-transact-sql
5447    fn parse_mssql_create_function(
5448        &mut self,
5449        or_alter: bool,
5450        or_replace: bool,
5451        temporary: bool,
5452    ) -> Result<Statement, ParserError> {
5453        let (name, args) = self.parse_create_function_name_and_params()?;
5454
5455        self.expect_keyword(Keyword::RETURNS)?;
5456
5457        let return_table = self.maybe_parse(|p| {
5458            let return_table_name = p.parse_identifier()?;
5459
5460            p.expect_keyword_is(Keyword::TABLE)?;
5461            p.prev_token();
5462
5463            let table_column_defs = match p.parse_data_type()? {
5464                DataType::Table(Some(table_column_defs)) if !table_column_defs.is_empty() => {
5465                    table_column_defs
5466                }
5467                _ => parser_err!(
5468                    "Expected table column definitions after TABLE keyword",
5469                    p.peek_token().span.start
5470                )?,
5471            };
5472
5473            Ok(DataType::NamedTable {
5474                name: ObjectName(vec![ObjectNamePart::Identifier(return_table_name)]),
5475                columns: table_column_defs,
5476            })
5477        })?;
5478
5479        let return_type = if return_table.is_some() {
5480            return_table
5481        } else {
5482            Some(self.parse_data_type()?)
5483        };
5484
5485        let _ = self.parse_keyword(Keyword::AS);
5486
5487        let function_body = if self.peek_keyword(Keyword::BEGIN) {
5488            let begin_token = self.expect_keyword(Keyword::BEGIN)?;
5489            let statements = self.parse_statement_list(&[Keyword::END])?;
5490            let end_token = self.expect_keyword(Keyword::END)?;
5491
5492            Some(CreateFunctionBody::AsBeginEnd(BeginEndStatements {
5493                begin_token: AttachedToken(begin_token),
5494                statements,
5495                end_token: AttachedToken(end_token),
5496            }))
5497        } else if self.parse_keyword(Keyword::RETURN) {
5498            if self.peek_token() == Token::LParen {
5499                Some(CreateFunctionBody::AsReturnExpr(self.parse_expr()?))
5500            } else if self.peek_keyword(Keyword::SELECT) {
5501                let select = self.parse_select()?;
5502                Some(CreateFunctionBody::AsReturnSelect(select))
5503            } else {
5504                parser_err!(
5505                    "Expected a subquery (or bare SELECT statement) after RETURN",
5506                    self.peek_token().span.start
5507                )?
5508            }
5509        } else {
5510            parser_err!("Unparsable function body", self.peek_token().span.start)?
5511        };
5512
5513        Ok(Statement::CreateFunction(CreateFunction {
5514            or_alter,
5515            or_replace,
5516            temporary,
5517            if_not_exists: false,
5518            name,
5519            args: Some(args),
5520            return_type,
5521            function_body,
5522            language: None,
5523            determinism_specifier: None,
5524            options: None,
5525            remote_connection: None,
5526            using: None,
5527            behavior: None,
5528            called_on_null: None,
5529            parallel: None,
5530            security: None,
5531            set_params: vec![],
5532        }))
5533    }
5534
5535    fn parse_create_function_name_and_params(
5536        &mut self,
5537    ) -> Result<(ObjectName, Vec<OperateFunctionArg>), ParserError> {
5538        let name = self.parse_object_name(false)?;
5539        let parse_function_param =
5540            |parser: &mut Parser| -> Result<OperateFunctionArg, ParserError> {
5541                let name = parser.parse_identifier()?;
5542                let data_type = parser.parse_data_type()?;
5543                let default_expr = if parser.consume_token(&Token::Eq) {
5544                    Some(parser.parse_expr()?)
5545                } else {
5546                    None
5547                };
5548
5549                Ok(OperateFunctionArg {
5550                    mode: None,
5551                    name: Some(name),
5552                    data_type,
5553                    default_expr,
5554                })
5555            };
5556        self.expect_token(&Token::LParen)?;
5557        let args = self.parse_comma_separated0(parse_function_param, Token::RParen)?;
5558        self.expect_token(&Token::RParen)?;
5559        Ok((name, args))
5560    }
5561
5562    fn parse_function_arg(&mut self) -> Result<OperateFunctionArg, ParserError> {
5563        let mode = if self.parse_keyword(Keyword::IN) {
5564            Some(ArgMode::In)
5565        } else if self.parse_keyword(Keyword::OUT) {
5566            Some(ArgMode::Out)
5567        } else if self.parse_keyword(Keyword::INOUT) {
5568            Some(ArgMode::InOut)
5569        } else {
5570            None
5571        };
5572
5573        // parse: [ argname ] argtype
5574        let mut name = None;
5575        let mut data_type = self.parse_data_type()?;
5576
5577        // To check whether the first token is a name or a type, we need to
5578        // peek the next token, which if it is another type keyword, then the
5579        // first token is a name and not a type in itself.
5580        let data_type_idx = self.get_current_index();
5581
5582        // DEFAULT will be parsed as `DataType::Custom`, which is undesirable in this context
5583        fn parse_data_type_no_default(parser: &mut Parser) -> Result<DataType, ParserError> {
5584            if parser.peek_keyword(Keyword::DEFAULT) {
5585                // This dummy error is ignored in `maybe_parse`
5586                parser_err!(
5587                    "The DEFAULT keyword is not a type",
5588                    parser.peek_token().span.start
5589                )
5590            } else {
5591                parser.parse_data_type()
5592            }
5593        }
5594
5595        if let Some(next_data_type) = self.maybe_parse(parse_data_type_no_default)? {
5596            let token = self.token_at(data_type_idx);
5597
5598            // We ensure that the token is a `Word` token, and not other special tokens.
5599            if !matches!(token.token, Token::Word(_)) {
5600                return self.expected("a name or type", token.clone());
5601            }
5602
5603            name = Some(Ident::new(token.to_string()));
5604            data_type = next_data_type;
5605        }
5606
5607        let default_expr = if self.parse_keyword(Keyword::DEFAULT) || self.consume_token(&Token::Eq)
5608        {
5609            Some(self.parse_expr()?)
5610        } else {
5611            None
5612        };
5613        Ok(OperateFunctionArg {
5614            mode,
5615            name,
5616            data_type,
5617            default_expr,
5618        })
5619    }
5620
5621    /// Parse statements of the DropTrigger type such as:
5622    ///
5623    /// ```sql
5624    /// DROP TRIGGER [ IF EXISTS ] name ON table_name [ CASCADE | RESTRICT ]
5625    /// ```
5626    pub fn parse_drop_trigger(&mut self) -> Result<Statement, ParserError> {
5627        if !dialect_of!(self is PostgreSqlDialect | SQLiteDialect | GenericDialect | MySqlDialect | MsSqlDialect)
5628        {
5629            self.prev_token();
5630            return self.expected("an object type after DROP", self.peek_token());
5631        }
5632        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
5633        let trigger_name = self.parse_object_name(false)?;
5634        let table_name = if self.parse_keyword(Keyword::ON) {
5635            Some(self.parse_object_name(false)?)
5636        } else {
5637            None
5638        };
5639        let option = match self.parse_one_of_keywords(&[Keyword::CASCADE, Keyword::RESTRICT]) {
5640            Some(Keyword::CASCADE) => Some(ReferentialAction::Cascade),
5641            Some(Keyword::RESTRICT) => Some(ReferentialAction::Restrict),
5642            Some(unexpected_keyword) => return Err(ParserError::ParserError(
5643                format!("Internal parser error: expected any of {{CASCADE, RESTRICT}}, got {unexpected_keyword:?}"),
5644            )),
5645            None => None,
5646        };
5647        Ok(Statement::DropTrigger(DropTrigger {
5648            if_exists,
5649            trigger_name,
5650            table_name,
5651            option,
5652        }))
5653    }
5654
5655    pub fn parse_create_trigger(
5656        &mut self,
5657        temporary: bool,
5658        or_alter: bool,
5659        or_replace: bool,
5660        is_constraint: bool,
5661    ) -> Result<Statement, ParserError> {
5662        if !dialect_of!(self is PostgreSqlDialect | SQLiteDialect | GenericDialect | MySqlDialect | MsSqlDialect)
5663        {
5664            self.prev_token();
5665            return self.expected("an object type after CREATE", self.peek_token());
5666        }
5667
5668        let name = self.parse_object_name(false)?;
5669        let period = self.maybe_parse(|parser| parser.parse_trigger_period())?;
5670
5671        let events = self.parse_keyword_separated(Keyword::OR, Parser::parse_trigger_event)?;
5672        self.expect_keyword_is(Keyword::ON)?;
5673        let table_name = self.parse_object_name(false)?;
5674
5675        let referenced_table_name = if self.parse_keyword(Keyword::FROM) {
5676            self.parse_object_name(true).ok()
5677        } else {
5678            None
5679        };
5680
5681        let characteristics = self.parse_constraint_characteristics()?;
5682
5683        let mut referencing = vec![];
5684        if self.parse_keyword(Keyword::REFERENCING) {
5685            while let Some(refer) = self.parse_trigger_referencing()? {
5686                referencing.push(refer);
5687            }
5688        }
5689
5690        let trigger_object = if self.parse_keyword(Keyword::FOR) {
5691            let include_each = self.parse_keyword(Keyword::EACH);
5692            let trigger_object =
5693                match self.expect_one_of_keywords(&[Keyword::ROW, Keyword::STATEMENT])? {
5694                    Keyword::ROW => TriggerObject::Row,
5695                    Keyword::STATEMENT => TriggerObject::Statement,
5696                    unexpected_keyword => return Err(ParserError::ParserError(
5697                        format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in ROW/STATEMENT"),
5698                    )),
5699                };
5700
5701            Some(if include_each {
5702                TriggerObjectKind::ForEach(trigger_object)
5703            } else {
5704                TriggerObjectKind::For(trigger_object)
5705            })
5706        } else {
5707            let _ = self.parse_keyword(Keyword::FOR);
5708
5709            None
5710        };
5711
5712        let condition = self
5713            .parse_keyword(Keyword::WHEN)
5714            .then(|| self.parse_expr())
5715            .transpose()?;
5716
5717        let mut exec_body = None;
5718        let mut statements = None;
5719        if self.parse_keyword(Keyword::EXECUTE) {
5720            exec_body = Some(self.parse_trigger_exec_body()?);
5721        } else {
5722            statements = Some(self.parse_conditional_statements(&[Keyword::END])?);
5723        }
5724
5725        Ok(CreateTrigger {
5726            or_alter,
5727            temporary,
5728            or_replace,
5729            is_constraint,
5730            name,
5731            period,
5732            period_before_table: true,
5733            events,
5734            table_name,
5735            referenced_table_name,
5736            referencing,
5737            trigger_object,
5738            condition,
5739            exec_body,
5740            statements_as: false,
5741            statements,
5742            characteristics,
5743        }
5744        .into())
5745    }
5746
5747    pub fn parse_trigger_period(&mut self) -> Result<TriggerPeriod, ParserError> {
5748        Ok(
5749            match self.expect_one_of_keywords(&[
5750                Keyword::FOR,
5751                Keyword::BEFORE,
5752                Keyword::AFTER,
5753                Keyword::INSTEAD,
5754            ])? {
5755                Keyword::FOR => TriggerPeriod::For,
5756                Keyword::BEFORE => TriggerPeriod::Before,
5757                Keyword::AFTER => TriggerPeriod::After,
5758                Keyword::INSTEAD => self
5759                    .expect_keyword_is(Keyword::OF)
5760                    .map(|_| TriggerPeriod::InsteadOf)?,
5761                unexpected_keyword => return Err(ParserError::ParserError(
5762                    format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in trigger period"),
5763                )),
5764            },
5765        )
5766    }
5767
5768    pub fn parse_trigger_event(&mut self) -> Result<TriggerEvent, ParserError> {
5769        Ok(
5770            match self.expect_one_of_keywords(&[
5771                Keyword::INSERT,
5772                Keyword::UPDATE,
5773                Keyword::DELETE,
5774                Keyword::TRUNCATE,
5775            ])? {
5776                Keyword::INSERT => TriggerEvent::Insert,
5777                Keyword::UPDATE => {
5778                    if self.parse_keyword(Keyword::OF) {
5779                        let cols = self.parse_comma_separated(Parser::parse_identifier)?;
5780                        TriggerEvent::Update(cols)
5781                    } else {
5782                        TriggerEvent::Update(vec![])
5783                    }
5784                }
5785                Keyword::DELETE => TriggerEvent::Delete,
5786                Keyword::TRUNCATE => TriggerEvent::Truncate,
5787                unexpected_keyword => return Err(ParserError::ParserError(
5788                    format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in trigger event"),
5789                )),
5790            },
5791        )
5792    }
5793
5794    pub fn parse_trigger_referencing(&mut self) -> Result<Option<TriggerReferencing>, ParserError> {
5795        let refer_type = match self.parse_one_of_keywords(&[Keyword::OLD, Keyword::NEW]) {
5796            Some(Keyword::OLD) if self.parse_keyword(Keyword::TABLE) => {
5797                TriggerReferencingType::OldTable
5798            }
5799            Some(Keyword::NEW) if self.parse_keyword(Keyword::TABLE) => {
5800                TriggerReferencingType::NewTable
5801            }
5802            _ => {
5803                return Ok(None);
5804            }
5805        };
5806
5807        let is_as = self.parse_keyword(Keyword::AS);
5808        let transition_relation_name = self.parse_object_name(false)?;
5809        Ok(Some(TriggerReferencing {
5810            refer_type,
5811            is_as,
5812            transition_relation_name,
5813        }))
5814    }
5815
5816    pub fn parse_trigger_exec_body(&mut self) -> Result<TriggerExecBody, ParserError> {
5817        Ok(TriggerExecBody {
5818            exec_type: match self
5819                .expect_one_of_keywords(&[Keyword::FUNCTION, Keyword::PROCEDURE])?
5820            {
5821                Keyword::FUNCTION => TriggerExecBodyType::Function,
5822                Keyword::PROCEDURE => TriggerExecBodyType::Procedure,
5823                unexpected_keyword => return Err(ParserError::ParserError(
5824                    format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in trigger exec body"),
5825                )),
5826            },
5827            func_desc: self.parse_function_desc()?,
5828        })
5829    }
5830
5831    pub fn parse_create_macro(
5832        &mut self,
5833        or_replace: bool,
5834        temporary: bool,
5835    ) -> Result<Statement, ParserError> {
5836        if dialect_of!(self is DuckDbDialect |  GenericDialect) {
5837            let name = self.parse_object_name(false)?;
5838            self.expect_token(&Token::LParen)?;
5839            let args = if self.consume_token(&Token::RParen) {
5840                self.prev_token();
5841                None
5842            } else {
5843                Some(self.parse_comma_separated(Parser::parse_macro_arg)?)
5844            };
5845
5846            self.expect_token(&Token::RParen)?;
5847            self.expect_keyword_is(Keyword::AS)?;
5848
5849            Ok(Statement::CreateMacro {
5850                or_replace,
5851                temporary,
5852                name,
5853                args,
5854                definition: if self.parse_keyword(Keyword::TABLE) {
5855                    MacroDefinition::Table(self.parse_query()?)
5856                } else {
5857                    MacroDefinition::Expr(self.parse_expr()?)
5858                },
5859            })
5860        } else {
5861            self.prev_token();
5862            self.expected("an object type after CREATE", self.peek_token())
5863        }
5864    }
5865
5866    fn parse_macro_arg(&mut self) -> Result<MacroArg, ParserError> {
5867        let name = self.parse_identifier()?;
5868
5869        let default_expr =
5870            if self.consume_token(&Token::Assignment) || self.consume_token(&Token::RArrow) {
5871                Some(self.parse_expr()?)
5872            } else {
5873                None
5874            };
5875        Ok(MacroArg { name, default_expr })
5876    }
5877
5878    pub fn parse_create_external_table(
5879        &mut self,
5880        or_replace: bool,
5881    ) -> Result<Statement, ParserError> {
5882        self.expect_keyword_is(Keyword::TABLE)?;
5883        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5884        let table_name = self.parse_object_name(false)?;
5885        let (columns, constraints) = self.parse_columns()?;
5886
5887        let hive_distribution = self.parse_hive_distribution()?;
5888        let hive_formats = self.parse_hive_formats()?;
5889
5890        let file_format = if let Some(ref hf) = hive_formats {
5891            if let Some(ref ff) = hf.storage {
5892                match ff {
5893                    HiveIOFormat::FileFormat { format } => Some(*format),
5894                    _ => None,
5895                }
5896            } else {
5897                None
5898            }
5899        } else {
5900            None
5901        };
5902        let location = hive_formats.as_ref().and_then(|hf| hf.location.clone());
5903        let table_properties = self.parse_options(Keyword::TBLPROPERTIES)?;
5904        let table_options = if !table_properties.is_empty() {
5905            CreateTableOptions::TableProperties(table_properties)
5906        } else {
5907            CreateTableOptions::None
5908        };
5909        Ok(CreateTableBuilder::new(table_name)
5910            .columns(columns)
5911            .constraints(constraints)
5912            .hive_distribution(hive_distribution)
5913            .hive_formats(hive_formats)
5914            .table_options(table_options)
5915            .or_replace(or_replace)
5916            .if_not_exists(if_not_exists)
5917            .external(true)
5918            .file_format(file_format)
5919            .location(location)
5920            .build())
5921    }
5922
5923    pub fn parse_file_format(&mut self) -> Result<FileFormat, ParserError> {
5924        let next_token = self.next_token();
5925        match &next_token.token {
5926            Token::Word(w) => match w.keyword {
5927                Keyword::AVRO => Ok(FileFormat::AVRO),
5928                Keyword::JSONFILE => Ok(FileFormat::JSONFILE),
5929                Keyword::ORC => Ok(FileFormat::ORC),
5930                Keyword::PARQUET => Ok(FileFormat::PARQUET),
5931                Keyword::RCFILE => Ok(FileFormat::RCFILE),
5932                Keyword::SEQUENCEFILE => Ok(FileFormat::SEQUENCEFILE),
5933                Keyword::TEXTFILE => Ok(FileFormat::TEXTFILE),
5934                _ => self.expected("fileformat", next_token),
5935            },
5936            _ => self.expected("fileformat", next_token),
5937        }
5938    }
5939
5940    fn parse_analyze_format_kind(&mut self) -> Result<AnalyzeFormatKind, ParserError> {
5941        if self.consume_token(&Token::Eq) {
5942            Ok(AnalyzeFormatKind::Assignment(self.parse_analyze_format()?))
5943        } else {
5944            Ok(AnalyzeFormatKind::Keyword(self.parse_analyze_format()?))
5945        }
5946    }
5947
5948    pub fn parse_analyze_format(&mut self) -> Result<AnalyzeFormat, ParserError> {
5949        let next_token = self.next_token();
5950        match &next_token.token {
5951            Token::Word(w) => match w.keyword {
5952                Keyword::TEXT => Ok(AnalyzeFormat::TEXT),
5953                Keyword::GRAPHVIZ => Ok(AnalyzeFormat::GRAPHVIZ),
5954                Keyword::JSON => Ok(AnalyzeFormat::JSON),
5955                _ => self.expected("fileformat", next_token),
5956            },
5957            _ => self.expected("fileformat", next_token),
5958        }
5959    }
5960
5961    pub fn parse_create_view(
5962        &mut self,
5963        or_alter: bool,
5964        or_replace: bool,
5965        temporary: bool,
5966        create_view_params: Option<CreateViewParams>,
5967    ) -> Result<Statement, ParserError> {
5968        let secure = self.parse_keyword(Keyword::SECURE);
5969        let materialized = self.parse_keyword(Keyword::MATERIALIZED);
5970        self.expect_keyword_is(Keyword::VIEW)?;
5971        let allow_unquoted_hyphen = dialect_of!(self is BigQueryDialect);
5972        // Tries to parse IF NOT EXISTS either before name or after name
5973        // Name before IF NOT EXISTS is supported by snowflake but undocumented
5974        let if_not_exists_first =
5975            self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5976        let name = self.parse_object_name(allow_unquoted_hyphen)?;
5977        let name_before_not_exists = !if_not_exists_first
5978            && self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5979        let if_not_exists = if_not_exists_first || name_before_not_exists;
5980        // Many dialects support `OR ALTER` right after `CREATE`, but we don't (yet).
5981        // ANSI SQL and Postgres support RECURSIVE here, but we don't support it either.
5982        let columns = self.parse_view_columns()?;
5983        let mut options = CreateTableOptions::None;
5984        let with_options = self.parse_options(Keyword::WITH)?;
5985        if !with_options.is_empty() {
5986            options = CreateTableOptions::With(with_options);
5987        }
5988
5989        let cluster_by = if self.parse_keyword(Keyword::CLUSTER) {
5990            self.expect_keyword_is(Keyword::BY)?;
5991            self.parse_parenthesized_column_list(Optional, false)?
5992        } else {
5993            vec![]
5994        };
5995
5996        if dialect_of!(self is BigQueryDialect | GenericDialect) {
5997            if let Some(opts) = self.maybe_parse_options(Keyword::OPTIONS)? {
5998                if !opts.is_empty() {
5999                    options = CreateTableOptions::Options(opts);
6000                }
6001            };
6002        }
6003
6004        let to = if dialect_of!(self is ClickHouseDialect | GenericDialect)
6005            && self.parse_keyword(Keyword::TO)
6006        {
6007            Some(self.parse_object_name(false)?)
6008        } else {
6009            None
6010        };
6011
6012        let comment = if dialect_of!(self is SnowflakeDialect | GenericDialect)
6013            && self.parse_keyword(Keyword::COMMENT)
6014        {
6015            self.expect_token(&Token::Eq)?;
6016            Some(self.parse_comment_value()?)
6017        } else {
6018            None
6019        };
6020
6021        self.expect_keyword_is(Keyword::AS)?;
6022        let query = self.parse_query()?;
6023        // Optional `WITH [ CASCADED | LOCAL ] CHECK OPTION` is widely supported here.
6024
6025        let with_no_schema_binding = dialect_of!(self is RedshiftSqlDialect | GenericDialect)
6026            && self.parse_keywords(&[
6027                Keyword::WITH,
6028                Keyword::NO,
6029                Keyword::SCHEMA,
6030                Keyword::BINDING,
6031            ]);
6032
6033        Ok(CreateView {
6034            or_alter,
6035            name,
6036            columns,
6037            query,
6038            materialized,
6039            secure,
6040            or_replace,
6041            options,
6042            cluster_by,
6043            comment,
6044            with_no_schema_binding,
6045            if_not_exists,
6046            temporary,
6047            to,
6048            params: create_view_params,
6049            name_before_not_exists,
6050        }
6051        .into())
6052    }
6053
6054    /// Parse optional parameters for the `CREATE VIEW` statement supported by [MySQL].
6055    ///
6056    /// [MySQL]: https://dev.mysql.com/doc/refman/9.1/en/create-view.html
6057    fn parse_create_view_params(&mut self) -> Result<Option<CreateViewParams>, ParserError> {
6058        let algorithm = if self.parse_keyword(Keyword::ALGORITHM) {
6059            self.expect_token(&Token::Eq)?;
6060            Some(
6061                match self.expect_one_of_keywords(&[
6062                    Keyword::UNDEFINED,
6063                    Keyword::MERGE,
6064                    Keyword::TEMPTABLE,
6065                ])? {
6066                    Keyword::UNDEFINED => CreateViewAlgorithm::Undefined,
6067                    Keyword::MERGE => CreateViewAlgorithm::Merge,
6068                    Keyword::TEMPTABLE => CreateViewAlgorithm::TempTable,
6069                    _ => {
6070                        self.prev_token();
6071                        let found = self.next_token();
6072                        return self
6073                            .expected("UNDEFINED or MERGE or TEMPTABLE after ALGORITHM =", found);
6074                    }
6075                },
6076            )
6077        } else {
6078            None
6079        };
6080        let definer = if self.parse_keyword(Keyword::DEFINER) {
6081            self.expect_token(&Token::Eq)?;
6082            Some(self.parse_grantee_name()?)
6083        } else {
6084            None
6085        };
6086        let security = if self.parse_keywords(&[Keyword::SQL, Keyword::SECURITY]) {
6087            Some(
6088                match self.expect_one_of_keywords(&[Keyword::DEFINER, Keyword::INVOKER])? {
6089                    Keyword::DEFINER => CreateViewSecurity::Definer,
6090                    Keyword::INVOKER => CreateViewSecurity::Invoker,
6091                    _ => {
6092                        self.prev_token();
6093                        let found = self.next_token();
6094                        return self.expected("DEFINER or INVOKER after SQL SECURITY", found);
6095                    }
6096                },
6097            )
6098        } else {
6099            None
6100        };
6101        if algorithm.is_some() || definer.is_some() || security.is_some() {
6102            Ok(Some(CreateViewParams {
6103                algorithm,
6104                definer,
6105                security,
6106            }))
6107        } else {
6108            Ok(None)
6109        }
6110    }
6111
6112    pub fn parse_create_role(&mut self) -> Result<Statement, ParserError> {
6113        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
6114        let names = self.parse_comma_separated(|p| p.parse_object_name(false))?;
6115
6116        let _ = self.parse_keyword(Keyword::WITH); // [ WITH ]
6117
6118        let optional_keywords = if dialect_of!(self is MsSqlDialect) {
6119            vec![Keyword::AUTHORIZATION]
6120        } else if dialect_of!(self is PostgreSqlDialect) {
6121            vec![
6122                Keyword::LOGIN,
6123                Keyword::NOLOGIN,
6124                Keyword::INHERIT,
6125                Keyword::NOINHERIT,
6126                Keyword::BYPASSRLS,
6127                Keyword::NOBYPASSRLS,
6128                Keyword::PASSWORD,
6129                Keyword::CREATEDB,
6130                Keyword::NOCREATEDB,
6131                Keyword::CREATEROLE,
6132                Keyword::NOCREATEROLE,
6133                Keyword::SUPERUSER,
6134                Keyword::NOSUPERUSER,
6135                Keyword::REPLICATION,
6136                Keyword::NOREPLICATION,
6137                Keyword::CONNECTION,
6138                Keyword::VALID,
6139                Keyword::IN,
6140                Keyword::ROLE,
6141                Keyword::ADMIN,
6142                Keyword::USER,
6143            ]
6144        } else {
6145            vec![]
6146        };
6147
6148        // MSSQL
6149        let mut authorization_owner = None;
6150        // Postgres
6151        let mut login = None;
6152        let mut inherit = None;
6153        let mut bypassrls = None;
6154        let mut password = None;
6155        let mut create_db = None;
6156        let mut create_role = None;
6157        let mut superuser = None;
6158        let mut replication = None;
6159        let mut connection_limit = None;
6160        let mut valid_until = None;
6161        let mut in_role = vec![];
6162        let mut in_group = vec![];
6163        let mut role = vec![];
6164        let mut user = vec![];
6165        let mut admin = vec![];
6166
6167        while let Some(keyword) = self.parse_one_of_keywords(&optional_keywords) {
6168            let loc = self
6169                .tokens
6170                .get(self.index - 1)
6171                .map_or(Location { line: 0, column: 0 }, |t| t.span.start);
6172            match keyword {
6173                Keyword::AUTHORIZATION => {
6174                    if authorization_owner.is_some() {
6175                        parser_err!("Found multiple AUTHORIZATION", loc)
6176                    } else {
6177                        authorization_owner = Some(self.parse_object_name(false)?);
6178                        Ok(())
6179                    }
6180                }
6181                Keyword::LOGIN | Keyword::NOLOGIN => {
6182                    if login.is_some() {
6183                        parser_err!("Found multiple LOGIN or NOLOGIN", loc)
6184                    } else {
6185                        login = Some(keyword == Keyword::LOGIN);
6186                        Ok(())
6187                    }
6188                }
6189                Keyword::INHERIT | Keyword::NOINHERIT => {
6190                    if inherit.is_some() {
6191                        parser_err!("Found multiple INHERIT or NOINHERIT", loc)
6192                    } else {
6193                        inherit = Some(keyword == Keyword::INHERIT);
6194                        Ok(())
6195                    }
6196                }
6197                Keyword::BYPASSRLS | Keyword::NOBYPASSRLS => {
6198                    if bypassrls.is_some() {
6199                        parser_err!("Found multiple BYPASSRLS or NOBYPASSRLS", loc)
6200                    } else {
6201                        bypassrls = Some(keyword == Keyword::BYPASSRLS);
6202                        Ok(())
6203                    }
6204                }
6205                Keyword::CREATEDB | Keyword::NOCREATEDB => {
6206                    if create_db.is_some() {
6207                        parser_err!("Found multiple CREATEDB or NOCREATEDB", loc)
6208                    } else {
6209                        create_db = Some(keyword == Keyword::CREATEDB);
6210                        Ok(())
6211                    }
6212                }
6213                Keyword::CREATEROLE | Keyword::NOCREATEROLE => {
6214                    if create_role.is_some() {
6215                        parser_err!("Found multiple CREATEROLE or NOCREATEROLE", loc)
6216                    } else {
6217                        create_role = Some(keyword == Keyword::CREATEROLE);
6218                        Ok(())
6219                    }
6220                }
6221                Keyword::SUPERUSER | Keyword::NOSUPERUSER => {
6222                    if superuser.is_some() {
6223                        parser_err!("Found multiple SUPERUSER or NOSUPERUSER", loc)
6224                    } else {
6225                        superuser = Some(keyword == Keyword::SUPERUSER);
6226                        Ok(())
6227                    }
6228                }
6229                Keyword::REPLICATION | Keyword::NOREPLICATION => {
6230                    if replication.is_some() {
6231                        parser_err!("Found multiple REPLICATION or NOREPLICATION", loc)
6232                    } else {
6233                        replication = Some(keyword == Keyword::REPLICATION);
6234                        Ok(())
6235                    }
6236                }
6237                Keyword::PASSWORD => {
6238                    if password.is_some() {
6239                        parser_err!("Found multiple PASSWORD", loc)
6240                    } else {
6241                        password = if self.parse_keyword(Keyword::NULL) {
6242                            Some(Password::NullPassword)
6243                        } else {
6244                            Some(Password::Password(Expr::Value(self.parse_value()?)))
6245                        };
6246                        Ok(())
6247                    }
6248                }
6249                Keyword::CONNECTION => {
6250                    self.expect_keyword_is(Keyword::LIMIT)?;
6251                    if connection_limit.is_some() {
6252                        parser_err!("Found multiple CONNECTION LIMIT", loc)
6253                    } else {
6254                        connection_limit = Some(Expr::Value(self.parse_number_value()?));
6255                        Ok(())
6256                    }
6257                }
6258                Keyword::VALID => {
6259                    self.expect_keyword_is(Keyword::UNTIL)?;
6260                    if valid_until.is_some() {
6261                        parser_err!("Found multiple VALID UNTIL", loc)
6262                    } else {
6263                        valid_until = Some(Expr::Value(self.parse_value()?));
6264                        Ok(())
6265                    }
6266                }
6267                Keyword::IN => {
6268                    if self.parse_keyword(Keyword::ROLE) {
6269                        if !in_role.is_empty() {
6270                            parser_err!("Found multiple IN ROLE", loc)
6271                        } else {
6272                            in_role = self.parse_comma_separated(|p| p.parse_identifier())?;
6273                            Ok(())
6274                        }
6275                    } else if self.parse_keyword(Keyword::GROUP) {
6276                        if !in_group.is_empty() {
6277                            parser_err!("Found multiple IN GROUP", loc)
6278                        } else {
6279                            in_group = self.parse_comma_separated(|p| p.parse_identifier())?;
6280                            Ok(())
6281                        }
6282                    } else {
6283                        self.expected("ROLE or GROUP after IN", self.peek_token())
6284                    }
6285                }
6286                Keyword::ROLE => {
6287                    if !role.is_empty() {
6288                        parser_err!("Found multiple ROLE", loc)
6289                    } else {
6290                        role = self.parse_comma_separated(|p| p.parse_identifier())?;
6291                        Ok(())
6292                    }
6293                }
6294                Keyword::USER => {
6295                    if !user.is_empty() {
6296                        parser_err!("Found multiple USER", loc)
6297                    } else {
6298                        user = self.parse_comma_separated(|p| p.parse_identifier())?;
6299                        Ok(())
6300                    }
6301                }
6302                Keyword::ADMIN => {
6303                    if !admin.is_empty() {
6304                        parser_err!("Found multiple ADMIN", loc)
6305                    } else {
6306                        admin = self.parse_comma_separated(|p| p.parse_identifier())?;
6307                        Ok(())
6308                    }
6309                }
6310                _ => break,
6311            }?
6312        }
6313
6314        Ok(CreateRole {
6315            names,
6316            if_not_exists,
6317            login,
6318            inherit,
6319            bypassrls,
6320            password,
6321            create_db,
6322            create_role,
6323            replication,
6324            superuser,
6325            connection_limit,
6326            valid_until,
6327            in_role,
6328            in_group,
6329            role,
6330            user,
6331            admin,
6332            authorization_owner,
6333        }
6334        .into())
6335    }
6336
6337    pub fn parse_owner(&mut self) -> Result<Owner, ParserError> {
6338        let owner = match self.parse_one_of_keywords(&[Keyword::CURRENT_USER, Keyword::CURRENT_ROLE, Keyword::SESSION_USER]) {
6339            Some(Keyword::CURRENT_USER) => Owner::CurrentUser,
6340            Some(Keyword::CURRENT_ROLE) => Owner::CurrentRole,
6341            Some(Keyword::SESSION_USER) => Owner::SessionUser,
6342            Some(unexpected_keyword) => return Err(ParserError::ParserError(
6343                format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in owner"),
6344            )),
6345            None => {
6346                match self.parse_identifier() {
6347                    Ok(ident) => Owner::Ident(ident),
6348                    Err(e) => {
6349                        return Err(ParserError::ParserError(format!("Expected: CURRENT_USER, CURRENT_ROLE, SESSION_USER or identifier after OWNER TO. {e}")))
6350                    }
6351                }
6352            }
6353        };
6354        Ok(owner)
6355    }
6356
6357    /// Parses a [Statement::CreateDomain] statement.
6358    fn parse_create_domain(&mut self) -> Result<Statement, ParserError> {
6359        let name = self.parse_object_name(false)?;
6360        self.expect_keyword_is(Keyword::AS)?;
6361        let data_type = self.parse_data_type()?;
6362        let collation = if self.parse_keyword(Keyword::COLLATE) {
6363            Some(self.parse_identifier()?)
6364        } else {
6365            None
6366        };
6367        let default = if self.parse_keyword(Keyword::DEFAULT) {
6368            Some(self.parse_expr()?)
6369        } else {
6370            None
6371        };
6372        let mut constraints = Vec::new();
6373        while let Some(constraint) = self.parse_optional_table_constraint()? {
6374            constraints.push(constraint);
6375        }
6376
6377        Ok(Statement::CreateDomain(CreateDomain {
6378            name,
6379            data_type,
6380            collation,
6381            default,
6382            constraints,
6383        }))
6384    }
6385
6386    /// ```sql
6387    ///     CREATE POLICY name ON table_name [ AS { PERMISSIVE | RESTRICTIVE } ]
6388    ///     [ FOR { ALL | SELECT | INSERT | UPDATE | DELETE } ]
6389    ///     [ TO { role_name | PUBLIC | CURRENT_USER | CURRENT_ROLE | SESSION_USER } [, ...] ]
6390    ///     [ USING ( using_expression ) ]
6391    ///     [ WITH CHECK ( with_check_expression ) ]
6392    /// ```
6393    ///
6394    /// [PostgreSQL Documentation](https://www.postgresql.org/docs/current/sql-createpolicy.html)
6395    pub fn parse_create_policy(&mut self) -> Result<Statement, ParserError> {
6396        let name = self.parse_identifier()?;
6397        self.expect_keyword_is(Keyword::ON)?;
6398        let table_name = self.parse_object_name(false)?;
6399
6400        let policy_type = if self.parse_keyword(Keyword::AS) {
6401            let keyword =
6402                self.expect_one_of_keywords(&[Keyword::PERMISSIVE, Keyword::RESTRICTIVE])?;
6403            Some(match keyword {
6404                Keyword::PERMISSIVE => CreatePolicyType::Permissive,
6405                Keyword::RESTRICTIVE => CreatePolicyType::Restrictive,
6406                unexpected_keyword => return Err(ParserError::ParserError(
6407                    format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in policy type"),
6408                )),
6409            })
6410        } else {
6411            None
6412        };
6413
6414        let command = if self.parse_keyword(Keyword::FOR) {
6415            let keyword = self.expect_one_of_keywords(&[
6416                Keyword::ALL,
6417                Keyword::SELECT,
6418                Keyword::INSERT,
6419                Keyword::UPDATE,
6420                Keyword::DELETE,
6421            ])?;
6422            Some(match keyword {
6423                Keyword::ALL => CreatePolicyCommand::All,
6424                Keyword::SELECT => CreatePolicyCommand::Select,
6425                Keyword::INSERT => CreatePolicyCommand::Insert,
6426                Keyword::UPDATE => CreatePolicyCommand::Update,
6427                Keyword::DELETE => CreatePolicyCommand::Delete,
6428                unexpected_keyword => return Err(ParserError::ParserError(
6429                    format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in policy command"),
6430                )),
6431            })
6432        } else {
6433            None
6434        };
6435
6436        let to = if self.parse_keyword(Keyword::TO) {
6437            Some(self.parse_comma_separated(|p| p.parse_owner())?)
6438        } else {
6439            None
6440        };
6441
6442        let using = if self.parse_keyword(Keyword::USING) {
6443            self.expect_token(&Token::LParen)?;
6444            let expr = self.parse_expr()?;
6445            self.expect_token(&Token::RParen)?;
6446            Some(expr)
6447        } else {
6448            None
6449        };
6450
6451        let with_check = if self.parse_keywords(&[Keyword::WITH, Keyword::CHECK]) {
6452            self.expect_token(&Token::LParen)?;
6453            let expr = self.parse_expr()?;
6454            self.expect_token(&Token::RParen)?;
6455            Some(expr)
6456        } else {
6457            None
6458        };
6459
6460        Ok(CreatePolicy {
6461            name,
6462            table_name,
6463            policy_type,
6464            command,
6465            to,
6466            using,
6467            with_check,
6468        })
6469    }
6470
6471    /// ```sql
6472    /// CREATE CONNECTOR [IF NOT EXISTS] connector_name
6473    /// [TYPE datasource_type]
6474    /// [URL datasource_url]
6475    /// [COMMENT connector_comment]
6476    /// [WITH DCPROPERTIES(property_name=property_value, ...)]
6477    /// ```
6478    ///
6479    /// [Hive Documentation](https://cwiki.apache.org/confluence/pages/viewpage.action?pageId=27362034#LanguageManualDDL-CreateDataConnectorCreateConnector)
6480    pub fn parse_create_connector(&mut self) -> Result<Statement, ParserError> {
6481        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
6482        let name = self.parse_identifier()?;
6483
6484        let connector_type = if self.parse_keyword(Keyword::TYPE) {
6485            Some(self.parse_literal_string()?)
6486        } else {
6487            None
6488        };
6489
6490        let url = if self.parse_keyword(Keyword::URL) {
6491            Some(self.parse_literal_string()?)
6492        } else {
6493            None
6494        };
6495
6496        let comment = self.parse_optional_inline_comment()?;
6497
6498        let with_dcproperties =
6499            match self.parse_options_with_keywords(&[Keyword::WITH, Keyword::DCPROPERTIES])? {
6500                properties if !properties.is_empty() => Some(properties),
6501                _ => None,
6502            };
6503
6504        Ok(Statement::CreateConnector(CreateConnector {
6505            name,
6506            if_not_exists,
6507            connector_type,
6508            url,
6509            comment,
6510            with_dcproperties,
6511        }))
6512    }
6513
6514    /// Parse an operator name, which can contain special characters like +, -, <, >, =
6515    /// that are tokenized as operator tokens rather than identifiers.
6516    /// This is used for PostgreSQL CREATE OPERATOR statements.
6517    ///
6518    /// Examples: `+`, `myschema.+`, `pg_catalog.<=`
6519    fn parse_operator_name(&mut self) -> Result<ObjectName, ParserError> {
6520        let mut parts = vec![];
6521        loop {
6522            parts.push(ObjectNamePart::Identifier(Ident::new(
6523                self.next_token().to_string(),
6524            )));
6525            if !self.consume_token(&Token::Period) {
6526                break;
6527            }
6528        }
6529        Ok(ObjectName(parts))
6530    }
6531
6532    /// Parse a [Statement::CreateOperator]
6533    ///
6534    /// [PostgreSQL Documentation](https://www.postgresql.org/docs/current/sql-createoperator.html)
6535    pub fn parse_create_operator(&mut self) -> Result<Statement, ParserError> {
6536        let name = self.parse_operator_name()?;
6537        self.expect_token(&Token::LParen)?;
6538
6539        let mut function: Option<ObjectName> = None;
6540        let mut is_procedure = false;
6541        let mut left_arg: Option<DataType> = None;
6542        let mut right_arg: Option<DataType> = None;
6543        let mut options: Vec<OperatorOption> = Vec::new();
6544
6545        loop {
6546            let keyword = self.expect_one_of_keywords(&[
6547                Keyword::FUNCTION,
6548                Keyword::PROCEDURE,
6549                Keyword::LEFTARG,
6550                Keyword::RIGHTARG,
6551                Keyword::COMMUTATOR,
6552                Keyword::NEGATOR,
6553                Keyword::RESTRICT,
6554                Keyword::JOIN,
6555                Keyword::HASHES,
6556                Keyword::MERGES,
6557            ])?;
6558
6559            match keyword {
6560                Keyword::HASHES if !options.iter().any(|o| matches!(o, OperatorOption::Hashes)) => {
6561                    options.push(OperatorOption::Hashes);
6562                }
6563                Keyword::MERGES if !options.iter().any(|o| matches!(o, OperatorOption::Merges)) => {
6564                    options.push(OperatorOption::Merges);
6565                }
6566                Keyword::FUNCTION | Keyword::PROCEDURE if function.is_none() => {
6567                    self.expect_token(&Token::Eq)?;
6568                    function = Some(self.parse_object_name(false)?);
6569                    is_procedure = keyword == Keyword::PROCEDURE;
6570                }
6571                Keyword::LEFTARG if left_arg.is_none() => {
6572                    self.expect_token(&Token::Eq)?;
6573                    left_arg = Some(self.parse_data_type()?);
6574                }
6575                Keyword::RIGHTARG if right_arg.is_none() => {
6576                    self.expect_token(&Token::Eq)?;
6577                    right_arg = Some(self.parse_data_type()?);
6578                }
6579                Keyword::COMMUTATOR
6580                    if !options
6581                        .iter()
6582                        .any(|o| matches!(o, OperatorOption::Commutator(_))) =>
6583                {
6584                    self.expect_token(&Token::Eq)?;
6585                    if self.parse_keyword(Keyword::OPERATOR) {
6586                        self.expect_token(&Token::LParen)?;
6587                        let op = self.parse_operator_name()?;
6588                        self.expect_token(&Token::RParen)?;
6589                        options.push(OperatorOption::Commutator(op));
6590                    } else {
6591                        options.push(OperatorOption::Commutator(self.parse_operator_name()?));
6592                    }
6593                }
6594                Keyword::NEGATOR
6595                    if !options
6596                        .iter()
6597                        .any(|o| matches!(o, OperatorOption::Negator(_))) =>
6598                {
6599                    self.expect_token(&Token::Eq)?;
6600                    if self.parse_keyword(Keyword::OPERATOR) {
6601                        self.expect_token(&Token::LParen)?;
6602                        let op = self.parse_operator_name()?;
6603                        self.expect_token(&Token::RParen)?;
6604                        options.push(OperatorOption::Negator(op));
6605                    } else {
6606                        options.push(OperatorOption::Negator(self.parse_operator_name()?));
6607                    }
6608                }
6609                Keyword::RESTRICT
6610                    if !options
6611                        .iter()
6612                        .any(|o| matches!(o, OperatorOption::Restrict(_))) =>
6613                {
6614                    self.expect_token(&Token::Eq)?;
6615                    options.push(OperatorOption::Restrict(Some(
6616                        self.parse_object_name(false)?,
6617                    )));
6618                }
6619                Keyword::JOIN if !options.iter().any(|o| matches!(o, OperatorOption::Join(_))) => {
6620                    self.expect_token(&Token::Eq)?;
6621                    options.push(OperatorOption::Join(Some(self.parse_object_name(false)?)));
6622                }
6623                _ => {
6624                    return Err(ParserError::ParserError(format!(
6625                        "Duplicate or unexpected keyword {:?} in CREATE OPERATOR",
6626                        keyword
6627                    )))
6628                }
6629            }
6630
6631            if !self.consume_token(&Token::Comma) {
6632                break;
6633            }
6634        }
6635
6636        // Expect closing parenthesis
6637        self.expect_token(&Token::RParen)?;
6638
6639        // FUNCTION is required
6640        let function = function.ok_or_else(|| {
6641            ParserError::ParserError("CREATE OPERATOR requires FUNCTION parameter".to_string())
6642        })?;
6643
6644        Ok(Statement::CreateOperator(CreateOperator {
6645            name,
6646            function,
6647            is_procedure,
6648            left_arg,
6649            right_arg,
6650            options,
6651        }))
6652    }
6653
6654    /// Parse a [Statement::CreateOperatorFamily]
6655    ///
6656    /// [PostgreSQL Documentation](https://www.postgresql.org/docs/current/sql-createopfamily.html)
6657    pub fn parse_create_operator_family(&mut self) -> Result<Statement, ParserError> {
6658        let name = self.parse_object_name(false)?;
6659        self.expect_keyword(Keyword::USING)?;
6660        let using = self.parse_identifier()?;
6661
6662        Ok(Statement::CreateOperatorFamily(CreateOperatorFamily {
6663            name,
6664            using,
6665        }))
6666    }
6667
6668    /// Parse a [Statement::CreateOperatorClass]
6669    ///
6670    /// [PostgreSQL Documentation](https://www.postgresql.org/docs/current/sql-createopclass.html)
6671    pub fn parse_create_operator_class(&mut self) -> Result<Statement, ParserError> {
6672        let name = self.parse_object_name(false)?;
6673        let default = self.parse_keyword(Keyword::DEFAULT);
6674        self.expect_keywords(&[Keyword::FOR, Keyword::TYPE])?;
6675        let for_type = self.parse_data_type()?;
6676        self.expect_keyword(Keyword::USING)?;
6677        let using = self.parse_identifier()?;
6678
6679        let family = if self.parse_keyword(Keyword::FAMILY) {
6680            Some(self.parse_object_name(false)?)
6681        } else {
6682            None
6683        };
6684
6685        self.expect_keyword(Keyword::AS)?;
6686
6687        let mut items = vec![];
6688        loop {
6689            if self.parse_keyword(Keyword::OPERATOR) {
6690                let strategy_number = self.parse_literal_uint()? as u32;
6691                let operator_name = self.parse_operator_name()?;
6692
6693                // Optional operator argument types
6694                let op_types = if self.consume_token(&Token::LParen) {
6695                    let left = self.parse_data_type()?;
6696                    self.expect_token(&Token::Comma)?;
6697                    let right = self.parse_data_type()?;
6698                    self.expect_token(&Token::RParen)?;
6699                    Some(OperatorArgTypes { left, right })
6700                } else {
6701                    None
6702                };
6703
6704                // Optional purpose
6705                let purpose = if self.parse_keyword(Keyword::FOR) {
6706                    if self.parse_keyword(Keyword::SEARCH) {
6707                        Some(OperatorPurpose::ForSearch)
6708                    } else if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
6709                        let sort_family = self.parse_object_name(false)?;
6710                        Some(OperatorPurpose::ForOrderBy { sort_family })
6711                    } else {
6712                        return self.expected("SEARCH or ORDER BY after FOR", self.peek_token());
6713                    }
6714                } else {
6715                    None
6716                };
6717
6718                items.push(OperatorClassItem::Operator {
6719                    strategy_number,
6720                    operator_name,
6721                    op_types,
6722                    purpose,
6723                });
6724            } else if self.parse_keyword(Keyword::FUNCTION) {
6725                let support_number = self.parse_literal_uint()? as u32;
6726
6727                // Optional operator types
6728                let op_types =
6729                    if self.consume_token(&Token::LParen) && self.peek_token() != Token::RParen {
6730                        let mut types = vec![];
6731                        loop {
6732                            types.push(self.parse_data_type()?);
6733                            if !self.consume_token(&Token::Comma) {
6734                                break;
6735                            }
6736                        }
6737                        self.expect_token(&Token::RParen)?;
6738                        Some(types)
6739                    } else if self.consume_token(&Token::LParen) {
6740                        self.expect_token(&Token::RParen)?;
6741                        Some(vec![])
6742                    } else {
6743                        None
6744                    };
6745
6746                let function_name = self.parse_object_name(false)?;
6747
6748                // Function argument types
6749                let argument_types = if self.consume_token(&Token::LParen) {
6750                    let mut types = vec![];
6751                    loop {
6752                        if self.peek_token() == Token::RParen {
6753                            break;
6754                        }
6755                        types.push(self.parse_data_type()?);
6756                        if !self.consume_token(&Token::Comma) {
6757                            break;
6758                        }
6759                    }
6760                    self.expect_token(&Token::RParen)?;
6761                    types
6762                } else {
6763                    vec![]
6764                };
6765
6766                items.push(OperatorClassItem::Function {
6767                    support_number,
6768                    op_types,
6769                    function_name,
6770                    argument_types,
6771                });
6772            } else if self.parse_keyword(Keyword::STORAGE) {
6773                let storage_type = self.parse_data_type()?;
6774                items.push(OperatorClassItem::Storage { storage_type });
6775            } else {
6776                break;
6777            }
6778
6779            // Check for comma separator
6780            if !self.consume_token(&Token::Comma) {
6781                break;
6782            }
6783        }
6784
6785        Ok(Statement::CreateOperatorClass(CreateOperatorClass {
6786            name,
6787            default,
6788            for_type,
6789            using,
6790            family,
6791            items,
6792        }))
6793    }
6794
6795    pub fn parse_drop(&mut self) -> Result<Statement, ParserError> {
6796        // MySQL dialect supports `TEMPORARY`
6797        let temporary = dialect_of!(self is MySqlDialect | GenericDialect | DuckDbDialect)
6798            && self.parse_keyword(Keyword::TEMPORARY);
6799        let persistent = dialect_of!(self is DuckDbDialect)
6800            && self.parse_one_of_keywords(&[Keyword::PERSISTENT]).is_some();
6801
6802        let object_type = if self.parse_keyword(Keyword::TABLE) {
6803            ObjectType::Table
6804        } else if self.parse_keyword(Keyword::VIEW) {
6805            ObjectType::View
6806        } else if self.parse_keywords(&[Keyword::MATERIALIZED, Keyword::VIEW]) {
6807            ObjectType::MaterializedView
6808        } else if self.parse_keyword(Keyword::INDEX) {
6809            ObjectType::Index
6810        } else if self.parse_keyword(Keyword::ROLE) {
6811            ObjectType::Role
6812        } else if self.parse_keyword(Keyword::SCHEMA) {
6813            ObjectType::Schema
6814        } else if self.parse_keyword(Keyword::DATABASE) {
6815            ObjectType::Database
6816        } else if self.parse_keyword(Keyword::SEQUENCE) {
6817            ObjectType::Sequence
6818        } else if self.parse_keyword(Keyword::STAGE) {
6819            ObjectType::Stage
6820        } else if self.parse_keyword(Keyword::TYPE) {
6821            ObjectType::Type
6822        } else if self.parse_keyword(Keyword::USER) {
6823            ObjectType::User
6824        } else if self.parse_keyword(Keyword::STREAM) {
6825            ObjectType::Stream
6826        } else if self.parse_keyword(Keyword::FUNCTION) {
6827            return self.parse_drop_function();
6828        } else if self.parse_keyword(Keyword::POLICY) {
6829            return self.parse_drop_policy();
6830        } else if self.parse_keyword(Keyword::CONNECTOR) {
6831            return self.parse_drop_connector();
6832        } else if self.parse_keyword(Keyword::DOMAIN) {
6833            return self.parse_drop_domain();
6834        } else if self.parse_keyword(Keyword::PROCEDURE) {
6835            return self.parse_drop_procedure();
6836        } else if self.parse_keyword(Keyword::SECRET) {
6837            return self.parse_drop_secret(temporary, persistent);
6838        } else if self.parse_keyword(Keyword::TRIGGER) {
6839            return self.parse_drop_trigger();
6840        } else if self.parse_keyword(Keyword::EXTENSION) {
6841            return self.parse_drop_extension();
6842        } else if self.parse_keyword(Keyword::OPERATOR) {
6843            // Check if this is DROP OPERATOR FAMILY or DROP OPERATOR CLASS
6844            return if self.parse_keyword(Keyword::FAMILY) {
6845                self.parse_drop_operator_family()
6846            } else if self.parse_keyword(Keyword::CLASS) {
6847                self.parse_drop_operator_class()
6848            } else {
6849                self.parse_drop_operator()
6850            };
6851        } else {
6852            return self.expected(
6853                "CONNECTOR, DATABASE, EXTENSION, FUNCTION, INDEX, OPERATOR, POLICY, PROCEDURE, ROLE, SCHEMA, SECRET, SEQUENCE, STAGE, TABLE, TRIGGER, TYPE, VIEW, MATERIALIZED VIEW or USER after DROP",
6854                self.peek_token(),
6855            );
6856        };
6857        // Many dialects support the non-standard `IF EXISTS` clause and allow
6858        // specifying multiple objects to delete in a single statement
6859        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6860        let names = self.parse_comma_separated(|p| p.parse_object_name(false))?;
6861
6862        let loc = self.peek_token().span.start;
6863        let cascade = self.parse_keyword(Keyword::CASCADE);
6864        let restrict = self.parse_keyword(Keyword::RESTRICT);
6865        let purge = self.parse_keyword(Keyword::PURGE);
6866        if cascade && restrict {
6867            return parser_err!("Cannot specify both CASCADE and RESTRICT in DROP", loc);
6868        }
6869        if object_type == ObjectType::Role && (cascade || restrict || purge) {
6870            return parser_err!(
6871                "Cannot specify CASCADE, RESTRICT, or PURGE in DROP ROLE",
6872                loc
6873            );
6874        }
6875        let table = if self.parse_keyword(Keyword::ON) {
6876            Some(self.parse_object_name(false)?)
6877        } else {
6878            None
6879        };
6880        Ok(Statement::Drop {
6881            object_type,
6882            if_exists,
6883            names,
6884            cascade,
6885            restrict,
6886            purge,
6887            temporary,
6888            table,
6889        })
6890    }
6891
6892    fn parse_optional_drop_behavior(&mut self) -> Option<DropBehavior> {
6893        match self.parse_one_of_keywords(&[Keyword::CASCADE, Keyword::RESTRICT]) {
6894            Some(Keyword::CASCADE) => Some(DropBehavior::Cascade),
6895            Some(Keyword::RESTRICT) => Some(DropBehavior::Restrict),
6896            _ => None,
6897        }
6898    }
6899
6900    /// ```sql
6901    /// DROP FUNCTION [ IF EXISTS ] name [ ( [ [ argmode ] [ argname ] argtype [, ...] ] ) ] [, ...]
6902    /// [ CASCADE | RESTRICT ]
6903    /// ```
6904    fn parse_drop_function(&mut self) -> Result<Statement, ParserError> {
6905        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6906        let func_desc = self.parse_comma_separated(Parser::parse_function_desc)?;
6907        let drop_behavior = self.parse_optional_drop_behavior();
6908        Ok(Statement::DropFunction(DropFunction {
6909            if_exists,
6910            func_desc,
6911            drop_behavior,
6912        }))
6913    }
6914
6915    /// ```sql
6916    /// DROP POLICY [ IF EXISTS ] name ON table_name [ CASCADE | RESTRICT ]
6917    /// ```
6918    ///
6919    /// [PostgreSQL Documentation](https://www.postgresql.org/docs/current/sql-droppolicy.html)
6920    fn parse_drop_policy(&mut self) -> Result<Statement, ParserError> {
6921        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6922        let name = self.parse_identifier()?;
6923        self.expect_keyword_is(Keyword::ON)?;
6924        let table_name = self.parse_object_name(false)?;
6925        let drop_behavior = self.parse_optional_drop_behavior();
6926        Ok(Statement::DropPolicy {
6927            if_exists,
6928            name,
6929            table_name,
6930            drop_behavior,
6931        })
6932    }
6933    /// ```sql
6934    /// DROP CONNECTOR [IF EXISTS] name
6935    /// ```
6936    ///
6937    /// See [Hive](https://cwiki.apache.org/confluence/pages/viewpage.action?pageId=27362034#LanguageManualDDL-DropConnector)
6938    fn parse_drop_connector(&mut self) -> Result<Statement, ParserError> {
6939        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6940        let name = self.parse_identifier()?;
6941        Ok(Statement::DropConnector { if_exists, name })
6942    }
6943
6944    /// ```sql
6945    /// DROP DOMAIN [ IF EXISTS ] name [ CASCADE | RESTRICT ]
6946    /// ```
6947    fn parse_drop_domain(&mut self) -> Result<Statement, ParserError> {
6948        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6949        let name = self.parse_object_name(false)?;
6950        let drop_behavior = self.parse_optional_drop_behavior();
6951        Ok(Statement::DropDomain(DropDomain {
6952            if_exists,
6953            name,
6954            drop_behavior,
6955        }))
6956    }
6957
6958    /// ```sql
6959    /// DROP PROCEDURE [ IF EXISTS ] name [ ( [ [ argmode ] [ argname ] argtype [, ...] ] ) ] [, ...]
6960    /// [ CASCADE | RESTRICT ]
6961    /// ```
6962    fn parse_drop_procedure(&mut self) -> Result<Statement, ParserError> {
6963        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6964        let proc_desc = self.parse_comma_separated(Parser::parse_function_desc)?;
6965        let drop_behavior = self.parse_optional_drop_behavior();
6966        Ok(Statement::DropProcedure {
6967            if_exists,
6968            proc_desc,
6969            drop_behavior,
6970        })
6971    }
6972
6973    fn parse_function_desc(&mut self) -> Result<FunctionDesc, ParserError> {
6974        let name = self.parse_object_name(false)?;
6975
6976        let args = if self.consume_token(&Token::LParen) {
6977            if self.consume_token(&Token::RParen) {
6978                Some(vec![])
6979            } else {
6980                let args = self.parse_comma_separated(Parser::parse_function_arg)?;
6981                self.expect_token(&Token::RParen)?;
6982                Some(args)
6983            }
6984        } else {
6985            None
6986        };
6987
6988        Ok(FunctionDesc { name, args })
6989    }
6990
6991    /// See [DuckDB Docs](https://duckdb.org/docs/sql/statements/create_secret.html) for more details.
6992    fn parse_drop_secret(
6993        &mut self,
6994        temporary: bool,
6995        persistent: bool,
6996    ) -> Result<Statement, ParserError> {
6997        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6998        let name = self.parse_identifier()?;
6999        let storage_specifier = if self.parse_keyword(Keyword::FROM) {
7000            self.parse_identifier().ok()
7001        } else {
7002            None
7003        };
7004        let temp = match (temporary, persistent) {
7005            (true, false) => Some(true),
7006            (false, true) => Some(false),
7007            (false, false) => None,
7008            _ => self.expected("TEMPORARY or PERSISTENT", self.peek_token())?,
7009        };
7010
7011        Ok(Statement::DropSecret {
7012            if_exists,
7013            temporary: temp,
7014            name,
7015            storage_specifier,
7016        })
7017    }
7018
7019    /// Parse a `DECLARE` statement.
7020    ///
7021    /// ```sql
7022    /// DECLARE name [ BINARY ] [ ASENSITIVE | INSENSITIVE ] [ [ NO ] SCROLL ]
7023    ///     CURSOR [ { WITH | WITHOUT } HOLD ] FOR query
7024    /// ```
7025    ///
7026    /// The syntax can vary significantly between warehouses. See the grammar
7027    /// on the warehouse specific function in such cases.
7028    pub fn parse_declare(&mut self) -> Result<Statement, ParserError> {
7029        if dialect_of!(self is BigQueryDialect) {
7030            return self.parse_big_query_declare();
7031        }
7032        if dialect_of!(self is SnowflakeDialect) {
7033            return self.parse_snowflake_declare();
7034        }
7035        if dialect_of!(self is MsSqlDialect) {
7036            return self.parse_mssql_declare();
7037        }
7038
7039        let name = self.parse_identifier()?;
7040
7041        let binary = Some(self.parse_keyword(Keyword::BINARY));
7042        let sensitive = if self.parse_keyword(Keyword::INSENSITIVE) {
7043            Some(true)
7044        } else if self.parse_keyword(Keyword::ASENSITIVE) {
7045            Some(false)
7046        } else {
7047            None
7048        };
7049        let scroll = if self.parse_keyword(Keyword::SCROLL) {
7050            Some(true)
7051        } else if self.parse_keywords(&[Keyword::NO, Keyword::SCROLL]) {
7052            Some(false)
7053        } else {
7054            None
7055        };
7056
7057        self.expect_keyword_is(Keyword::CURSOR)?;
7058        let declare_type = Some(DeclareType::Cursor);
7059
7060        let hold = match self.parse_one_of_keywords(&[Keyword::WITH, Keyword::WITHOUT]) {
7061            Some(keyword) => {
7062                self.expect_keyword_is(Keyword::HOLD)?;
7063
7064                match keyword {
7065                    Keyword::WITH => Some(true),
7066                    Keyword::WITHOUT => Some(false),
7067                    unexpected_keyword => return Err(ParserError::ParserError(
7068                        format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in cursor hold"),
7069                    )),
7070                }
7071            }
7072            None => None,
7073        };
7074
7075        self.expect_keyword_is(Keyword::FOR)?;
7076
7077        let query = Some(self.parse_query()?);
7078
7079        Ok(Statement::Declare {
7080            stmts: vec![Declare {
7081                names: vec![name],
7082                data_type: None,
7083                assignment: None,
7084                declare_type,
7085                binary,
7086                sensitive,
7087                scroll,
7088                hold,
7089                for_query: query,
7090            }],
7091        })
7092    }
7093
7094    /// Parse a [BigQuery] `DECLARE` statement.
7095    ///
7096    /// Syntax:
7097    /// ```text
7098    /// DECLARE variable_name[, ...] [{ <variable_type> | <DEFAULT expression> }];
7099    /// ```
7100    /// [BigQuery]: https://cloud.google.com/bigquery/docs/reference/standard-sql/procedural-language#declare
7101    pub fn parse_big_query_declare(&mut self) -> Result<Statement, ParserError> {
7102        let names = self.parse_comma_separated(Parser::parse_identifier)?;
7103
7104        let data_type = match self.peek_token().token {
7105            Token::Word(w) if w.keyword == Keyword::DEFAULT => None,
7106            _ => Some(self.parse_data_type()?),
7107        };
7108
7109        let expr = if data_type.is_some() {
7110            if self.parse_keyword(Keyword::DEFAULT) {
7111                Some(self.parse_expr()?)
7112            } else {
7113                None
7114            }
7115        } else {
7116            // If no variable type - default expression must be specified, per BQ docs.
7117            // i.e `DECLARE foo;` is invalid.
7118            self.expect_keyword_is(Keyword::DEFAULT)?;
7119            Some(self.parse_expr()?)
7120        };
7121
7122        Ok(Statement::Declare {
7123            stmts: vec![Declare {
7124                names,
7125                data_type,
7126                assignment: expr.map(|expr| DeclareAssignment::Default(Box::new(expr))),
7127                declare_type: None,
7128                binary: None,
7129                sensitive: None,
7130                scroll: None,
7131                hold: None,
7132                for_query: None,
7133            }],
7134        })
7135    }
7136
7137    /// Parse a [Snowflake] `DECLARE` statement.
7138    ///
7139    /// Syntax:
7140    /// ```text
7141    /// DECLARE
7142    ///   [{ <variable_declaration>
7143    ///      | <cursor_declaration>
7144    ///      | <resultset_declaration>
7145    ///      | <exception_declaration> }; ... ]
7146    ///
7147    /// <variable_declaration>
7148    /// <variable_name> [<type>] [ { DEFAULT | := } <expression>]
7149    ///
7150    /// <cursor_declaration>
7151    /// <cursor_name> CURSOR FOR <query>
7152    ///
7153    /// <resultset_declaration>
7154    /// <resultset_name> RESULTSET [ { DEFAULT | := } ( <query> ) ] ;
7155    ///
7156    /// <exception_declaration>
7157    /// <exception_name> EXCEPTION [ ( <exception_number> , '<exception_message>' ) ] ;
7158    /// ```
7159    ///
7160    /// [Snowflake]: https://docs.snowflake.com/en/sql-reference/snowflake-scripting/declare
7161    pub fn parse_snowflake_declare(&mut self) -> Result<Statement, ParserError> {
7162        let mut stmts = vec![];
7163        loop {
7164            let name = self.parse_identifier()?;
7165            let (declare_type, for_query, assigned_expr, data_type) =
7166                if self.parse_keyword(Keyword::CURSOR) {
7167                    self.expect_keyword_is(Keyword::FOR)?;
7168                    match self.peek_token().token {
7169                        Token::Word(w) if w.keyword == Keyword::SELECT => (
7170                            Some(DeclareType::Cursor),
7171                            Some(self.parse_query()?),
7172                            None,
7173                            None,
7174                        ),
7175                        _ => (
7176                            Some(DeclareType::Cursor),
7177                            None,
7178                            Some(DeclareAssignment::For(Box::new(self.parse_expr()?))),
7179                            None,
7180                        ),
7181                    }
7182                } else if self.parse_keyword(Keyword::RESULTSET) {
7183                    let assigned_expr = if self.peek_token().token != Token::SemiColon {
7184                        self.parse_snowflake_variable_declaration_expression()?
7185                    } else {
7186                        // Nothing more to do. The statement has no further parameters.
7187                        None
7188                    };
7189
7190                    (Some(DeclareType::ResultSet), None, assigned_expr, None)
7191                } else if self.parse_keyword(Keyword::EXCEPTION) {
7192                    let assigned_expr = if self.peek_token().token == Token::LParen {
7193                        Some(DeclareAssignment::Expr(Box::new(self.parse_expr()?)))
7194                    } else {
7195                        // Nothing more to do. The statement has no further parameters.
7196                        None
7197                    };
7198
7199                    (Some(DeclareType::Exception), None, assigned_expr, None)
7200                } else {
7201                    // Without an explicit keyword, the only valid option is variable declaration.
7202                    let (assigned_expr, data_type) = if let Some(assigned_expr) =
7203                        self.parse_snowflake_variable_declaration_expression()?
7204                    {
7205                        (Some(assigned_expr), None)
7206                    } else if let Token::Word(_) = self.peek_token().token {
7207                        let data_type = self.parse_data_type()?;
7208                        (
7209                            self.parse_snowflake_variable_declaration_expression()?,
7210                            Some(data_type),
7211                        )
7212                    } else {
7213                        (None, None)
7214                    };
7215                    (None, None, assigned_expr, data_type)
7216                };
7217            let stmt = Declare {
7218                names: vec![name],
7219                data_type,
7220                assignment: assigned_expr,
7221                declare_type,
7222                binary: None,
7223                sensitive: None,
7224                scroll: None,
7225                hold: None,
7226                for_query,
7227            };
7228
7229            stmts.push(stmt);
7230            if self.consume_token(&Token::SemiColon) {
7231                match self.peek_token().token {
7232                    Token::Word(w)
7233                        if ALL_KEYWORDS
7234                            .binary_search(&w.value.to_uppercase().as_str())
7235                            .is_err() =>
7236                    {
7237                        // Not a keyword - start of a new declaration.
7238                        continue;
7239                    }
7240                    _ => {
7241                        // Put back the semicolon, this is the end of the DECLARE statement.
7242                        self.prev_token();
7243                    }
7244                }
7245            }
7246
7247            break;
7248        }
7249
7250        Ok(Statement::Declare { stmts })
7251    }
7252
7253    /// Parse a [MsSql] `DECLARE` statement.
7254    ///
7255    /// Syntax:
7256    /// ```text
7257    /// DECLARE
7258    // {
7259    //   { @local_variable [AS] data_type [ = value ] }
7260    //   | { @cursor_variable_name CURSOR [ FOR ] }
7261    // } [ ,...n ]
7262    /// ```
7263    /// [MsSql]: https://learn.microsoft.com/en-us/sql/t-sql/language-elements/declare-local-variable-transact-sql?view=sql-server-ver16
7264    pub fn parse_mssql_declare(&mut self) -> Result<Statement, ParserError> {
7265        let stmts = self.parse_comma_separated(Parser::parse_mssql_declare_stmt)?;
7266
7267        Ok(Statement::Declare { stmts })
7268    }
7269
7270    /// Parse the body of a [MsSql] `DECLARE`statement.
7271    ///
7272    /// Syntax:
7273    /// ```text
7274    // {
7275    //   { @local_variable [AS] data_type [ = value ] }
7276    //   | { @cursor_variable_name CURSOR [ FOR ]}
7277    // } [ ,...n ]
7278    /// ```
7279    /// [MsSql]: https://learn.microsoft.com/en-us/sql/t-sql/language-elements/declare-local-variable-transact-sql?view=sql-server-ver16
7280    pub fn parse_mssql_declare_stmt(&mut self) -> Result<Declare, ParserError> {
7281        let name = {
7282            let ident = self.parse_identifier()?;
7283            if !ident.value.starts_with('@')
7284                && !matches!(
7285                    self.peek_token().token,
7286                    Token::Word(w) if w.keyword == Keyword::CURSOR
7287                )
7288            {
7289                Err(ParserError::TokenizerError(
7290                    "Invalid MsSql variable declaration.".to_string(),
7291                ))
7292            } else {
7293                Ok(ident)
7294            }
7295        }?;
7296
7297        let (declare_type, data_type) = match self.peek_token().token {
7298            Token::Word(w) => match w.keyword {
7299                Keyword::CURSOR => {
7300                    self.next_token();
7301                    (Some(DeclareType::Cursor), None)
7302                }
7303                Keyword::AS => {
7304                    self.next_token();
7305                    (None, Some(self.parse_data_type()?))
7306                }
7307                _ => (None, Some(self.parse_data_type()?)),
7308            },
7309            _ => (None, Some(self.parse_data_type()?)),
7310        };
7311
7312        let (for_query, assignment) = if self.peek_keyword(Keyword::FOR) {
7313            self.next_token();
7314            let query = Some(self.parse_query()?);
7315            (query, None)
7316        } else {
7317            let assignment = self.parse_mssql_variable_declaration_expression()?;
7318            (None, assignment)
7319        };
7320
7321        Ok(Declare {
7322            names: vec![name],
7323            data_type,
7324            assignment,
7325            declare_type,
7326            binary: None,
7327            sensitive: None,
7328            scroll: None,
7329            hold: None,
7330            for_query,
7331        })
7332    }
7333
7334    /// Parses the assigned expression in a variable declaration.
7335    ///
7336    /// Syntax:
7337    /// ```text
7338    /// [ { DEFAULT | := } <expression>]
7339    /// ```
7340    /// <https://docs.snowflake.com/en/sql-reference/snowflake-scripting/declare#variable-declaration-syntax>
7341    pub fn parse_snowflake_variable_declaration_expression(
7342        &mut self,
7343    ) -> Result<Option<DeclareAssignment>, ParserError> {
7344        Ok(match self.peek_token().token {
7345            Token::Word(w) if w.keyword == Keyword::DEFAULT => {
7346                self.next_token(); // Skip `DEFAULT`
7347                Some(DeclareAssignment::Default(Box::new(self.parse_expr()?)))
7348            }
7349            Token::Assignment => {
7350                self.next_token(); // Skip `:=`
7351                Some(DeclareAssignment::DuckAssignment(Box::new(
7352                    self.parse_expr()?,
7353                )))
7354            }
7355            _ => None,
7356        })
7357    }
7358
7359    /// Parses the assigned expression in a variable declaration.
7360    ///
7361    /// Syntax:
7362    /// ```text
7363    /// [ = <expression>]
7364    /// ```
7365    pub fn parse_mssql_variable_declaration_expression(
7366        &mut self,
7367    ) -> Result<Option<DeclareAssignment>, ParserError> {
7368        Ok(match self.peek_token().token {
7369            Token::Eq => {
7370                self.next_token(); // Skip `=`
7371                Some(DeclareAssignment::MsSqlAssignment(Box::new(
7372                    self.parse_expr()?,
7373                )))
7374            }
7375            _ => None,
7376        })
7377    }
7378
7379    // FETCH [ direction { FROM | IN } ] cursor INTO target;
7380    pub fn parse_fetch_statement(&mut self) -> Result<Statement, ParserError> {
7381        let direction = if self.parse_keyword(Keyword::NEXT) {
7382            FetchDirection::Next
7383        } else if self.parse_keyword(Keyword::PRIOR) {
7384            FetchDirection::Prior
7385        } else if self.parse_keyword(Keyword::FIRST) {
7386            FetchDirection::First
7387        } else if self.parse_keyword(Keyword::LAST) {
7388            FetchDirection::Last
7389        } else if self.parse_keyword(Keyword::ABSOLUTE) {
7390            FetchDirection::Absolute {
7391                limit: self.parse_number_value()?.value,
7392            }
7393        } else if self.parse_keyword(Keyword::RELATIVE) {
7394            FetchDirection::Relative {
7395                limit: self.parse_number_value()?.value,
7396            }
7397        } else if self.parse_keyword(Keyword::FORWARD) {
7398            if self.parse_keyword(Keyword::ALL) {
7399                FetchDirection::ForwardAll
7400            } else {
7401                FetchDirection::Forward {
7402                    // TODO: Support optional
7403                    limit: Some(self.parse_number_value()?.value),
7404                }
7405            }
7406        } else if self.parse_keyword(Keyword::BACKWARD) {
7407            if self.parse_keyword(Keyword::ALL) {
7408                FetchDirection::BackwardAll
7409            } else {
7410                FetchDirection::Backward {
7411                    // TODO: Support optional
7412                    limit: Some(self.parse_number_value()?.value),
7413                }
7414            }
7415        } else if self.parse_keyword(Keyword::ALL) {
7416            FetchDirection::All
7417        } else {
7418            FetchDirection::Count {
7419                limit: self.parse_number_value()?.value,
7420            }
7421        };
7422
7423        let position = if self.peek_keyword(Keyword::FROM) {
7424            self.expect_keyword(Keyword::FROM)?;
7425            FetchPosition::From
7426        } else if self.peek_keyword(Keyword::IN) {
7427            self.expect_keyword(Keyword::IN)?;
7428            FetchPosition::In
7429        } else {
7430            return parser_err!("Expected FROM or IN", self.peek_token().span.start);
7431        };
7432
7433        let name = self.parse_identifier()?;
7434
7435        let into = if self.parse_keyword(Keyword::INTO) {
7436            Some(self.parse_object_name(false)?)
7437        } else {
7438            None
7439        };
7440
7441        Ok(Statement::Fetch {
7442            name,
7443            direction,
7444            position,
7445            into,
7446        })
7447    }
7448
7449    pub fn parse_discard(&mut self) -> Result<Statement, ParserError> {
7450        let object_type = if self.parse_keyword(Keyword::ALL) {
7451            DiscardObject::ALL
7452        } else if self.parse_keyword(Keyword::PLANS) {
7453            DiscardObject::PLANS
7454        } else if self.parse_keyword(Keyword::SEQUENCES) {
7455            DiscardObject::SEQUENCES
7456        } else if self.parse_keyword(Keyword::TEMP) || self.parse_keyword(Keyword::TEMPORARY) {
7457            DiscardObject::TEMP
7458        } else {
7459            return self.expected(
7460                "ALL, PLANS, SEQUENCES, TEMP or TEMPORARY after DISCARD",
7461                self.peek_token(),
7462            );
7463        };
7464        Ok(Statement::Discard { object_type })
7465    }
7466
7467    pub fn parse_create_index(&mut self, unique: bool) -> Result<Statement, ParserError> {
7468        let concurrently = self.parse_keyword(Keyword::CONCURRENTLY);
7469        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
7470
7471        let mut using = None;
7472
7473        let index_name = if if_not_exists || !self.parse_keyword(Keyword::ON) {
7474            let index_name = self.parse_object_name(false)?;
7475            // MySQL allows `USING index_type` either before or after `ON table_name`
7476            using = self.parse_optional_using_then_index_type()?;
7477            self.expect_keyword_is(Keyword::ON)?;
7478            Some(index_name)
7479        } else {
7480            None
7481        };
7482
7483        let table_name = self.parse_object_name(false)?;
7484
7485        // MySQL allows having two `USING` clauses.
7486        // In that case, the second clause overwrites the first.
7487        using = self.parse_optional_using_then_index_type()?.or(using);
7488
7489        let columns = self.parse_parenthesized_index_column_list()?;
7490
7491        let include = if self.parse_keyword(Keyword::INCLUDE) {
7492            self.expect_token(&Token::LParen)?;
7493            let columns = self.parse_comma_separated(|p| p.parse_identifier())?;
7494            self.expect_token(&Token::RParen)?;
7495            columns
7496        } else {
7497            vec![]
7498        };
7499
7500        let nulls_distinct = if self.parse_keyword(Keyword::NULLS) {
7501            let not = self.parse_keyword(Keyword::NOT);
7502            self.expect_keyword_is(Keyword::DISTINCT)?;
7503            Some(!not)
7504        } else {
7505            None
7506        };
7507
7508        let with = if self.dialect.supports_create_index_with_clause()
7509            && self.parse_keyword(Keyword::WITH)
7510        {
7511            self.expect_token(&Token::LParen)?;
7512            let with_params = self.parse_comma_separated(Parser::parse_expr)?;
7513            self.expect_token(&Token::RParen)?;
7514            with_params
7515        } else {
7516            Vec::new()
7517        };
7518
7519        let predicate = if self.parse_keyword(Keyword::WHERE) {
7520            Some(self.parse_expr()?)
7521        } else {
7522            None
7523        };
7524
7525        // MySQL options (including the modern style of `USING` after the column list instead of
7526        // before, which is deprecated) shouldn't conflict with other preceding options (e.g. `WITH
7527        // PARSER` won't be caught by the above `WITH` clause parsing because MySQL doesn't set that
7528        // support flag). This is probably invalid syntax for other dialects, but it is simpler to
7529        // parse it anyway (as we do inside `ALTER TABLE` and `CREATE TABLE` parsing).
7530        let index_options = self.parse_index_options()?;
7531
7532        // MySQL allows `ALGORITHM` and `LOCK` options. Unlike in `ALTER TABLE`, they need not be comma separated.
7533        let mut alter_options = Vec::new();
7534        while self
7535            .peek_one_of_keywords(&[Keyword::ALGORITHM, Keyword::LOCK])
7536            .is_some()
7537        {
7538            alter_options.push(self.parse_alter_table_operation()?)
7539        }
7540
7541        Ok(Statement::CreateIndex(CreateIndex {
7542            name: index_name,
7543            table_name,
7544            using,
7545            columns,
7546            unique,
7547            concurrently,
7548            if_not_exists,
7549            include,
7550            nulls_distinct,
7551            with,
7552            predicate,
7553            index_options,
7554            alter_options,
7555        }))
7556    }
7557
7558    pub fn parse_create_extension(&mut self) -> Result<Statement, ParserError> {
7559        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
7560        let name = self.parse_identifier()?;
7561
7562        let (schema, version, cascade) = if self.parse_keyword(Keyword::WITH) {
7563            let schema = if self.parse_keyword(Keyword::SCHEMA) {
7564                Some(self.parse_identifier()?)
7565            } else {
7566                None
7567            };
7568
7569            let version = if self.parse_keyword(Keyword::VERSION) {
7570                Some(self.parse_identifier()?)
7571            } else {
7572                None
7573            };
7574
7575            let cascade = self.parse_keyword(Keyword::CASCADE);
7576
7577            (schema, version, cascade)
7578        } else {
7579            (None, None, false)
7580        };
7581
7582        Ok(CreateExtension {
7583            name,
7584            if_not_exists,
7585            schema,
7586            version,
7587            cascade,
7588        }
7589        .into())
7590    }
7591
7592    /// Parse a PostgreSQL-specific [Statement::DropExtension] statement.
7593    pub fn parse_drop_extension(&mut self) -> Result<Statement, ParserError> {
7594        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7595        let names = self.parse_comma_separated(|p| p.parse_identifier())?;
7596        let cascade_or_restrict =
7597            self.parse_one_of_keywords(&[Keyword::CASCADE, Keyword::RESTRICT]);
7598        Ok(Statement::DropExtension(DropExtension {
7599            names,
7600            if_exists,
7601            cascade_or_restrict: cascade_or_restrict
7602                .map(|k| match k {
7603                    Keyword::CASCADE => Ok(ReferentialAction::Cascade),
7604                    Keyword::RESTRICT => Ok(ReferentialAction::Restrict),
7605                    _ => self.expected("CASCADE or RESTRICT", self.peek_token()),
7606                })
7607                .transpose()?,
7608        }))
7609    }
7610
7611    /// Parse a[Statement::DropOperator] statement.
7612    ///
7613    pub fn parse_drop_operator(&mut self) -> Result<Statement, ParserError> {
7614        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7615        let operators = self.parse_comma_separated(|p| p.parse_drop_operator_signature())?;
7616        let drop_behavior = self.parse_optional_drop_behavior();
7617        Ok(Statement::DropOperator(DropOperator {
7618            if_exists,
7619            operators,
7620            drop_behavior,
7621        }))
7622    }
7623
7624    /// Parse an operator signature for a [Statement::DropOperator]
7625    /// Format: `name ( { left_type | NONE } , right_type )`
7626    fn parse_drop_operator_signature(&mut self) -> Result<DropOperatorSignature, ParserError> {
7627        let name = self.parse_operator_name()?;
7628        self.expect_token(&Token::LParen)?;
7629
7630        // Parse left operand type (or NONE for prefix operators)
7631        let left_type = if self.parse_keyword(Keyword::NONE) {
7632            None
7633        } else {
7634            Some(self.parse_data_type()?)
7635        };
7636
7637        self.expect_token(&Token::Comma)?;
7638
7639        // Parse right operand type (always required)
7640        let right_type = self.parse_data_type()?;
7641
7642        self.expect_token(&Token::RParen)?;
7643
7644        Ok(DropOperatorSignature {
7645            name,
7646            left_type,
7647            right_type,
7648        })
7649    }
7650
7651    /// Parse a [Statement::DropOperatorFamily]
7652    ///
7653    /// [PostgreSQL Documentation](https://www.postgresql.org/docs/current/sql-dropopfamily.html)
7654    pub fn parse_drop_operator_family(&mut self) -> Result<Statement, ParserError> {
7655        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7656        let names = self.parse_comma_separated(|p| p.parse_object_name(false))?;
7657        self.expect_keyword(Keyword::USING)?;
7658        let using = self.parse_identifier()?;
7659        let drop_behavior = self.parse_optional_drop_behavior();
7660        Ok(Statement::DropOperatorFamily(DropOperatorFamily {
7661            if_exists,
7662            names,
7663            using,
7664            drop_behavior,
7665        }))
7666    }
7667
7668    /// Parse a [Statement::DropOperatorClass]
7669    ///
7670    /// [PostgreSQL Documentation](https://www.postgresql.org/docs/current/sql-dropopclass.html)
7671    pub fn parse_drop_operator_class(&mut self) -> Result<Statement, ParserError> {
7672        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7673        let names = self.parse_comma_separated(|p| p.parse_object_name(false))?;
7674        self.expect_keyword(Keyword::USING)?;
7675        let using = self.parse_identifier()?;
7676        let drop_behavior = self.parse_optional_drop_behavior();
7677        Ok(Statement::DropOperatorClass(DropOperatorClass {
7678            if_exists,
7679            names,
7680            using,
7681            drop_behavior,
7682        }))
7683    }
7684
7685    //TODO: Implement parsing for Skewed
7686    pub fn parse_hive_distribution(&mut self) -> Result<HiveDistributionStyle, ParserError> {
7687        if self.parse_keywords(&[Keyword::PARTITIONED, Keyword::BY]) {
7688            self.expect_token(&Token::LParen)?;
7689            let columns = self.parse_comma_separated(Parser::parse_column_def)?;
7690            self.expect_token(&Token::RParen)?;
7691            Ok(HiveDistributionStyle::PARTITIONED { columns })
7692        } else {
7693            Ok(HiveDistributionStyle::NONE)
7694        }
7695    }
7696
7697    pub fn parse_hive_formats(&mut self) -> Result<Option<HiveFormat>, ParserError> {
7698        let mut hive_format: Option<HiveFormat> = None;
7699        loop {
7700            match self.parse_one_of_keywords(&[
7701                Keyword::ROW,
7702                Keyword::STORED,
7703                Keyword::LOCATION,
7704                Keyword::WITH,
7705            ]) {
7706                Some(Keyword::ROW) => {
7707                    hive_format
7708                        .get_or_insert_with(HiveFormat::default)
7709                        .row_format = Some(self.parse_row_format()?);
7710                }
7711                Some(Keyword::STORED) => {
7712                    self.expect_keyword_is(Keyword::AS)?;
7713                    if self.parse_keyword(Keyword::INPUTFORMAT) {
7714                        let input_format = self.parse_expr()?;
7715                        self.expect_keyword_is(Keyword::OUTPUTFORMAT)?;
7716                        let output_format = self.parse_expr()?;
7717                        hive_format.get_or_insert_with(HiveFormat::default).storage =
7718                            Some(HiveIOFormat::IOF {
7719                                input_format,
7720                                output_format,
7721                            });
7722                    } else {
7723                        let format = self.parse_file_format()?;
7724                        hive_format.get_or_insert_with(HiveFormat::default).storage =
7725                            Some(HiveIOFormat::FileFormat { format });
7726                    }
7727                }
7728                Some(Keyword::LOCATION) => {
7729                    hive_format.get_or_insert_with(HiveFormat::default).location =
7730                        Some(self.parse_literal_string()?);
7731                }
7732                Some(Keyword::WITH) => {
7733                    self.prev_token();
7734                    let properties = self
7735                        .parse_options_with_keywords(&[Keyword::WITH, Keyword::SERDEPROPERTIES])?;
7736                    if !properties.is_empty() {
7737                        hive_format
7738                            .get_or_insert_with(HiveFormat::default)
7739                            .serde_properties = Some(properties);
7740                    } else {
7741                        break;
7742                    }
7743                }
7744                None => break,
7745                _ => break,
7746            }
7747        }
7748
7749        Ok(hive_format)
7750    }
7751
7752    pub fn parse_row_format(&mut self) -> Result<HiveRowFormat, ParserError> {
7753        self.expect_keyword_is(Keyword::FORMAT)?;
7754        match self.parse_one_of_keywords(&[Keyword::SERDE, Keyword::DELIMITED]) {
7755            Some(Keyword::SERDE) => {
7756                let class = self.parse_literal_string()?;
7757                Ok(HiveRowFormat::SERDE { class })
7758            }
7759            _ => {
7760                let mut row_delimiters = vec![];
7761
7762                loop {
7763                    match self.parse_one_of_keywords(&[
7764                        Keyword::FIELDS,
7765                        Keyword::COLLECTION,
7766                        Keyword::MAP,
7767                        Keyword::LINES,
7768                        Keyword::NULL,
7769                    ]) {
7770                        Some(Keyword::FIELDS) => {
7771                            if self.parse_keywords(&[Keyword::TERMINATED, Keyword::BY]) {
7772                                row_delimiters.push(HiveRowDelimiter {
7773                                    delimiter: HiveDelimiter::FieldsTerminatedBy,
7774                                    char: self.parse_identifier()?,
7775                                });
7776
7777                                if self.parse_keywords(&[Keyword::ESCAPED, Keyword::BY]) {
7778                                    row_delimiters.push(HiveRowDelimiter {
7779                                        delimiter: HiveDelimiter::FieldsEscapedBy,
7780                                        char: self.parse_identifier()?,
7781                                    });
7782                                }
7783                            } else {
7784                                break;
7785                            }
7786                        }
7787                        Some(Keyword::COLLECTION) => {
7788                            if self.parse_keywords(&[
7789                                Keyword::ITEMS,
7790                                Keyword::TERMINATED,
7791                                Keyword::BY,
7792                            ]) {
7793                                row_delimiters.push(HiveRowDelimiter {
7794                                    delimiter: HiveDelimiter::CollectionItemsTerminatedBy,
7795                                    char: self.parse_identifier()?,
7796                                });
7797                            } else {
7798                                break;
7799                            }
7800                        }
7801                        Some(Keyword::MAP) => {
7802                            if self.parse_keywords(&[
7803                                Keyword::KEYS,
7804                                Keyword::TERMINATED,
7805                                Keyword::BY,
7806                            ]) {
7807                                row_delimiters.push(HiveRowDelimiter {
7808                                    delimiter: HiveDelimiter::MapKeysTerminatedBy,
7809                                    char: self.parse_identifier()?,
7810                                });
7811                            } else {
7812                                break;
7813                            }
7814                        }
7815                        Some(Keyword::LINES) => {
7816                            if self.parse_keywords(&[Keyword::TERMINATED, Keyword::BY]) {
7817                                row_delimiters.push(HiveRowDelimiter {
7818                                    delimiter: HiveDelimiter::LinesTerminatedBy,
7819                                    char: self.parse_identifier()?,
7820                                });
7821                            } else {
7822                                break;
7823                            }
7824                        }
7825                        Some(Keyword::NULL) => {
7826                            if self.parse_keywords(&[Keyword::DEFINED, Keyword::AS]) {
7827                                row_delimiters.push(HiveRowDelimiter {
7828                                    delimiter: HiveDelimiter::NullDefinedAs,
7829                                    char: self.parse_identifier()?,
7830                                });
7831                            } else {
7832                                break;
7833                            }
7834                        }
7835                        _ => {
7836                            break;
7837                        }
7838                    }
7839                }
7840
7841                Ok(HiveRowFormat::DELIMITED {
7842                    delimiters: row_delimiters,
7843                })
7844            }
7845        }
7846    }
7847
7848    fn parse_optional_on_cluster(&mut self) -> Result<Option<Ident>, ParserError> {
7849        if self.parse_keywords(&[Keyword::ON, Keyword::CLUSTER]) {
7850            Ok(Some(self.parse_identifier()?))
7851        } else {
7852            Ok(None)
7853        }
7854    }
7855
7856    pub fn parse_create_table(
7857        &mut self,
7858        or_replace: bool,
7859        temporary: bool,
7860        global: Option<bool>,
7861        transient: bool,
7862    ) -> Result<Statement, ParserError> {
7863        let allow_unquoted_hyphen = dialect_of!(self is BigQueryDialect);
7864        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
7865        let table_name = self.parse_object_name(allow_unquoted_hyphen)?;
7866
7867        // PostgreSQL PARTITION OF for child partition tables
7868        let partition_of = if dialect_of!(self is PostgreSqlDialect | GenericDialect)
7869            && self.parse_keywords(&[Keyword::PARTITION, Keyword::OF])
7870        {
7871            Some(self.parse_object_name(allow_unquoted_hyphen)?)
7872        } else {
7873            None
7874        };
7875
7876        // Clickhouse has `ON CLUSTER 'cluster'` syntax for DDLs
7877        let on_cluster = self.parse_optional_on_cluster()?;
7878
7879        let like = self.maybe_parse_create_table_like(allow_unquoted_hyphen)?;
7880
7881        let clone = if self.parse_keyword(Keyword::CLONE) {
7882            self.parse_object_name(allow_unquoted_hyphen).ok()
7883        } else {
7884            None
7885        };
7886
7887        // parse optional column list (schema)
7888        let (columns, constraints) = self.parse_columns()?;
7889        let comment_after_column_def =
7890            if dialect_of!(self is HiveDialect) && self.parse_keyword(Keyword::COMMENT) {
7891                let next_token = self.next_token();
7892                match next_token.token {
7893                    Token::SingleQuotedString(str) => Some(CommentDef::WithoutEq(str)),
7894                    _ => self.expected("comment", next_token)?,
7895                }
7896            } else {
7897                None
7898            };
7899
7900        // PostgreSQL PARTITION OF: partition bound specification
7901        let for_values = if partition_of.is_some() {
7902            Some(self.parse_partition_for_values()?)
7903        } else {
7904            None
7905        };
7906
7907        // SQLite supports `WITHOUT ROWID` at the end of `CREATE TABLE`
7908        let without_rowid = self.parse_keywords(&[Keyword::WITHOUT, Keyword::ROWID]);
7909
7910        let hive_distribution = self.parse_hive_distribution()?;
7911        let clustered_by = self.parse_optional_clustered_by()?;
7912        let hive_formats = self.parse_hive_formats()?;
7913
7914        let create_table_config = self.parse_optional_create_table_config()?;
7915
7916        // ClickHouse supports `PRIMARY KEY`, before `ORDER BY`
7917        // https://clickhouse.com/docs/en/sql-reference/statements/create/table#primary-key
7918        let primary_key = if dialect_of!(self is ClickHouseDialect | GenericDialect)
7919            && self.parse_keywords(&[Keyword::PRIMARY, Keyword::KEY])
7920        {
7921            Some(Box::new(self.parse_expr()?))
7922        } else {
7923            None
7924        };
7925
7926        let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
7927            if self.consume_token(&Token::LParen) {
7928                let columns = if self.peek_token() != Token::RParen {
7929                    self.parse_comma_separated(|p| p.parse_expr())?
7930                } else {
7931                    vec![]
7932                };
7933                self.expect_token(&Token::RParen)?;
7934                Some(OneOrManyWithParens::Many(columns))
7935            } else {
7936                Some(OneOrManyWithParens::One(self.parse_expr()?))
7937            }
7938        } else {
7939            None
7940        };
7941
7942        let on_commit = if self.parse_keywords(&[Keyword::ON, Keyword::COMMIT]) {
7943            Some(self.parse_create_table_on_commit()?)
7944        } else {
7945            None
7946        };
7947
7948        let strict = self.parse_keyword(Keyword::STRICT);
7949
7950        // Parse optional `AS ( query )`
7951        let query = if self.parse_keyword(Keyword::AS) {
7952            Some(self.parse_query()?)
7953        } else if self.dialect.supports_create_table_select() && self.parse_keyword(Keyword::SELECT)
7954        {
7955            // rewind the SELECT keyword
7956            self.prev_token();
7957            Some(self.parse_query()?)
7958        } else {
7959            None
7960        };
7961
7962        Ok(CreateTableBuilder::new(table_name)
7963            .temporary(temporary)
7964            .columns(columns)
7965            .constraints(constraints)
7966            .or_replace(or_replace)
7967            .if_not_exists(if_not_exists)
7968            .transient(transient)
7969            .hive_distribution(hive_distribution)
7970            .hive_formats(hive_formats)
7971            .global(global)
7972            .query(query)
7973            .without_rowid(without_rowid)
7974            .like(like)
7975            .clone_clause(clone)
7976            .comment_after_column_def(comment_after_column_def)
7977            .order_by(order_by)
7978            .on_commit(on_commit)
7979            .on_cluster(on_cluster)
7980            .clustered_by(clustered_by)
7981            .partition_by(create_table_config.partition_by)
7982            .cluster_by(create_table_config.cluster_by)
7983            .inherits(create_table_config.inherits)
7984            .partition_of(partition_of)
7985            .for_values(for_values)
7986            .table_options(create_table_config.table_options)
7987            .primary_key(primary_key)
7988            .strict(strict)
7989            .build())
7990    }
7991
7992    fn maybe_parse_create_table_like(
7993        &mut self,
7994        allow_unquoted_hyphen: bool,
7995    ) -> Result<Option<CreateTableLikeKind>, ParserError> {
7996        let like = if self.dialect.supports_create_table_like_parenthesized()
7997            && self.consume_token(&Token::LParen)
7998        {
7999            if self.parse_keyword(Keyword::LIKE) {
8000                let name = self.parse_object_name(allow_unquoted_hyphen)?;
8001                let defaults = if self.parse_keywords(&[Keyword::INCLUDING, Keyword::DEFAULTS]) {
8002                    Some(CreateTableLikeDefaults::Including)
8003                } else if self.parse_keywords(&[Keyword::EXCLUDING, Keyword::DEFAULTS]) {
8004                    Some(CreateTableLikeDefaults::Excluding)
8005                } else {
8006                    None
8007                };
8008                self.expect_token(&Token::RParen)?;
8009                Some(CreateTableLikeKind::Parenthesized(CreateTableLike {
8010                    name,
8011                    defaults,
8012                }))
8013            } else {
8014                // Rollback the '(' it's probably the columns list
8015                self.prev_token();
8016                None
8017            }
8018        } else if self.parse_keyword(Keyword::LIKE) || self.parse_keyword(Keyword::ILIKE) {
8019            let name = self.parse_object_name(allow_unquoted_hyphen)?;
8020            Some(CreateTableLikeKind::Plain(CreateTableLike {
8021                name,
8022                defaults: None,
8023            }))
8024        } else {
8025            None
8026        };
8027        Ok(like)
8028    }
8029
8030    pub(crate) fn parse_create_table_on_commit(&mut self) -> Result<OnCommit, ParserError> {
8031        if self.parse_keywords(&[Keyword::DELETE, Keyword::ROWS]) {
8032            Ok(OnCommit::DeleteRows)
8033        } else if self.parse_keywords(&[Keyword::PRESERVE, Keyword::ROWS]) {
8034            Ok(OnCommit::PreserveRows)
8035        } else if self.parse_keywords(&[Keyword::DROP]) {
8036            Ok(OnCommit::Drop)
8037        } else {
8038            parser_err!(
8039                "Expecting DELETE ROWS, PRESERVE ROWS or DROP",
8040                self.peek_token()
8041            )
8042        }
8043    }
8044
8045    /// Parse PostgreSQL partition bound specification for PARTITION OF.
8046    ///
8047    /// Parses: `FOR VALUES partition_bound_spec | DEFAULT`
8048    ///
8049    /// [PostgreSQL](https://www.postgresql.org/docs/current/sql-createtable.html)
8050    fn parse_partition_for_values(&mut self) -> Result<ForValues, ParserError> {
8051        if self.parse_keyword(Keyword::DEFAULT) {
8052            return Ok(ForValues::Default);
8053        }
8054
8055        self.expect_keywords(&[Keyword::FOR, Keyword::VALUES])?;
8056
8057        if self.parse_keyword(Keyword::IN) {
8058            // FOR VALUES IN (expr, ...)
8059            self.expect_token(&Token::LParen)?;
8060            let values = self.parse_comma_separated(Parser::parse_expr)?;
8061            self.expect_token(&Token::RParen)?;
8062            Ok(ForValues::In(values))
8063        } else if self.parse_keyword(Keyword::FROM) {
8064            // FOR VALUES FROM (...) TO (...)
8065            self.expect_token(&Token::LParen)?;
8066            let from = self.parse_comma_separated(Parser::parse_partition_bound_value)?;
8067            self.expect_token(&Token::RParen)?;
8068            self.expect_keyword(Keyword::TO)?;
8069            self.expect_token(&Token::LParen)?;
8070            let to = self.parse_comma_separated(Parser::parse_partition_bound_value)?;
8071            self.expect_token(&Token::RParen)?;
8072            Ok(ForValues::From { from, to })
8073        } else if self.parse_keyword(Keyword::WITH) {
8074            // FOR VALUES WITH (MODULUS n, REMAINDER r)
8075            self.expect_token(&Token::LParen)?;
8076            self.expect_keyword(Keyword::MODULUS)?;
8077            let modulus = self.parse_literal_uint()?;
8078            self.expect_token(&Token::Comma)?;
8079            self.expect_keyword(Keyword::REMAINDER)?;
8080            let remainder = self.parse_literal_uint()?;
8081            self.expect_token(&Token::RParen)?;
8082            Ok(ForValues::With { modulus, remainder })
8083        } else {
8084            self.expected("IN, FROM, or WITH after FOR VALUES", self.peek_token())
8085        }
8086    }
8087
8088    /// Parse a single partition bound value (MINVALUE, MAXVALUE, or expression).
8089    fn parse_partition_bound_value(&mut self) -> Result<PartitionBoundValue, ParserError> {
8090        if self.parse_keyword(Keyword::MINVALUE) {
8091            Ok(PartitionBoundValue::MinValue)
8092        } else if self.parse_keyword(Keyword::MAXVALUE) {
8093            Ok(PartitionBoundValue::MaxValue)
8094        } else {
8095            Ok(PartitionBoundValue::Expr(self.parse_expr()?))
8096        }
8097    }
8098
8099    /// Parse configuration like inheritance, partitioning, clustering information during the table creation.
8100    ///
8101    /// [BigQuery](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#syntax_2)
8102    /// [PostgreSQL](https://www.postgresql.org/docs/current/ddl-partitioning.html)
8103    /// [MySql](https://dev.mysql.com/doc/refman/8.4/en/create-table.html)
8104    fn parse_optional_create_table_config(
8105        &mut self,
8106    ) -> Result<CreateTableConfiguration, ParserError> {
8107        let mut table_options = CreateTableOptions::None;
8108
8109        let inherits = if self.parse_keyword(Keyword::INHERITS) {
8110            Some(self.parse_parenthesized_qualified_column_list(IsOptional::Mandatory, false)?)
8111        } else {
8112            None
8113        };
8114
8115        // PostgreSQL supports `WITH ( options )`, before `AS`
8116        let with_options = self.parse_options(Keyword::WITH)?;
8117        if !with_options.is_empty() {
8118            table_options = CreateTableOptions::With(with_options)
8119        }
8120
8121        let table_properties = self.parse_options(Keyword::TBLPROPERTIES)?;
8122        if !table_properties.is_empty() {
8123            table_options = CreateTableOptions::TableProperties(table_properties);
8124        }
8125        let partition_by = if dialect_of!(self is BigQueryDialect | PostgreSqlDialect | GenericDialect)
8126            && self.parse_keywords(&[Keyword::PARTITION, Keyword::BY])
8127        {
8128            Some(Box::new(self.parse_expr()?))
8129        } else {
8130            None
8131        };
8132
8133        let mut cluster_by = None;
8134        if dialect_of!(self is BigQueryDialect | GenericDialect) {
8135            if self.parse_keywords(&[Keyword::CLUSTER, Keyword::BY]) {
8136                cluster_by = Some(WrappedCollection::NoWrapping(
8137                    self.parse_comma_separated(|p| p.parse_expr())?,
8138                ));
8139            };
8140
8141            if let Token::Word(word) = self.peek_token().token {
8142                if word.keyword == Keyword::OPTIONS {
8143                    table_options =
8144                        CreateTableOptions::Options(self.parse_options(Keyword::OPTIONS)?)
8145                }
8146            };
8147        }
8148
8149        if !dialect_of!(self is HiveDialect) && table_options == CreateTableOptions::None {
8150            let plain_options = self.parse_plain_options()?;
8151            if !plain_options.is_empty() {
8152                table_options = CreateTableOptions::Plain(plain_options)
8153            }
8154        };
8155
8156        Ok(CreateTableConfiguration {
8157            partition_by,
8158            cluster_by,
8159            inherits,
8160            table_options,
8161        })
8162    }
8163
8164    fn parse_plain_option(&mut self) -> Result<Option<SqlOption>, ParserError> {
8165        // Single parameter option
8166        // <https://dev.mysql.com/doc/refman/8.4/en/create-table.html>
8167        if self.parse_keywords(&[Keyword::START, Keyword::TRANSACTION]) {
8168            return Ok(Some(SqlOption::Ident(Ident::new("START TRANSACTION"))));
8169        }
8170
8171        // Custom option
8172        // <https://dev.mysql.com/doc/refman/8.4/en/create-table.html>
8173        if self.parse_keywords(&[Keyword::COMMENT]) {
8174            let has_eq = self.consume_token(&Token::Eq);
8175            let value = self.next_token();
8176
8177            let comment = match (has_eq, value.token) {
8178                (true, Token::SingleQuotedString(s)) => {
8179                    Ok(Some(SqlOption::Comment(CommentDef::WithEq(s))))
8180                }
8181                (false, Token::SingleQuotedString(s)) => {
8182                    Ok(Some(SqlOption::Comment(CommentDef::WithoutEq(s))))
8183                }
8184                (_, token) => {
8185                    self.expected("Token::SingleQuotedString", TokenWithSpan::wrap(token))
8186                }
8187            };
8188            return comment;
8189        }
8190
8191        // <https://dev.mysql.com/doc/refman/8.4/en/create-table.html>
8192        // <https://clickhouse.com/docs/sql-reference/statements/create/table>
8193        if self.parse_keywords(&[Keyword::ENGINE]) {
8194            let _ = self.consume_token(&Token::Eq);
8195            let value = self.next_token();
8196
8197            let engine = match value.token {
8198                Token::Word(w) => {
8199                    let parameters = if self.peek_token() == Token::LParen {
8200                        self.parse_parenthesized_identifiers()?
8201                    } else {
8202                        vec![]
8203                    };
8204
8205                    Ok(Some(SqlOption::NamedParenthesizedList(
8206                        NamedParenthesizedList {
8207                            key: Ident::new("ENGINE"),
8208                            name: Some(Ident::new(w.value)),
8209                            values: parameters,
8210                        },
8211                    )))
8212                }
8213                _ => {
8214                    return self.expected("Token::Word", value)?;
8215                }
8216            };
8217
8218            return engine;
8219        }
8220
8221        // <https://dev.mysql.com/doc/refman/8.4/en/create-table.html>
8222        if self.parse_keywords(&[Keyword::TABLESPACE]) {
8223            let _ = self.consume_token(&Token::Eq);
8224            let value = self.next_token();
8225
8226            let tablespace = match value.token {
8227                Token::Word(Word { value: name, .. }) | Token::SingleQuotedString(name) => {
8228                    let storage = match self.parse_keyword(Keyword::STORAGE) {
8229                        true => {
8230                            let _ = self.consume_token(&Token::Eq);
8231                            let storage_token = self.next_token();
8232                            match &storage_token.token {
8233                                Token::Word(w) => match w.value.to_uppercase().as_str() {
8234                                    "DISK" => Some(StorageType::Disk),
8235                                    "MEMORY" => Some(StorageType::Memory),
8236                                    _ => self
8237                                        .expected("Storage type (DISK or MEMORY)", storage_token)?,
8238                                },
8239                                _ => self.expected("Token::Word", storage_token)?,
8240                            }
8241                        }
8242                        false => None,
8243                    };
8244
8245                    Ok(Some(SqlOption::TableSpace(TablespaceOption {
8246                        name,
8247                        storage,
8248                    })))
8249                }
8250                _ => {
8251                    return self.expected("Token::Word", value)?;
8252                }
8253            };
8254
8255            return tablespace;
8256        }
8257
8258        // <https://dev.mysql.com/doc/refman/8.4/en/create-table.html>
8259        if self.parse_keyword(Keyword::UNION) {
8260            let _ = self.consume_token(&Token::Eq);
8261            let value = self.next_token();
8262
8263            match value.token {
8264                Token::LParen => {
8265                    let tables: Vec<Ident> =
8266                        self.parse_comma_separated0(Parser::parse_identifier, Token::RParen)?;
8267                    self.expect_token(&Token::RParen)?;
8268
8269                    return Ok(Some(SqlOption::NamedParenthesizedList(
8270                        NamedParenthesizedList {
8271                            key: Ident::new("UNION"),
8272                            name: None,
8273                            values: tables,
8274                        },
8275                    )));
8276                }
8277                _ => {
8278                    return self.expected("Token::LParen", value)?;
8279                }
8280            }
8281        }
8282
8283        // Key/Value parameter option
8284        let key = if self.parse_keywords(&[Keyword::DEFAULT, Keyword::CHARSET]) {
8285            Ident::new("DEFAULT CHARSET")
8286        } else if self.parse_keyword(Keyword::CHARSET) {
8287            Ident::new("CHARSET")
8288        } else if self.parse_keywords(&[Keyword::DEFAULT, Keyword::CHARACTER, Keyword::SET]) {
8289            Ident::new("DEFAULT CHARACTER SET")
8290        } else if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
8291            Ident::new("CHARACTER SET")
8292        } else if self.parse_keywords(&[Keyword::DEFAULT, Keyword::COLLATE]) {
8293            Ident::new("DEFAULT COLLATE")
8294        } else if self.parse_keyword(Keyword::COLLATE) {
8295            Ident::new("COLLATE")
8296        } else if self.parse_keywords(&[Keyword::DATA, Keyword::DIRECTORY]) {
8297            Ident::new("DATA DIRECTORY")
8298        } else if self.parse_keywords(&[Keyword::INDEX, Keyword::DIRECTORY]) {
8299            Ident::new("INDEX DIRECTORY")
8300        } else if self.parse_keyword(Keyword::KEY_BLOCK_SIZE) {
8301            Ident::new("KEY_BLOCK_SIZE")
8302        } else if self.parse_keyword(Keyword::ROW_FORMAT) {
8303            Ident::new("ROW_FORMAT")
8304        } else if self.parse_keyword(Keyword::PACK_KEYS) {
8305            Ident::new("PACK_KEYS")
8306        } else if self.parse_keyword(Keyword::STATS_AUTO_RECALC) {
8307            Ident::new("STATS_AUTO_RECALC")
8308        } else if self.parse_keyword(Keyword::STATS_PERSISTENT) {
8309            Ident::new("STATS_PERSISTENT")
8310        } else if self.parse_keyword(Keyword::STATS_SAMPLE_PAGES) {
8311            Ident::new("STATS_SAMPLE_PAGES")
8312        } else if self.parse_keyword(Keyword::DELAY_KEY_WRITE) {
8313            Ident::new("DELAY_KEY_WRITE")
8314        } else if self.parse_keyword(Keyword::COMPRESSION) {
8315            Ident::new("COMPRESSION")
8316        } else if self.parse_keyword(Keyword::ENCRYPTION) {
8317            Ident::new("ENCRYPTION")
8318        } else if self.parse_keyword(Keyword::MAX_ROWS) {
8319            Ident::new("MAX_ROWS")
8320        } else if self.parse_keyword(Keyword::MIN_ROWS) {
8321            Ident::new("MIN_ROWS")
8322        } else if self.parse_keyword(Keyword::AUTOEXTEND_SIZE) {
8323            Ident::new("AUTOEXTEND_SIZE")
8324        } else if self.parse_keyword(Keyword::AVG_ROW_LENGTH) {
8325            Ident::new("AVG_ROW_LENGTH")
8326        } else if self.parse_keyword(Keyword::CHECKSUM) {
8327            Ident::new("CHECKSUM")
8328        } else if self.parse_keyword(Keyword::CONNECTION) {
8329            Ident::new("CONNECTION")
8330        } else if self.parse_keyword(Keyword::ENGINE_ATTRIBUTE) {
8331            Ident::new("ENGINE_ATTRIBUTE")
8332        } else if self.parse_keyword(Keyword::PASSWORD) {
8333            Ident::new("PASSWORD")
8334        } else if self.parse_keyword(Keyword::SECONDARY_ENGINE_ATTRIBUTE) {
8335            Ident::new("SECONDARY_ENGINE_ATTRIBUTE")
8336        } else if self.parse_keyword(Keyword::INSERT_METHOD) {
8337            Ident::new("INSERT_METHOD")
8338        } else if self.parse_keyword(Keyword::AUTO_INCREMENT) {
8339            Ident::new("AUTO_INCREMENT")
8340        } else {
8341            return Ok(None);
8342        };
8343
8344        let _ = self.consume_token(&Token::Eq);
8345
8346        let value = match self
8347            .maybe_parse(|parser| parser.parse_value())?
8348            .map(Expr::Value)
8349        {
8350            Some(expr) => expr,
8351            None => Expr::Identifier(self.parse_identifier()?),
8352        };
8353
8354        Ok(Some(SqlOption::KeyValue { key, value }))
8355    }
8356
8357    pub fn parse_plain_options(&mut self) -> Result<Vec<SqlOption>, ParserError> {
8358        let mut options = Vec::new();
8359
8360        while let Some(option) = self.parse_plain_option()? {
8361            options.push(option);
8362            // Some dialects support comma-separated options; it shouldn't introduce ambiguity to
8363            // consume it for all dialects.
8364            let _ = self.consume_token(&Token::Comma);
8365        }
8366
8367        Ok(options)
8368    }
8369
8370    pub fn parse_optional_inline_comment(&mut self) -> Result<Option<CommentDef>, ParserError> {
8371        let comment = if self.parse_keyword(Keyword::COMMENT) {
8372            let has_eq = self.consume_token(&Token::Eq);
8373            let comment = self.parse_comment_value()?;
8374            Some(if has_eq {
8375                CommentDef::WithEq(comment)
8376            } else {
8377                CommentDef::WithoutEq(comment)
8378            })
8379        } else {
8380            None
8381        };
8382        Ok(comment)
8383    }
8384
8385    pub fn parse_comment_value(&mut self) -> Result<String, ParserError> {
8386        let next_token = self.next_token();
8387        let value = match next_token.token {
8388            Token::SingleQuotedString(str) => str,
8389            Token::DollarQuotedString(str) => str.value,
8390            _ => self.expected("string literal", next_token)?,
8391        };
8392        Ok(value)
8393    }
8394
8395    pub fn parse_optional_procedure_parameters(
8396        &mut self,
8397    ) -> Result<Option<Vec<ProcedureParam>>, ParserError> {
8398        let mut params = vec![];
8399        if !self.consume_token(&Token::LParen) || self.consume_token(&Token::RParen) {
8400            return Ok(Some(params));
8401        }
8402        loop {
8403            if let Token::Word(_) = self.peek_token().token {
8404                params.push(self.parse_procedure_param()?)
8405            }
8406            let comma = self.consume_token(&Token::Comma);
8407            if self.consume_token(&Token::RParen) {
8408                // allow a trailing comma, even though it's not in standard
8409                break;
8410            } else if !comma {
8411                return self.expected("',' or ')' after parameter definition", self.peek_token());
8412            }
8413        }
8414        Ok(Some(params))
8415    }
8416
8417    pub fn parse_columns(&mut self) -> Result<(Vec<ColumnDef>, Vec<TableConstraint>), ParserError> {
8418        let mut columns = vec![];
8419        let mut constraints = vec![];
8420        if !self.consume_token(&Token::LParen) || self.consume_token(&Token::RParen) {
8421            return Ok((columns, constraints));
8422        }
8423
8424        loop {
8425            if let Some(constraint) = self.parse_optional_table_constraint()? {
8426                constraints.push(constraint);
8427            } else if let Token::Word(_) = self.peek_token().token {
8428                columns.push(self.parse_column_def()?);
8429            } else {
8430                return self.expected("column name or constraint definition", self.peek_token());
8431            }
8432
8433            let comma = self.consume_token(&Token::Comma);
8434            let rparen = self.peek_token().token == Token::RParen;
8435
8436            if !comma && !rparen {
8437                return self.expected("',' or ')' after column definition", self.peek_token());
8438            };
8439
8440            if rparen
8441                && (!comma
8442                    || self.dialect.supports_column_definition_trailing_commas()
8443                    || self.options.trailing_commas)
8444            {
8445                let _ = self.consume_token(&Token::RParen);
8446                break;
8447            }
8448        }
8449
8450        Ok((columns, constraints))
8451    }
8452
8453    pub fn parse_procedure_param(&mut self) -> Result<ProcedureParam, ParserError> {
8454        let mode = if self.parse_keyword(Keyword::IN) {
8455            Some(ArgMode::In)
8456        } else if self.parse_keyword(Keyword::OUT) {
8457            Some(ArgMode::Out)
8458        } else if self.parse_keyword(Keyword::INOUT) {
8459            Some(ArgMode::InOut)
8460        } else {
8461            None
8462        };
8463        let name = self.parse_identifier()?;
8464        let data_type = self.parse_data_type()?;
8465        let default = if self.consume_token(&Token::Eq) {
8466            Some(self.parse_expr()?)
8467        } else {
8468            None
8469        };
8470
8471        Ok(ProcedureParam {
8472            name,
8473            data_type,
8474            mode,
8475            default,
8476        })
8477    }
8478
8479    pub fn parse_column_def(&mut self) -> Result<ColumnDef, ParserError> {
8480        let col_name = self.parse_identifier()?;
8481        let data_type = if self.is_column_type_sqlite_unspecified() {
8482            DataType::Unspecified
8483        } else {
8484            self.parse_data_type()?
8485        };
8486        let mut options = vec![];
8487        loop {
8488            if self.parse_keyword(Keyword::CONSTRAINT) {
8489                let name = Some(self.parse_identifier()?);
8490                if let Some(option) = self.parse_optional_column_option()? {
8491                    options.push(ColumnOptionDef { name, option });
8492                } else {
8493                    return self.expected(
8494                        "constraint details after CONSTRAINT <name>",
8495                        self.peek_token(),
8496                    );
8497                }
8498            } else if let Some(option) = self.parse_optional_column_option()? {
8499                options.push(ColumnOptionDef { name: None, option });
8500            } else {
8501                break;
8502            };
8503        }
8504        Ok(ColumnDef {
8505            name: col_name,
8506            data_type,
8507            options,
8508        })
8509    }
8510
8511    fn is_column_type_sqlite_unspecified(&mut self) -> bool {
8512        if dialect_of!(self is SQLiteDialect) {
8513            match self.peek_token().token {
8514                Token::Word(word) => matches!(
8515                    word.keyword,
8516                    Keyword::CONSTRAINT
8517                        | Keyword::PRIMARY
8518                        | Keyword::NOT
8519                        | Keyword::UNIQUE
8520                        | Keyword::CHECK
8521                        | Keyword::DEFAULT
8522                        | Keyword::COLLATE
8523                        | Keyword::REFERENCES
8524                        | Keyword::GENERATED
8525                        | Keyword::AS
8526                ),
8527                _ => true, // e.g. comma immediately after column name
8528            }
8529        } else {
8530            false
8531        }
8532    }
8533
8534    pub fn parse_optional_column_option(&mut self) -> Result<Option<ColumnOption>, ParserError> {
8535        if let Some(option) = self.dialect.parse_column_option(self)? {
8536            return option;
8537        }
8538
8539        self.with_state(
8540            ColumnDefinition,
8541            |parser| -> Result<Option<ColumnOption>, ParserError> {
8542                parser.parse_optional_column_option_inner()
8543            },
8544        )
8545    }
8546
8547    fn parse_optional_column_option_inner(&mut self) -> Result<Option<ColumnOption>, ParserError> {
8548        if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
8549            Ok(Some(ColumnOption::CharacterSet(
8550                self.parse_object_name(false)?,
8551            )))
8552        } else if self.parse_keywords(&[Keyword::COLLATE]) {
8553            Ok(Some(ColumnOption::Collation(
8554                self.parse_object_name(false)?,
8555            )))
8556        } else if self.parse_keywords(&[Keyword::NOT, Keyword::NULL]) {
8557            Ok(Some(ColumnOption::NotNull))
8558        } else if self.parse_keywords(&[Keyword::COMMENT]) {
8559            Ok(Some(ColumnOption::Comment(self.parse_comment_value()?)))
8560        } else if self.parse_keyword(Keyword::NULL) {
8561            Ok(Some(ColumnOption::Null))
8562        } else if self.parse_keyword(Keyword::DEFAULT) {
8563            Ok(Some(ColumnOption::Default(
8564                self.parse_column_option_expr()?,
8565            )))
8566        } else if dialect_of!(self is ClickHouseDialect| GenericDialect)
8567            && self.parse_keyword(Keyword::MATERIALIZED)
8568        {
8569            Ok(Some(ColumnOption::Materialized(
8570                self.parse_column_option_expr()?,
8571            )))
8572        } else if dialect_of!(self is ClickHouseDialect| GenericDialect)
8573            && self.parse_keyword(Keyword::ALIAS)
8574        {
8575            Ok(Some(ColumnOption::Alias(self.parse_column_option_expr()?)))
8576        } else if dialect_of!(self is ClickHouseDialect| GenericDialect)
8577            && self.parse_keyword(Keyword::EPHEMERAL)
8578        {
8579            // The expression is optional for the EPHEMERAL syntax, so we need to check
8580            // if the column definition has remaining tokens before parsing the expression.
8581            if matches!(self.peek_token().token, Token::Comma | Token::RParen) {
8582                Ok(Some(ColumnOption::Ephemeral(None)))
8583            } else {
8584                Ok(Some(ColumnOption::Ephemeral(Some(
8585                    self.parse_column_option_expr()?,
8586                ))))
8587            }
8588        } else if self.parse_keywords(&[Keyword::PRIMARY, Keyword::KEY]) {
8589            let characteristics = self.parse_constraint_characteristics()?;
8590            Ok(Some(
8591                PrimaryKeyConstraint {
8592                    name: None,
8593                    index_name: None,
8594                    index_type: None,
8595                    columns: vec![],
8596                    index_options: vec![],
8597                    characteristics,
8598                }
8599                .into(),
8600            ))
8601        } else if self.parse_keyword(Keyword::UNIQUE) {
8602            let characteristics = self.parse_constraint_characteristics()?;
8603            Ok(Some(
8604                UniqueConstraint {
8605                    name: None,
8606                    index_name: None,
8607                    index_type_display: KeyOrIndexDisplay::None,
8608                    index_type: None,
8609                    columns: vec![],
8610                    index_options: vec![],
8611                    characteristics,
8612                    nulls_distinct: NullsDistinctOption::None,
8613                }
8614                .into(),
8615            ))
8616        } else if self.parse_keyword(Keyword::REFERENCES) {
8617            let foreign_table = self.parse_object_name(false)?;
8618            // PostgreSQL allows omitting the column list and
8619            // uses the primary key column of the foreign table by default
8620            let referred_columns = self.parse_parenthesized_column_list(Optional, false)?;
8621            let mut match_kind = None;
8622            let mut on_delete = None;
8623            let mut on_update = None;
8624            loop {
8625                if match_kind.is_none() && self.parse_keyword(Keyword::MATCH) {
8626                    match_kind = Some(self.parse_match_kind()?);
8627                } else if on_delete.is_none()
8628                    && self.parse_keywords(&[Keyword::ON, Keyword::DELETE])
8629                {
8630                    on_delete = Some(self.parse_referential_action()?);
8631                } else if on_update.is_none()
8632                    && self.parse_keywords(&[Keyword::ON, Keyword::UPDATE])
8633                {
8634                    on_update = Some(self.parse_referential_action()?);
8635                } else {
8636                    break;
8637                }
8638            }
8639            let characteristics = self.parse_constraint_characteristics()?;
8640
8641            Ok(Some(
8642                ForeignKeyConstraint {
8643                    name: None,       // Column-level constraints don't have names
8644                    index_name: None, // Not applicable for column-level constraints
8645                    columns: vec![],  // Not applicable for column-level constraints
8646                    foreign_table,
8647                    referred_columns,
8648                    on_delete,
8649                    on_update,
8650                    match_kind,
8651                    characteristics,
8652                }
8653                .into(),
8654            ))
8655        } else if self.parse_keyword(Keyword::CHECK) {
8656            self.expect_token(&Token::LParen)?;
8657            // since `CHECK` requires parentheses, we can parse the inner expression in ParserState::Normal
8658            let expr: Expr = self.with_state(ParserState::Normal, |p| p.parse_expr())?;
8659            self.expect_token(&Token::RParen)?;
8660            Ok(Some(
8661                CheckConstraint {
8662                    name: None, // Column-level check constraints don't have names
8663                    expr: Box::new(expr),
8664                    enforced: None, // Could be extended later to support MySQL ENFORCED/NOT ENFORCED
8665                }
8666                .into(),
8667            ))
8668        } else if self.parse_keyword(Keyword::AUTO_INCREMENT)
8669            && dialect_of!(self is MySqlDialect | GenericDialect)
8670        {
8671            // Support AUTO_INCREMENT for MySQL
8672            Ok(Some(ColumnOption::DialectSpecific(vec![
8673                Token::make_keyword("AUTO_INCREMENT"),
8674            ])))
8675        } else if self.parse_keyword(Keyword::AUTOINCREMENT)
8676            && dialect_of!(self is SQLiteDialect |  GenericDialect)
8677        {
8678            // Support AUTOINCREMENT for SQLite
8679            Ok(Some(ColumnOption::DialectSpecific(vec![
8680                Token::make_keyword("AUTOINCREMENT"),
8681            ])))
8682        } else if self.parse_keyword(Keyword::ASC)
8683            && self.dialect.supports_asc_desc_in_column_definition()
8684        {
8685            // Support ASC for SQLite
8686            Ok(Some(ColumnOption::DialectSpecific(vec![
8687                Token::make_keyword("ASC"),
8688            ])))
8689        } else if self.parse_keyword(Keyword::DESC)
8690            && self.dialect.supports_asc_desc_in_column_definition()
8691        {
8692            // Support DESC for SQLite
8693            Ok(Some(ColumnOption::DialectSpecific(vec![
8694                Token::make_keyword("DESC"),
8695            ])))
8696        } else if self.parse_keywords(&[Keyword::ON, Keyword::UPDATE])
8697            && dialect_of!(self is MySqlDialect | GenericDialect)
8698        {
8699            let expr = self.parse_column_option_expr()?;
8700            Ok(Some(ColumnOption::OnUpdate(expr)))
8701        } else if self.parse_keyword(Keyword::GENERATED) {
8702            self.parse_optional_column_option_generated()
8703        } else if dialect_of!(self is BigQueryDialect | GenericDialect)
8704            && self.parse_keyword(Keyword::OPTIONS)
8705        {
8706            self.prev_token();
8707            Ok(Some(ColumnOption::Options(
8708                self.parse_options(Keyword::OPTIONS)?,
8709            )))
8710        } else if self.parse_keyword(Keyword::AS)
8711            && dialect_of!(self is MySqlDialect | SQLiteDialect | DuckDbDialect | GenericDialect)
8712        {
8713            self.parse_optional_column_option_as()
8714        } else if self.parse_keyword(Keyword::SRID)
8715            && dialect_of!(self is MySqlDialect | GenericDialect)
8716        {
8717            Ok(Some(ColumnOption::Srid(Box::new(
8718                self.parse_column_option_expr()?,
8719            ))))
8720        } else if self.parse_keyword(Keyword::IDENTITY)
8721            && dialect_of!(self is MsSqlDialect | GenericDialect)
8722        {
8723            let parameters = if self.consume_token(&Token::LParen) {
8724                let seed = self.parse_number()?;
8725                self.expect_token(&Token::Comma)?;
8726                let increment = self.parse_number()?;
8727                self.expect_token(&Token::RParen)?;
8728
8729                Some(IdentityPropertyFormatKind::FunctionCall(
8730                    IdentityParameters { seed, increment },
8731                ))
8732            } else {
8733                None
8734            };
8735            Ok(Some(ColumnOption::Identity(
8736                IdentityPropertyKind::Identity(IdentityProperty {
8737                    parameters,
8738                    order: None,
8739                }),
8740            )))
8741        } else if dialect_of!(self is SQLiteDialect | GenericDialect)
8742            && self.parse_keywords(&[Keyword::ON, Keyword::CONFLICT])
8743        {
8744            // Support ON CONFLICT for SQLite
8745            Ok(Some(ColumnOption::OnConflict(
8746                self.expect_one_of_keywords(&[
8747                    Keyword::ROLLBACK,
8748                    Keyword::ABORT,
8749                    Keyword::FAIL,
8750                    Keyword::IGNORE,
8751                    Keyword::REPLACE,
8752                ])?,
8753            )))
8754        } else if self.parse_keyword(Keyword::INVISIBLE) {
8755            Ok(Some(ColumnOption::Invisible))
8756        } else {
8757            Ok(None)
8758        }
8759    }
8760
8761    /// When parsing some column option expressions we need to revert to [ParserState::Normal] since
8762    /// `NOT NULL` is allowed as an alias for `IS NOT NULL`.
8763    /// In those cases we use this helper instead of calling [Parser::parse_expr] directly.
8764    ///
8765    /// For example, consider these `CREATE TABLE` statements:
8766    /// ```sql
8767    /// CREATE TABLE foo (abc BOOL DEFAULT (42 NOT NULL) NOT NULL);
8768    /// ```
8769    /// vs
8770    /// ```sql
8771    /// CREATE TABLE foo (abc BOOL NOT NULL);
8772    /// ```
8773    ///
8774    /// In the first we should parse the inner portion of `(42 NOT NULL)` as [Expr::IsNotNull],
8775    /// whereas is both statements that trailing `NOT NULL` should only be parsed as a
8776    /// [ColumnOption::NotNull].
8777    fn parse_column_option_expr(&mut self) -> Result<Expr, ParserError> {
8778        if self.peek_token_ref().token == Token::LParen {
8779            let expr: Expr = self.with_state(ParserState::Normal, |p| p.parse_prefix())?;
8780            Ok(expr)
8781        } else {
8782            Ok(self.parse_expr()?)
8783        }
8784    }
8785
8786    pub(crate) fn parse_tag(&mut self) -> Result<Tag, ParserError> {
8787        let name = self.parse_object_name(false)?;
8788        self.expect_token(&Token::Eq)?;
8789        let value = self.parse_literal_string()?;
8790
8791        Ok(Tag::new(name, value))
8792    }
8793
8794    fn parse_optional_column_option_generated(
8795        &mut self,
8796    ) -> Result<Option<ColumnOption>, ParserError> {
8797        if self.parse_keywords(&[Keyword::ALWAYS, Keyword::AS, Keyword::IDENTITY]) {
8798            let mut sequence_options = vec![];
8799            if self.expect_token(&Token::LParen).is_ok() {
8800                sequence_options = self.parse_create_sequence_options()?;
8801                self.expect_token(&Token::RParen)?;
8802            }
8803            Ok(Some(ColumnOption::Generated {
8804                generated_as: GeneratedAs::Always,
8805                sequence_options: Some(sequence_options),
8806                generation_expr: None,
8807                generation_expr_mode: None,
8808                generated_keyword: true,
8809            }))
8810        } else if self.parse_keywords(&[
8811            Keyword::BY,
8812            Keyword::DEFAULT,
8813            Keyword::AS,
8814            Keyword::IDENTITY,
8815        ]) {
8816            let mut sequence_options = vec![];
8817            if self.expect_token(&Token::LParen).is_ok() {
8818                sequence_options = self.parse_create_sequence_options()?;
8819                self.expect_token(&Token::RParen)?;
8820            }
8821            Ok(Some(ColumnOption::Generated {
8822                generated_as: GeneratedAs::ByDefault,
8823                sequence_options: Some(sequence_options),
8824                generation_expr: None,
8825                generation_expr_mode: None,
8826                generated_keyword: true,
8827            }))
8828        } else if self.parse_keywords(&[Keyword::ALWAYS, Keyword::AS]) {
8829            if self.expect_token(&Token::LParen).is_ok() {
8830                let expr: Expr = self.with_state(ParserState::Normal, |p| p.parse_expr())?;
8831                self.expect_token(&Token::RParen)?;
8832                let (gen_as, expr_mode) = if self.parse_keywords(&[Keyword::STORED]) {
8833                    Ok((
8834                        GeneratedAs::ExpStored,
8835                        Some(GeneratedExpressionMode::Stored),
8836                    ))
8837                } else if dialect_of!(self is PostgreSqlDialect) {
8838                    // Postgres' AS IDENTITY branches are above, this one needs STORED
8839                    self.expected("STORED", self.peek_token())
8840                } else if self.parse_keywords(&[Keyword::VIRTUAL]) {
8841                    Ok((GeneratedAs::Always, Some(GeneratedExpressionMode::Virtual)))
8842                } else {
8843                    Ok((GeneratedAs::Always, None))
8844                }?;
8845
8846                Ok(Some(ColumnOption::Generated {
8847                    generated_as: gen_as,
8848                    sequence_options: None,
8849                    generation_expr: Some(expr),
8850                    generation_expr_mode: expr_mode,
8851                    generated_keyword: true,
8852                }))
8853            } else {
8854                Ok(None)
8855            }
8856        } else {
8857            Ok(None)
8858        }
8859    }
8860
8861    fn parse_optional_column_option_as(&mut self) -> Result<Option<ColumnOption>, ParserError> {
8862        // Some DBs allow 'AS (expr)', shorthand for GENERATED ALWAYS AS
8863        self.expect_token(&Token::LParen)?;
8864        let expr = self.parse_expr()?;
8865        self.expect_token(&Token::RParen)?;
8866
8867        let (gen_as, expr_mode) = if self.parse_keywords(&[Keyword::STORED]) {
8868            (
8869                GeneratedAs::ExpStored,
8870                Some(GeneratedExpressionMode::Stored),
8871            )
8872        } else if self.parse_keywords(&[Keyword::VIRTUAL]) {
8873            (GeneratedAs::Always, Some(GeneratedExpressionMode::Virtual))
8874        } else {
8875            (GeneratedAs::Always, None)
8876        };
8877
8878        Ok(Some(ColumnOption::Generated {
8879            generated_as: gen_as,
8880            sequence_options: None,
8881            generation_expr: Some(expr),
8882            generation_expr_mode: expr_mode,
8883            generated_keyword: false,
8884        }))
8885    }
8886
8887    pub fn parse_optional_clustered_by(&mut self) -> Result<Option<ClusteredBy>, ParserError> {
8888        let clustered_by = if dialect_of!(self is HiveDialect|GenericDialect)
8889            && self.parse_keywords(&[Keyword::CLUSTERED, Keyword::BY])
8890        {
8891            let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
8892
8893            let sorted_by = if self.parse_keywords(&[Keyword::SORTED, Keyword::BY]) {
8894                self.expect_token(&Token::LParen)?;
8895                let sorted_by_columns = self.parse_comma_separated(|p| p.parse_order_by_expr())?;
8896                self.expect_token(&Token::RParen)?;
8897                Some(sorted_by_columns)
8898            } else {
8899                None
8900            };
8901
8902            self.expect_keyword_is(Keyword::INTO)?;
8903            let num_buckets = self.parse_number_value()?.value;
8904            self.expect_keyword_is(Keyword::BUCKETS)?;
8905            Some(ClusteredBy {
8906                columns,
8907                sorted_by,
8908                num_buckets,
8909            })
8910        } else {
8911            None
8912        };
8913        Ok(clustered_by)
8914    }
8915
8916    pub fn parse_referential_action(&mut self) -> Result<ReferentialAction, ParserError> {
8917        if self.parse_keyword(Keyword::RESTRICT) {
8918            Ok(ReferentialAction::Restrict)
8919        } else if self.parse_keyword(Keyword::CASCADE) {
8920            Ok(ReferentialAction::Cascade)
8921        } else if self.parse_keywords(&[Keyword::SET, Keyword::NULL]) {
8922            Ok(ReferentialAction::SetNull)
8923        } else if self.parse_keywords(&[Keyword::NO, Keyword::ACTION]) {
8924            Ok(ReferentialAction::NoAction)
8925        } else if self.parse_keywords(&[Keyword::SET, Keyword::DEFAULT]) {
8926            Ok(ReferentialAction::SetDefault)
8927        } else {
8928            self.expected(
8929                "one of RESTRICT, CASCADE, SET NULL, NO ACTION or SET DEFAULT",
8930                self.peek_token(),
8931            )
8932        }
8933    }
8934
8935    pub fn parse_match_kind(&mut self) -> Result<ConstraintReferenceMatchKind, ParserError> {
8936        if self.parse_keyword(Keyword::FULL) {
8937            Ok(ConstraintReferenceMatchKind::Full)
8938        } else if self.parse_keyword(Keyword::PARTIAL) {
8939            Ok(ConstraintReferenceMatchKind::Partial)
8940        } else if self.parse_keyword(Keyword::SIMPLE) {
8941            Ok(ConstraintReferenceMatchKind::Simple)
8942        } else {
8943            self.expected("one of FULL, PARTIAL or SIMPLE", self.peek_token())
8944        }
8945    }
8946
8947    pub fn parse_constraint_characteristics(
8948        &mut self,
8949    ) -> Result<Option<ConstraintCharacteristics>, ParserError> {
8950        let mut cc = ConstraintCharacteristics::default();
8951
8952        loop {
8953            if cc.deferrable.is_none() && self.parse_keywords(&[Keyword::NOT, Keyword::DEFERRABLE])
8954            {
8955                cc.deferrable = Some(false);
8956            } else if cc.deferrable.is_none() && self.parse_keyword(Keyword::DEFERRABLE) {
8957                cc.deferrable = Some(true);
8958            } else if cc.initially.is_none() && self.parse_keyword(Keyword::INITIALLY) {
8959                if self.parse_keyword(Keyword::DEFERRED) {
8960                    cc.initially = Some(DeferrableInitial::Deferred);
8961                } else if self.parse_keyword(Keyword::IMMEDIATE) {
8962                    cc.initially = Some(DeferrableInitial::Immediate);
8963                } else {
8964                    self.expected("one of DEFERRED or IMMEDIATE", self.peek_token())?;
8965                }
8966            } else if cc.enforced.is_none() && self.parse_keyword(Keyword::ENFORCED) {
8967                cc.enforced = Some(true);
8968            } else if cc.enforced.is_none()
8969                && self.parse_keywords(&[Keyword::NOT, Keyword::ENFORCED])
8970            {
8971                cc.enforced = Some(false);
8972            } else {
8973                break;
8974            }
8975        }
8976
8977        if cc.deferrable.is_some() || cc.initially.is_some() || cc.enforced.is_some() {
8978            Ok(Some(cc))
8979        } else {
8980            Ok(None)
8981        }
8982    }
8983
8984    pub fn parse_optional_table_constraint(
8985        &mut self,
8986    ) -> Result<Option<TableConstraint>, ParserError> {
8987        let name = if self.parse_keyword(Keyword::CONSTRAINT) {
8988            Some(self.parse_identifier()?)
8989        } else {
8990            None
8991        };
8992
8993        let next_token = self.next_token();
8994        match next_token.token {
8995            Token::Word(w) if w.keyword == Keyword::UNIQUE => {
8996                let index_type_display = self.parse_index_type_display();
8997                if !dialect_of!(self is GenericDialect | MySqlDialect)
8998                    && !index_type_display.is_none()
8999                {
9000                    return self
9001                        .expected("`index_name` or `(column_name [, ...])`", self.peek_token());
9002                }
9003
9004                let nulls_distinct = self.parse_optional_nulls_distinct()?;
9005
9006                // optional index name
9007                let index_name = self.parse_optional_ident()?;
9008                let index_type = self.parse_optional_using_then_index_type()?;
9009
9010                let columns = self.parse_parenthesized_index_column_list()?;
9011                let index_options = self.parse_index_options()?;
9012                let characteristics = self.parse_constraint_characteristics()?;
9013                Ok(Some(
9014                    UniqueConstraint {
9015                        name,
9016                        index_name,
9017                        index_type_display,
9018                        index_type,
9019                        columns,
9020                        index_options,
9021                        characteristics,
9022                        nulls_distinct,
9023                    }
9024                    .into(),
9025                ))
9026            }
9027            Token::Word(w) if w.keyword == Keyword::PRIMARY => {
9028                // after `PRIMARY` always stay `KEY`
9029                self.expect_keyword_is(Keyword::KEY)?;
9030
9031                // optional index name
9032                let index_name = self.parse_optional_ident()?;
9033                let index_type = self.parse_optional_using_then_index_type()?;
9034
9035                let columns = self.parse_parenthesized_index_column_list()?;
9036                let index_options = self.parse_index_options()?;
9037                let characteristics = self.parse_constraint_characteristics()?;
9038                Ok(Some(
9039                    PrimaryKeyConstraint {
9040                        name,
9041                        index_name,
9042                        index_type,
9043                        columns,
9044                        index_options,
9045                        characteristics,
9046                    }
9047                    .into(),
9048                ))
9049            }
9050            Token::Word(w) if w.keyword == Keyword::FOREIGN => {
9051                self.expect_keyword_is(Keyword::KEY)?;
9052                let index_name = self.parse_optional_ident()?;
9053                let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
9054                self.expect_keyword_is(Keyword::REFERENCES)?;
9055                let foreign_table = self.parse_object_name(false)?;
9056                let referred_columns = self.parse_parenthesized_column_list(Optional, false)?;
9057                let mut match_kind = None;
9058                let mut on_delete = None;
9059                let mut on_update = None;
9060                loop {
9061                    if match_kind.is_none() && self.parse_keyword(Keyword::MATCH) {
9062                        match_kind = Some(self.parse_match_kind()?);
9063                    } else if on_delete.is_none()
9064                        && self.parse_keywords(&[Keyword::ON, Keyword::DELETE])
9065                    {
9066                        on_delete = Some(self.parse_referential_action()?);
9067                    } else if on_update.is_none()
9068                        && self.parse_keywords(&[Keyword::ON, Keyword::UPDATE])
9069                    {
9070                        on_update = Some(self.parse_referential_action()?);
9071                    } else {
9072                        break;
9073                    }
9074                }
9075
9076                let characteristics = self.parse_constraint_characteristics()?;
9077
9078                Ok(Some(
9079                    ForeignKeyConstraint {
9080                        name,
9081                        index_name,
9082                        columns,
9083                        foreign_table,
9084                        referred_columns,
9085                        on_delete,
9086                        on_update,
9087                        match_kind,
9088                        characteristics,
9089                    }
9090                    .into(),
9091                ))
9092            }
9093            Token::Word(w) if w.keyword == Keyword::CHECK => {
9094                self.expect_token(&Token::LParen)?;
9095                let expr = Box::new(self.parse_expr()?);
9096                self.expect_token(&Token::RParen)?;
9097
9098                let enforced = if self.parse_keyword(Keyword::ENFORCED) {
9099                    Some(true)
9100                } else if self.parse_keywords(&[Keyword::NOT, Keyword::ENFORCED]) {
9101                    Some(false)
9102                } else {
9103                    None
9104                };
9105
9106                Ok(Some(
9107                    CheckConstraint {
9108                        name,
9109                        expr,
9110                        enforced,
9111                    }
9112                    .into(),
9113                ))
9114            }
9115            Token::Word(w)
9116                if (w.keyword == Keyword::INDEX || w.keyword == Keyword::KEY)
9117                    && dialect_of!(self is GenericDialect | MySqlDialect)
9118                    && name.is_none() =>
9119            {
9120                let display_as_key = w.keyword == Keyword::KEY;
9121
9122                let name = match self.peek_token().token {
9123                    Token::Word(word) if word.keyword == Keyword::USING => None,
9124                    _ => self.parse_optional_ident()?,
9125                };
9126
9127                let index_type = self.parse_optional_using_then_index_type()?;
9128                let columns = self.parse_parenthesized_index_column_list()?;
9129                let index_options = self.parse_index_options()?;
9130
9131                Ok(Some(
9132                    IndexConstraint {
9133                        display_as_key,
9134                        name,
9135                        index_type,
9136                        columns,
9137                        index_options,
9138                    }
9139                    .into(),
9140                ))
9141            }
9142            Token::Word(w)
9143                if (w.keyword == Keyword::FULLTEXT || w.keyword == Keyword::SPATIAL)
9144                    && dialect_of!(self is GenericDialect | MySqlDialect) =>
9145            {
9146                if let Some(name) = name {
9147                    return self.expected(
9148                        "FULLTEXT or SPATIAL option without constraint name",
9149                        TokenWithSpan {
9150                            token: Token::make_keyword(&name.to_string()),
9151                            span: next_token.span,
9152                        },
9153                    );
9154                }
9155
9156                let fulltext = w.keyword == Keyword::FULLTEXT;
9157
9158                let index_type_display = self.parse_index_type_display();
9159
9160                let opt_index_name = self.parse_optional_ident()?;
9161
9162                let columns = self.parse_parenthesized_index_column_list()?;
9163
9164                Ok(Some(
9165                    FullTextOrSpatialConstraint {
9166                        fulltext,
9167                        index_type_display,
9168                        opt_index_name,
9169                        columns,
9170                    }
9171                    .into(),
9172                ))
9173            }
9174            _ => {
9175                if name.is_some() {
9176                    self.expected("PRIMARY, UNIQUE, FOREIGN, or CHECK", next_token)
9177                } else {
9178                    self.prev_token();
9179                    Ok(None)
9180                }
9181            }
9182        }
9183    }
9184
9185    fn parse_optional_nulls_distinct(&mut self) -> Result<NullsDistinctOption, ParserError> {
9186        Ok(if self.parse_keyword(Keyword::NULLS) {
9187            let not = self.parse_keyword(Keyword::NOT);
9188            self.expect_keyword_is(Keyword::DISTINCT)?;
9189            if not {
9190                NullsDistinctOption::NotDistinct
9191            } else {
9192                NullsDistinctOption::Distinct
9193            }
9194        } else {
9195            NullsDistinctOption::None
9196        })
9197    }
9198
9199    pub fn maybe_parse_options(
9200        &mut self,
9201        keyword: Keyword,
9202    ) -> Result<Option<Vec<SqlOption>>, ParserError> {
9203        if let Token::Word(word) = self.peek_token().token {
9204            if word.keyword == keyword {
9205                return Ok(Some(self.parse_options(keyword)?));
9206            }
9207        };
9208        Ok(None)
9209    }
9210
9211    pub fn parse_options(&mut self, keyword: Keyword) -> Result<Vec<SqlOption>, ParserError> {
9212        if self.parse_keyword(keyword) {
9213            self.expect_token(&Token::LParen)?;
9214            let options = self.parse_comma_separated0(Parser::parse_sql_option, Token::RParen)?;
9215            self.expect_token(&Token::RParen)?;
9216            Ok(options)
9217        } else {
9218            Ok(vec![])
9219        }
9220    }
9221
9222    pub fn parse_options_with_keywords(
9223        &mut self,
9224        keywords: &[Keyword],
9225    ) -> Result<Vec<SqlOption>, ParserError> {
9226        if self.parse_keywords(keywords) {
9227            self.expect_token(&Token::LParen)?;
9228            let options = self.parse_comma_separated(Parser::parse_sql_option)?;
9229            self.expect_token(&Token::RParen)?;
9230            Ok(options)
9231        } else {
9232            Ok(vec![])
9233        }
9234    }
9235
9236    pub fn parse_index_type(&mut self) -> Result<IndexType, ParserError> {
9237        Ok(if self.parse_keyword(Keyword::BTREE) {
9238            IndexType::BTree
9239        } else if self.parse_keyword(Keyword::HASH) {
9240            IndexType::Hash
9241        } else if self.parse_keyword(Keyword::GIN) {
9242            IndexType::GIN
9243        } else if self.parse_keyword(Keyword::GIST) {
9244            IndexType::GiST
9245        } else if self.parse_keyword(Keyword::SPGIST) {
9246            IndexType::SPGiST
9247        } else if self.parse_keyword(Keyword::BRIN) {
9248            IndexType::BRIN
9249        } else if self.parse_keyword(Keyword::BLOOM) {
9250            IndexType::Bloom
9251        } else {
9252            IndexType::Custom(self.parse_identifier()?)
9253        })
9254    }
9255
9256    /// Optionally parse the `USING` keyword, followed by an [IndexType]
9257    /// Example:
9258    /// ```sql
9259    //// USING BTREE (name, age DESC)
9260    /// ```
9261    pub fn parse_optional_using_then_index_type(
9262        &mut self,
9263    ) -> Result<Option<IndexType>, ParserError> {
9264        if self.parse_keyword(Keyword::USING) {
9265            Ok(Some(self.parse_index_type()?))
9266        } else {
9267            Ok(None)
9268        }
9269    }
9270
9271    /// Parse `[ident]`, mostly `ident` is name, like:
9272    /// `window_name`, `index_name`, ...
9273    pub fn parse_optional_ident(&mut self) -> Result<Option<Ident>, ParserError> {
9274        self.maybe_parse(|parser| parser.parse_identifier())
9275    }
9276
9277    #[must_use]
9278    pub fn parse_index_type_display(&mut self) -> KeyOrIndexDisplay {
9279        if self.parse_keyword(Keyword::KEY) {
9280            KeyOrIndexDisplay::Key
9281        } else if self.parse_keyword(Keyword::INDEX) {
9282            KeyOrIndexDisplay::Index
9283        } else {
9284            KeyOrIndexDisplay::None
9285        }
9286    }
9287
9288    pub fn parse_optional_index_option(&mut self) -> Result<Option<IndexOption>, ParserError> {
9289        if let Some(index_type) = self.parse_optional_using_then_index_type()? {
9290            Ok(Some(IndexOption::Using(index_type)))
9291        } else if self.parse_keyword(Keyword::COMMENT) {
9292            let s = self.parse_literal_string()?;
9293            Ok(Some(IndexOption::Comment(s)))
9294        } else {
9295            Ok(None)
9296        }
9297    }
9298
9299    pub fn parse_index_options(&mut self) -> Result<Vec<IndexOption>, ParserError> {
9300        let mut options = Vec::new();
9301
9302        loop {
9303            match self.parse_optional_index_option()? {
9304                Some(index_option) => options.push(index_option),
9305                None => return Ok(options),
9306            }
9307        }
9308    }
9309
9310    pub fn parse_sql_option(&mut self) -> Result<SqlOption, ParserError> {
9311        let is_mssql = dialect_of!(self is MsSqlDialect|GenericDialect);
9312
9313        match self.peek_token().token {
9314            Token::Word(w) if w.keyword == Keyword::HEAP && is_mssql => {
9315                Ok(SqlOption::Ident(self.parse_identifier()?))
9316            }
9317            Token::Word(w) if w.keyword == Keyword::PARTITION && is_mssql => {
9318                self.parse_option_partition()
9319            }
9320            Token::Word(w) if w.keyword == Keyword::CLUSTERED && is_mssql => {
9321                self.parse_option_clustered()
9322            }
9323            _ => {
9324                let name = self.parse_identifier()?;
9325                self.expect_token(&Token::Eq)?;
9326                let value = self.parse_expr()?;
9327
9328                Ok(SqlOption::KeyValue { key: name, value })
9329            }
9330        }
9331    }
9332
9333    pub fn parse_option_clustered(&mut self) -> Result<SqlOption, ParserError> {
9334        if self.parse_keywords(&[
9335            Keyword::CLUSTERED,
9336            Keyword::COLUMNSTORE,
9337            Keyword::INDEX,
9338            Keyword::ORDER,
9339        ]) {
9340            Ok(SqlOption::Clustered(
9341                TableOptionsClustered::ColumnstoreIndexOrder(
9342                    self.parse_parenthesized_column_list(IsOptional::Mandatory, false)?,
9343                ),
9344            ))
9345        } else if self.parse_keywords(&[Keyword::CLUSTERED, Keyword::COLUMNSTORE, Keyword::INDEX]) {
9346            Ok(SqlOption::Clustered(
9347                TableOptionsClustered::ColumnstoreIndex,
9348            ))
9349        } else if self.parse_keywords(&[Keyword::CLUSTERED, Keyword::INDEX]) {
9350            self.expect_token(&Token::LParen)?;
9351
9352            let columns = self.parse_comma_separated(|p| {
9353                let name = p.parse_identifier()?;
9354                let asc = p.parse_asc_desc();
9355
9356                Ok(ClusteredIndex { name, asc })
9357            })?;
9358
9359            self.expect_token(&Token::RParen)?;
9360
9361            Ok(SqlOption::Clustered(TableOptionsClustered::Index(columns)))
9362        } else {
9363            Err(ParserError::ParserError(
9364                "invalid CLUSTERED sequence".to_string(),
9365            ))
9366        }
9367    }
9368
9369    pub fn parse_option_partition(&mut self) -> Result<SqlOption, ParserError> {
9370        self.expect_keyword_is(Keyword::PARTITION)?;
9371        self.expect_token(&Token::LParen)?;
9372        let column_name = self.parse_identifier()?;
9373
9374        self.expect_keyword_is(Keyword::RANGE)?;
9375        let range_direction = if self.parse_keyword(Keyword::LEFT) {
9376            Some(PartitionRangeDirection::Left)
9377        } else if self.parse_keyword(Keyword::RIGHT) {
9378            Some(PartitionRangeDirection::Right)
9379        } else {
9380            None
9381        };
9382
9383        self.expect_keywords(&[Keyword::FOR, Keyword::VALUES])?;
9384        self.expect_token(&Token::LParen)?;
9385
9386        let for_values = self.parse_comma_separated(Parser::parse_expr)?;
9387
9388        self.expect_token(&Token::RParen)?;
9389        self.expect_token(&Token::RParen)?;
9390
9391        Ok(SqlOption::Partition {
9392            column_name,
9393            range_direction,
9394            for_values,
9395        })
9396    }
9397
9398    pub fn parse_partition(&mut self) -> Result<Partition, ParserError> {
9399        self.expect_token(&Token::LParen)?;
9400        let partitions = self.parse_comma_separated(Parser::parse_expr)?;
9401        self.expect_token(&Token::RParen)?;
9402        Ok(Partition::Partitions(partitions))
9403    }
9404
9405    pub fn parse_projection_select(&mut self) -> Result<ProjectionSelect, ParserError> {
9406        self.expect_token(&Token::LParen)?;
9407        self.expect_keyword_is(Keyword::SELECT)?;
9408        let projection = self.parse_projection()?;
9409        let group_by = self.parse_optional_group_by()?;
9410        let order_by = self.parse_optional_order_by()?;
9411        self.expect_token(&Token::RParen)?;
9412        Ok(ProjectionSelect {
9413            projection,
9414            group_by,
9415            order_by,
9416        })
9417    }
9418    pub fn parse_alter_table_add_projection(&mut self) -> Result<AlterTableOperation, ParserError> {
9419        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
9420        let name = self.parse_identifier()?;
9421        let query = self.parse_projection_select()?;
9422        Ok(AlterTableOperation::AddProjection {
9423            if_not_exists,
9424            name,
9425            select: query,
9426        })
9427    }
9428
9429    pub fn parse_alter_table_operation(&mut self) -> Result<AlterTableOperation, ParserError> {
9430        let operation = if self.parse_keyword(Keyword::ADD) {
9431            if let Some(constraint) = self.parse_optional_table_constraint()? {
9432                let not_valid = self.parse_keywords(&[Keyword::NOT, Keyword::VALID]);
9433                AlterTableOperation::AddConstraint {
9434                    constraint,
9435                    not_valid,
9436                }
9437            } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
9438                && self.parse_keyword(Keyword::PROJECTION)
9439            {
9440                return self.parse_alter_table_add_projection();
9441            } else {
9442                let if_not_exists =
9443                    self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
9444                let mut new_partitions = vec![];
9445                loop {
9446                    if self.parse_keyword(Keyword::PARTITION) {
9447                        new_partitions.push(self.parse_partition()?);
9448                    } else {
9449                        break;
9450                    }
9451                }
9452                if !new_partitions.is_empty() {
9453                    AlterTableOperation::AddPartitions {
9454                        if_not_exists,
9455                        new_partitions,
9456                    }
9457                } else {
9458                    let column_keyword = self.parse_keyword(Keyword::COLUMN);
9459
9460                    let if_not_exists = if dialect_of!(self is PostgreSqlDialect | BigQueryDialect | DuckDbDialect | GenericDialect)
9461                    {
9462                        self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS])
9463                            || if_not_exists
9464                    } else {
9465                        false
9466                    };
9467
9468                    let column_def = self.parse_column_def()?;
9469
9470                    let column_position = self.parse_column_position()?;
9471
9472                    AlterTableOperation::AddColumn {
9473                        column_keyword,
9474                        if_not_exists,
9475                        column_def,
9476                        column_position,
9477                    }
9478                }
9479            }
9480        } else if self.parse_keyword(Keyword::RENAME) {
9481            if dialect_of!(self is PostgreSqlDialect) && self.parse_keyword(Keyword::CONSTRAINT) {
9482                let old_name = self.parse_identifier()?;
9483                self.expect_keyword_is(Keyword::TO)?;
9484                let new_name = self.parse_identifier()?;
9485                AlterTableOperation::RenameConstraint { old_name, new_name }
9486            } else if self.parse_keyword(Keyword::TO) {
9487                let table_name = self.parse_object_name(false)?;
9488                AlterTableOperation::RenameTable {
9489                    table_name: RenameTableNameKind::To(table_name),
9490                }
9491            } else if self.parse_keyword(Keyword::AS) {
9492                let table_name = self.parse_object_name(false)?;
9493                AlterTableOperation::RenameTable {
9494                    table_name: RenameTableNameKind::As(table_name),
9495                }
9496            } else {
9497                let _ = self.parse_keyword(Keyword::COLUMN); // [ COLUMN ]
9498                let old_column_name = self.parse_identifier()?;
9499                self.expect_keyword_is(Keyword::TO)?;
9500                let new_column_name = self.parse_identifier()?;
9501                AlterTableOperation::RenameColumn {
9502                    old_column_name,
9503                    new_column_name,
9504                }
9505            }
9506        } else if self.parse_keyword(Keyword::DISABLE) {
9507            if self.parse_keywords(&[Keyword::ROW, Keyword::LEVEL, Keyword::SECURITY]) {
9508                AlterTableOperation::DisableRowLevelSecurity {}
9509            } else if self.parse_keyword(Keyword::RULE) {
9510                let name = self.parse_identifier()?;
9511                AlterTableOperation::DisableRule { name }
9512            } else if self.parse_keyword(Keyword::TRIGGER) {
9513                let name = self.parse_identifier()?;
9514                AlterTableOperation::DisableTrigger { name }
9515            } else {
9516                return self.expected(
9517                    "ROW LEVEL SECURITY, RULE, or TRIGGER after DISABLE",
9518                    self.peek_token(),
9519                );
9520            }
9521        } else if self.parse_keyword(Keyword::ENABLE) {
9522            if self.parse_keywords(&[Keyword::ALWAYS, Keyword::RULE]) {
9523                let name = self.parse_identifier()?;
9524                AlterTableOperation::EnableAlwaysRule { name }
9525            } else if self.parse_keywords(&[Keyword::ALWAYS, Keyword::TRIGGER]) {
9526                let name = self.parse_identifier()?;
9527                AlterTableOperation::EnableAlwaysTrigger { name }
9528            } else if self.parse_keywords(&[Keyword::ROW, Keyword::LEVEL, Keyword::SECURITY]) {
9529                AlterTableOperation::EnableRowLevelSecurity {}
9530            } else if self.parse_keywords(&[Keyword::REPLICA, Keyword::RULE]) {
9531                let name = self.parse_identifier()?;
9532                AlterTableOperation::EnableReplicaRule { name }
9533            } else if self.parse_keywords(&[Keyword::REPLICA, Keyword::TRIGGER]) {
9534                let name = self.parse_identifier()?;
9535                AlterTableOperation::EnableReplicaTrigger { name }
9536            } else if self.parse_keyword(Keyword::RULE) {
9537                let name = self.parse_identifier()?;
9538                AlterTableOperation::EnableRule { name }
9539            } else if self.parse_keyword(Keyword::TRIGGER) {
9540                let name = self.parse_identifier()?;
9541                AlterTableOperation::EnableTrigger { name }
9542            } else {
9543                return self.expected(
9544                    "ALWAYS, REPLICA, ROW LEVEL SECURITY, RULE, or TRIGGER after ENABLE",
9545                    self.peek_token(),
9546                );
9547            }
9548        } else if self.parse_keywords(&[Keyword::CLEAR, Keyword::PROJECTION])
9549            && dialect_of!(self is ClickHouseDialect|GenericDialect)
9550        {
9551            let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
9552            let name = self.parse_identifier()?;
9553            let partition = if self.parse_keywords(&[Keyword::IN, Keyword::PARTITION]) {
9554                Some(self.parse_identifier()?)
9555            } else {
9556                None
9557            };
9558            AlterTableOperation::ClearProjection {
9559                if_exists,
9560                name,
9561                partition,
9562            }
9563        } else if self.parse_keywords(&[Keyword::MATERIALIZE, Keyword::PROJECTION])
9564            && dialect_of!(self is ClickHouseDialect|GenericDialect)
9565        {
9566            let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
9567            let name = self.parse_identifier()?;
9568            let partition = if self.parse_keywords(&[Keyword::IN, Keyword::PARTITION]) {
9569                Some(self.parse_identifier()?)
9570            } else {
9571                None
9572            };
9573            AlterTableOperation::MaterializeProjection {
9574                if_exists,
9575                name,
9576                partition,
9577            }
9578        } else if self.parse_keyword(Keyword::DROP) {
9579            if self.parse_keywords(&[Keyword::IF, Keyword::EXISTS, Keyword::PARTITION]) {
9580                self.expect_token(&Token::LParen)?;
9581                let partitions = self.parse_comma_separated(Parser::parse_expr)?;
9582                self.expect_token(&Token::RParen)?;
9583                AlterTableOperation::DropPartitions {
9584                    partitions,
9585                    if_exists: true,
9586                }
9587            } else if self.parse_keyword(Keyword::PARTITION) {
9588                self.expect_token(&Token::LParen)?;
9589                let partitions = self.parse_comma_separated(Parser::parse_expr)?;
9590                self.expect_token(&Token::RParen)?;
9591                AlterTableOperation::DropPartitions {
9592                    partitions,
9593                    if_exists: false,
9594                }
9595            } else if self.parse_keyword(Keyword::CONSTRAINT) {
9596                let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
9597                let name = self.parse_identifier()?;
9598                let drop_behavior = self.parse_optional_drop_behavior();
9599                AlterTableOperation::DropConstraint {
9600                    if_exists,
9601                    name,
9602                    drop_behavior,
9603                }
9604            } else if self.parse_keywords(&[Keyword::PRIMARY, Keyword::KEY]) {
9605                let drop_behavior = self.parse_optional_drop_behavior();
9606                AlterTableOperation::DropPrimaryKey { drop_behavior }
9607            } else if self.parse_keywords(&[Keyword::FOREIGN, Keyword::KEY]) {
9608                let name = self.parse_identifier()?;
9609                let drop_behavior = self.parse_optional_drop_behavior();
9610                AlterTableOperation::DropForeignKey {
9611                    name,
9612                    drop_behavior,
9613                }
9614            } else if self.parse_keyword(Keyword::INDEX) {
9615                let name = self.parse_identifier()?;
9616                AlterTableOperation::DropIndex { name }
9617            } else if self.parse_keyword(Keyword::PROJECTION)
9618                && dialect_of!(self is ClickHouseDialect|GenericDialect)
9619            {
9620                let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
9621                let name = self.parse_identifier()?;
9622                AlterTableOperation::DropProjection { if_exists, name }
9623            } else if self.parse_keywords(&[Keyword::CLUSTERING, Keyword::KEY]) {
9624                AlterTableOperation::DropClusteringKey
9625            } else {
9626                let has_column_keyword = self.parse_keyword(Keyword::COLUMN); // [ COLUMN ]
9627                let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
9628                let column_names = if self.dialect.supports_comma_separated_drop_column_list() {
9629                    self.parse_comma_separated(Parser::parse_identifier)?
9630                } else {
9631                    vec![self.parse_identifier()?]
9632                };
9633                let drop_behavior = self.parse_optional_drop_behavior();
9634                AlterTableOperation::DropColumn {
9635                    has_column_keyword,
9636                    column_names,
9637                    if_exists,
9638                    drop_behavior,
9639                }
9640            }
9641        } else if self.parse_keyword(Keyword::PARTITION) {
9642            self.expect_token(&Token::LParen)?;
9643            let before = self.parse_comma_separated(Parser::parse_expr)?;
9644            self.expect_token(&Token::RParen)?;
9645            self.expect_keyword_is(Keyword::RENAME)?;
9646            self.expect_keywords(&[Keyword::TO, Keyword::PARTITION])?;
9647            self.expect_token(&Token::LParen)?;
9648            let renames = self.parse_comma_separated(Parser::parse_expr)?;
9649            self.expect_token(&Token::RParen)?;
9650            AlterTableOperation::RenamePartitions {
9651                old_partitions: before,
9652                new_partitions: renames,
9653            }
9654        } else if self.parse_keyword(Keyword::CHANGE) {
9655            let _ = self.parse_keyword(Keyword::COLUMN); // [ COLUMN ]
9656            let old_name = self.parse_identifier()?;
9657            let new_name = self.parse_identifier()?;
9658            let data_type = self.parse_data_type()?;
9659            let mut options = vec![];
9660            while let Some(option) = self.parse_optional_column_option()? {
9661                options.push(option);
9662            }
9663
9664            let column_position = self.parse_column_position()?;
9665
9666            AlterTableOperation::ChangeColumn {
9667                old_name,
9668                new_name,
9669                data_type,
9670                options,
9671                column_position,
9672            }
9673        } else if self.parse_keyword(Keyword::MODIFY) {
9674            let _ = self.parse_keyword(Keyword::COLUMN); // [ COLUMN ]
9675            let col_name = self.parse_identifier()?;
9676            let data_type = self.parse_data_type()?;
9677            let mut options = vec![];
9678            while let Some(option) = self.parse_optional_column_option()? {
9679                options.push(option);
9680            }
9681
9682            let column_position = self.parse_column_position()?;
9683
9684            AlterTableOperation::ModifyColumn {
9685                col_name,
9686                data_type,
9687                options,
9688                column_position,
9689            }
9690        } else if self.parse_keyword(Keyword::ALTER) {
9691            let _ = self.parse_keyword(Keyword::COLUMN); // [ COLUMN ]
9692            let column_name = self.parse_identifier()?;
9693            let is_postgresql = dialect_of!(self is PostgreSqlDialect);
9694
9695            let op: AlterColumnOperation = if self.parse_keywords(&[
9696                Keyword::SET,
9697                Keyword::NOT,
9698                Keyword::NULL,
9699            ]) {
9700                AlterColumnOperation::SetNotNull {}
9701            } else if self.parse_keywords(&[Keyword::DROP, Keyword::NOT, Keyword::NULL]) {
9702                AlterColumnOperation::DropNotNull {}
9703            } else if self.parse_keywords(&[Keyword::SET, Keyword::DEFAULT]) {
9704                AlterColumnOperation::SetDefault {
9705                    value: self.parse_expr()?,
9706                }
9707            } else if self.parse_keywords(&[Keyword::DROP, Keyword::DEFAULT]) {
9708                AlterColumnOperation::DropDefault {}
9709            } else if self.parse_keywords(&[Keyword::SET, Keyword::DATA, Keyword::TYPE]) {
9710                self.parse_set_data_type(true)?
9711            } else if self.parse_keyword(Keyword::TYPE) {
9712                self.parse_set_data_type(false)?
9713            } else if self.parse_keywords(&[Keyword::ADD, Keyword::GENERATED]) {
9714                let generated_as = if self.parse_keyword(Keyword::ALWAYS) {
9715                    Some(GeneratedAs::Always)
9716                } else if self.parse_keywords(&[Keyword::BY, Keyword::DEFAULT]) {
9717                    Some(GeneratedAs::ByDefault)
9718                } else {
9719                    None
9720                };
9721
9722                self.expect_keywords(&[Keyword::AS, Keyword::IDENTITY])?;
9723
9724                let mut sequence_options: Option<Vec<SequenceOptions>> = None;
9725
9726                if self.peek_token().token == Token::LParen {
9727                    self.expect_token(&Token::LParen)?;
9728                    sequence_options = Some(self.parse_create_sequence_options()?);
9729                    self.expect_token(&Token::RParen)?;
9730                }
9731
9732                AlterColumnOperation::AddGenerated {
9733                    generated_as,
9734                    sequence_options,
9735                }
9736            } else {
9737                let message = if is_postgresql {
9738                    "SET/DROP NOT NULL, SET DEFAULT, SET DATA TYPE, or ADD GENERATED after ALTER COLUMN"
9739                } else {
9740                    "SET/DROP NOT NULL, SET DEFAULT, or SET DATA TYPE after ALTER COLUMN"
9741                };
9742
9743                return self.expected(message, self.peek_token());
9744            };
9745            AlterTableOperation::AlterColumn { column_name, op }
9746        } else if self.parse_keyword(Keyword::SWAP) {
9747            self.expect_keyword_is(Keyword::WITH)?;
9748            let table_name = self.parse_object_name(false)?;
9749            AlterTableOperation::SwapWith { table_name }
9750        } else if dialect_of!(self is PostgreSqlDialect | GenericDialect)
9751            && self.parse_keywords(&[Keyword::OWNER, Keyword::TO])
9752        {
9753            let new_owner = self.parse_owner()?;
9754            AlterTableOperation::OwnerTo { new_owner }
9755        } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
9756            && self.parse_keyword(Keyword::ATTACH)
9757        {
9758            AlterTableOperation::AttachPartition {
9759                partition: self.parse_part_or_partition()?,
9760            }
9761        } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
9762            && self.parse_keyword(Keyword::DETACH)
9763        {
9764            AlterTableOperation::DetachPartition {
9765                partition: self.parse_part_or_partition()?,
9766            }
9767        } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
9768            && self.parse_keyword(Keyword::FREEZE)
9769        {
9770            let partition = self.parse_part_or_partition()?;
9771            let with_name = if self.parse_keyword(Keyword::WITH) {
9772                self.expect_keyword_is(Keyword::NAME)?;
9773                Some(self.parse_identifier()?)
9774            } else {
9775                None
9776            };
9777            AlterTableOperation::FreezePartition {
9778                partition,
9779                with_name,
9780            }
9781        } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
9782            && self.parse_keyword(Keyword::UNFREEZE)
9783        {
9784            let partition = self.parse_part_or_partition()?;
9785            let with_name = if self.parse_keyword(Keyword::WITH) {
9786                self.expect_keyword_is(Keyword::NAME)?;
9787                Some(self.parse_identifier()?)
9788            } else {
9789                None
9790            };
9791            AlterTableOperation::UnfreezePartition {
9792                partition,
9793                with_name,
9794            }
9795        } else if self.parse_keywords(&[Keyword::CLUSTER, Keyword::BY]) {
9796            self.expect_token(&Token::LParen)?;
9797            let exprs = self.parse_comma_separated(|parser| parser.parse_expr())?;
9798            self.expect_token(&Token::RParen)?;
9799            AlterTableOperation::ClusterBy { exprs }
9800        } else if self.parse_keywords(&[Keyword::SUSPEND, Keyword::RECLUSTER]) {
9801            AlterTableOperation::SuspendRecluster
9802        } else if self.parse_keywords(&[Keyword::RESUME, Keyword::RECLUSTER]) {
9803            AlterTableOperation::ResumeRecluster
9804        } else if self.parse_keyword(Keyword::LOCK) {
9805            let equals = self.consume_token(&Token::Eq);
9806            let lock = match self.parse_one_of_keywords(&[
9807                Keyword::DEFAULT,
9808                Keyword::EXCLUSIVE,
9809                Keyword::NONE,
9810                Keyword::SHARED,
9811            ]) {
9812                Some(Keyword::DEFAULT) => AlterTableLock::Default,
9813                Some(Keyword::EXCLUSIVE) => AlterTableLock::Exclusive,
9814                Some(Keyword::NONE) => AlterTableLock::None,
9815                Some(Keyword::SHARED) => AlterTableLock::Shared,
9816                _ => self.expected(
9817                    "DEFAULT, EXCLUSIVE, NONE or SHARED after LOCK [=]",
9818                    self.peek_token(),
9819                )?,
9820            };
9821            AlterTableOperation::Lock { equals, lock }
9822        } else if self.parse_keyword(Keyword::ALGORITHM) {
9823            let equals = self.consume_token(&Token::Eq);
9824            let algorithm = match self.parse_one_of_keywords(&[
9825                Keyword::DEFAULT,
9826                Keyword::INSTANT,
9827                Keyword::INPLACE,
9828                Keyword::COPY,
9829            ]) {
9830                Some(Keyword::DEFAULT) => AlterTableAlgorithm::Default,
9831                Some(Keyword::INSTANT) => AlterTableAlgorithm::Instant,
9832                Some(Keyword::INPLACE) => AlterTableAlgorithm::Inplace,
9833                Some(Keyword::COPY) => AlterTableAlgorithm::Copy,
9834                _ => self.expected(
9835                    "DEFAULT, INSTANT, INPLACE, or COPY after ALGORITHM [=]",
9836                    self.peek_token(),
9837                )?,
9838            };
9839            AlterTableOperation::Algorithm { equals, algorithm }
9840        } else if self.parse_keyword(Keyword::AUTO_INCREMENT) {
9841            let equals = self.consume_token(&Token::Eq);
9842            let value = self.parse_number_value()?;
9843            AlterTableOperation::AutoIncrement { equals, value }
9844        } else if self.parse_keywords(&[Keyword::REPLICA, Keyword::IDENTITY]) {
9845            let identity = if self.parse_keyword(Keyword::NONE) {
9846                ReplicaIdentity::None
9847            } else if self.parse_keyword(Keyword::FULL) {
9848                ReplicaIdentity::Full
9849            } else if self.parse_keyword(Keyword::DEFAULT) {
9850                ReplicaIdentity::Default
9851            } else if self.parse_keywords(&[Keyword::USING, Keyword::INDEX]) {
9852                ReplicaIdentity::Index(self.parse_identifier()?)
9853            } else {
9854                return self.expected(
9855                    "NONE, FULL, DEFAULT, or USING INDEX index_name after REPLICA IDENTITY",
9856                    self.peek_token(),
9857                );
9858            };
9859
9860            AlterTableOperation::ReplicaIdentity { identity }
9861        } else if self.parse_keywords(&[Keyword::VALIDATE, Keyword::CONSTRAINT]) {
9862            let name = self.parse_identifier()?;
9863            AlterTableOperation::ValidateConstraint { name }
9864        } else {
9865            let mut options =
9866                self.parse_options_with_keywords(&[Keyword::SET, Keyword::TBLPROPERTIES])?;
9867            if !options.is_empty() {
9868                AlterTableOperation::SetTblProperties {
9869                    table_properties: options,
9870                }
9871            } else {
9872                options = self.parse_options(Keyword::SET)?;
9873                if !options.is_empty() {
9874                    AlterTableOperation::SetOptionsParens { options }
9875                } else {
9876                    return self.expected(
9877                    "ADD, RENAME, PARTITION, SWAP, DROP, REPLICA IDENTITY, SET, or SET TBLPROPERTIES after ALTER TABLE",
9878                    self.peek_token(),
9879                  );
9880                }
9881            }
9882        };
9883        Ok(operation)
9884    }
9885
9886    fn parse_set_data_type(&mut self, had_set: bool) -> Result<AlterColumnOperation, ParserError> {
9887        let data_type = self.parse_data_type()?;
9888        let using = if self.dialect.supports_alter_column_type_using()
9889            && self.parse_keyword(Keyword::USING)
9890        {
9891            Some(self.parse_expr()?)
9892        } else {
9893            None
9894        };
9895        Ok(AlterColumnOperation::SetDataType {
9896            data_type,
9897            using,
9898            had_set,
9899        })
9900    }
9901
9902    fn parse_part_or_partition(&mut self) -> Result<Partition, ParserError> {
9903        let keyword = self.expect_one_of_keywords(&[Keyword::PART, Keyword::PARTITION])?;
9904        match keyword {
9905            Keyword::PART => Ok(Partition::Part(self.parse_expr()?)),
9906            Keyword::PARTITION => Ok(Partition::Expr(self.parse_expr()?)),
9907            // unreachable because expect_one_of_keywords used above
9908            unexpected_keyword => Err(ParserError::ParserError(
9909                format!("Internal parser error: expected any of {{PART, PARTITION}}, got {unexpected_keyword:?}"),
9910            )),
9911        }
9912    }
9913
9914    pub fn parse_alter(&mut self) -> Result<Statement, ParserError> {
9915        let object_type = self.expect_one_of_keywords(&[
9916            Keyword::VIEW,
9917            Keyword::TYPE,
9918            Keyword::TABLE,
9919            Keyword::INDEX,
9920            Keyword::ROLE,
9921            Keyword::POLICY,
9922            Keyword::CONNECTOR,
9923            Keyword::ICEBERG,
9924            Keyword::SCHEMA,
9925            Keyword::USER,
9926            Keyword::OPERATOR,
9927        ])?;
9928        match object_type {
9929            Keyword::SCHEMA => {
9930                self.prev_token();
9931                self.prev_token();
9932                self.parse_alter_schema()
9933            }
9934            Keyword::VIEW => self.parse_alter_view(),
9935            Keyword::TYPE => self.parse_alter_type(),
9936            Keyword::TABLE => self.parse_alter_table(false),
9937            Keyword::ICEBERG => {
9938                self.expect_keyword(Keyword::TABLE)?;
9939                self.parse_alter_table(true)
9940            }
9941            Keyword::INDEX => {
9942                let index_name = self.parse_object_name(false)?;
9943                let operation = if self.parse_keyword(Keyword::RENAME) {
9944                    if self.parse_keyword(Keyword::TO) {
9945                        let index_name = self.parse_object_name(false)?;
9946                        AlterIndexOperation::RenameIndex { index_name }
9947                    } else {
9948                        return self.expected("TO after RENAME", self.peek_token());
9949                    }
9950                } else {
9951                    return self.expected("RENAME after ALTER INDEX", self.peek_token());
9952                };
9953
9954                Ok(Statement::AlterIndex {
9955                    name: index_name,
9956                    operation,
9957                })
9958            }
9959            Keyword::OPERATOR => self.parse_alter_operator(),
9960            Keyword::ROLE => self.parse_alter_role(),
9961            Keyword::POLICY => self.parse_alter_policy(),
9962            Keyword::CONNECTOR => self.parse_alter_connector(),
9963            Keyword::USER => self.parse_alter_user(),
9964            // unreachable because expect_one_of_keywords used above
9965            unexpected_keyword => Err(ParserError::ParserError(
9966                format!("Internal parser error: expected any of {{VIEW, TYPE, TABLE, INDEX, ROLE, POLICY, CONNECTOR, ICEBERG, SCHEMA, USER, OPERATOR}}, got {unexpected_keyword:?}"),
9967            )),
9968        }
9969    }
9970
9971    /// Parse a [Statement::AlterTable]
9972    pub fn parse_alter_table(&mut self, iceberg: bool) -> Result<Statement, ParserError> {
9973        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
9974        let only = self.parse_keyword(Keyword::ONLY); // [ ONLY ]
9975        let table_name = self.parse_object_name(false)?;
9976        let on_cluster = self.parse_optional_on_cluster()?;
9977        let operations = self.parse_comma_separated(Parser::parse_alter_table_operation)?;
9978
9979        let mut location = None;
9980        if self.parse_keyword(Keyword::LOCATION) {
9981            location = Some(HiveSetLocation {
9982                has_set: false,
9983                location: self.parse_identifier()?,
9984            });
9985        } else if self.parse_keywords(&[Keyword::SET, Keyword::LOCATION]) {
9986            location = Some(HiveSetLocation {
9987                has_set: true,
9988                location: self.parse_identifier()?,
9989            });
9990        }
9991
9992        let end_token = if self.peek_token_ref().token == Token::SemiColon {
9993            self.peek_token_ref().clone()
9994        } else {
9995            self.get_current_token().clone()
9996        };
9997
9998        Ok(AlterTable {
9999            name: table_name,
10000            if_exists,
10001            only,
10002            operations,
10003            location,
10004            on_cluster,
10005            table_type: if iceberg {
10006                Some(AlterTableType::Iceberg)
10007            } else {
10008                None
10009            },
10010            end_token: AttachedToken(end_token),
10011        }
10012        .into())
10013    }
10014
10015    pub fn parse_alter_view(&mut self) -> Result<Statement, ParserError> {
10016        let name = self.parse_object_name(false)?;
10017        let columns = self.parse_parenthesized_column_list(Optional, false)?;
10018
10019        let with_options = self.parse_options(Keyword::WITH)?;
10020
10021        self.expect_keyword_is(Keyword::AS)?;
10022        let query = self.parse_query()?;
10023
10024        Ok(Statement::AlterView {
10025            name,
10026            columns,
10027            query,
10028            with_options,
10029        })
10030    }
10031
10032    /// Parse a [Statement::AlterType]
10033    pub fn parse_alter_type(&mut self) -> Result<Statement, ParserError> {
10034        let name = self.parse_object_name(false)?;
10035
10036        if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
10037            let new_name = self.parse_identifier()?;
10038            Ok(Statement::AlterType(AlterType {
10039                name,
10040                operation: AlterTypeOperation::Rename(AlterTypeRename { new_name }),
10041            }))
10042        } else if self.parse_keywords(&[Keyword::ADD, Keyword::VALUE]) {
10043            let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
10044            let new_enum_value = self.parse_identifier()?;
10045            let position = if self.parse_keyword(Keyword::BEFORE) {
10046                Some(AlterTypeAddValuePosition::Before(self.parse_identifier()?))
10047            } else if self.parse_keyword(Keyword::AFTER) {
10048                Some(AlterTypeAddValuePosition::After(self.parse_identifier()?))
10049            } else {
10050                None
10051            };
10052
10053            Ok(Statement::AlterType(AlterType {
10054                name,
10055                operation: AlterTypeOperation::AddValue(AlterTypeAddValue {
10056                    if_not_exists,
10057                    value: new_enum_value,
10058                    position,
10059                }),
10060            }))
10061        } else if self.parse_keywords(&[Keyword::RENAME, Keyword::VALUE]) {
10062            let existing_enum_value = self.parse_identifier()?;
10063            self.expect_keyword(Keyword::TO)?;
10064            let new_enum_value = self.parse_identifier()?;
10065
10066            Ok(Statement::AlterType(AlterType {
10067                name,
10068                operation: AlterTypeOperation::RenameValue(AlterTypeRenameValue {
10069                    from: existing_enum_value,
10070                    to: new_enum_value,
10071                }),
10072            }))
10073        } else {
10074            self.expected_ref(
10075                "{RENAME TO | { RENAME | ADD } VALUE}",
10076                self.peek_token_ref(),
10077            )
10078        }
10079    }
10080
10081    /// Parse a [Statement::AlterOperator]
10082    ///
10083    /// [PostgreSQL Documentation](https://www.postgresql.org/docs/current/sql-alteroperator.html)
10084    pub fn parse_alter_operator(&mut self) -> Result<Statement, ParserError> {
10085        let name = self.parse_operator_name()?;
10086
10087        // Parse (left_type, right_type)
10088        self.expect_token(&Token::LParen)?;
10089
10090        let left_type = if self.parse_keyword(Keyword::NONE) {
10091            None
10092        } else {
10093            Some(self.parse_data_type()?)
10094        };
10095
10096        self.expect_token(&Token::Comma)?;
10097        let right_type = self.parse_data_type()?;
10098        self.expect_token(&Token::RParen)?;
10099
10100        // Parse the operation
10101        let operation = if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
10102            let owner = if self.parse_keyword(Keyword::CURRENT_ROLE) {
10103                Owner::CurrentRole
10104            } else if self.parse_keyword(Keyword::CURRENT_USER) {
10105                Owner::CurrentUser
10106            } else if self.parse_keyword(Keyword::SESSION_USER) {
10107                Owner::SessionUser
10108            } else {
10109                Owner::Ident(self.parse_identifier()?)
10110            };
10111            AlterOperatorOperation::OwnerTo(owner)
10112        } else if self.parse_keywords(&[Keyword::SET, Keyword::SCHEMA]) {
10113            let schema_name = self.parse_object_name(false)?;
10114            AlterOperatorOperation::SetSchema { schema_name }
10115        } else if self.parse_keyword(Keyword::SET) {
10116            self.expect_token(&Token::LParen)?;
10117
10118            let mut options = Vec::new();
10119            loop {
10120                let keyword = self.expect_one_of_keywords(&[
10121                    Keyword::RESTRICT,
10122                    Keyword::JOIN,
10123                    Keyword::COMMUTATOR,
10124                    Keyword::NEGATOR,
10125                    Keyword::HASHES,
10126                    Keyword::MERGES,
10127                ])?;
10128
10129                match keyword {
10130                    Keyword::RESTRICT => {
10131                        self.expect_token(&Token::Eq)?;
10132                        let proc_name = if self.parse_keyword(Keyword::NONE) {
10133                            None
10134                        } else {
10135                            Some(self.parse_object_name(false)?)
10136                        };
10137                        options.push(OperatorOption::Restrict(proc_name));
10138                    }
10139                    Keyword::JOIN => {
10140                        self.expect_token(&Token::Eq)?;
10141                        let proc_name = if self.parse_keyword(Keyword::NONE) {
10142                            None
10143                        } else {
10144                            Some(self.parse_object_name(false)?)
10145                        };
10146                        options.push(OperatorOption::Join(proc_name));
10147                    }
10148                    Keyword::COMMUTATOR => {
10149                        self.expect_token(&Token::Eq)?;
10150                        let op_name = self.parse_operator_name()?;
10151                        options.push(OperatorOption::Commutator(op_name));
10152                    }
10153                    Keyword::NEGATOR => {
10154                        self.expect_token(&Token::Eq)?;
10155                        let op_name = self.parse_operator_name()?;
10156                        options.push(OperatorOption::Negator(op_name));
10157                    }
10158                    Keyword::HASHES => {
10159                        options.push(OperatorOption::Hashes);
10160                    }
10161                    Keyword::MERGES => {
10162                        options.push(OperatorOption::Merges);
10163                    }
10164                    unexpected_keyword => return Err(ParserError::ParserError(
10165                        format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in operator option"),
10166                    )),
10167                }
10168
10169                if !self.consume_token(&Token::Comma) {
10170                    break;
10171                }
10172            }
10173
10174            self.expect_token(&Token::RParen)?;
10175            AlterOperatorOperation::Set { options }
10176        } else {
10177            return self.expected_ref(
10178                "OWNER TO, SET SCHEMA, or SET after ALTER OPERATOR",
10179                self.peek_token_ref(),
10180            );
10181        };
10182
10183        Ok(Statement::AlterOperator(AlterOperator {
10184            name,
10185            left_type,
10186            right_type,
10187            operation,
10188        }))
10189    }
10190
10191    // Parse a [Statement::AlterSchema]
10192    // ALTER SCHEMA [ IF EXISTS ] schema_name
10193    pub fn parse_alter_schema(&mut self) -> Result<Statement, ParserError> {
10194        self.expect_keywords(&[Keyword::ALTER, Keyword::SCHEMA])?;
10195        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
10196        let name = self.parse_object_name(false)?;
10197        let operation = if self.parse_keywords(&[Keyword::SET, Keyword::OPTIONS]) {
10198            self.prev_token();
10199            let options = self.parse_options(Keyword::OPTIONS)?;
10200            AlterSchemaOperation::SetOptionsParens { options }
10201        } else if self.parse_keywords(&[Keyword::SET, Keyword::DEFAULT, Keyword::COLLATE]) {
10202            let collate = self.parse_expr()?;
10203            AlterSchemaOperation::SetDefaultCollate { collate }
10204        } else if self.parse_keywords(&[Keyword::ADD, Keyword::REPLICA]) {
10205            let replica = self.parse_identifier()?;
10206            let options = if self.peek_keyword(Keyword::OPTIONS) {
10207                Some(self.parse_options(Keyword::OPTIONS)?)
10208            } else {
10209                None
10210            };
10211            AlterSchemaOperation::AddReplica { replica, options }
10212        } else if self.parse_keywords(&[Keyword::DROP, Keyword::REPLICA]) {
10213            let replica = self.parse_identifier()?;
10214            AlterSchemaOperation::DropReplica { replica }
10215        } else if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
10216            let new_name = self.parse_object_name(false)?;
10217            AlterSchemaOperation::Rename { name: new_name }
10218        } else if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
10219            let owner = self.parse_owner()?;
10220            AlterSchemaOperation::OwnerTo { owner }
10221        } else {
10222            return self.expected_ref("ALTER SCHEMA operation", self.peek_token_ref());
10223        };
10224        Ok(Statement::AlterSchema(AlterSchema {
10225            name,
10226            if_exists,
10227            operations: vec![operation],
10228        }))
10229    }
10230
10231    /// Parse a `CALL procedure_name(arg1, arg2, ...)`
10232    /// or `CALL procedure_name` statement
10233    pub fn parse_call(&mut self) -> Result<Statement, ParserError> {
10234        let object_name = self.parse_object_name(false)?;
10235        if self.peek_token().token == Token::LParen {
10236            match self.parse_function(object_name)? {
10237                Expr::Function(f) => Ok(Statement::Call(f)),
10238                other => parser_err!(
10239                    format!("Expected a simple procedure call but found: {other}"),
10240                    self.peek_token().span.start
10241                ),
10242            }
10243        } else {
10244            Ok(Statement::Call(Function {
10245                name: object_name,
10246                uses_odbc_syntax: false,
10247                parameters: FunctionArguments::None,
10248                args: FunctionArguments::None,
10249                over: None,
10250                filter: None,
10251                null_treatment: None,
10252                within_group: vec![],
10253            }))
10254        }
10255    }
10256
10257    /// Parse a copy statement
10258    pub fn parse_copy(&mut self) -> Result<Statement, ParserError> {
10259        let source;
10260        if self.consume_token(&Token::LParen) {
10261            source = CopySource::Query(self.parse_query()?);
10262            self.expect_token(&Token::RParen)?;
10263        } else {
10264            let table_name = self.parse_object_name(false)?;
10265            let columns = self.parse_parenthesized_column_list(Optional, false)?;
10266            source = CopySource::Table {
10267                table_name,
10268                columns,
10269            };
10270        }
10271        let to = match self.parse_one_of_keywords(&[Keyword::FROM, Keyword::TO]) {
10272            Some(Keyword::FROM) => false,
10273            Some(Keyword::TO) => true,
10274            _ => self.expected("FROM or TO", self.peek_token())?,
10275        };
10276        if !to {
10277            // Use a separate if statement to prevent Rust compiler from complaining about
10278            // "if statement in this position is unstable: https://github.com/rust-lang/rust/issues/53667"
10279            if let CopySource::Query(_) = source {
10280                return Err(ParserError::ParserError(
10281                    "COPY ... FROM does not support query as a source".to_string(),
10282                ));
10283            }
10284        }
10285        let target = if self.parse_keyword(Keyword::STDIN) {
10286            CopyTarget::Stdin
10287        } else if self.parse_keyword(Keyword::STDOUT) {
10288            CopyTarget::Stdout
10289        } else if self.parse_keyword(Keyword::PROGRAM) {
10290            CopyTarget::Program {
10291                command: self.parse_literal_string()?,
10292            }
10293        } else {
10294            CopyTarget::File {
10295                filename: self.parse_literal_string()?,
10296            }
10297        };
10298        let _ = self.parse_keyword(Keyword::WITH); // [ WITH ]
10299        let mut options = vec![];
10300        if self.consume_token(&Token::LParen) {
10301            options = self.parse_comma_separated(Parser::parse_copy_option)?;
10302            self.expect_token(&Token::RParen)?;
10303        }
10304        let mut legacy_options = vec![];
10305        while let Some(opt) = self.maybe_parse(|parser| parser.parse_copy_legacy_option())? {
10306            legacy_options.push(opt);
10307        }
10308        let values = if let CopyTarget::Stdin = target {
10309            self.expect_token(&Token::SemiColon)?;
10310            self.parse_tsv()
10311        } else {
10312            vec![]
10313        };
10314        Ok(Statement::Copy {
10315            source,
10316            to,
10317            target,
10318            options,
10319            legacy_options,
10320            values,
10321        })
10322    }
10323
10324    /// Parse [Statement::Open]
10325    fn parse_open(&mut self) -> Result<Statement, ParserError> {
10326        self.expect_keyword(Keyword::OPEN)?;
10327        Ok(Statement::Open(OpenStatement {
10328            cursor_name: self.parse_identifier()?,
10329        }))
10330    }
10331
10332    pub fn parse_close(&mut self) -> Result<Statement, ParserError> {
10333        let cursor = if self.parse_keyword(Keyword::ALL) {
10334            CloseCursor::All
10335        } else {
10336            let name = self.parse_identifier()?;
10337
10338            CloseCursor::Specific { name }
10339        };
10340
10341        Ok(Statement::Close { cursor })
10342    }
10343
10344    fn parse_copy_option(&mut self) -> Result<CopyOption, ParserError> {
10345        let ret = match self.parse_one_of_keywords(&[
10346            Keyword::FORMAT,
10347            Keyword::FREEZE,
10348            Keyword::DELIMITER,
10349            Keyword::NULL,
10350            Keyword::HEADER,
10351            Keyword::QUOTE,
10352            Keyword::ESCAPE,
10353            Keyword::FORCE_QUOTE,
10354            Keyword::FORCE_NOT_NULL,
10355            Keyword::FORCE_NULL,
10356            Keyword::ENCODING,
10357        ]) {
10358            Some(Keyword::FORMAT) => CopyOption::Format(self.parse_identifier()?),
10359            Some(Keyword::FREEZE) => CopyOption::Freeze(!matches!(
10360                self.parse_one_of_keywords(&[Keyword::TRUE, Keyword::FALSE]),
10361                Some(Keyword::FALSE)
10362            )),
10363            Some(Keyword::DELIMITER) => CopyOption::Delimiter(self.parse_literal_char()?),
10364            Some(Keyword::NULL) => CopyOption::Null(self.parse_literal_string()?),
10365            Some(Keyword::HEADER) => CopyOption::Header(!matches!(
10366                self.parse_one_of_keywords(&[Keyword::TRUE, Keyword::FALSE]),
10367                Some(Keyword::FALSE)
10368            )),
10369            Some(Keyword::QUOTE) => CopyOption::Quote(self.parse_literal_char()?),
10370            Some(Keyword::ESCAPE) => CopyOption::Escape(self.parse_literal_char()?),
10371            Some(Keyword::FORCE_QUOTE) => {
10372                CopyOption::ForceQuote(self.parse_parenthesized_column_list(Mandatory, false)?)
10373            }
10374            Some(Keyword::FORCE_NOT_NULL) => {
10375                CopyOption::ForceNotNull(self.parse_parenthesized_column_list(Mandatory, false)?)
10376            }
10377            Some(Keyword::FORCE_NULL) => {
10378                CopyOption::ForceNull(self.parse_parenthesized_column_list(Mandatory, false)?)
10379            }
10380            Some(Keyword::ENCODING) => CopyOption::Encoding(self.parse_literal_string()?),
10381            _ => self.expected("option", self.peek_token())?,
10382        };
10383        Ok(ret)
10384    }
10385
10386    fn parse_copy_legacy_option(&mut self) -> Result<CopyLegacyOption, ParserError> {
10387        // FORMAT \[ AS \] is optional
10388        if self.parse_keyword(Keyword::FORMAT) {
10389            let _ = self.parse_keyword(Keyword::AS);
10390        }
10391
10392        let ret = match self.parse_one_of_keywords(&[
10393            Keyword::ACCEPTANYDATE,
10394            Keyword::ACCEPTINVCHARS,
10395            Keyword::ADDQUOTES,
10396            Keyword::ALLOWOVERWRITE,
10397            Keyword::BINARY,
10398            Keyword::BLANKSASNULL,
10399            Keyword::BZIP2,
10400            Keyword::CLEANPATH,
10401            Keyword::COMPUPDATE,
10402            Keyword::CSV,
10403            Keyword::DATEFORMAT,
10404            Keyword::DELIMITER,
10405            Keyword::EMPTYASNULL,
10406            Keyword::ENCRYPTED,
10407            Keyword::ESCAPE,
10408            Keyword::EXTENSION,
10409            Keyword::FIXEDWIDTH,
10410            Keyword::GZIP,
10411            Keyword::HEADER,
10412            Keyword::IAM_ROLE,
10413            Keyword::IGNOREHEADER,
10414            Keyword::JSON,
10415            Keyword::MANIFEST,
10416            Keyword::MAXFILESIZE,
10417            Keyword::NULL,
10418            Keyword::PARALLEL,
10419            Keyword::PARQUET,
10420            Keyword::PARTITION,
10421            Keyword::REGION,
10422            Keyword::REMOVEQUOTES,
10423            Keyword::ROWGROUPSIZE,
10424            Keyword::STATUPDATE,
10425            Keyword::TIMEFORMAT,
10426            Keyword::TRUNCATECOLUMNS,
10427            Keyword::ZSTD,
10428        ]) {
10429            Some(Keyword::ACCEPTANYDATE) => CopyLegacyOption::AcceptAnyDate,
10430            Some(Keyword::ACCEPTINVCHARS) => {
10431                let _ = self.parse_keyword(Keyword::AS); // [ AS ]
10432                let ch = if matches!(self.peek_token().token, Token::SingleQuotedString(_)) {
10433                    Some(self.parse_literal_string()?)
10434                } else {
10435                    None
10436                };
10437                CopyLegacyOption::AcceptInvChars(ch)
10438            }
10439            Some(Keyword::ADDQUOTES) => CopyLegacyOption::AddQuotes,
10440            Some(Keyword::ALLOWOVERWRITE) => CopyLegacyOption::AllowOverwrite,
10441            Some(Keyword::BINARY) => CopyLegacyOption::Binary,
10442            Some(Keyword::BLANKSASNULL) => CopyLegacyOption::BlankAsNull,
10443            Some(Keyword::BZIP2) => CopyLegacyOption::Bzip2,
10444            Some(Keyword::CLEANPATH) => CopyLegacyOption::CleanPath,
10445            Some(Keyword::COMPUPDATE) => {
10446                let preset = self.parse_keyword(Keyword::PRESET);
10447                let enabled = match self.parse_one_of_keywords(&[
10448                    Keyword::TRUE,
10449                    Keyword::FALSE,
10450                    Keyword::ON,
10451                    Keyword::OFF,
10452                ]) {
10453                    Some(Keyword::TRUE) | Some(Keyword::ON) => Some(true),
10454                    Some(Keyword::FALSE) | Some(Keyword::OFF) => Some(false),
10455                    _ => None,
10456                };
10457                CopyLegacyOption::CompUpdate { preset, enabled }
10458            }
10459            Some(Keyword::CSV) => CopyLegacyOption::Csv({
10460                let mut opts = vec![];
10461                while let Some(opt) =
10462                    self.maybe_parse(|parser| parser.parse_copy_legacy_csv_option())?
10463                {
10464                    opts.push(opt);
10465                }
10466                opts
10467            }),
10468            Some(Keyword::DATEFORMAT) => {
10469                let _ = self.parse_keyword(Keyword::AS);
10470                let fmt = if matches!(self.peek_token().token, Token::SingleQuotedString(_)) {
10471                    Some(self.parse_literal_string()?)
10472                } else {
10473                    None
10474                };
10475                CopyLegacyOption::DateFormat(fmt)
10476            }
10477            Some(Keyword::DELIMITER) => {
10478                let _ = self.parse_keyword(Keyword::AS);
10479                CopyLegacyOption::Delimiter(self.parse_literal_char()?)
10480            }
10481            Some(Keyword::EMPTYASNULL) => CopyLegacyOption::EmptyAsNull,
10482            Some(Keyword::ENCRYPTED) => {
10483                let auto = self.parse_keyword(Keyword::AUTO);
10484                CopyLegacyOption::Encrypted { auto }
10485            }
10486            Some(Keyword::ESCAPE) => CopyLegacyOption::Escape,
10487            Some(Keyword::EXTENSION) => {
10488                let ext = self.parse_literal_string()?;
10489                CopyLegacyOption::Extension(ext)
10490            }
10491            Some(Keyword::FIXEDWIDTH) => {
10492                let spec = self.parse_literal_string()?;
10493                CopyLegacyOption::FixedWidth(spec)
10494            }
10495            Some(Keyword::GZIP) => CopyLegacyOption::Gzip,
10496            Some(Keyword::HEADER) => CopyLegacyOption::Header,
10497            Some(Keyword::IAM_ROLE) => CopyLegacyOption::IamRole(self.parse_iam_role_kind()?),
10498            Some(Keyword::IGNOREHEADER) => {
10499                let _ = self.parse_keyword(Keyword::AS);
10500                let num_rows = self.parse_literal_uint()?;
10501                CopyLegacyOption::IgnoreHeader(num_rows)
10502            }
10503            Some(Keyword::JSON) => CopyLegacyOption::Json,
10504            Some(Keyword::MANIFEST) => {
10505                let verbose = self.parse_keyword(Keyword::VERBOSE);
10506                CopyLegacyOption::Manifest { verbose }
10507            }
10508            Some(Keyword::MAXFILESIZE) => {
10509                let _ = self.parse_keyword(Keyword::AS);
10510                let size = self.parse_number_value()?.value;
10511                let unit = match self.parse_one_of_keywords(&[Keyword::MB, Keyword::GB]) {
10512                    Some(Keyword::MB) => Some(FileSizeUnit::MB),
10513                    Some(Keyword::GB) => Some(FileSizeUnit::GB),
10514                    _ => None,
10515                };
10516                CopyLegacyOption::MaxFileSize(FileSize { size, unit })
10517            }
10518            Some(Keyword::NULL) => {
10519                let _ = self.parse_keyword(Keyword::AS);
10520                CopyLegacyOption::Null(self.parse_literal_string()?)
10521            }
10522            Some(Keyword::PARALLEL) => {
10523                let enabled = match self.parse_one_of_keywords(&[
10524                    Keyword::TRUE,
10525                    Keyword::FALSE,
10526                    Keyword::ON,
10527                    Keyword::OFF,
10528                ]) {
10529                    Some(Keyword::TRUE) | Some(Keyword::ON) => Some(true),
10530                    Some(Keyword::FALSE) | Some(Keyword::OFF) => Some(false),
10531                    _ => None,
10532                };
10533                CopyLegacyOption::Parallel(enabled)
10534            }
10535            Some(Keyword::PARQUET) => CopyLegacyOption::Parquet,
10536            Some(Keyword::PARTITION) => {
10537                self.expect_keyword(Keyword::BY)?;
10538                let columns = self.parse_parenthesized_column_list(IsOptional::Mandatory, false)?;
10539                let include = self.parse_keyword(Keyword::INCLUDE);
10540                CopyLegacyOption::PartitionBy(UnloadPartitionBy { columns, include })
10541            }
10542            Some(Keyword::REGION) => {
10543                let _ = self.parse_keyword(Keyword::AS);
10544                let region = self.parse_literal_string()?;
10545                CopyLegacyOption::Region(region)
10546            }
10547            Some(Keyword::REMOVEQUOTES) => CopyLegacyOption::RemoveQuotes,
10548            Some(Keyword::ROWGROUPSIZE) => {
10549                let _ = self.parse_keyword(Keyword::AS);
10550                let file_size = self.parse_file_size()?;
10551                CopyLegacyOption::RowGroupSize(file_size)
10552            }
10553            Some(Keyword::STATUPDATE) => {
10554                let enabled = match self.parse_one_of_keywords(&[
10555                    Keyword::TRUE,
10556                    Keyword::FALSE,
10557                    Keyword::ON,
10558                    Keyword::OFF,
10559                ]) {
10560                    Some(Keyword::TRUE) | Some(Keyword::ON) => Some(true),
10561                    Some(Keyword::FALSE) | Some(Keyword::OFF) => Some(false),
10562                    _ => None,
10563                };
10564                CopyLegacyOption::StatUpdate(enabled)
10565            }
10566            Some(Keyword::TIMEFORMAT) => {
10567                let _ = self.parse_keyword(Keyword::AS);
10568                let fmt = if matches!(self.peek_token().token, Token::SingleQuotedString(_)) {
10569                    Some(self.parse_literal_string()?)
10570                } else {
10571                    None
10572                };
10573                CopyLegacyOption::TimeFormat(fmt)
10574            }
10575            Some(Keyword::TRUNCATECOLUMNS) => CopyLegacyOption::TruncateColumns,
10576            Some(Keyword::ZSTD) => CopyLegacyOption::Zstd,
10577            _ => self.expected("option", self.peek_token())?,
10578        };
10579        Ok(ret)
10580    }
10581
10582    fn parse_file_size(&mut self) -> Result<FileSize, ParserError> {
10583        let size = self.parse_number_value()?.value;
10584        let unit = self.maybe_parse_file_size_unit();
10585        Ok(FileSize { size, unit })
10586    }
10587
10588    fn maybe_parse_file_size_unit(&mut self) -> Option<FileSizeUnit> {
10589        match self.parse_one_of_keywords(&[Keyword::MB, Keyword::GB]) {
10590            Some(Keyword::MB) => Some(FileSizeUnit::MB),
10591            Some(Keyword::GB) => Some(FileSizeUnit::GB),
10592            _ => None,
10593        }
10594    }
10595
10596    fn parse_iam_role_kind(&mut self) -> Result<IamRoleKind, ParserError> {
10597        if self.parse_keyword(Keyword::DEFAULT) {
10598            Ok(IamRoleKind::Default)
10599        } else {
10600            let arn = self.parse_literal_string()?;
10601            Ok(IamRoleKind::Arn(arn))
10602        }
10603    }
10604
10605    fn parse_copy_legacy_csv_option(&mut self) -> Result<CopyLegacyCsvOption, ParserError> {
10606        let ret = match self.parse_one_of_keywords(&[
10607            Keyword::HEADER,
10608            Keyword::QUOTE,
10609            Keyword::ESCAPE,
10610            Keyword::FORCE,
10611        ]) {
10612            Some(Keyword::HEADER) => CopyLegacyCsvOption::Header,
10613            Some(Keyword::QUOTE) => {
10614                let _ = self.parse_keyword(Keyword::AS); // [ AS ]
10615                CopyLegacyCsvOption::Quote(self.parse_literal_char()?)
10616            }
10617            Some(Keyword::ESCAPE) => {
10618                let _ = self.parse_keyword(Keyword::AS); // [ AS ]
10619                CopyLegacyCsvOption::Escape(self.parse_literal_char()?)
10620            }
10621            Some(Keyword::FORCE) if self.parse_keywords(&[Keyword::NOT, Keyword::NULL]) => {
10622                CopyLegacyCsvOption::ForceNotNull(
10623                    self.parse_comma_separated(|p| p.parse_identifier())?,
10624                )
10625            }
10626            Some(Keyword::FORCE) if self.parse_keywords(&[Keyword::QUOTE]) => {
10627                CopyLegacyCsvOption::ForceQuote(
10628                    self.parse_comma_separated(|p| p.parse_identifier())?,
10629                )
10630            }
10631            _ => self.expected("csv option", self.peek_token())?,
10632        };
10633        Ok(ret)
10634    }
10635
10636    fn parse_literal_char(&mut self) -> Result<char, ParserError> {
10637        let s = self.parse_literal_string()?;
10638        if s.len() != 1 {
10639            let loc = self
10640                .tokens
10641                .get(self.index - 1)
10642                .map_or(Location { line: 0, column: 0 }, |t| t.span.start);
10643            return parser_err!(format!("Expect a char, found {s:?}"), loc);
10644        }
10645        Ok(s.chars().next().unwrap())
10646    }
10647
10648    /// Parse a tab separated values in
10649    /// COPY payload
10650    pub fn parse_tsv(&mut self) -> Vec<Option<String>> {
10651        self.parse_tab_value()
10652    }
10653
10654    pub fn parse_tab_value(&mut self) -> Vec<Option<String>> {
10655        let mut values = vec![];
10656        let mut content = String::from("");
10657        while let Some(t) = self.next_token_no_skip().map(|t| &t.token) {
10658            match t {
10659                Token::Whitespace(Whitespace::Tab) => {
10660                    values.push(Some(content.to_string()));
10661                    content.clear();
10662                }
10663                Token::Whitespace(Whitespace::Newline) => {
10664                    values.push(Some(content.to_string()));
10665                    content.clear();
10666                }
10667                Token::Backslash => {
10668                    if self.consume_token(&Token::Period) {
10669                        return values;
10670                    }
10671                    if let Token::Word(w) = self.next_token().token {
10672                        if w.value == "N" {
10673                            values.push(None);
10674                        }
10675                    }
10676                }
10677                _ => {
10678                    content.push_str(&t.to_string());
10679                }
10680            }
10681        }
10682        values
10683    }
10684
10685    /// Parse a literal value (numbers, strings, date/time, booleans)
10686    pub fn parse_value(&mut self) -> Result<ValueWithSpan, ParserError> {
10687        let next_token = self.next_token();
10688        let span = next_token.span;
10689        let ok_value = |value: Value| Ok(value.with_span(span));
10690        match next_token.token {
10691            Token::Word(w) => match w.keyword {
10692                Keyword::TRUE if self.dialect.supports_boolean_literals() => {
10693                    ok_value(Value::Boolean(true))
10694                }
10695                Keyword::FALSE if self.dialect.supports_boolean_literals() => {
10696                    ok_value(Value::Boolean(false))
10697                }
10698                Keyword::NULL => ok_value(Value::Null),
10699                Keyword::NoKeyword if w.quote_style.is_some() => match w.quote_style {
10700                    Some('"') => ok_value(Value::DoubleQuotedString(w.value)),
10701                    Some('\'') => ok_value(Value::SingleQuotedString(w.value)),
10702                    _ => self.expected(
10703                        "A value?",
10704                        TokenWithSpan {
10705                            token: Token::Word(w),
10706                            span,
10707                        },
10708                    )?,
10709                },
10710                _ => self.expected(
10711                    "a concrete value",
10712                    TokenWithSpan {
10713                        token: Token::Word(w),
10714                        span,
10715                    },
10716                ),
10717            },
10718            // The call to n.parse() returns a bigdecimal when the
10719            // bigdecimal feature is enabled, and is otherwise a no-op
10720            // (i.e., it returns the input string).
10721            Token::Number(n, l) => ok_value(Value::Number(Self::parse(n, span.start)?, l)),
10722            Token::SingleQuotedString(ref s) => ok_value(Value::SingleQuotedString(
10723                self.maybe_concat_string_literal(s.to_string()),
10724            )),
10725            Token::DoubleQuotedString(ref s) => ok_value(Value::DoubleQuotedString(
10726                self.maybe_concat_string_literal(s.to_string()),
10727            )),
10728            Token::TripleSingleQuotedString(ref s) => {
10729                ok_value(Value::TripleSingleQuotedString(s.to_string()))
10730            }
10731            Token::TripleDoubleQuotedString(ref s) => {
10732                ok_value(Value::TripleDoubleQuotedString(s.to_string()))
10733            }
10734            Token::DollarQuotedString(ref s) => ok_value(Value::DollarQuotedString(s.clone())),
10735            Token::SingleQuotedByteStringLiteral(ref s) => {
10736                ok_value(Value::SingleQuotedByteStringLiteral(s.clone()))
10737            }
10738            Token::DoubleQuotedByteStringLiteral(ref s) => {
10739                ok_value(Value::DoubleQuotedByteStringLiteral(s.clone()))
10740            }
10741            Token::TripleSingleQuotedByteStringLiteral(ref s) => {
10742                ok_value(Value::TripleSingleQuotedByteStringLiteral(s.clone()))
10743            }
10744            Token::TripleDoubleQuotedByteStringLiteral(ref s) => {
10745                ok_value(Value::TripleDoubleQuotedByteStringLiteral(s.clone()))
10746            }
10747            Token::SingleQuotedRawStringLiteral(ref s) => {
10748                ok_value(Value::SingleQuotedRawStringLiteral(s.clone()))
10749            }
10750            Token::DoubleQuotedRawStringLiteral(ref s) => {
10751                ok_value(Value::DoubleQuotedRawStringLiteral(s.clone()))
10752            }
10753            Token::TripleSingleQuotedRawStringLiteral(ref s) => {
10754                ok_value(Value::TripleSingleQuotedRawStringLiteral(s.clone()))
10755            }
10756            Token::TripleDoubleQuotedRawStringLiteral(ref s) => {
10757                ok_value(Value::TripleDoubleQuotedRawStringLiteral(s.clone()))
10758            }
10759            Token::NationalStringLiteral(ref s) => {
10760                ok_value(Value::NationalStringLiteral(s.to_string()))
10761            }
10762            Token::EscapedStringLiteral(ref s) => {
10763                ok_value(Value::EscapedStringLiteral(s.to_string()))
10764            }
10765            Token::UnicodeStringLiteral(ref s) => {
10766                ok_value(Value::UnicodeStringLiteral(s.to_string()))
10767            }
10768            Token::HexStringLiteral(ref s) => ok_value(Value::HexStringLiteral(s.to_string())),
10769            Token::Placeholder(ref s) => ok_value(Value::Placeholder(s.to_string())),
10770            tok @ Token::Colon | tok @ Token::AtSign => {
10771                // 1. Not calling self.parse_identifier(false)?
10772                //    because only in placeholder we want to check
10773                //    numbers as idfentifies.  This because snowflake
10774                //    allows numbers as placeholders
10775                // 2. Not calling self.next_token() to enforce `tok`
10776                //    be followed immediately by a word/number, ie.
10777                //    without any whitespace in between
10778                let next_token = self.next_token_no_skip().unwrap_or(&EOF_TOKEN).clone();
10779                let ident = match next_token.token {
10780                    Token::Word(w) => Ok(w.into_ident(next_token.span)),
10781                    Token::Number(w, false) => Ok(Ident::with_span(next_token.span, w)),
10782                    _ => self.expected("placeholder", next_token),
10783                }?;
10784                Ok(Value::Placeholder(tok.to_string() + &ident.value)
10785                    .with_span(Span::new(span.start, ident.span.end)))
10786            }
10787            unexpected => self.expected(
10788                "a value",
10789                TokenWithSpan {
10790                    token: unexpected,
10791                    span,
10792                },
10793            ),
10794        }
10795    }
10796
10797    fn maybe_concat_string_literal(&mut self, mut str: String) -> String {
10798        if self.dialect.supports_string_literal_concatenation() {
10799            while let Token::SingleQuotedString(ref s) | Token::DoubleQuotedString(ref s) =
10800                self.peek_token_ref().token
10801            {
10802                str.push_str(s.clone().as_str());
10803                self.advance_token();
10804            }
10805        }
10806        str
10807    }
10808
10809    /// Parse an unsigned numeric literal
10810    pub fn parse_number_value(&mut self) -> Result<ValueWithSpan, ParserError> {
10811        let value_wrapper = self.parse_value()?;
10812        match &value_wrapper.value {
10813            Value::Number(_, _) => Ok(value_wrapper),
10814            Value::Placeholder(_) => Ok(value_wrapper),
10815            _ => {
10816                self.prev_token();
10817                self.expected("literal number", self.peek_token())
10818            }
10819        }
10820    }
10821
10822    /// Parse a numeric literal as an expression. Returns a [`Expr::UnaryOp`] if the number is signed,
10823    /// otherwise returns a [`Expr::Value`]
10824    pub fn parse_number(&mut self) -> Result<Expr, ParserError> {
10825        let next_token = self.next_token();
10826        match next_token.token {
10827            Token::Plus => Ok(Expr::UnaryOp {
10828                op: UnaryOperator::Plus,
10829                expr: Box::new(Expr::Value(self.parse_number_value()?)),
10830            }),
10831            Token::Minus => Ok(Expr::UnaryOp {
10832                op: UnaryOperator::Minus,
10833                expr: Box::new(Expr::Value(self.parse_number_value()?)),
10834            }),
10835            _ => {
10836                self.prev_token();
10837                Ok(Expr::Value(self.parse_number_value()?))
10838            }
10839        }
10840    }
10841
10842    fn parse_introduced_string_expr(&mut self) -> Result<Expr, ParserError> {
10843        let next_token = self.next_token();
10844        let span = next_token.span;
10845        match next_token.token {
10846            Token::SingleQuotedString(ref s) => Ok(Expr::Value(
10847                Value::SingleQuotedString(s.to_string()).with_span(span),
10848            )),
10849            Token::DoubleQuotedString(ref s) => Ok(Expr::Value(
10850                Value::DoubleQuotedString(s.to_string()).with_span(span),
10851            )),
10852            Token::HexStringLiteral(ref s) => Ok(Expr::Value(
10853                Value::HexStringLiteral(s.to_string()).with_span(span),
10854            )),
10855            unexpected => self.expected(
10856                "a string value",
10857                TokenWithSpan {
10858                    token: unexpected,
10859                    span,
10860                },
10861            ),
10862        }
10863    }
10864
10865    /// Parse an unsigned literal integer/long
10866    pub fn parse_literal_uint(&mut self) -> Result<u64, ParserError> {
10867        let next_token = self.next_token();
10868        match next_token.token {
10869            Token::Number(s, _) => Self::parse::<u64>(s, next_token.span.start),
10870            _ => self.expected("literal int", next_token),
10871        }
10872    }
10873
10874    /// Parse the body of a `CREATE FUNCTION` specified as a string.
10875    /// e.g. `CREATE FUNCTION ... AS $$ body $$`.
10876    fn parse_create_function_body_string(&mut self) -> Result<CreateFunctionBody, ParserError> {
10877        let parse_string_expr = |parser: &mut Parser| -> Result<Expr, ParserError> {
10878            let peek_token = parser.peek_token();
10879            let span = peek_token.span;
10880            match peek_token.token {
10881                Token::DollarQuotedString(s) if dialect_of!(parser is PostgreSqlDialect | GenericDialect) =>
10882                {
10883                    parser.next_token();
10884                    Ok(Expr::Value(Value::DollarQuotedString(s).with_span(span)))
10885                }
10886                _ => Ok(Expr::Value(
10887                    Value::SingleQuotedString(parser.parse_literal_string()?).with_span(span),
10888                )),
10889            }
10890        };
10891
10892        Ok(CreateFunctionBody::AsBeforeOptions {
10893            body: parse_string_expr(self)?,
10894            link_symbol: if self.consume_token(&Token::Comma) {
10895                Some(parse_string_expr(self)?)
10896            } else {
10897                None
10898            },
10899        })
10900    }
10901
10902    /// Parse a literal string
10903    pub fn parse_literal_string(&mut self) -> Result<String, ParserError> {
10904        let next_token = self.next_token();
10905        match next_token.token {
10906            Token::Word(Word {
10907                value,
10908                keyword: Keyword::NoKeyword,
10909                ..
10910            }) => Ok(value),
10911            Token::SingleQuotedString(s) => Ok(s),
10912            Token::DoubleQuotedString(s) => Ok(s),
10913            Token::EscapedStringLiteral(s) if dialect_of!(self is PostgreSqlDialect | GenericDialect) => {
10914                Ok(s)
10915            }
10916            Token::UnicodeStringLiteral(s) => Ok(s),
10917            _ => self.expected("literal string", next_token),
10918        }
10919    }
10920
10921    /// Parse a boolean string
10922    pub(crate) fn parse_boolean_string(&mut self) -> Result<bool, ParserError> {
10923        match self.parse_one_of_keywords(&[Keyword::TRUE, Keyword::FALSE]) {
10924            Some(Keyword::TRUE) => Ok(true),
10925            Some(Keyword::FALSE) => Ok(false),
10926            _ => self.expected("TRUE or FALSE", self.peek_token()),
10927        }
10928    }
10929
10930    /// Parse a literal unicode normalization clause
10931    pub fn parse_unicode_is_normalized(&mut self, expr: Expr) -> Result<Expr, ParserError> {
10932        let neg = self.parse_keyword(Keyword::NOT);
10933        let normalized_form = self.maybe_parse(|parser| {
10934            match parser.parse_one_of_keywords(&[
10935                Keyword::NFC,
10936                Keyword::NFD,
10937                Keyword::NFKC,
10938                Keyword::NFKD,
10939            ]) {
10940                Some(Keyword::NFC) => Ok(NormalizationForm::NFC),
10941                Some(Keyword::NFD) => Ok(NormalizationForm::NFD),
10942                Some(Keyword::NFKC) => Ok(NormalizationForm::NFKC),
10943                Some(Keyword::NFKD) => Ok(NormalizationForm::NFKD),
10944                _ => parser.expected("unicode normalization form", parser.peek_token()),
10945            }
10946        })?;
10947        if self.parse_keyword(Keyword::NORMALIZED) {
10948            return Ok(Expr::IsNormalized {
10949                expr: Box::new(expr),
10950                form: normalized_form,
10951                negated: neg,
10952            });
10953        }
10954        self.expected("unicode normalization form", self.peek_token())
10955    }
10956
10957    pub fn parse_enum_values(&mut self) -> Result<Vec<EnumMember>, ParserError> {
10958        self.expect_token(&Token::LParen)?;
10959        let values = self.parse_comma_separated(|parser| {
10960            let name = parser.parse_literal_string()?;
10961            let e = if parser.consume_token(&Token::Eq) {
10962                let value = parser.parse_number()?;
10963                EnumMember::NamedValue(name, value)
10964            } else {
10965                EnumMember::Name(name)
10966            };
10967            Ok(e)
10968        })?;
10969        self.expect_token(&Token::RParen)?;
10970
10971        Ok(values)
10972    }
10973
10974    /// Parse a SQL datatype (in the context of a CREATE TABLE statement for example)
10975    pub fn parse_data_type(&mut self) -> Result<DataType, ParserError> {
10976        let (ty, trailing_bracket) = self.parse_data_type_helper()?;
10977        if trailing_bracket.0 {
10978            return parser_err!(
10979                format!("unmatched > after parsing data type {ty}"),
10980                self.peek_token()
10981            );
10982        }
10983
10984        Ok(ty)
10985    }
10986
10987    fn parse_data_type_helper(
10988        &mut self,
10989    ) -> Result<(DataType, MatchedTrailingBracket), ParserError> {
10990        let dialect = self.dialect;
10991        self.advance_token();
10992        let next_token = self.get_current_token();
10993        let next_token_index = self.get_current_index();
10994
10995        let mut trailing_bracket: MatchedTrailingBracket = false.into();
10996        let mut data = match &next_token.token {
10997            Token::Word(w) => match w.keyword {
10998                Keyword::BOOLEAN => Ok(DataType::Boolean),
10999                Keyword::BOOL => Ok(DataType::Bool),
11000                Keyword::FLOAT => {
11001                    let precision = self.parse_exact_number_optional_precision_scale()?;
11002
11003                    if self.parse_keyword(Keyword::UNSIGNED) {
11004                        Ok(DataType::FloatUnsigned(precision))
11005                    } else {
11006                        Ok(DataType::Float(precision))
11007                    }
11008                }
11009                Keyword::REAL => {
11010                    if self.parse_keyword(Keyword::UNSIGNED) {
11011                        Ok(DataType::RealUnsigned)
11012                    } else {
11013                        Ok(DataType::Real)
11014                    }
11015                }
11016                Keyword::FLOAT4 => Ok(DataType::Float4),
11017                Keyword::FLOAT32 => Ok(DataType::Float32),
11018                Keyword::FLOAT64 => Ok(DataType::Float64),
11019                Keyword::FLOAT8 => Ok(DataType::Float8),
11020                Keyword::DOUBLE => {
11021                    if self.parse_keyword(Keyword::PRECISION) {
11022                        if self.parse_keyword(Keyword::UNSIGNED) {
11023                            Ok(DataType::DoublePrecisionUnsigned)
11024                        } else {
11025                            Ok(DataType::DoublePrecision)
11026                        }
11027                    } else {
11028                        let precision = self.parse_exact_number_optional_precision_scale()?;
11029
11030                        if self.parse_keyword(Keyword::UNSIGNED) {
11031                            Ok(DataType::DoubleUnsigned(precision))
11032                        } else {
11033                            Ok(DataType::Double(precision))
11034                        }
11035                    }
11036                }
11037                Keyword::TINYINT => {
11038                    let optional_precision = self.parse_optional_precision();
11039                    if self.parse_keyword(Keyword::UNSIGNED) {
11040                        Ok(DataType::TinyIntUnsigned(optional_precision?))
11041                    } else {
11042                        if dialect.supports_data_type_signed_suffix() {
11043                            let _ = self.parse_keyword(Keyword::SIGNED);
11044                        }
11045                        Ok(DataType::TinyInt(optional_precision?))
11046                    }
11047                }
11048                Keyword::INT2 => {
11049                    let optional_precision = self.parse_optional_precision();
11050                    if self.parse_keyword(Keyword::UNSIGNED) {
11051                        Ok(DataType::Int2Unsigned(optional_precision?))
11052                    } else {
11053                        Ok(DataType::Int2(optional_precision?))
11054                    }
11055                }
11056                Keyword::SMALLINT => {
11057                    let optional_precision = self.parse_optional_precision();
11058                    if self.parse_keyword(Keyword::UNSIGNED) {
11059                        Ok(DataType::SmallIntUnsigned(optional_precision?))
11060                    } else {
11061                        if dialect.supports_data_type_signed_suffix() {
11062                            let _ = self.parse_keyword(Keyword::SIGNED);
11063                        }
11064                        Ok(DataType::SmallInt(optional_precision?))
11065                    }
11066                }
11067                Keyword::MEDIUMINT => {
11068                    let optional_precision = self.parse_optional_precision();
11069                    if self.parse_keyword(Keyword::UNSIGNED) {
11070                        Ok(DataType::MediumIntUnsigned(optional_precision?))
11071                    } else {
11072                        if dialect.supports_data_type_signed_suffix() {
11073                            let _ = self.parse_keyword(Keyword::SIGNED);
11074                        }
11075                        Ok(DataType::MediumInt(optional_precision?))
11076                    }
11077                }
11078                Keyword::INT => {
11079                    let optional_precision = self.parse_optional_precision();
11080                    if self.parse_keyword(Keyword::UNSIGNED) {
11081                        Ok(DataType::IntUnsigned(optional_precision?))
11082                    } else {
11083                        if dialect.supports_data_type_signed_suffix() {
11084                            let _ = self.parse_keyword(Keyword::SIGNED);
11085                        }
11086                        Ok(DataType::Int(optional_precision?))
11087                    }
11088                }
11089                Keyword::INT4 => {
11090                    let optional_precision = self.parse_optional_precision();
11091                    if self.parse_keyword(Keyword::UNSIGNED) {
11092                        Ok(DataType::Int4Unsigned(optional_precision?))
11093                    } else {
11094                        Ok(DataType::Int4(optional_precision?))
11095                    }
11096                }
11097                Keyword::INT8 => {
11098                    let optional_precision = self.parse_optional_precision();
11099                    if self.parse_keyword(Keyword::UNSIGNED) {
11100                        Ok(DataType::Int8Unsigned(optional_precision?))
11101                    } else {
11102                        Ok(DataType::Int8(optional_precision?))
11103                    }
11104                }
11105                Keyword::INT16 => Ok(DataType::Int16),
11106                Keyword::INT32 => Ok(DataType::Int32),
11107                Keyword::INT64 => Ok(DataType::Int64),
11108                Keyword::INT128 => Ok(DataType::Int128),
11109                Keyword::INT256 => Ok(DataType::Int256),
11110                Keyword::INTEGER => {
11111                    let optional_precision = self.parse_optional_precision();
11112                    if self.parse_keyword(Keyword::UNSIGNED) {
11113                        Ok(DataType::IntegerUnsigned(optional_precision?))
11114                    } else {
11115                        if dialect.supports_data_type_signed_suffix() {
11116                            let _ = self.parse_keyword(Keyword::SIGNED);
11117                        }
11118                        Ok(DataType::Integer(optional_precision?))
11119                    }
11120                }
11121                Keyword::BIGINT => {
11122                    let optional_precision = self.parse_optional_precision();
11123                    if self.parse_keyword(Keyword::UNSIGNED) {
11124                        Ok(DataType::BigIntUnsigned(optional_precision?))
11125                    } else {
11126                        if dialect.supports_data_type_signed_suffix() {
11127                            let _ = self.parse_keyword(Keyword::SIGNED);
11128                        }
11129                        Ok(DataType::BigInt(optional_precision?))
11130                    }
11131                }
11132                Keyword::HUGEINT => Ok(DataType::HugeInt),
11133                Keyword::UBIGINT => Ok(DataType::UBigInt),
11134                Keyword::UHUGEINT => Ok(DataType::UHugeInt),
11135                Keyword::USMALLINT => Ok(DataType::USmallInt),
11136                Keyword::UTINYINT => Ok(DataType::UTinyInt),
11137                Keyword::UINT8 => Ok(DataType::UInt8),
11138                Keyword::UINT16 => Ok(DataType::UInt16),
11139                Keyword::UINT32 => Ok(DataType::UInt32),
11140                Keyword::UINT64 => Ok(DataType::UInt64),
11141                Keyword::UINT128 => Ok(DataType::UInt128),
11142                Keyword::UINT256 => Ok(DataType::UInt256),
11143                Keyword::VARCHAR => Ok(DataType::Varchar(self.parse_optional_character_length()?)),
11144                Keyword::NVARCHAR => {
11145                    Ok(DataType::Nvarchar(self.parse_optional_character_length()?))
11146                }
11147                Keyword::CHARACTER => {
11148                    if self.parse_keyword(Keyword::VARYING) {
11149                        Ok(DataType::CharacterVarying(
11150                            self.parse_optional_character_length()?,
11151                        ))
11152                    } else if self.parse_keywords(&[Keyword::LARGE, Keyword::OBJECT]) {
11153                        Ok(DataType::CharacterLargeObject(
11154                            self.parse_optional_precision()?,
11155                        ))
11156                    } else {
11157                        Ok(DataType::Character(self.parse_optional_character_length()?))
11158                    }
11159                }
11160                Keyword::CHAR => {
11161                    if self.parse_keyword(Keyword::VARYING) {
11162                        Ok(DataType::CharVarying(
11163                            self.parse_optional_character_length()?,
11164                        ))
11165                    } else if self.parse_keywords(&[Keyword::LARGE, Keyword::OBJECT]) {
11166                        Ok(DataType::CharLargeObject(self.parse_optional_precision()?))
11167                    } else {
11168                        Ok(DataType::Char(self.parse_optional_character_length()?))
11169                    }
11170                }
11171                Keyword::CLOB => Ok(DataType::Clob(self.parse_optional_precision()?)),
11172                Keyword::BINARY => Ok(DataType::Binary(self.parse_optional_precision()?)),
11173                Keyword::VARBINARY => Ok(DataType::Varbinary(self.parse_optional_binary_length()?)),
11174                Keyword::BLOB => Ok(DataType::Blob(self.parse_optional_precision()?)),
11175                Keyword::TINYBLOB => Ok(DataType::TinyBlob),
11176                Keyword::MEDIUMBLOB => Ok(DataType::MediumBlob),
11177                Keyword::LONGBLOB => Ok(DataType::LongBlob),
11178                Keyword::BYTES => Ok(DataType::Bytes(self.parse_optional_precision()?)),
11179                Keyword::BIT => {
11180                    if self.parse_keyword(Keyword::VARYING) {
11181                        Ok(DataType::BitVarying(self.parse_optional_precision()?))
11182                    } else {
11183                        Ok(DataType::Bit(self.parse_optional_precision()?))
11184                    }
11185                }
11186                Keyword::VARBIT => Ok(DataType::VarBit(self.parse_optional_precision()?)),
11187                Keyword::UUID => Ok(DataType::Uuid),
11188                Keyword::DATE => Ok(DataType::Date),
11189                Keyword::DATE32 => Ok(DataType::Date32),
11190                Keyword::DATETIME => Ok(DataType::Datetime(self.parse_optional_precision()?)),
11191                Keyword::DATETIME64 => {
11192                    self.prev_token();
11193                    let (precision, time_zone) = self.parse_datetime_64()?;
11194                    Ok(DataType::Datetime64(precision, time_zone))
11195                }
11196                Keyword::TIMESTAMP => {
11197                    let precision = self.parse_optional_precision()?;
11198                    let tz = if self.parse_keyword(Keyword::WITH) {
11199                        self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
11200                        TimezoneInfo::WithTimeZone
11201                    } else if self.parse_keyword(Keyword::WITHOUT) {
11202                        self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
11203                        TimezoneInfo::WithoutTimeZone
11204                    } else {
11205                        TimezoneInfo::None
11206                    };
11207                    Ok(DataType::Timestamp(precision, tz))
11208                }
11209                Keyword::TIMESTAMPTZ => Ok(DataType::Timestamp(
11210                    self.parse_optional_precision()?,
11211                    TimezoneInfo::Tz,
11212                )),
11213                Keyword::TIMESTAMP_NTZ => {
11214                    Ok(DataType::TimestampNtz(self.parse_optional_precision()?))
11215                }
11216                Keyword::TIME => {
11217                    let precision = self.parse_optional_precision()?;
11218                    let tz = if self.parse_keyword(Keyword::WITH) {
11219                        self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
11220                        TimezoneInfo::WithTimeZone
11221                    } else if self.parse_keyword(Keyword::WITHOUT) {
11222                        self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
11223                        TimezoneInfo::WithoutTimeZone
11224                    } else {
11225                        TimezoneInfo::None
11226                    };
11227                    Ok(DataType::Time(precision, tz))
11228                }
11229                Keyword::TIMETZ => Ok(DataType::Time(
11230                    self.parse_optional_precision()?,
11231                    TimezoneInfo::Tz,
11232                )),
11233                Keyword::INTERVAL => {
11234                    if self.dialect.supports_interval_options() {
11235                        let fields = self.maybe_parse_optional_interval_fields()?;
11236                        let precision = self.parse_optional_precision()?;
11237                        Ok(DataType::Interval { fields, precision })
11238                    } else {
11239                        Ok(DataType::Interval {
11240                            fields: None,
11241                            precision: None,
11242                        })
11243                    }
11244                }
11245                Keyword::JSON => Ok(DataType::JSON),
11246                Keyword::JSONB => Ok(DataType::JSONB),
11247                Keyword::REGCLASS => Ok(DataType::Regclass),
11248                Keyword::STRING => Ok(DataType::String(self.parse_optional_precision()?)),
11249                Keyword::FIXEDSTRING => {
11250                    self.expect_token(&Token::LParen)?;
11251                    let character_length = self.parse_literal_uint()?;
11252                    self.expect_token(&Token::RParen)?;
11253                    Ok(DataType::FixedString(character_length))
11254                }
11255                Keyword::TEXT => Ok(DataType::Text),
11256                Keyword::TINYTEXT => Ok(DataType::TinyText),
11257                Keyword::MEDIUMTEXT => Ok(DataType::MediumText),
11258                Keyword::LONGTEXT => Ok(DataType::LongText),
11259                Keyword::BYTEA => Ok(DataType::Bytea),
11260                Keyword::NUMERIC => Ok(DataType::Numeric(
11261                    self.parse_exact_number_optional_precision_scale()?,
11262                )),
11263                Keyword::DECIMAL => {
11264                    let precision = self.parse_exact_number_optional_precision_scale()?;
11265
11266                    if self.parse_keyword(Keyword::UNSIGNED) {
11267                        Ok(DataType::DecimalUnsigned(precision))
11268                    } else {
11269                        Ok(DataType::Decimal(precision))
11270                    }
11271                }
11272                Keyword::DEC => {
11273                    let precision = self.parse_exact_number_optional_precision_scale()?;
11274
11275                    if self.parse_keyword(Keyword::UNSIGNED) {
11276                        Ok(DataType::DecUnsigned(precision))
11277                    } else {
11278                        Ok(DataType::Dec(precision))
11279                    }
11280                }
11281                Keyword::BIGNUMERIC => Ok(DataType::BigNumeric(
11282                    self.parse_exact_number_optional_precision_scale()?,
11283                )),
11284                Keyword::BIGDECIMAL => Ok(DataType::BigDecimal(
11285                    self.parse_exact_number_optional_precision_scale()?,
11286                )),
11287                Keyword::ENUM => Ok(DataType::Enum(self.parse_enum_values()?, None)),
11288                Keyword::ENUM8 => Ok(DataType::Enum(self.parse_enum_values()?, Some(8))),
11289                Keyword::ENUM16 => Ok(DataType::Enum(self.parse_enum_values()?, Some(16))),
11290                Keyword::SET => Ok(DataType::Set(self.parse_string_values()?)),
11291                Keyword::ARRAY => {
11292                    if dialect_of!(self is SnowflakeDialect) {
11293                        Ok(DataType::Array(ArrayElemTypeDef::None))
11294                    } else if dialect_of!(self is ClickHouseDialect) {
11295                        Ok(self.parse_sub_type(|internal_type| {
11296                            DataType::Array(ArrayElemTypeDef::Parenthesis(internal_type))
11297                        })?)
11298                    } else {
11299                        self.expect_token(&Token::Lt)?;
11300                        let (inside_type, _trailing_bracket) = self.parse_data_type_helper()?;
11301                        trailing_bracket = self.expect_closing_angle_bracket(_trailing_bracket)?;
11302                        Ok(DataType::Array(ArrayElemTypeDef::AngleBracket(Box::new(
11303                            inside_type,
11304                        ))))
11305                    }
11306                }
11307                Keyword::STRUCT if dialect_is!(dialect is DuckDbDialect) => {
11308                    self.prev_token();
11309                    let field_defs = self.parse_duckdb_struct_type_def()?;
11310                    Ok(DataType::Struct(field_defs, StructBracketKind::Parentheses))
11311                }
11312                Keyword::STRUCT if dialect_is!(dialect is BigQueryDialect | GenericDialect) => {
11313                    self.prev_token();
11314                    let (field_defs, _trailing_bracket) =
11315                        self.parse_struct_type_def(Self::parse_struct_field_def)?;
11316                    trailing_bracket = _trailing_bracket;
11317                    Ok(DataType::Struct(
11318                        field_defs,
11319                        StructBracketKind::AngleBrackets,
11320                    ))
11321                }
11322                Keyword::UNION if dialect_is!(dialect is DuckDbDialect | GenericDialect) => {
11323                    self.prev_token();
11324                    let fields = self.parse_union_type_def()?;
11325                    Ok(DataType::Union(fields))
11326                }
11327                Keyword::NULLABLE if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
11328                    Ok(self.parse_sub_type(DataType::Nullable)?)
11329                }
11330                Keyword::LOWCARDINALITY if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
11331                    Ok(self.parse_sub_type(DataType::LowCardinality)?)
11332                }
11333                Keyword::MAP if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
11334                    self.prev_token();
11335                    let (key_data_type, value_data_type) = self.parse_click_house_map_def()?;
11336                    Ok(DataType::Map(
11337                        Box::new(key_data_type),
11338                        Box::new(value_data_type),
11339                    ))
11340                }
11341                Keyword::NESTED if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
11342                    self.expect_token(&Token::LParen)?;
11343                    let field_defs = self.parse_comma_separated(Parser::parse_column_def)?;
11344                    self.expect_token(&Token::RParen)?;
11345                    Ok(DataType::Nested(field_defs))
11346                }
11347                Keyword::TUPLE if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
11348                    self.prev_token();
11349                    let field_defs = self.parse_click_house_tuple_def()?;
11350                    Ok(DataType::Tuple(field_defs))
11351                }
11352                Keyword::TRIGGER => Ok(DataType::Trigger),
11353                Keyword::ANY if self.peek_keyword(Keyword::TYPE) => {
11354                    let _ = self.parse_keyword(Keyword::TYPE);
11355                    Ok(DataType::AnyType)
11356                }
11357                Keyword::TABLE => {
11358                    // an LParen after the TABLE keyword indicates that table columns are being defined
11359                    // whereas no LParen indicates an anonymous table expression will be returned
11360                    if self.peek_token() == Token::LParen {
11361                        let columns = self.parse_returns_table_columns()?;
11362                        Ok(DataType::Table(Some(columns)))
11363                    } else {
11364                        Ok(DataType::Table(None))
11365                    }
11366                }
11367                Keyword::SIGNED => {
11368                    if self.parse_keyword(Keyword::INTEGER) {
11369                        Ok(DataType::SignedInteger)
11370                    } else {
11371                        Ok(DataType::Signed)
11372                    }
11373                }
11374                Keyword::UNSIGNED => {
11375                    if self.parse_keyword(Keyword::INTEGER) {
11376                        Ok(DataType::UnsignedInteger)
11377                    } else {
11378                        Ok(DataType::Unsigned)
11379                    }
11380                }
11381                Keyword::TSVECTOR if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
11382                    Ok(DataType::TsVector)
11383                }
11384                Keyword::TSQUERY if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
11385                    Ok(DataType::TsQuery)
11386                }
11387                _ => {
11388                    self.prev_token();
11389                    let type_name = self.parse_object_name(false)?;
11390                    if let Some(modifiers) = self.parse_optional_type_modifiers()? {
11391                        Ok(DataType::Custom(type_name, modifiers))
11392                    } else {
11393                        Ok(DataType::Custom(type_name, vec![]))
11394                    }
11395                }
11396            },
11397            _ => self.expected_at("a data type name", next_token_index),
11398        }?;
11399
11400        if self.dialect.supports_array_typedef_with_brackets() {
11401            while self.consume_token(&Token::LBracket) {
11402                // Parse optional array data type size
11403                let size = self.maybe_parse(|p| p.parse_literal_uint())?;
11404                self.expect_token(&Token::RBracket)?;
11405                data = DataType::Array(ArrayElemTypeDef::SquareBracket(Box::new(data), size))
11406            }
11407        }
11408        Ok((data, trailing_bracket))
11409    }
11410
11411    fn parse_returns_table_column(&mut self) -> Result<ColumnDef, ParserError> {
11412        self.parse_column_def()
11413    }
11414
11415    fn parse_returns_table_columns(&mut self) -> Result<Vec<ColumnDef>, ParserError> {
11416        self.expect_token(&Token::LParen)?;
11417        let columns = self.parse_comma_separated(Parser::parse_returns_table_column)?;
11418        self.expect_token(&Token::RParen)?;
11419        Ok(columns)
11420    }
11421
11422    pub fn parse_string_values(&mut self) -> Result<Vec<String>, ParserError> {
11423        self.expect_token(&Token::LParen)?;
11424        let mut values = Vec::new();
11425        loop {
11426            let next_token = self.next_token();
11427            match next_token.token {
11428                Token::SingleQuotedString(value) => values.push(value),
11429                _ => self.expected("a string", next_token)?,
11430            }
11431            let next_token = self.next_token();
11432            match next_token.token {
11433                Token::Comma => (),
11434                Token::RParen => break,
11435                _ => self.expected(", or }", next_token)?,
11436            }
11437        }
11438        Ok(values)
11439    }
11440
11441    /// Strictly parse `identifier AS identifier`
11442    pub fn parse_identifier_with_alias(&mut self) -> Result<IdentWithAlias, ParserError> {
11443        let ident = self.parse_identifier()?;
11444        self.expect_keyword_is(Keyword::AS)?;
11445        let alias = self.parse_identifier()?;
11446        Ok(IdentWithAlias { ident, alias })
11447    }
11448
11449    /// Parse `identifier [AS] identifier` where the AS keyword is optional
11450    fn parse_identifier_with_optional_alias(&mut self) -> Result<IdentWithAlias, ParserError> {
11451        let ident = self.parse_identifier()?;
11452        let _after_as = self.parse_keyword(Keyword::AS);
11453        let alias = self.parse_identifier()?;
11454        Ok(IdentWithAlias { ident, alias })
11455    }
11456
11457    /// Parse comma-separated list of parenthesized queries for pipe operators
11458    fn parse_pipe_operator_queries(&mut self) -> Result<Vec<Query>, ParserError> {
11459        self.parse_comma_separated(|parser| {
11460            parser.expect_token(&Token::LParen)?;
11461            let query = parser.parse_query()?;
11462            parser.expect_token(&Token::RParen)?;
11463            Ok(*query)
11464        })
11465    }
11466
11467    /// Parse set quantifier for pipe operators that require DISTINCT. E.g. INTERSECT and EXCEPT
11468    fn parse_distinct_required_set_quantifier(
11469        &mut self,
11470        operator_name: &str,
11471    ) -> Result<SetQuantifier, ParserError> {
11472        let quantifier = self.parse_set_quantifier(&Some(SetOperator::Intersect));
11473        match quantifier {
11474            SetQuantifier::Distinct | SetQuantifier::DistinctByName => Ok(quantifier),
11475            _ => Err(ParserError::ParserError(format!(
11476                "{operator_name} pipe operator requires DISTINCT modifier",
11477            ))),
11478        }
11479    }
11480
11481    /// Parse optional identifier alias (with or without AS keyword)
11482    fn parse_identifier_optional_alias(&mut self) -> Result<Option<Ident>, ParserError> {
11483        if self.parse_keyword(Keyword::AS) {
11484            Ok(Some(self.parse_identifier()?))
11485        } else {
11486            // Check if the next token is an identifier (implicit alias)
11487            self.maybe_parse(|parser| parser.parse_identifier())
11488        }
11489    }
11490
11491    /// Optionally parses an alias for a select list item
11492    fn maybe_parse_select_item_alias(&mut self) -> Result<Option<Ident>, ParserError> {
11493        fn validator(explicit: bool, kw: &Keyword, parser: &mut Parser) -> bool {
11494            parser.dialect.is_select_item_alias(explicit, kw, parser)
11495        }
11496        self.parse_optional_alias_inner(None, validator)
11497    }
11498
11499    /// Optionally parses an alias for a table like in `... FROM generate_series(1, 10) AS t (col)`.
11500    /// In this case, the alias is allowed to optionally name the columns in the table, in
11501    /// addition to the table itself.
11502    pub fn maybe_parse_table_alias(&mut self) -> Result<Option<TableAlias>, ParserError> {
11503        fn validator(explicit: bool, kw: &Keyword, parser: &mut Parser) -> bool {
11504            parser.dialect.is_table_factor_alias(explicit, kw, parser)
11505        }
11506        let explicit = self.peek_keyword(Keyword::AS);
11507        match self.parse_optional_alias_inner(None, validator)? {
11508            Some(name) => {
11509                let columns = self.parse_table_alias_column_defs()?;
11510                Ok(Some(TableAlias {
11511                    explicit,
11512                    name,
11513                    columns,
11514                }))
11515            }
11516            None => Ok(None),
11517        }
11518    }
11519
11520    fn parse_table_index_hints(&mut self) -> Result<Vec<TableIndexHints>, ParserError> {
11521        let mut hints = vec![];
11522        while let Some(hint_type) =
11523            self.parse_one_of_keywords(&[Keyword::USE, Keyword::IGNORE, Keyword::FORCE])
11524        {
11525            let hint_type = match hint_type {
11526                Keyword::USE => TableIndexHintType::Use,
11527                Keyword::IGNORE => TableIndexHintType::Ignore,
11528                Keyword::FORCE => TableIndexHintType::Force,
11529                _ => {
11530                    return self.expected(
11531                        "expected to match USE/IGNORE/FORCE keyword",
11532                        self.peek_token(),
11533                    )
11534                }
11535            };
11536            let index_type = match self.parse_one_of_keywords(&[Keyword::INDEX, Keyword::KEY]) {
11537                Some(Keyword::INDEX) => TableIndexType::Index,
11538                Some(Keyword::KEY) => TableIndexType::Key,
11539                _ => {
11540                    return self.expected("expected to match INDEX/KEY keyword", self.peek_token())
11541                }
11542            };
11543            let for_clause = if self.parse_keyword(Keyword::FOR) {
11544                let clause = if self.parse_keyword(Keyword::JOIN) {
11545                    TableIndexHintForClause::Join
11546                } else if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
11547                    TableIndexHintForClause::OrderBy
11548                } else if self.parse_keywords(&[Keyword::GROUP, Keyword::BY]) {
11549                    TableIndexHintForClause::GroupBy
11550                } else {
11551                    return self.expected(
11552                        "expected to match FOR/ORDER BY/GROUP BY table hint in for clause",
11553                        self.peek_token(),
11554                    );
11555                };
11556                Some(clause)
11557            } else {
11558                None
11559            };
11560
11561            self.expect_token(&Token::LParen)?;
11562            let index_names = if self.peek_token().token != Token::RParen {
11563                self.parse_comma_separated(Parser::parse_identifier)?
11564            } else {
11565                vec![]
11566            };
11567            self.expect_token(&Token::RParen)?;
11568            hints.push(TableIndexHints {
11569                hint_type,
11570                index_type,
11571                for_clause,
11572                index_names,
11573            });
11574        }
11575        Ok(hints)
11576    }
11577
11578    /// Wrapper for parse_optional_alias_inner, left for backwards-compatibility
11579    /// but new flows should use the context-specific methods such as `maybe_parse_select_item_alias`
11580    /// and `maybe_parse_table_alias`.
11581    pub fn parse_optional_alias(
11582        &mut self,
11583        reserved_kwds: &[Keyword],
11584    ) -> Result<Option<Ident>, ParserError> {
11585        fn validator(_explicit: bool, _kw: &Keyword, _parser: &mut Parser) -> bool {
11586            false
11587        }
11588        self.parse_optional_alias_inner(Some(reserved_kwds), validator)
11589    }
11590
11591    /// Parses an optional alias after a SQL element such as a select list item
11592    /// or a table name.
11593    ///
11594    /// This method accepts an optional list of reserved keywords or a function
11595    /// to call to validate if a keyword should be parsed as an alias, to allow
11596    /// callers to customize the parsing logic based on their context.
11597    fn parse_optional_alias_inner<F>(
11598        &mut self,
11599        reserved_kwds: Option<&[Keyword]>,
11600        validator: F,
11601    ) -> Result<Option<Ident>, ParserError>
11602    where
11603        F: Fn(bool, &Keyword, &mut Parser) -> bool,
11604    {
11605        let after_as = self.parse_keyword(Keyword::AS);
11606
11607        let next_token = self.next_token();
11608        match next_token.token {
11609            // By default, if a word is located after the `AS` keyword we consider it an alias
11610            // as long as it's not reserved.
11611            Token::Word(w)
11612                if after_as || reserved_kwds.is_some_and(|x| !x.contains(&w.keyword)) =>
11613            {
11614                Ok(Some(w.into_ident(next_token.span)))
11615            }
11616            // This pattern allows for customizing the acceptance of words as aliases based on the caller's
11617            // context, such as to what SQL element this word is a potential alias of (select item alias, table name
11618            // alias, etc.) or dialect-specific logic that goes beyond a simple list of reserved keywords.
11619            Token::Word(w) if validator(after_as, &w.keyword, self) => {
11620                Ok(Some(w.into_ident(next_token.span)))
11621            }
11622            // For backwards-compatibility, we accept quoted strings as aliases regardless of the context.
11623            Token::SingleQuotedString(s) => Ok(Some(Ident::with_quote('\'', s))),
11624            Token::DoubleQuotedString(s) => Ok(Some(Ident::with_quote('\"', s))),
11625            _ => {
11626                if after_as {
11627                    return self.expected("an identifier after AS", next_token);
11628                }
11629                self.prev_token();
11630                Ok(None) // no alias found
11631            }
11632        }
11633    }
11634
11635    pub fn parse_optional_group_by(&mut self) -> Result<Option<GroupByExpr>, ParserError> {
11636        if self.parse_keywords(&[Keyword::GROUP, Keyword::BY]) {
11637            let expressions = if self.parse_keyword(Keyword::ALL) {
11638                None
11639            } else {
11640                Some(self.parse_comma_separated(Parser::parse_group_by_expr)?)
11641            };
11642
11643            let mut modifiers = vec![];
11644            if self.dialect.supports_group_by_with_modifier() {
11645                loop {
11646                    if !self.parse_keyword(Keyword::WITH) {
11647                        break;
11648                    }
11649                    let keyword = self.expect_one_of_keywords(&[
11650                        Keyword::ROLLUP,
11651                        Keyword::CUBE,
11652                        Keyword::TOTALS,
11653                    ])?;
11654                    modifiers.push(match keyword {
11655                        Keyword::ROLLUP => GroupByWithModifier::Rollup,
11656                        Keyword::CUBE => GroupByWithModifier::Cube,
11657                        Keyword::TOTALS => GroupByWithModifier::Totals,
11658                        _ => {
11659                            return parser_err!(
11660                                "BUG: expected to match GroupBy modifier keyword",
11661                                self.peek_token().span.start
11662                            )
11663                        }
11664                    });
11665                }
11666            }
11667            if self.parse_keywords(&[Keyword::GROUPING, Keyword::SETS]) {
11668                self.expect_token(&Token::LParen)?;
11669                let result = self.parse_comma_separated(|p| {
11670                    if p.peek_token_ref().token == Token::LParen {
11671                        p.parse_tuple(true, true)
11672                    } else {
11673                        Ok(vec![p.parse_expr()?])
11674                    }
11675                })?;
11676                self.expect_token(&Token::RParen)?;
11677                modifiers.push(GroupByWithModifier::GroupingSets(Expr::GroupingSets(
11678                    result,
11679                )));
11680            };
11681            let group_by = match expressions {
11682                None => GroupByExpr::All(modifiers),
11683                Some(exprs) => GroupByExpr::Expressions(exprs, modifiers),
11684            };
11685            Ok(Some(group_by))
11686        } else {
11687            Ok(None)
11688        }
11689    }
11690
11691    pub fn parse_optional_order_by(&mut self) -> Result<Option<OrderBy>, ParserError> {
11692        if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
11693            let order_by =
11694                if self.dialect.supports_order_by_all() && self.parse_keyword(Keyword::ALL) {
11695                    let order_by_options = self.parse_order_by_options()?;
11696                    OrderBy {
11697                        kind: OrderByKind::All(order_by_options),
11698                        interpolate: None,
11699                    }
11700                } else {
11701                    let exprs = self.parse_comma_separated(Parser::parse_order_by_expr)?;
11702                    let interpolate = if dialect_of!(self is ClickHouseDialect | GenericDialect) {
11703                        self.parse_interpolations()?
11704                    } else {
11705                        None
11706                    };
11707                    OrderBy {
11708                        kind: OrderByKind::Expressions(exprs),
11709                        interpolate,
11710                    }
11711                };
11712            Ok(Some(order_by))
11713        } else {
11714            Ok(None)
11715        }
11716    }
11717
11718    fn parse_optional_limit_clause(&mut self) -> Result<Option<LimitClause>, ParserError> {
11719        let mut offset = if self.parse_keyword(Keyword::OFFSET) {
11720            Some(self.parse_offset()?)
11721        } else {
11722            None
11723        };
11724
11725        let (limit, limit_by) = if self.parse_keyword(Keyword::LIMIT) {
11726            let expr = self.parse_limit()?;
11727
11728            if self.dialect.supports_limit_comma()
11729                && offset.is_none()
11730                && expr.is_some() // ALL not supported with comma
11731                && self.consume_token(&Token::Comma)
11732            {
11733                let offset = expr.ok_or_else(|| {
11734                    ParserError::ParserError(
11735                        "Missing offset for LIMIT <offset>, <limit>".to_string(),
11736                    )
11737                })?;
11738                return Ok(Some(LimitClause::OffsetCommaLimit {
11739                    offset,
11740                    limit: self.parse_expr()?,
11741                }));
11742            }
11743
11744            let limit_by = if dialect_of!(self is ClickHouseDialect | GenericDialect)
11745                && self.parse_keyword(Keyword::BY)
11746            {
11747                Some(self.parse_comma_separated(Parser::parse_expr)?)
11748            } else {
11749                None
11750            };
11751
11752            (Some(expr), limit_by)
11753        } else {
11754            (None, None)
11755        };
11756
11757        if offset.is_none() && limit.is_some() && self.parse_keyword(Keyword::OFFSET) {
11758            offset = Some(self.parse_offset()?);
11759        }
11760
11761        if offset.is_some() || (limit.is_some() && limit != Some(None)) || limit_by.is_some() {
11762            Ok(Some(LimitClause::LimitOffset {
11763                limit: limit.unwrap_or_default(),
11764                offset,
11765                limit_by: limit_by.unwrap_or_default(),
11766            }))
11767        } else {
11768            Ok(None)
11769        }
11770    }
11771
11772    /// Parse a table object for insertion
11773    /// e.g. `some_database.some_table` or `FUNCTION some_table_func(...)`
11774    pub fn parse_table_object(&mut self) -> Result<TableObject, ParserError> {
11775        if self.dialect.supports_insert_table_function() && self.parse_keyword(Keyword::FUNCTION) {
11776            let fn_name = self.parse_object_name(false)?;
11777            self.parse_function_call(fn_name)
11778                .map(TableObject::TableFunction)
11779        } else {
11780            self.parse_object_name(false).map(TableObject::TableName)
11781        }
11782    }
11783
11784    /// Parse a possibly qualified, possibly quoted identifier, e.g.
11785    /// `foo` or `myschema."table"
11786    ///
11787    /// The `in_table_clause` parameter indicates whether the object name is a table in a FROM, JOIN,
11788    /// or similar table clause. Currently, this is used only to support unquoted hyphenated identifiers
11789    /// in this context on BigQuery.
11790    pub fn parse_object_name(&mut self, in_table_clause: bool) -> Result<ObjectName, ParserError> {
11791        self.parse_object_name_inner(in_table_clause, false)
11792    }
11793
11794    /// Parse a possibly qualified, possibly quoted identifier, e.g.
11795    /// `foo` or `myschema."table"
11796    ///
11797    /// The `in_table_clause` parameter indicates whether the object name is a table in a FROM, JOIN,
11798    /// or similar table clause. Currently, this is used only to support unquoted hyphenated identifiers
11799    /// in this context on BigQuery.
11800    ///
11801    /// The `allow_wildcards` parameter indicates whether to allow for wildcards in the object name
11802    /// e.g. *, *.*, `foo`.*, or "foo"."bar"
11803    fn parse_object_name_inner(
11804        &mut self,
11805        in_table_clause: bool,
11806        allow_wildcards: bool,
11807    ) -> Result<ObjectName, ParserError> {
11808        let mut parts = vec![];
11809        if dialect_of!(self is BigQueryDialect) && in_table_clause {
11810            loop {
11811                let (ident, end_with_period) = self.parse_unquoted_hyphenated_identifier()?;
11812                parts.push(ObjectNamePart::Identifier(ident));
11813                if !self.consume_token(&Token::Period) && !end_with_period {
11814                    break;
11815                }
11816            }
11817        } else {
11818            loop {
11819                if allow_wildcards && self.peek_token().token == Token::Mul {
11820                    let span = self.next_token().span;
11821                    parts.push(ObjectNamePart::Identifier(Ident {
11822                        value: Token::Mul.to_string(),
11823                        quote_style: None,
11824                        span,
11825                    }));
11826                } else if dialect_of!(self is BigQueryDialect) && in_table_clause {
11827                    let (ident, end_with_period) = self.parse_unquoted_hyphenated_identifier()?;
11828                    parts.push(ObjectNamePart::Identifier(ident));
11829                    if !self.consume_token(&Token::Period) && !end_with_period {
11830                        break;
11831                    }
11832                } else if self.dialect.supports_object_name_double_dot_notation()
11833                    && parts.len() == 1
11834                    && matches!(self.peek_token().token, Token::Period)
11835                {
11836                    // Empty string here means default schema
11837                    parts.push(ObjectNamePart::Identifier(Ident::new("")));
11838                } else {
11839                    let ident = self.parse_identifier()?;
11840                    let part = if self
11841                        .dialect
11842                        .is_identifier_generating_function_name(&ident, &parts)
11843                    {
11844                        self.expect_token(&Token::LParen)?;
11845                        let args: Vec<FunctionArg> =
11846                            self.parse_comma_separated0(Self::parse_function_args, Token::RParen)?;
11847                        self.expect_token(&Token::RParen)?;
11848                        ObjectNamePart::Function(ObjectNamePartFunction { name: ident, args })
11849                    } else {
11850                        ObjectNamePart::Identifier(ident)
11851                    };
11852                    parts.push(part);
11853                }
11854
11855                if !self.consume_token(&Token::Period) {
11856                    break;
11857                }
11858            }
11859        }
11860
11861        // BigQuery accepts any number of quoted identifiers of a table name.
11862        // https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#quoted_identifiers
11863        if dialect_of!(self is BigQueryDialect)
11864            && parts.iter().any(|part| {
11865                part.as_ident()
11866                    .is_some_and(|ident| ident.value.contains('.'))
11867            })
11868        {
11869            parts = parts
11870                .into_iter()
11871                .flat_map(|part| match part.as_ident() {
11872                    Some(ident) => ident
11873                        .value
11874                        .split('.')
11875                        .map(|value| {
11876                            ObjectNamePart::Identifier(Ident {
11877                                value: value.into(),
11878                                quote_style: ident.quote_style,
11879                                span: ident.span,
11880                            })
11881                        })
11882                        .collect::<Vec<_>>(),
11883                    None => vec![part],
11884                })
11885                .collect()
11886        }
11887
11888        Ok(ObjectName(parts))
11889    }
11890
11891    /// Parse identifiers
11892    pub fn parse_identifiers(&mut self) -> Result<Vec<Ident>, ParserError> {
11893        let mut idents = vec![];
11894        loop {
11895            match &self.peek_token_ref().token {
11896                Token::Word(w) => {
11897                    idents.push(w.clone().into_ident(self.peek_token_ref().span));
11898                }
11899                Token::EOF | Token::Eq => break,
11900                _ => {}
11901            }
11902            self.advance_token();
11903        }
11904        Ok(idents)
11905    }
11906
11907    /// Parse identifiers of form ident1[.identN]*
11908    ///
11909    /// Similar in functionality to [parse_identifiers], with difference
11910    /// being this function is much more strict about parsing a valid multipart identifier, not
11911    /// allowing extraneous tokens to be parsed, otherwise it fails.
11912    ///
11913    /// For example:
11914    ///
11915    /// ```rust
11916    /// use sqlparser::ast::Ident;
11917    /// use sqlparser::dialect::GenericDialect;
11918    /// use sqlparser::parser::Parser;
11919    ///
11920    /// let dialect = GenericDialect {};
11921    /// let expected = vec![Ident::new("one"), Ident::new("two")];
11922    ///
11923    /// // expected usage
11924    /// let sql = "one.two";
11925    /// let mut parser = Parser::new(&dialect).try_with_sql(sql).unwrap();
11926    /// let actual = parser.parse_multipart_identifier().unwrap();
11927    /// assert_eq!(&actual, &expected);
11928    ///
11929    /// // parse_identifiers is more loose on what it allows, parsing successfully
11930    /// let sql = "one + two";
11931    /// let mut parser = Parser::new(&dialect).try_with_sql(sql).unwrap();
11932    /// let actual = parser.parse_identifiers().unwrap();
11933    /// assert_eq!(&actual, &expected);
11934    ///
11935    /// // expected to strictly fail due to + separator
11936    /// let sql = "one + two";
11937    /// let mut parser = Parser::new(&dialect).try_with_sql(sql).unwrap();
11938    /// let actual = parser.parse_multipart_identifier().unwrap_err();
11939    /// assert_eq!(
11940    ///     actual.to_string(),
11941    ///     "sql parser error: Unexpected token in identifier: +"
11942    /// );
11943    /// ```
11944    ///
11945    /// [parse_identifiers]: Parser::parse_identifiers
11946    pub fn parse_multipart_identifier(&mut self) -> Result<Vec<Ident>, ParserError> {
11947        let mut idents = vec![];
11948
11949        // expecting at least one word for identifier
11950        let next_token = self.next_token();
11951        match next_token.token {
11952            Token::Word(w) => idents.push(w.into_ident(next_token.span)),
11953            Token::EOF => {
11954                return Err(ParserError::ParserError(
11955                    "Empty input when parsing identifier".to_string(),
11956                ))?
11957            }
11958            token => {
11959                return Err(ParserError::ParserError(format!(
11960                    "Unexpected token in identifier: {token}"
11961                )))?
11962            }
11963        };
11964
11965        // parse optional next parts if exist
11966        loop {
11967            match self.next_token().token {
11968                // ensure that optional period is succeeded by another identifier
11969                Token::Period => {
11970                    let next_token = self.next_token();
11971                    match next_token.token {
11972                        Token::Word(w) => idents.push(w.into_ident(next_token.span)),
11973                        Token::EOF => {
11974                            return Err(ParserError::ParserError(
11975                                "Trailing period in identifier".to_string(),
11976                            ))?
11977                        }
11978                        token => {
11979                            return Err(ParserError::ParserError(format!(
11980                                "Unexpected token following period in identifier: {token}"
11981                            )))?
11982                        }
11983                    }
11984                }
11985                Token::EOF => break,
11986                token => {
11987                    return Err(ParserError::ParserError(format!(
11988                        "Unexpected token in identifier: {token}"
11989                    )))?;
11990                }
11991            }
11992        }
11993
11994        Ok(idents)
11995    }
11996
11997    /// Parse a simple one-word identifier (possibly quoted, possibly a keyword)
11998    pub fn parse_identifier(&mut self) -> Result<Ident, ParserError> {
11999        let next_token = self.next_token();
12000        match next_token.token {
12001            Token::Word(w) => Ok(w.into_ident(next_token.span)),
12002            Token::SingleQuotedString(s) => Ok(Ident::with_quote('\'', s)),
12003            Token::DoubleQuotedString(s) => Ok(Ident::with_quote('\"', s)),
12004            _ => self.expected("identifier", next_token),
12005        }
12006    }
12007
12008    /// On BigQuery, hyphens are permitted in unquoted identifiers inside of a FROM or
12009    /// TABLE clause.
12010    ///
12011    /// The first segment must be an ordinary unquoted identifier, e.g. it must not start
12012    /// with a digit. Subsequent segments are either must either be valid identifiers or
12013    /// integers, e.g. foo-123 is allowed, but foo-123a is not.
12014    ///
12015    /// [BigQuery-lexical](https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical)
12016    ///
12017    /// Return a tuple of the identifier and a boolean indicating it ends with a period.
12018    fn parse_unquoted_hyphenated_identifier(&mut self) -> Result<(Ident, bool), ParserError> {
12019        match self.peek_token().token {
12020            Token::Word(w) => {
12021                let quote_style_is_none = w.quote_style.is_none();
12022                let mut requires_whitespace = false;
12023                let mut ident = w.into_ident(self.next_token().span);
12024                if quote_style_is_none {
12025                    while matches!(self.peek_token_no_skip().token, Token::Minus) {
12026                        self.next_token();
12027                        ident.value.push('-');
12028
12029                        let token = self
12030                            .next_token_no_skip()
12031                            .cloned()
12032                            .unwrap_or(TokenWithSpan::wrap(Token::EOF));
12033                        requires_whitespace = match token.token {
12034                            Token::Word(next_word) if next_word.quote_style.is_none() => {
12035                                ident.value.push_str(&next_word.value);
12036                                false
12037                            }
12038                            Token::Number(s, false) => {
12039                                // A number token can represent a decimal value ending with a period, e.g., `Number('123.')`.
12040                                // However, for an [ObjectName], it is part of a hyphenated identifier, e.g., `foo-123.bar`.
12041                                //
12042                                // If a number token is followed by a period, it is part of an [ObjectName].
12043                                // Return the identifier with `true` if the number token is followed by a period, indicating that
12044                                // parsing should continue for the next part of the hyphenated identifier.
12045                                if s.ends_with('.') {
12046                                    let Some(s) = s.split('.').next().filter(|s| {
12047                                        !s.is_empty() && s.chars().all(|c| c.is_ascii_digit())
12048                                    }) else {
12049                                        return self.expected(
12050                                            "continuation of hyphenated identifier",
12051                                            TokenWithSpan::new(Token::Number(s, false), token.span),
12052                                        );
12053                                    };
12054                                    ident.value.push_str(s);
12055                                    return Ok((ident, true));
12056                                } else {
12057                                    ident.value.push_str(&s);
12058                                }
12059                                // If next token is period, then it is part of an ObjectName and we don't expect whitespace
12060                                // after the number.
12061                                !matches!(self.peek_token().token, Token::Period)
12062                            }
12063                            _ => {
12064                                return self
12065                                    .expected("continuation of hyphenated identifier", token);
12066                            }
12067                        }
12068                    }
12069
12070                    // If the last segment was a number, we must check that it's followed by whitespace,
12071                    // otherwise foo-123a will be parsed as `foo-123` with the alias `a`.
12072                    if requires_whitespace {
12073                        let token = self.next_token();
12074                        if !matches!(token.token, Token::EOF | Token::Whitespace(_)) {
12075                            return self
12076                                .expected("whitespace following hyphenated identifier", token);
12077                        }
12078                    }
12079                }
12080                Ok((ident, false))
12081            }
12082            _ => Ok((self.parse_identifier()?, false)),
12083        }
12084    }
12085
12086    /// Parses a parenthesized, comma-separated list of column definitions within a view.
12087    fn parse_view_columns(&mut self) -> Result<Vec<ViewColumnDef>, ParserError> {
12088        if self.consume_token(&Token::LParen) {
12089            if self.peek_token().token == Token::RParen {
12090                self.next_token();
12091                Ok(vec![])
12092            } else {
12093                let cols = self.parse_comma_separated_with_trailing_commas(
12094                    Parser::parse_view_column,
12095                    self.dialect.supports_column_definition_trailing_commas(),
12096                    Self::is_reserved_for_column_alias,
12097                )?;
12098                self.expect_token(&Token::RParen)?;
12099                Ok(cols)
12100            }
12101        } else {
12102            Ok(vec![])
12103        }
12104    }
12105
12106    /// Parses a column definition within a view.
12107    fn parse_view_column(&mut self) -> Result<ViewColumnDef, ParserError> {
12108        let name = self.parse_identifier()?;
12109        let options = self.parse_view_column_options()?;
12110        let data_type = if dialect_of!(self is ClickHouseDialect) {
12111            Some(self.parse_data_type()?)
12112        } else {
12113            None
12114        };
12115        Ok(ViewColumnDef {
12116            name,
12117            data_type,
12118            options,
12119        })
12120    }
12121
12122    fn parse_view_column_options(&mut self) -> Result<Option<ColumnOptions>, ParserError> {
12123        let mut options = Vec::new();
12124        loop {
12125            let option = self.parse_optional_column_option()?;
12126            if let Some(option) = option {
12127                options.push(option);
12128            } else {
12129                break;
12130            }
12131        }
12132        if options.is_empty() {
12133            Ok(None)
12134        } else if self.dialect.supports_space_separated_column_options() {
12135            Ok(Some(ColumnOptions::SpaceSeparated(options)))
12136        } else {
12137            Ok(Some(ColumnOptions::CommaSeparated(options)))
12138        }
12139    }
12140
12141    /// Parses a parenthesized comma-separated list of unqualified, possibly quoted identifiers.
12142    /// For example: `(col1, "col 2", ...)`
12143    pub fn parse_parenthesized_column_list(
12144        &mut self,
12145        optional: IsOptional,
12146        allow_empty: bool,
12147    ) -> Result<Vec<Ident>, ParserError> {
12148        self.parse_parenthesized_column_list_inner(optional, allow_empty, |p| p.parse_identifier())
12149    }
12150
12151    pub fn parse_parenthesized_compound_identifier_list(
12152        &mut self,
12153        optional: IsOptional,
12154        allow_empty: bool,
12155    ) -> Result<Vec<Expr>, ParserError> {
12156        self.parse_parenthesized_column_list_inner(optional, allow_empty, |p| {
12157            Ok(Expr::CompoundIdentifier(
12158                p.parse_period_separated(|p| p.parse_identifier())?,
12159            ))
12160        })
12161    }
12162
12163    /// Parses a parenthesized comma-separated list of index columns, which can be arbitrary
12164    /// expressions with ordering information (and an opclass in some dialects).
12165    fn parse_parenthesized_index_column_list(&mut self) -> Result<Vec<IndexColumn>, ParserError> {
12166        self.parse_parenthesized_column_list_inner(Mandatory, false, |p| {
12167            p.parse_create_index_expr()
12168        })
12169    }
12170
12171    /// Parses a parenthesized comma-separated list of qualified, possibly quoted identifiers.
12172    /// For example: `(db1.sc1.tbl1.col1, db1.sc1.tbl1."col 2", ...)`
12173    pub fn parse_parenthesized_qualified_column_list(
12174        &mut self,
12175        optional: IsOptional,
12176        allow_empty: bool,
12177    ) -> Result<Vec<ObjectName>, ParserError> {
12178        self.parse_parenthesized_column_list_inner(optional, allow_empty, |p| {
12179            p.parse_object_name(true)
12180        })
12181    }
12182
12183    /// Parses a parenthesized comma-separated list of columns using
12184    /// the provided function to parse each element.
12185    fn parse_parenthesized_column_list_inner<F, T>(
12186        &mut self,
12187        optional: IsOptional,
12188        allow_empty: bool,
12189        mut f: F,
12190    ) -> Result<Vec<T>, ParserError>
12191    where
12192        F: FnMut(&mut Parser) -> Result<T, ParserError>,
12193    {
12194        if self.consume_token(&Token::LParen) {
12195            if allow_empty && self.peek_token().token == Token::RParen {
12196                self.next_token();
12197                Ok(vec![])
12198            } else {
12199                let cols = self.parse_comma_separated(|p| f(p))?;
12200                self.expect_token(&Token::RParen)?;
12201                Ok(cols)
12202            }
12203        } else if optional == Optional {
12204            Ok(vec![])
12205        } else {
12206            self.expected("a list of columns in parentheses", self.peek_token())
12207        }
12208    }
12209
12210    /// Parses a parenthesized comma-separated list of table alias column definitions.
12211    fn parse_table_alias_column_defs(&mut self) -> Result<Vec<TableAliasColumnDef>, ParserError> {
12212        if self.consume_token(&Token::LParen) {
12213            let cols = self.parse_comma_separated(|p| {
12214                let name = p.parse_identifier()?;
12215                let data_type = p.maybe_parse(|p| p.parse_data_type())?;
12216                Ok(TableAliasColumnDef { name, data_type })
12217            })?;
12218            self.expect_token(&Token::RParen)?;
12219            Ok(cols)
12220        } else {
12221            Ok(vec![])
12222        }
12223    }
12224
12225    pub fn parse_precision(&mut self) -> Result<u64, ParserError> {
12226        self.expect_token(&Token::LParen)?;
12227        let n = self.parse_literal_uint()?;
12228        self.expect_token(&Token::RParen)?;
12229        Ok(n)
12230    }
12231
12232    pub fn parse_optional_precision(&mut self) -> Result<Option<u64>, ParserError> {
12233        if self.consume_token(&Token::LParen) {
12234            let n = self.parse_literal_uint()?;
12235            self.expect_token(&Token::RParen)?;
12236            Ok(Some(n))
12237        } else {
12238            Ok(None)
12239        }
12240    }
12241
12242    fn maybe_parse_optional_interval_fields(
12243        &mut self,
12244    ) -> Result<Option<IntervalFields>, ParserError> {
12245        match self.parse_one_of_keywords(&[
12246            // Can be followed by `TO` option
12247            Keyword::YEAR,
12248            Keyword::DAY,
12249            Keyword::HOUR,
12250            Keyword::MINUTE,
12251            // No `TO` option
12252            Keyword::MONTH,
12253            Keyword::SECOND,
12254        ]) {
12255            Some(Keyword::YEAR) => {
12256                if self.peek_keyword(Keyword::TO) {
12257                    self.expect_keyword(Keyword::TO)?;
12258                    self.expect_keyword(Keyword::MONTH)?;
12259                    Ok(Some(IntervalFields::YearToMonth))
12260                } else {
12261                    Ok(Some(IntervalFields::Year))
12262                }
12263            }
12264            Some(Keyword::DAY) => {
12265                if self.peek_keyword(Keyword::TO) {
12266                    self.expect_keyword(Keyword::TO)?;
12267                    match self.expect_one_of_keywords(&[
12268                        Keyword::HOUR,
12269                        Keyword::MINUTE,
12270                        Keyword::SECOND,
12271                    ])? {
12272                        Keyword::HOUR => Ok(Some(IntervalFields::DayToHour)),
12273                        Keyword::MINUTE => Ok(Some(IntervalFields::DayToMinute)),
12274                        Keyword::SECOND => Ok(Some(IntervalFields::DayToSecond)),
12275                        _ => {
12276                            self.prev_token();
12277                            self.expected("HOUR, MINUTE, or SECOND", self.peek_token())
12278                        }
12279                    }
12280                } else {
12281                    Ok(Some(IntervalFields::Day))
12282                }
12283            }
12284            Some(Keyword::HOUR) => {
12285                if self.peek_keyword(Keyword::TO) {
12286                    self.expect_keyword(Keyword::TO)?;
12287                    match self.expect_one_of_keywords(&[Keyword::MINUTE, Keyword::SECOND])? {
12288                        Keyword::MINUTE => Ok(Some(IntervalFields::HourToMinute)),
12289                        Keyword::SECOND => Ok(Some(IntervalFields::HourToSecond)),
12290                        _ => {
12291                            self.prev_token();
12292                            self.expected("MINUTE or SECOND", self.peek_token())
12293                        }
12294                    }
12295                } else {
12296                    Ok(Some(IntervalFields::Hour))
12297                }
12298            }
12299            Some(Keyword::MINUTE) => {
12300                if self.peek_keyword(Keyword::TO) {
12301                    self.expect_keyword(Keyword::TO)?;
12302                    self.expect_keyword(Keyword::SECOND)?;
12303                    Ok(Some(IntervalFields::MinuteToSecond))
12304                } else {
12305                    Ok(Some(IntervalFields::Minute))
12306                }
12307            }
12308            Some(Keyword::MONTH) => Ok(Some(IntervalFields::Month)),
12309            Some(Keyword::SECOND) => Ok(Some(IntervalFields::Second)),
12310            Some(_) => {
12311                self.prev_token();
12312                self.expected(
12313                    "YEAR, MONTH, DAY, HOUR, MINUTE, or SECOND",
12314                    self.peek_token(),
12315                )
12316            }
12317            None => Ok(None),
12318        }
12319    }
12320
12321    /// Parse datetime64 [1]
12322    /// Syntax
12323    /// ```sql
12324    /// DateTime64(precision[, timezone])
12325    /// ```
12326    ///
12327    /// [1]: https://clickhouse.com/docs/en/sql-reference/data-types/datetime64
12328    pub fn parse_datetime_64(&mut self) -> Result<(u64, Option<String>), ParserError> {
12329        self.expect_keyword_is(Keyword::DATETIME64)?;
12330        self.expect_token(&Token::LParen)?;
12331        let precision = self.parse_literal_uint()?;
12332        let time_zone = if self.consume_token(&Token::Comma) {
12333            Some(self.parse_literal_string()?)
12334        } else {
12335            None
12336        };
12337        self.expect_token(&Token::RParen)?;
12338        Ok((precision, time_zone))
12339    }
12340
12341    pub fn parse_optional_character_length(
12342        &mut self,
12343    ) -> Result<Option<CharacterLength>, ParserError> {
12344        if self.consume_token(&Token::LParen) {
12345            let character_length = self.parse_character_length()?;
12346            self.expect_token(&Token::RParen)?;
12347            Ok(Some(character_length))
12348        } else {
12349            Ok(None)
12350        }
12351    }
12352
12353    pub fn parse_optional_binary_length(&mut self) -> Result<Option<BinaryLength>, ParserError> {
12354        if self.consume_token(&Token::LParen) {
12355            let binary_length = self.parse_binary_length()?;
12356            self.expect_token(&Token::RParen)?;
12357            Ok(Some(binary_length))
12358        } else {
12359            Ok(None)
12360        }
12361    }
12362
12363    pub fn parse_character_length(&mut self) -> Result<CharacterLength, ParserError> {
12364        if self.parse_keyword(Keyword::MAX) {
12365            return Ok(CharacterLength::Max);
12366        }
12367        let length = self.parse_literal_uint()?;
12368        let unit = if self.parse_keyword(Keyword::CHARACTERS) {
12369            Some(CharLengthUnits::Characters)
12370        } else if self.parse_keyword(Keyword::OCTETS) {
12371            Some(CharLengthUnits::Octets)
12372        } else {
12373            None
12374        };
12375        Ok(CharacterLength::IntegerLength { length, unit })
12376    }
12377
12378    pub fn parse_binary_length(&mut self) -> Result<BinaryLength, ParserError> {
12379        if self.parse_keyword(Keyword::MAX) {
12380            return Ok(BinaryLength::Max);
12381        }
12382        let length = self.parse_literal_uint()?;
12383        Ok(BinaryLength::IntegerLength { length })
12384    }
12385
12386    pub fn parse_optional_precision_scale(
12387        &mut self,
12388    ) -> Result<(Option<u64>, Option<u64>), ParserError> {
12389        if self.consume_token(&Token::LParen) {
12390            let n = self.parse_literal_uint()?;
12391            let scale = if self.consume_token(&Token::Comma) {
12392                Some(self.parse_literal_uint()?)
12393            } else {
12394                None
12395            };
12396            self.expect_token(&Token::RParen)?;
12397            Ok((Some(n), scale))
12398        } else {
12399            Ok((None, None))
12400        }
12401    }
12402
12403    pub fn parse_exact_number_optional_precision_scale(
12404        &mut self,
12405    ) -> Result<ExactNumberInfo, ParserError> {
12406        if self.consume_token(&Token::LParen) {
12407            let precision = self.parse_literal_uint()?;
12408            let scale = if self.consume_token(&Token::Comma) {
12409                Some(self.parse_signed_integer()?)
12410            } else {
12411                None
12412            };
12413
12414            self.expect_token(&Token::RParen)?;
12415
12416            match scale {
12417                None => Ok(ExactNumberInfo::Precision(precision)),
12418                Some(scale) => Ok(ExactNumberInfo::PrecisionAndScale(precision, scale)),
12419            }
12420        } else {
12421            Ok(ExactNumberInfo::None)
12422        }
12423    }
12424
12425    /// Parse an optionally signed integer literal.
12426    fn parse_signed_integer(&mut self) -> Result<i64, ParserError> {
12427        let is_negative = self.consume_token(&Token::Minus);
12428
12429        if !is_negative {
12430            let _ = self.consume_token(&Token::Plus);
12431        }
12432
12433        let current_token = self.peek_token_ref();
12434        match &current_token.token {
12435            Token::Number(s, _) => {
12436                let s = s.clone();
12437                let span_start = current_token.span.start;
12438                self.advance_token();
12439                let value = Self::parse::<i64>(s, span_start)?;
12440                Ok(if is_negative { -value } else { value })
12441            }
12442            _ => self.expected_ref("number", current_token),
12443        }
12444    }
12445
12446    pub fn parse_optional_type_modifiers(&mut self) -> Result<Option<Vec<String>>, ParserError> {
12447        if self.consume_token(&Token::LParen) {
12448            let mut modifiers = Vec::new();
12449            loop {
12450                let next_token = self.next_token();
12451                match next_token.token {
12452                    Token::Word(w) => modifiers.push(w.to_string()),
12453                    Token::Number(n, _) => modifiers.push(n),
12454                    Token::SingleQuotedString(s) => modifiers.push(s),
12455
12456                    Token::Comma => {
12457                        continue;
12458                    }
12459                    Token::RParen => {
12460                        break;
12461                    }
12462                    _ => self.expected("type modifiers", next_token)?,
12463                }
12464            }
12465
12466            Ok(Some(modifiers))
12467        } else {
12468            Ok(None)
12469        }
12470    }
12471
12472    /// Parse a parenthesized sub data type
12473    fn parse_sub_type<F>(&mut self, parent_type: F) -> Result<DataType, ParserError>
12474    where
12475        F: FnOnce(Box<DataType>) -> DataType,
12476    {
12477        self.expect_token(&Token::LParen)?;
12478        let inside_type = self.parse_data_type()?;
12479        self.expect_token(&Token::RParen)?;
12480        Ok(parent_type(inside_type.into()))
12481    }
12482
12483    /// Parse a DELETE statement, returning a `Box`ed SetExpr
12484    ///
12485    /// This is used to reduce the size of the stack frames in debug builds
12486    fn parse_delete_setexpr_boxed(
12487        &mut self,
12488        delete_token: TokenWithSpan,
12489    ) -> Result<Box<SetExpr>, ParserError> {
12490        Ok(Box::new(SetExpr::Delete(self.parse_delete(delete_token)?)))
12491    }
12492
12493    pub fn parse_delete(&mut self, delete_token: TokenWithSpan) -> Result<Statement, ParserError> {
12494        let (tables, with_from_keyword) = if !self.parse_keyword(Keyword::FROM) {
12495            // `FROM` keyword is optional in BigQuery SQL.
12496            // https://cloud.google.com/bigquery/docs/reference/standard-sql/dml-syntax#delete_statement
12497            if dialect_of!(self is BigQueryDialect | OracleDialect | GenericDialect) {
12498                (vec![], false)
12499            } else {
12500                let tables = self.parse_comma_separated(|p| p.parse_object_name(false))?;
12501                self.expect_keyword_is(Keyword::FROM)?;
12502                (tables, true)
12503            }
12504        } else {
12505            (vec![], true)
12506        };
12507
12508        let from = self.parse_comma_separated(Parser::parse_table_and_joins)?;
12509        let using = if self.parse_keyword(Keyword::USING) {
12510            Some(self.parse_comma_separated(Parser::parse_table_and_joins)?)
12511        } else {
12512            None
12513        };
12514        let selection = if self.parse_keyword(Keyword::WHERE) {
12515            Some(self.parse_expr()?)
12516        } else {
12517            None
12518        };
12519        let returning = if self.parse_keyword(Keyword::RETURNING) {
12520            Some(self.parse_comma_separated(Parser::parse_select_item)?)
12521        } else {
12522            None
12523        };
12524        let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
12525            self.parse_comma_separated(Parser::parse_order_by_expr)?
12526        } else {
12527            vec![]
12528        };
12529        let limit = if self.parse_keyword(Keyword::LIMIT) {
12530            self.parse_limit()?
12531        } else {
12532            None
12533        };
12534
12535        Ok(Statement::Delete(Delete {
12536            delete_token: delete_token.into(),
12537            tables,
12538            from: if with_from_keyword {
12539                FromTable::WithFromKeyword(from)
12540            } else {
12541                FromTable::WithoutKeyword(from)
12542            },
12543            using,
12544            selection,
12545            returning,
12546            order_by,
12547            limit,
12548        }))
12549    }
12550
12551    // KILL [CONNECTION | QUERY | MUTATION] processlist_id
12552    pub fn parse_kill(&mut self) -> Result<Statement, ParserError> {
12553        let modifier_keyword =
12554            self.parse_one_of_keywords(&[Keyword::CONNECTION, Keyword::QUERY, Keyword::MUTATION]);
12555
12556        let id = self.parse_literal_uint()?;
12557
12558        let modifier = match modifier_keyword {
12559            Some(Keyword::CONNECTION) => Some(KillType::Connection),
12560            Some(Keyword::QUERY) => Some(KillType::Query),
12561            Some(Keyword::MUTATION) => {
12562                if dialect_of!(self is ClickHouseDialect | GenericDialect) {
12563                    Some(KillType::Mutation)
12564                } else {
12565                    self.expected(
12566                        "Unsupported type for KILL, allowed: CONNECTION | QUERY",
12567                        self.peek_token(),
12568                    )?
12569                }
12570            }
12571            _ => None,
12572        };
12573
12574        Ok(Statement::Kill { modifier, id })
12575    }
12576
12577    pub fn parse_explain(
12578        &mut self,
12579        describe_alias: DescribeAlias,
12580    ) -> Result<Statement, ParserError> {
12581        let mut analyze = false;
12582        let mut verbose = false;
12583        let mut query_plan = false;
12584        let mut estimate = false;
12585        let mut format = None;
12586        let mut options = None;
12587
12588        // Note: DuckDB is compatible with PostgreSQL syntax for this statement,
12589        // although not all features may be implemented.
12590        if describe_alias == DescribeAlias::Explain
12591            && self.dialect.supports_explain_with_utility_options()
12592            && self.peek_token().token == Token::LParen
12593        {
12594            options = Some(self.parse_utility_options()?)
12595        } else if self.parse_keywords(&[Keyword::QUERY, Keyword::PLAN]) {
12596            query_plan = true;
12597        } else if self.parse_keyword(Keyword::ESTIMATE) {
12598            estimate = true;
12599        } else {
12600            analyze = self.parse_keyword(Keyword::ANALYZE);
12601            verbose = self.parse_keyword(Keyword::VERBOSE);
12602            if self.parse_keyword(Keyword::FORMAT) {
12603                format = Some(self.parse_analyze_format_kind()?);
12604            }
12605        }
12606
12607        match self.maybe_parse(|parser| parser.parse_statement())? {
12608            Some(Statement::Explain { .. }) | Some(Statement::ExplainTable { .. }) => Err(
12609                ParserError::ParserError("Explain must be root of the plan".to_string()),
12610            ),
12611            Some(statement) => Ok(Statement::Explain {
12612                describe_alias,
12613                analyze,
12614                verbose,
12615                query_plan,
12616                estimate,
12617                statement: Box::new(statement),
12618                format,
12619                options,
12620            }),
12621            _ => {
12622                let hive_format =
12623                    match self.parse_one_of_keywords(&[Keyword::EXTENDED, Keyword::FORMATTED]) {
12624                        Some(Keyword::EXTENDED) => Some(HiveDescribeFormat::Extended),
12625                        Some(Keyword::FORMATTED) => Some(HiveDescribeFormat::Formatted),
12626                        _ => None,
12627                    };
12628
12629                let has_table_keyword = if self.dialect.describe_requires_table_keyword() {
12630                    // only allow to use TABLE keyword for DESC|DESCRIBE statement
12631                    self.parse_keyword(Keyword::TABLE)
12632                } else {
12633                    false
12634                };
12635
12636                let table_name = self.parse_object_name(false)?;
12637                Ok(Statement::ExplainTable {
12638                    describe_alias,
12639                    hive_format,
12640                    has_table_keyword,
12641                    table_name,
12642                })
12643            }
12644        }
12645    }
12646
12647    /// Parse a query expression, i.e. a `SELECT` statement optionally
12648    /// preceded with some `WITH` CTE declarations and optionally followed
12649    /// by `ORDER BY`. Unlike some other parse_... methods, this one doesn't
12650    /// expect the initial keyword to be already consumed
12651    pub fn parse_query(&mut self) -> Result<Box<Query>, ParserError> {
12652        let _guard = self.recursion_counter.try_decrease()?;
12653        let with = if self.parse_keyword(Keyword::WITH) {
12654            let with_token = self.get_current_token();
12655            Some(With {
12656                with_token: with_token.clone().into(),
12657                recursive: self.parse_keyword(Keyword::RECURSIVE),
12658                cte_tables: self.parse_comma_separated(Parser::parse_cte)?,
12659            })
12660        } else {
12661            None
12662        };
12663        if self.parse_keyword(Keyword::INSERT) {
12664            Ok(Query {
12665                with,
12666                body: self.parse_insert_setexpr_boxed(self.get_current_token().clone())?,
12667                order_by: None,
12668                limit_clause: None,
12669                fetch: None,
12670                locks: vec![],
12671                for_clause: None,
12672                settings: None,
12673                format_clause: None,
12674                pipe_operators: vec![],
12675            }
12676            .into())
12677        } else if self.parse_keyword(Keyword::UPDATE) {
12678            Ok(Query {
12679                with,
12680                body: self.parse_update_setexpr_boxed(self.get_current_token().clone())?,
12681                order_by: None,
12682                limit_clause: None,
12683                fetch: None,
12684                locks: vec![],
12685                for_clause: None,
12686                settings: None,
12687                format_clause: None,
12688                pipe_operators: vec![],
12689            }
12690            .into())
12691        } else if self.parse_keyword(Keyword::DELETE) {
12692            Ok(Query {
12693                with,
12694                body: self.parse_delete_setexpr_boxed(self.get_current_token().clone())?,
12695                limit_clause: None,
12696                order_by: None,
12697                fetch: None,
12698                locks: vec![],
12699                for_clause: None,
12700                settings: None,
12701                format_clause: None,
12702                pipe_operators: vec![],
12703            }
12704            .into())
12705        } else if self.parse_keyword(Keyword::MERGE) {
12706            Ok(Query {
12707                with,
12708                body: self.parse_merge_setexpr_boxed(self.get_current_token().clone())?,
12709                limit_clause: None,
12710                order_by: None,
12711                fetch: None,
12712                locks: vec![],
12713                for_clause: None,
12714                settings: None,
12715                format_clause: None,
12716                pipe_operators: vec![],
12717            }
12718            .into())
12719        } else {
12720            let body = self.parse_query_body(self.dialect.prec_unknown())?;
12721
12722            let order_by = self.parse_optional_order_by()?;
12723
12724            let limit_clause = self.parse_optional_limit_clause()?;
12725
12726            let settings = self.parse_settings()?;
12727
12728            let fetch = if self.parse_keyword(Keyword::FETCH) {
12729                Some(self.parse_fetch()?)
12730            } else {
12731                None
12732            };
12733
12734            let mut for_clause = None;
12735            let mut locks = Vec::new();
12736            while self.parse_keyword(Keyword::FOR) {
12737                if let Some(parsed_for_clause) = self.parse_for_clause()? {
12738                    for_clause = Some(parsed_for_clause);
12739                    break;
12740                } else {
12741                    locks.push(self.parse_lock()?);
12742                }
12743            }
12744            let format_clause = if dialect_of!(self is ClickHouseDialect | GenericDialect)
12745                && self.parse_keyword(Keyword::FORMAT)
12746            {
12747                if self.parse_keyword(Keyword::NULL) {
12748                    Some(FormatClause::Null)
12749                } else {
12750                    let ident = self.parse_identifier()?;
12751                    Some(FormatClause::Identifier(ident))
12752                }
12753            } else {
12754                None
12755            };
12756
12757            let pipe_operators = if self.dialect.supports_pipe_operator() {
12758                self.parse_pipe_operators()?
12759            } else {
12760                Vec::new()
12761            };
12762
12763            Ok(Query {
12764                with,
12765                body,
12766                order_by,
12767                limit_clause,
12768                fetch,
12769                locks,
12770                for_clause,
12771                settings,
12772                format_clause,
12773                pipe_operators,
12774            }
12775            .into())
12776        }
12777    }
12778
12779    fn parse_pipe_operators(&mut self) -> Result<Vec<PipeOperator>, ParserError> {
12780        let mut pipe_operators = Vec::new();
12781
12782        while self.consume_token(&Token::VerticalBarRightAngleBracket) {
12783            let kw = self.expect_one_of_keywords(&[
12784                Keyword::SELECT,
12785                Keyword::EXTEND,
12786                Keyword::SET,
12787                Keyword::DROP,
12788                Keyword::AS,
12789                Keyword::WHERE,
12790                Keyword::LIMIT,
12791                Keyword::AGGREGATE,
12792                Keyword::ORDER,
12793                Keyword::TABLESAMPLE,
12794                Keyword::RENAME,
12795                Keyword::UNION,
12796                Keyword::INTERSECT,
12797                Keyword::EXCEPT,
12798                Keyword::CALL,
12799                Keyword::PIVOT,
12800                Keyword::UNPIVOT,
12801                Keyword::JOIN,
12802                Keyword::INNER,
12803                Keyword::LEFT,
12804                Keyword::RIGHT,
12805                Keyword::FULL,
12806                Keyword::CROSS,
12807            ])?;
12808            match kw {
12809                Keyword::SELECT => {
12810                    let exprs = self.parse_comma_separated(Parser::parse_select_item)?;
12811                    pipe_operators.push(PipeOperator::Select { exprs })
12812                }
12813                Keyword::EXTEND => {
12814                    let exprs = self.parse_comma_separated(Parser::parse_select_item)?;
12815                    pipe_operators.push(PipeOperator::Extend { exprs })
12816                }
12817                Keyword::SET => {
12818                    let assignments = self.parse_comma_separated(Parser::parse_assignment)?;
12819                    pipe_operators.push(PipeOperator::Set { assignments })
12820                }
12821                Keyword::DROP => {
12822                    let columns = self.parse_identifiers()?;
12823                    pipe_operators.push(PipeOperator::Drop { columns })
12824                }
12825                Keyword::AS => {
12826                    let alias = self.parse_identifier()?;
12827                    pipe_operators.push(PipeOperator::As { alias })
12828                }
12829                Keyword::WHERE => {
12830                    let expr = self.parse_expr()?;
12831                    pipe_operators.push(PipeOperator::Where { expr })
12832                }
12833                Keyword::LIMIT => {
12834                    let expr = self.parse_expr()?;
12835                    let offset = if self.parse_keyword(Keyword::OFFSET) {
12836                        Some(self.parse_expr()?)
12837                    } else {
12838                        None
12839                    };
12840                    pipe_operators.push(PipeOperator::Limit { expr, offset })
12841                }
12842                Keyword::AGGREGATE => {
12843                    let full_table_exprs = if self.peek_keyword(Keyword::GROUP) {
12844                        vec![]
12845                    } else {
12846                        self.parse_comma_separated(|parser| {
12847                            parser.parse_expr_with_alias_and_order_by()
12848                        })?
12849                    };
12850
12851                    let group_by_expr = if self.parse_keywords(&[Keyword::GROUP, Keyword::BY]) {
12852                        self.parse_comma_separated(|parser| {
12853                            parser.parse_expr_with_alias_and_order_by()
12854                        })?
12855                    } else {
12856                        vec![]
12857                    };
12858
12859                    pipe_operators.push(PipeOperator::Aggregate {
12860                        full_table_exprs,
12861                        group_by_expr,
12862                    })
12863                }
12864                Keyword::ORDER => {
12865                    self.expect_one_of_keywords(&[Keyword::BY])?;
12866                    let exprs = self.parse_comma_separated(Parser::parse_order_by_expr)?;
12867                    pipe_operators.push(PipeOperator::OrderBy { exprs })
12868                }
12869                Keyword::TABLESAMPLE => {
12870                    let sample = self.parse_table_sample(TableSampleModifier::TableSample)?;
12871                    pipe_operators.push(PipeOperator::TableSample { sample });
12872                }
12873                Keyword::RENAME => {
12874                    let mappings =
12875                        self.parse_comma_separated(Parser::parse_identifier_with_optional_alias)?;
12876                    pipe_operators.push(PipeOperator::Rename { mappings });
12877                }
12878                Keyword::UNION => {
12879                    let set_quantifier = self.parse_set_quantifier(&Some(SetOperator::Union));
12880                    let queries = self.parse_pipe_operator_queries()?;
12881                    pipe_operators.push(PipeOperator::Union {
12882                        set_quantifier,
12883                        queries,
12884                    });
12885                }
12886                Keyword::INTERSECT => {
12887                    let set_quantifier =
12888                        self.parse_distinct_required_set_quantifier("INTERSECT")?;
12889                    let queries = self.parse_pipe_operator_queries()?;
12890                    pipe_operators.push(PipeOperator::Intersect {
12891                        set_quantifier,
12892                        queries,
12893                    });
12894                }
12895                Keyword::EXCEPT => {
12896                    let set_quantifier = self.parse_distinct_required_set_quantifier("EXCEPT")?;
12897                    let queries = self.parse_pipe_operator_queries()?;
12898                    pipe_operators.push(PipeOperator::Except {
12899                        set_quantifier,
12900                        queries,
12901                    });
12902                }
12903                Keyword::CALL => {
12904                    let function_name = self.parse_object_name(false)?;
12905                    let function_expr = self.parse_function(function_name)?;
12906                    if let Expr::Function(function) = function_expr {
12907                        let alias = self.parse_identifier_optional_alias()?;
12908                        pipe_operators.push(PipeOperator::Call { function, alias });
12909                    } else {
12910                        return Err(ParserError::ParserError(
12911                            "Expected function call after CALL".to_string(),
12912                        ));
12913                    }
12914                }
12915                Keyword::PIVOT => {
12916                    self.expect_token(&Token::LParen)?;
12917                    let aggregate_functions =
12918                        self.parse_comma_separated(Self::parse_aliased_function_call)?;
12919                    self.expect_keyword_is(Keyword::FOR)?;
12920                    let value_column = self.parse_period_separated(|p| p.parse_identifier())?;
12921                    self.expect_keyword_is(Keyword::IN)?;
12922
12923                    self.expect_token(&Token::LParen)?;
12924                    let value_source = if self.parse_keyword(Keyword::ANY) {
12925                        let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
12926                            self.parse_comma_separated(Parser::parse_order_by_expr)?
12927                        } else {
12928                            vec![]
12929                        };
12930                        PivotValueSource::Any(order_by)
12931                    } else if self.peek_sub_query() {
12932                        PivotValueSource::Subquery(self.parse_query()?)
12933                    } else {
12934                        PivotValueSource::List(
12935                            self.parse_comma_separated(Self::parse_expr_with_alias)?,
12936                        )
12937                    };
12938                    self.expect_token(&Token::RParen)?;
12939                    self.expect_token(&Token::RParen)?;
12940
12941                    let alias = self.parse_identifier_optional_alias()?;
12942
12943                    pipe_operators.push(PipeOperator::Pivot {
12944                        aggregate_functions,
12945                        value_column,
12946                        value_source,
12947                        alias,
12948                    });
12949                }
12950                Keyword::UNPIVOT => {
12951                    self.expect_token(&Token::LParen)?;
12952                    let value_column = self.parse_identifier()?;
12953                    self.expect_keyword(Keyword::FOR)?;
12954                    let name_column = self.parse_identifier()?;
12955                    self.expect_keyword(Keyword::IN)?;
12956
12957                    self.expect_token(&Token::LParen)?;
12958                    let unpivot_columns = self.parse_comma_separated(Parser::parse_identifier)?;
12959                    self.expect_token(&Token::RParen)?;
12960
12961                    self.expect_token(&Token::RParen)?;
12962
12963                    let alias = self.parse_identifier_optional_alias()?;
12964
12965                    pipe_operators.push(PipeOperator::Unpivot {
12966                        value_column,
12967                        name_column,
12968                        unpivot_columns,
12969                        alias,
12970                    });
12971                }
12972                Keyword::JOIN
12973                | Keyword::INNER
12974                | Keyword::LEFT
12975                | Keyword::RIGHT
12976                | Keyword::FULL
12977                | Keyword::CROSS => {
12978                    self.prev_token();
12979                    let mut joins = self.parse_joins()?;
12980                    if joins.len() != 1 {
12981                        return Err(ParserError::ParserError(
12982                            "Join pipe operator must have a single join".to_string(),
12983                        ));
12984                    }
12985                    let join = joins.swap_remove(0);
12986                    pipe_operators.push(PipeOperator::Join(join))
12987                }
12988                unhandled => {
12989                    return Err(ParserError::ParserError(format!(
12990                    "`expect_one_of_keywords` further up allowed unhandled keyword: {unhandled:?}"
12991                )))
12992                }
12993            }
12994        }
12995        Ok(pipe_operators)
12996    }
12997
12998    fn parse_settings(&mut self) -> Result<Option<Vec<Setting>>, ParserError> {
12999        let settings = if dialect_of!(self is ClickHouseDialect|GenericDialect)
13000            && self.parse_keyword(Keyword::SETTINGS)
13001        {
13002            let key_values = self.parse_comma_separated(|p| {
13003                let key = p.parse_identifier()?;
13004                p.expect_token(&Token::Eq)?;
13005                let value = p.parse_expr()?;
13006                Ok(Setting { key, value })
13007            })?;
13008            Some(key_values)
13009        } else {
13010            None
13011        };
13012        Ok(settings)
13013    }
13014
13015    /// Parse a mssql `FOR [XML | JSON | BROWSE]` clause
13016    pub fn parse_for_clause(&mut self) -> Result<Option<ForClause>, ParserError> {
13017        if self.parse_keyword(Keyword::XML) {
13018            Ok(Some(self.parse_for_xml()?))
13019        } else if self.parse_keyword(Keyword::JSON) {
13020            Ok(Some(self.parse_for_json()?))
13021        } else if self.parse_keyword(Keyword::BROWSE) {
13022            Ok(Some(ForClause::Browse))
13023        } else {
13024            Ok(None)
13025        }
13026    }
13027
13028    /// Parse a mssql `FOR XML` clause
13029    pub fn parse_for_xml(&mut self) -> Result<ForClause, ParserError> {
13030        let for_xml = if self.parse_keyword(Keyword::RAW) {
13031            let mut element_name = None;
13032            if self.peek_token().token == Token::LParen {
13033                self.expect_token(&Token::LParen)?;
13034                element_name = Some(self.parse_literal_string()?);
13035                self.expect_token(&Token::RParen)?;
13036            }
13037            ForXml::Raw(element_name)
13038        } else if self.parse_keyword(Keyword::AUTO) {
13039            ForXml::Auto
13040        } else if self.parse_keyword(Keyword::EXPLICIT) {
13041            ForXml::Explicit
13042        } else if self.parse_keyword(Keyword::PATH) {
13043            let mut element_name = None;
13044            if self.peek_token().token == Token::LParen {
13045                self.expect_token(&Token::LParen)?;
13046                element_name = Some(self.parse_literal_string()?);
13047                self.expect_token(&Token::RParen)?;
13048            }
13049            ForXml::Path(element_name)
13050        } else {
13051            return Err(ParserError::ParserError(
13052                "Expected FOR XML [RAW | AUTO | EXPLICIT | PATH ]".to_string(),
13053            ));
13054        };
13055        let mut elements = false;
13056        let mut binary_base64 = false;
13057        let mut root = None;
13058        let mut r#type = false;
13059        while self.peek_token().token == Token::Comma {
13060            self.next_token();
13061            if self.parse_keyword(Keyword::ELEMENTS) {
13062                elements = true;
13063            } else if self.parse_keyword(Keyword::BINARY) {
13064                self.expect_keyword_is(Keyword::BASE64)?;
13065                binary_base64 = true;
13066            } else if self.parse_keyword(Keyword::ROOT) {
13067                self.expect_token(&Token::LParen)?;
13068                root = Some(self.parse_literal_string()?);
13069                self.expect_token(&Token::RParen)?;
13070            } else if self.parse_keyword(Keyword::TYPE) {
13071                r#type = true;
13072            }
13073        }
13074        Ok(ForClause::Xml {
13075            for_xml,
13076            elements,
13077            binary_base64,
13078            root,
13079            r#type,
13080        })
13081    }
13082
13083    /// Parse a mssql `FOR JSON` clause
13084    pub fn parse_for_json(&mut self) -> Result<ForClause, ParserError> {
13085        let for_json = if self.parse_keyword(Keyword::AUTO) {
13086            ForJson::Auto
13087        } else if self.parse_keyword(Keyword::PATH) {
13088            ForJson::Path
13089        } else {
13090            return Err(ParserError::ParserError(
13091                "Expected FOR JSON [AUTO | PATH ]".to_string(),
13092            ));
13093        };
13094        let mut root = None;
13095        let mut include_null_values = false;
13096        let mut without_array_wrapper = false;
13097        while self.peek_token().token == Token::Comma {
13098            self.next_token();
13099            if self.parse_keyword(Keyword::ROOT) {
13100                self.expect_token(&Token::LParen)?;
13101                root = Some(self.parse_literal_string()?);
13102                self.expect_token(&Token::RParen)?;
13103            } else if self.parse_keyword(Keyword::INCLUDE_NULL_VALUES) {
13104                include_null_values = true;
13105            } else if self.parse_keyword(Keyword::WITHOUT_ARRAY_WRAPPER) {
13106                without_array_wrapper = true;
13107            }
13108        }
13109        Ok(ForClause::Json {
13110            for_json,
13111            root,
13112            include_null_values,
13113            without_array_wrapper,
13114        })
13115    }
13116
13117    /// Parse a CTE (`alias [( col1, col2, ... )] AS (subquery)`)
13118    pub fn parse_cte(&mut self) -> Result<Cte, ParserError> {
13119        let name = self.parse_identifier()?;
13120
13121        let mut cte = if self.parse_keyword(Keyword::AS) {
13122            let mut is_materialized = None;
13123            if dialect_of!(self is PostgreSqlDialect) {
13124                if self.parse_keyword(Keyword::MATERIALIZED) {
13125                    is_materialized = Some(CteAsMaterialized::Materialized);
13126                } else if self.parse_keywords(&[Keyword::NOT, Keyword::MATERIALIZED]) {
13127                    is_materialized = Some(CteAsMaterialized::NotMaterialized);
13128                }
13129            }
13130            self.expect_token(&Token::LParen)?;
13131
13132            let query = self.parse_query()?;
13133            let closing_paren_token = self.expect_token(&Token::RParen)?;
13134
13135            let alias = TableAlias {
13136                explicit: false,
13137                name,
13138                columns: vec![],
13139            };
13140            Cte {
13141                alias,
13142                query,
13143                from: None,
13144                materialized: is_materialized,
13145                closing_paren_token: closing_paren_token.into(),
13146            }
13147        } else {
13148            let columns = self.parse_table_alias_column_defs()?;
13149            self.expect_keyword_is(Keyword::AS)?;
13150            let mut is_materialized = None;
13151            if dialect_of!(self is PostgreSqlDialect) {
13152                if self.parse_keyword(Keyword::MATERIALIZED) {
13153                    is_materialized = Some(CteAsMaterialized::Materialized);
13154                } else if self.parse_keywords(&[Keyword::NOT, Keyword::MATERIALIZED]) {
13155                    is_materialized = Some(CteAsMaterialized::NotMaterialized);
13156                }
13157            }
13158            self.expect_token(&Token::LParen)?;
13159
13160            let query = self.parse_query()?;
13161            let closing_paren_token = self.expect_token(&Token::RParen)?;
13162
13163            let alias = TableAlias {
13164                explicit: false,
13165                name,
13166                columns,
13167            };
13168            Cte {
13169                alias,
13170                query,
13171                from: None,
13172                materialized: is_materialized,
13173                closing_paren_token: closing_paren_token.into(),
13174            }
13175        };
13176        if self.parse_keyword(Keyword::FROM) {
13177            cte.from = Some(self.parse_identifier()?);
13178        }
13179        Ok(cte)
13180    }
13181
13182    /// Parse a "query body", which is an expression with roughly the
13183    /// following grammar:
13184    /// ```sql
13185    ///   query_body ::= restricted_select | '(' subquery ')' | set_operation
13186    ///   restricted_select ::= 'SELECT' [expr_list] [ from ] [ where ] [ groupby_having ]
13187    ///   subquery ::= query_body [ order_by_limit ]
13188    ///   set_operation ::= query_body { 'UNION' | 'EXCEPT' | 'INTERSECT' } [ 'ALL' ] query_body
13189    /// ```
13190    pub fn parse_query_body(&mut self, precedence: u8) -> Result<Box<SetExpr>, ParserError> {
13191        // We parse the expression using a Pratt parser, as in `parse_expr()`.
13192        // Start by parsing a restricted SELECT or a `(subquery)`:
13193        let expr = if self.peek_keyword(Keyword::SELECT)
13194            || (self.peek_keyword(Keyword::FROM) && self.dialect.supports_from_first_select())
13195        {
13196            SetExpr::Select(self.parse_select().map(Box::new)?)
13197        } else if self.consume_token(&Token::LParen) {
13198            // CTEs are not allowed here, but the parser currently accepts them
13199            let subquery = self.parse_query()?;
13200            self.expect_token(&Token::RParen)?;
13201            SetExpr::Query(subquery)
13202        } else if self.parse_keyword(Keyword::VALUES) {
13203            let is_mysql = dialect_of!(self is MySqlDialect);
13204            SetExpr::Values(self.parse_values(is_mysql, false)?)
13205        } else if self.parse_keyword(Keyword::VALUE) {
13206            let is_mysql = dialect_of!(self is MySqlDialect);
13207            SetExpr::Values(self.parse_values(is_mysql, true)?)
13208        } else if self.parse_keyword(Keyword::TABLE) {
13209            SetExpr::Table(Box::new(self.parse_as_table()?))
13210        } else {
13211            return self.expected(
13212                "SELECT, VALUES, or a subquery in the query body",
13213                self.peek_token(),
13214            );
13215        };
13216
13217        self.parse_remaining_set_exprs(expr, precedence)
13218    }
13219
13220    /// Parse any extra set expressions that may be present in a query body
13221    ///
13222    /// (this is its own function to reduce required stack size in debug builds)
13223    fn parse_remaining_set_exprs(
13224        &mut self,
13225        mut expr: SetExpr,
13226        precedence: u8,
13227    ) -> Result<Box<SetExpr>, ParserError> {
13228        loop {
13229            // The query can be optionally followed by a set operator:
13230            let op = self.parse_set_operator(&self.peek_token().token);
13231            let next_precedence = match op {
13232                // UNION and EXCEPT have the same binding power and evaluate left-to-right
13233                Some(SetOperator::Union) | Some(SetOperator::Except) | Some(SetOperator::Minus) => {
13234                    10
13235                }
13236                // INTERSECT has higher precedence than UNION/EXCEPT
13237                Some(SetOperator::Intersect) => 20,
13238                // Unexpected token or EOF => stop parsing the query body
13239                None => break,
13240            };
13241            if precedence >= next_precedence {
13242                break;
13243            }
13244            self.next_token(); // skip past the set operator
13245            let set_quantifier = self.parse_set_quantifier(&op);
13246            expr = SetExpr::SetOperation {
13247                left: Box::new(expr),
13248                op: op.unwrap(),
13249                set_quantifier,
13250                right: self.parse_query_body(next_precedence)?,
13251            };
13252        }
13253
13254        Ok(expr.into())
13255    }
13256
13257    pub fn parse_set_operator(&mut self, token: &Token) -> Option<SetOperator> {
13258        match token {
13259            Token::Word(w) if w.keyword == Keyword::UNION => Some(SetOperator::Union),
13260            Token::Word(w) if w.keyword == Keyword::EXCEPT => Some(SetOperator::Except),
13261            Token::Word(w) if w.keyword == Keyword::INTERSECT => Some(SetOperator::Intersect),
13262            Token::Word(w) if w.keyword == Keyword::MINUS => Some(SetOperator::Minus),
13263            _ => None,
13264        }
13265    }
13266
13267    pub fn parse_set_quantifier(&mut self, op: &Option<SetOperator>) -> SetQuantifier {
13268        match op {
13269            Some(
13270                SetOperator::Except
13271                | SetOperator::Intersect
13272                | SetOperator::Union
13273                | SetOperator::Minus,
13274            ) => {
13275                if self.parse_keywords(&[Keyword::DISTINCT, Keyword::BY, Keyword::NAME]) {
13276                    SetQuantifier::DistinctByName
13277                } else if self.parse_keywords(&[Keyword::BY, Keyword::NAME]) {
13278                    SetQuantifier::ByName
13279                } else if self.parse_keyword(Keyword::ALL) {
13280                    if self.parse_keywords(&[Keyword::BY, Keyword::NAME]) {
13281                        SetQuantifier::AllByName
13282                    } else {
13283                        SetQuantifier::All
13284                    }
13285                } else if self.parse_keyword(Keyword::DISTINCT) {
13286                    SetQuantifier::Distinct
13287                } else {
13288                    SetQuantifier::None
13289                }
13290            }
13291            _ => SetQuantifier::None,
13292        }
13293    }
13294
13295    /// Parse a restricted `SELECT` statement (no CTEs / `UNION` / `ORDER BY`)
13296    pub fn parse_select(&mut self) -> Result<Select, ParserError> {
13297        let mut from_first = None;
13298
13299        if self.dialect.supports_from_first_select() && self.peek_keyword(Keyword::FROM) {
13300            let from_token = self.expect_keyword(Keyword::FROM)?;
13301            let from = self.parse_table_with_joins()?;
13302            if !self.peek_keyword(Keyword::SELECT) {
13303                return Ok(Select {
13304                    select_token: AttachedToken(from_token),
13305                    distinct: None,
13306                    top: None,
13307                    top_before_distinct: false,
13308                    projection: vec![],
13309                    exclude: None,
13310                    into: None,
13311                    from,
13312                    lateral_views: vec![],
13313                    prewhere: None,
13314                    selection: None,
13315                    group_by: GroupByExpr::Expressions(vec![], vec![]),
13316                    cluster_by: vec![],
13317                    distribute_by: vec![],
13318                    sort_by: vec![],
13319                    having: None,
13320                    named_window: vec![],
13321                    window_before_qualify: false,
13322                    qualify: None,
13323                    value_table_mode: None,
13324                    connect_by: None,
13325                    flavor: SelectFlavor::FromFirstNoSelect,
13326                });
13327            }
13328            from_first = Some(from);
13329        }
13330
13331        let select_token = self.expect_keyword(Keyword::SELECT)?;
13332        let value_table_mode = self.parse_value_table_mode()?;
13333
13334        let mut top_before_distinct = false;
13335        let mut top = None;
13336        if self.dialect.supports_top_before_distinct() && self.parse_keyword(Keyword::TOP) {
13337            top = Some(self.parse_top()?);
13338            top_before_distinct = true;
13339        }
13340        let distinct = self.parse_all_or_distinct()?;
13341        if !self.dialect.supports_top_before_distinct() && self.parse_keyword(Keyword::TOP) {
13342            top = Some(self.parse_top()?);
13343        }
13344
13345        let projection =
13346            if self.dialect.supports_empty_projections() && self.peek_keyword(Keyword::FROM) {
13347                vec![]
13348            } else {
13349                self.parse_projection()?
13350            };
13351
13352        let exclude = if self.dialect.supports_select_exclude() {
13353            self.parse_optional_select_item_exclude()?
13354        } else {
13355            None
13356        };
13357
13358        let into = if self.parse_keyword(Keyword::INTO) {
13359            Some(self.parse_select_into()?)
13360        } else {
13361            None
13362        };
13363
13364        // Note that for keywords to be properly handled here, they need to be
13365        // added to `RESERVED_FOR_COLUMN_ALIAS` / `RESERVED_FOR_TABLE_ALIAS`,
13366        // otherwise they may be parsed as an alias as part of the `projection`
13367        // or `from`.
13368
13369        let (from, from_first) = if let Some(from) = from_first.take() {
13370            (from, true)
13371        } else if self.parse_keyword(Keyword::FROM) {
13372            (self.parse_table_with_joins()?, false)
13373        } else {
13374            (vec![], false)
13375        };
13376
13377        let mut lateral_views = vec![];
13378        loop {
13379            if self.parse_keywords(&[Keyword::LATERAL, Keyword::VIEW]) {
13380                let outer = self.parse_keyword(Keyword::OUTER);
13381                let lateral_view = self.parse_expr()?;
13382                let lateral_view_name = self.parse_object_name(false)?;
13383                let lateral_col_alias = self
13384                    .parse_comma_separated(|parser| {
13385                        parser.parse_optional_alias(&[
13386                            Keyword::WHERE,
13387                            Keyword::GROUP,
13388                            Keyword::CLUSTER,
13389                            Keyword::HAVING,
13390                            Keyword::LATERAL,
13391                        ]) // This couldn't possibly be a bad idea
13392                    })?
13393                    .into_iter()
13394                    .flatten()
13395                    .collect();
13396
13397                lateral_views.push(LateralView {
13398                    lateral_view,
13399                    lateral_view_name,
13400                    lateral_col_alias,
13401                    outer,
13402                });
13403            } else {
13404                break;
13405            }
13406        }
13407
13408        let prewhere = if dialect_of!(self is ClickHouseDialect|GenericDialect)
13409            && self.parse_keyword(Keyword::PREWHERE)
13410        {
13411            Some(self.parse_expr()?)
13412        } else {
13413            None
13414        };
13415
13416        let selection = if self.parse_keyword(Keyword::WHERE) {
13417            Some(self.parse_expr()?)
13418        } else {
13419            None
13420        };
13421
13422        let group_by = self
13423            .parse_optional_group_by()?
13424            .unwrap_or_else(|| GroupByExpr::Expressions(vec![], vec![]));
13425
13426        let cluster_by = if self.parse_keywords(&[Keyword::CLUSTER, Keyword::BY]) {
13427            self.parse_comma_separated(Parser::parse_expr)?
13428        } else {
13429            vec![]
13430        };
13431
13432        let distribute_by = if self.parse_keywords(&[Keyword::DISTRIBUTE, Keyword::BY]) {
13433            self.parse_comma_separated(Parser::parse_expr)?
13434        } else {
13435            vec![]
13436        };
13437
13438        let sort_by = if self.parse_keywords(&[Keyword::SORT, Keyword::BY]) {
13439            self.parse_comma_separated(Parser::parse_order_by_expr)?
13440        } else {
13441            vec![]
13442        };
13443
13444        let having = if self.parse_keyword(Keyword::HAVING) {
13445            Some(self.parse_expr()?)
13446        } else {
13447            None
13448        };
13449
13450        // Accept QUALIFY and WINDOW in any order and flag accordingly.
13451        let (named_windows, qualify, window_before_qualify) = if self.parse_keyword(Keyword::WINDOW)
13452        {
13453            let named_windows = self.parse_comma_separated(Parser::parse_named_window)?;
13454            if self.parse_keyword(Keyword::QUALIFY) {
13455                (named_windows, Some(self.parse_expr()?), true)
13456            } else {
13457                (named_windows, None, true)
13458            }
13459        } else if self.parse_keyword(Keyword::QUALIFY) {
13460            let qualify = Some(self.parse_expr()?);
13461            if self.parse_keyword(Keyword::WINDOW) {
13462                (
13463                    self.parse_comma_separated(Parser::parse_named_window)?,
13464                    qualify,
13465                    false,
13466                )
13467            } else {
13468                (Default::default(), qualify, false)
13469            }
13470        } else {
13471            Default::default()
13472        };
13473
13474        let connect_by = if self.dialect.supports_connect_by()
13475            && self
13476                .parse_one_of_keywords(&[Keyword::START, Keyword::CONNECT])
13477                .is_some()
13478        {
13479            self.prev_token();
13480            Some(self.parse_connect_by()?)
13481        } else {
13482            None
13483        };
13484
13485        Ok(Select {
13486            select_token: AttachedToken(select_token),
13487            distinct,
13488            top,
13489            top_before_distinct,
13490            projection,
13491            exclude,
13492            into,
13493            from,
13494            lateral_views,
13495            prewhere,
13496            selection,
13497            group_by,
13498            cluster_by,
13499            distribute_by,
13500            sort_by,
13501            having,
13502            named_window: named_windows,
13503            window_before_qualify,
13504            qualify,
13505            value_table_mode,
13506            connect_by,
13507            flavor: if from_first {
13508                SelectFlavor::FromFirst
13509            } else {
13510                SelectFlavor::Standard
13511            },
13512        })
13513    }
13514
13515    fn parse_value_table_mode(&mut self) -> Result<Option<ValueTableMode>, ParserError> {
13516        if !dialect_of!(self is BigQueryDialect) {
13517            return Ok(None);
13518        }
13519
13520        let mode = if self.parse_keywords(&[Keyword::DISTINCT, Keyword::AS, Keyword::VALUE]) {
13521            Some(ValueTableMode::DistinctAsValue)
13522        } else if self.parse_keywords(&[Keyword::DISTINCT, Keyword::AS, Keyword::STRUCT]) {
13523            Some(ValueTableMode::DistinctAsStruct)
13524        } else if self.parse_keywords(&[Keyword::AS, Keyword::VALUE])
13525            || self.parse_keywords(&[Keyword::ALL, Keyword::AS, Keyword::VALUE])
13526        {
13527            Some(ValueTableMode::AsValue)
13528        } else if self.parse_keywords(&[Keyword::AS, Keyword::STRUCT])
13529            || self.parse_keywords(&[Keyword::ALL, Keyword::AS, Keyword::STRUCT])
13530        {
13531            Some(ValueTableMode::AsStruct)
13532        } else if self.parse_keyword(Keyword::AS) {
13533            self.expected("VALUE or STRUCT", self.peek_token())?
13534        } else {
13535            None
13536        };
13537
13538        Ok(mode)
13539    }
13540
13541    /// Invoke `f` after first setting the parser's `ParserState` to `state`.
13542    ///
13543    /// Upon return, restores the parser's state to what it started at.
13544    fn with_state<T, F>(&mut self, state: ParserState, mut f: F) -> Result<T, ParserError>
13545    where
13546        F: FnMut(&mut Parser) -> Result<T, ParserError>,
13547    {
13548        let current_state = self.state;
13549        self.state = state;
13550        let res = f(self);
13551        self.state = current_state;
13552        res
13553    }
13554
13555    pub fn parse_connect_by(&mut self) -> Result<ConnectBy, ParserError> {
13556        let (condition, relationships) = if self.parse_keywords(&[Keyword::CONNECT, Keyword::BY]) {
13557            let relationships = self.with_state(ParserState::ConnectBy, |parser| {
13558                parser.parse_comma_separated(Parser::parse_expr)
13559            })?;
13560            self.expect_keywords(&[Keyword::START, Keyword::WITH])?;
13561            let condition = self.parse_expr()?;
13562            (condition, relationships)
13563        } else {
13564            self.expect_keywords(&[Keyword::START, Keyword::WITH])?;
13565            let condition = self.parse_expr()?;
13566            self.expect_keywords(&[Keyword::CONNECT, Keyword::BY])?;
13567            let relationships = self.with_state(ParserState::ConnectBy, |parser| {
13568                parser.parse_comma_separated(Parser::parse_expr)
13569            })?;
13570            (condition, relationships)
13571        };
13572        Ok(ConnectBy {
13573            condition,
13574            relationships,
13575        })
13576    }
13577
13578    /// Parse `CREATE TABLE x AS TABLE y`
13579    pub fn parse_as_table(&mut self) -> Result<Table, ParserError> {
13580        let token1 = self.next_token();
13581        let token2 = self.next_token();
13582        let token3 = self.next_token();
13583
13584        let table_name;
13585        let schema_name;
13586        if token2 == Token::Period {
13587            match token1.token {
13588                Token::Word(w) => {
13589                    schema_name = w.value;
13590                }
13591                _ => {
13592                    return self.expected("Schema name", token1);
13593                }
13594            }
13595            match token3.token {
13596                Token::Word(w) => {
13597                    table_name = w.value;
13598                }
13599                _ => {
13600                    return self.expected("Table name", token3);
13601                }
13602            }
13603            Ok(Table {
13604                table_name: Some(table_name),
13605                schema_name: Some(schema_name),
13606            })
13607        } else {
13608            match token1.token {
13609                Token::Word(w) => {
13610                    table_name = w.value;
13611                }
13612                _ => {
13613                    return self.expected("Table name", token1);
13614                }
13615            }
13616            Ok(Table {
13617                table_name: Some(table_name),
13618                schema_name: None,
13619            })
13620        }
13621    }
13622
13623    /// Parse a `SET ROLE` statement. Expects SET to be consumed already.
13624    fn parse_set_role(
13625        &mut self,
13626        modifier: Option<ContextModifier>,
13627    ) -> Result<Statement, ParserError> {
13628        self.expect_keyword_is(Keyword::ROLE)?;
13629
13630        let role_name = if self.parse_keyword(Keyword::NONE) {
13631            None
13632        } else {
13633            Some(self.parse_identifier()?)
13634        };
13635        Ok(Statement::Set(Set::SetRole {
13636            context_modifier: modifier,
13637            role_name,
13638        }))
13639    }
13640
13641    fn parse_set_values(
13642        &mut self,
13643        parenthesized_assignment: bool,
13644    ) -> Result<Vec<Expr>, ParserError> {
13645        let mut values = vec![];
13646
13647        if parenthesized_assignment {
13648            self.expect_token(&Token::LParen)?;
13649        }
13650
13651        loop {
13652            let value = if let Some(expr) = self.try_parse_expr_sub_query()? {
13653                expr
13654            } else if let Ok(expr) = self.parse_expr() {
13655                expr
13656            } else {
13657                self.expected("variable value", self.peek_token())?
13658            };
13659
13660            values.push(value);
13661            if self.consume_token(&Token::Comma) {
13662                continue;
13663            }
13664
13665            if parenthesized_assignment {
13666                self.expect_token(&Token::RParen)?;
13667            }
13668            return Ok(values);
13669        }
13670    }
13671
13672    fn parse_context_modifier(&mut self) -> Option<ContextModifier> {
13673        let modifier =
13674            self.parse_one_of_keywords(&[Keyword::SESSION, Keyword::LOCAL, Keyword::GLOBAL])?;
13675
13676        Self::keyword_to_modifier(modifier)
13677    }
13678
13679    /// Parse a single SET statement assignment `var = expr`.
13680    fn parse_set_assignment(&mut self) -> Result<SetAssignment, ParserError> {
13681        let scope = self.parse_context_modifier();
13682
13683        let name = if self.dialect.supports_parenthesized_set_variables()
13684            && self.consume_token(&Token::LParen)
13685        {
13686            // Parenthesized assignments are handled in the `parse_set` function after
13687            // trying to parse list of assignments using this function.
13688            // If a dialect supports both, and we find a LParen, we early exit from this function.
13689            self.expected("Unparenthesized assignment", self.peek_token())?
13690        } else {
13691            self.parse_object_name(false)?
13692        };
13693
13694        if !(self.consume_token(&Token::Eq) || self.parse_keyword(Keyword::TO)) {
13695            return self.expected("assignment operator", self.peek_token());
13696        }
13697
13698        let value = self.parse_expr()?;
13699
13700        Ok(SetAssignment { scope, name, value })
13701    }
13702
13703    fn parse_set(&mut self) -> Result<Statement, ParserError> {
13704        let hivevar = self.parse_keyword(Keyword::HIVEVAR);
13705
13706        // Modifier is either HIVEVAR: or a ContextModifier (LOCAL, SESSION, etc), not both
13707        let scope = if !hivevar {
13708            self.parse_context_modifier()
13709        } else {
13710            None
13711        };
13712
13713        if hivevar {
13714            self.expect_token(&Token::Colon)?;
13715        }
13716
13717        if let Some(set_role_stmt) = self.maybe_parse(|parser| parser.parse_set_role(scope))? {
13718            return Ok(set_role_stmt);
13719        }
13720
13721        // Handle special cases first
13722        if self.parse_keywords(&[Keyword::TIME, Keyword::ZONE])
13723            || self.parse_keyword(Keyword::TIMEZONE)
13724        {
13725            if self.consume_token(&Token::Eq) || self.parse_keyword(Keyword::TO) {
13726                return Ok(Set::SingleAssignment {
13727                    scope,
13728                    hivevar,
13729                    variable: ObjectName::from(vec!["TIMEZONE".into()]),
13730                    values: self.parse_set_values(false)?,
13731                }
13732                .into());
13733            } else {
13734                // A shorthand alias for SET TIME ZONE that doesn't require
13735                // the assignment operator. It's originally PostgreSQL specific,
13736                // but we allow it for all the dialects
13737                return Ok(Set::SetTimeZone {
13738                    local: scope == Some(ContextModifier::Local),
13739                    value: self.parse_expr()?,
13740                }
13741                .into());
13742            }
13743        } else if self.dialect.supports_set_names() && self.parse_keyword(Keyword::NAMES) {
13744            if self.parse_keyword(Keyword::DEFAULT) {
13745                return Ok(Set::SetNamesDefault {}.into());
13746            }
13747            let charset_name = self.parse_identifier()?;
13748            let collation_name = if self.parse_one_of_keywords(&[Keyword::COLLATE]).is_some() {
13749                Some(self.parse_literal_string()?)
13750            } else {
13751                None
13752            };
13753
13754            return Ok(Set::SetNames {
13755                charset_name,
13756                collation_name,
13757            }
13758            .into());
13759        } else if self.parse_keyword(Keyword::CHARACTERISTICS) {
13760            self.expect_keywords(&[Keyword::AS, Keyword::TRANSACTION])?;
13761            return Ok(Set::SetTransaction {
13762                modes: self.parse_transaction_modes()?,
13763                snapshot: None,
13764                session: true,
13765            }
13766            .into());
13767        } else if self.parse_keyword(Keyword::TRANSACTION) {
13768            if self.parse_keyword(Keyword::SNAPSHOT) {
13769                let snapshot_id = self.parse_value()?.value;
13770                return Ok(Set::SetTransaction {
13771                    modes: vec![],
13772                    snapshot: Some(snapshot_id),
13773                    session: false,
13774                }
13775                .into());
13776            }
13777            return Ok(Set::SetTransaction {
13778                modes: self.parse_transaction_modes()?,
13779                snapshot: None,
13780                session: false,
13781            }
13782            .into());
13783        } else if self.parse_keyword(Keyword::AUTHORIZATION) {
13784            let auth_value = if self.parse_keyword(Keyword::DEFAULT) {
13785                SetSessionAuthorizationParamKind::Default
13786            } else {
13787                let value = self.parse_identifier()?;
13788                SetSessionAuthorizationParamKind::User(value)
13789            };
13790            return Ok(Set::SetSessionAuthorization(SetSessionAuthorizationParam {
13791                scope: scope.expect("SET ... AUTHORIZATION must have a scope"),
13792                kind: auth_value,
13793            })
13794            .into());
13795        }
13796
13797        if self.dialect.supports_comma_separated_set_assignments() {
13798            if scope.is_some() {
13799                self.prev_token();
13800            }
13801
13802            if let Some(assignments) = self
13803                .maybe_parse(|parser| parser.parse_comma_separated(Parser::parse_set_assignment))?
13804            {
13805                return if assignments.len() > 1 {
13806                    Ok(Set::MultipleAssignments { assignments }.into())
13807                } else {
13808                    let SetAssignment { scope, name, value } =
13809                        assignments.into_iter().next().ok_or_else(|| {
13810                            ParserError::ParserError("Expected at least one assignment".to_string())
13811                        })?;
13812
13813                    Ok(Set::SingleAssignment {
13814                        scope,
13815                        hivevar,
13816                        variable: name,
13817                        values: vec![value],
13818                    }
13819                    .into())
13820                };
13821            }
13822        }
13823
13824        let variables = if self.dialect.supports_parenthesized_set_variables()
13825            && self.consume_token(&Token::LParen)
13826        {
13827            let vars = OneOrManyWithParens::Many(
13828                self.parse_comma_separated(|parser: &mut Parser<'a>| parser.parse_identifier())?
13829                    .into_iter()
13830                    .map(|ident| ObjectName::from(vec![ident]))
13831                    .collect(),
13832            );
13833            self.expect_token(&Token::RParen)?;
13834            vars
13835        } else {
13836            OneOrManyWithParens::One(self.parse_object_name(false)?)
13837        };
13838
13839        if self.consume_token(&Token::Eq) || self.parse_keyword(Keyword::TO) {
13840            let stmt = match variables {
13841                OneOrManyWithParens::One(var) => Set::SingleAssignment {
13842                    scope,
13843                    hivevar,
13844                    variable: var,
13845                    values: self.parse_set_values(false)?,
13846                },
13847                OneOrManyWithParens::Many(vars) => Set::ParenthesizedAssignments {
13848                    variables: vars,
13849                    values: self.parse_set_values(true)?,
13850                },
13851            };
13852
13853            return Ok(stmt.into());
13854        }
13855
13856        if self.dialect.supports_set_stmt_without_operator() {
13857            self.prev_token();
13858            return self.parse_set_session_params();
13859        };
13860
13861        self.expected("equals sign or TO", self.peek_token())
13862    }
13863
13864    pub fn parse_set_session_params(&mut self) -> Result<Statement, ParserError> {
13865        if self.parse_keyword(Keyword::STATISTICS) {
13866            let topic = match self.parse_one_of_keywords(&[
13867                Keyword::IO,
13868                Keyword::PROFILE,
13869                Keyword::TIME,
13870                Keyword::XML,
13871            ]) {
13872                Some(Keyword::IO) => SessionParamStatsTopic::IO,
13873                Some(Keyword::PROFILE) => SessionParamStatsTopic::Profile,
13874                Some(Keyword::TIME) => SessionParamStatsTopic::Time,
13875                Some(Keyword::XML) => SessionParamStatsTopic::Xml,
13876                _ => return self.expected("IO, PROFILE, TIME or XML", self.peek_token()),
13877            };
13878            let value = self.parse_session_param_value()?;
13879            Ok(
13880                Set::SetSessionParam(SetSessionParamKind::Statistics(SetSessionParamStatistics {
13881                    topic,
13882                    value,
13883                }))
13884                .into(),
13885            )
13886        } else if self.parse_keyword(Keyword::IDENTITY_INSERT) {
13887            let obj = self.parse_object_name(false)?;
13888            let value = self.parse_session_param_value()?;
13889            Ok(Set::SetSessionParam(SetSessionParamKind::IdentityInsert(
13890                SetSessionParamIdentityInsert { obj, value },
13891            ))
13892            .into())
13893        } else if self.parse_keyword(Keyword::OFFSETS) {
13894            let keywords = self.parse_comma_separated(|parser| {
13895                let next_token = parser.next_token();
13896                match &next_token.token {
13897                    Token::Word(w) => Ok(w.to_string()),
13898                    _ => parser.expected("SQL keyword", next_token),
13899                }
13900            })?;
13901            let value = self.parse_session_param_value()?;
13902            Ok(
13903                Set::SetSessionParam(SetSessionParamKind::Offsets(SetSessionParamOffsets {
13904                    keywords,
13905                    value,
13906                }))
13907                .into(),
13908            )
13909        } else {
13910            let names = self.parse_comma_separated(|parser| {
13911                let next_token = parser.next_token();
13912                match next_token.token {
13913                    Token::Word(w) => Ok(w.to_string()),
13914                    _ => parser.expected("Session param name", next_token),
13915                }
13916            })?;
13917            let value = self.parse_expr()?.to_string();
13918            Ok(
13919                Set::SetSessionParam(SetSessionParamKind::Generic(SetSessionParamGeneric {
13920                    names,
13921                    value,
13922                }))
13923                .into(),
13924            )
13925        }
13926    }
13927
13928    fn parse_session_param_value(&mut self) -> Result<SessionParamValue, ParserError> {
13929        if self.parse_keyword(Keyword::ON) {
13930            Ok(SessionParamValue::On)
13931        } else if self.parse_keyword(Keyword::OFF) {
13932            Ok(SessionParamValue::Off)
13933        } else {
13934            self.expected("ON or OFF", self.peek_token())
13935        }
13936    }
13937
13938    pub fn parse_show(&mut self) -> Result<Statement, ParserError> {
13939        let terse = self.parse_keyword(Keyword::TERSE);
13940        let extended = self.parse_keyword(Keyword::EXTENDED);
13941        let full = self.parse_keyword(Keyword::FULL);
13942        let session = self.parse_keyword(Keyword::SESSION);
13943        let global = self.parse_keyword(Keyword::GLOBAL);
13944        let external = self.parse_keyword(Keyword::EXTERNAL);
13945        if self
13946            .parse_one_of_keywords(&[Keyword::COLUMNS, Keyword::FIELDS])
13947            .is_some()
13948        {
13949            Ok(self.parse_show_columns(extended, full)?)
13950        } else if self.parse_keyword(Keyword::TABLES) {
13951            Ok(self.parse_show_tables(terse, extended, full, external)?)
13952        } else if self.parse_keywords(&[Keyword::MATERIALIZED, Keyword::VIEWS]) {
13953            Ok(self.parse_show_views(terse, true)?)
13954        } else if self.parse_keyword(Keyword::VIEWS) {
13955            Ok(self.parse_show_views(terse, false)?)
13956        } else if self.parse_keyword(Keyword::FUNCTIONS) {
13957            Ok(self.parse_show_functions()?)
13958        } else if extended || full {
13959            Err(ParserError::ParserError(
13960                "EXTENDED/FULL are not supported with this type of SHOW query".to_string(),
13961            ))
13962        } else if self.parse_one_of_keywords(&[Keyword::CREATE]).is_some() {
13963            Ok(self.parse_show_create()?)
13964        } else if self.parse_keyword(Keyword::COLLATION) {
13965            Ok(self.parse_show_collation()?)
13966        } else if self.parse_keyword(Keyword::VARIABLES)
13967            && dialect_of!(self is MySqlDialect | GenericDialect)
13968        {
13969            Ok(Statement::ShowVariables {
13970                filter: self.parse_show_statement_filter()?,
13971                session,
13972                global,
13973            })
13974        } else if self.parse_keyword(Keyword::STATUS)
13975            && dialect_of!(self is MySqlDialect | GenericDialect)
13976        {
13977            Ok(Statement::ShowStatus {
13978                filter: self.parse_show_statement_filter()?,
13979                session,
13980                global,
13981            })
13982        } else if self.parse_keyword(Keyword::DATABASES) {
13983            self.parse_show_databases(terse)
13984        } else if self.parse_keyword(Keyword::SCHEMAS) {
13985            self.parse_show_schemas(terse)
13986        } else if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
13987            self.parse_show_charset(false)
13988        } else if self.parse_keyword(Keyword::CHARSET) {
13989            self.parse_show_charset(true)
13990        } else {
13991            Ok(Statement::ShowVariable {
13992                variable: self.parse_identifiers()?,
13993            })
13994        }
13995    }
13996
13997    fn parse_show_charset(&mut self, is_shorthand: bool) -> Result<Statement, ParserError> {
13998        // parse one of keywords
13999        Ok(Statement::ShowCharset(ShowCharset {
14000            is_shorthand,
14001            filter: self.parse_show_statement_filter()?,
14002        }))
14003    }
14004
14005    fn parse_show_databases(&mut self, terse: bool) -> Result<Statement, ParserError> {
14006        let history = self.parse_keyword(Keyword::HISTORY);
14007        let show_options = self.parse_show_stmt_options()?;
14008        Ok(Statement::ShowDatabases {
14009            terse,
14010            history,
14011            show_options,
14012        })
14013    }
14014
14015    fn parse_show_schemas(&mut self, terse: bool) -> Result<Statement, ParserError> {
14016        let history = self.parse_keyword(Keyword::HISTORY);
14017        let show_options = self.parse_show_stmt_options()?;
14018        Ok(Statement::ShowSchemas {
14019            terse,
14020            history,
14021            show_options,
14022        })
14023    }
14024
14025    pub fn parse_show_create(&mut self) -> Result<Statement, ParserError> {
14026        let obj_type = match self.expect_one_of_keywords(&[
14027            Keyword::TABLE,
14028            Keyword::TRIGGER,
14029            Keyword::FUNCTION,
14030            Keyword::PROCEDURE,
14031            Keyword::EVENT,
14032            Keyword::VIEW,
14033        ])? {
14034            Keyword::TABLE => Ok(ShowCreateObject::Table),
14035            Keyword::TRIGGER => Ok(ShowCreateObject::Trigger),
14036            Keyword::FUNCTION => Ok(ShowCreateObject::Function),
14037            Keyword::PROCEDURE => Ok(ShowCreateObject::Procedure),
14038            Keyword::EVENT => Ok(ShowCreateObject::Event),
14039            Keyword::VIEW => Ok(ShowCreateObject::View),
14040            keyword => Err(ParserError::ParserError(format!(
14041                "Unable to map keyword to ShowCreateObject: {keyword:?}"
14042            ))),
14043        }?;
14044
14045        let obj_name = self.parse_object_name(false)?;
14046
14047        Ok(Statement::ShowCreate { obj_type, obj_name })
14048    }
14049
14050    pub fn parse_show_columns(
14051        &mut self,
14052        extended: bool,
14053        full: bool,
14054    ) -> Result<Statement, ParserError> {
14055        let show_options = self.parse_show_stmt_options()?;
14056        Ok(Statement::ShowColumns {
14057            extended,
14058            full,
14059            show_options,
14060        })
14061    }
14062
14063    fn parse_show_tables(
14064        &mut self,
14065        terse: bool,
14066        extended: bool,
14067        full: bool,
14068        external: bool,
14069    ) -> Result<Statement, ParserError> {
14070        let history = !external && self.parse_keyword(Keyword::HISTORY);
14071        let show_options = self.parse_show_stmt_options()?;
14072        Ok(Statement::ShowTables {
14073            terse,
14074            history,
14075            extended,
14076            full,
14077            external,
14078            show_options,
14079        })
14080    }
14081
14082    fn parse_show_views(
14083        &mut self,
14084        terse: bool,
14085        materialized: bool,
14086    ) -> Result<Statement, ParserError> {
14087        let show_options = self.parse_show_stmt_options()?;
14088        Ok(Statement::ShowViews {
14089            materialized,
14090            terse,
14091            show_options,
14092        })
14093    }
14094
14095    pub fn parse_show_functions(&mut self) -> Result<Statement, ParserError> {
14096        let filter = self.parse_show_statement_filter()?;
14097        Ok(Statement::ShowFunctions { filter })
14098    }
14099
14100    pub fn parse_show_collation(&mut self) -> Result<Statement, ParserError> {
14101        let filter = self.parse_show_statement_filter()?;
14102        Ok(Statement::ShowCollation { filter })
14103    }
14104
14105    pub fn parse_show_statement_filter(
14106        &mut self,
14107    ) -> Result<Option<ShowStatementFilter>, ParserError> {
14108        if self.parse_keyword(Keyword::LIKE) {
14109            Ok(Some(ShowStatementFilter::Like(
14110                self.parse_literal_string()?,
14111            )))
14112        } else if self.parse_keyword(Keyword::ILIKE) {
14113            Ok(Some(ShowStatementFilter::ILike(
14114                self.parse_literal_string()?,
14115            )))
14116        } else if self.parse_keyword(Keyword::WHERE) {
14117            Ok(Some(ShowStatementFilter::Where(self.parse_expr()?)))
14118        } else {
14119            self.maybe_parse(|parser| -> Result<String, ParserError> {
14120                parser.parse_literal_string()
14121            })?
14122            .map_or(Ok(None), |filter| {
14123                Ok(Some(ShowStatementFilter::NoKeyword(filter)))
14124            })
14125        }
14126    }
14127
14128    pub fn parse_use(&mut self) -> Result<Statement, ParserError> {
14129        // Determine which keywords are recognized by the current dialect
14130        let parsed_keyword = if dialect_of!(self is HiveDialect) {
14131            // HiveDialect accepts USE DEFAULT; statement without any db specified
14132            if self.parse_keyword(Keyword::DEFAULT) {
14133                return Ok(Statement::Use(Use::Default));
14134            }
14135            None // HiveDialect doesn't expect any other specific keyword after `USE`
14136        } else if dialect_of!(self is DatabricksDialect) {
14137            self.parse_one_of_keywords(&[Keyword::CATALOG, Keyword::DATABASE, Keyword::SCHEMA])
14138        } else if dialect_of!(self is SnowflakeDialect) {
14139            self.parse_one_of_keywords(&[
14140                Keyword::DATABASE,
14141                Keyword::SCHEMA,
14142                Keyword::WAREHOUSE,
14143                Keyword::ROLE,
14144                Keyword::SECONDARY,
14145            ])
14146        } else {
14147            None // No specific keywords for other dialects, including GenericDialect
14148        };
14149
14150        let result = if matches!(parsed_keyword, Some(Keyword::SECONDARY)) {
14151            self.parse_secondary_roles()?
14152        } else {
14153            let obj_name = self.parse_object_name(false)?;
14154            match parsed_keyword {
14155                Some(Keyword::CATALOG) => Use::Catalog(obj_name),
14156                Some(Keyword::DATABASE) => Use::Database(obj_name),
14157                Some(Keyword::SCHEMA) => Use::Schema(obj_name),
14158                Some(Keyword::WAREHOUSE) => Use::Warehouse(obj_name),
14159                Some(Keyword::ROLE) => Use::Role(obj_name),
14160                _ => Use::Object(obj_name),
14161            }
14162        };
14163
14164        Ok(Statement::Use(result))
14165    }
14166
14167    fn parse_secondary_roles(&mut self) -> Result<Use, ParserError> {
14168        self.expect_one_of_keywords(&[Keyword::ROLES, Keyword::ROLE])?;
14169        if self.parse_keyword(Keyword::NONE) {
14170            Ok(Use::SecondaryRoles(SecondaryRoles::None))
14171        } else if self.parse_keyword(Keyword::ALL) {
14172            Ok(Use::SecondaryRoles(SecondaryRoles::All))
14173        } else {
14174            let roles = self.parse_comma_separated(|parser| parser.parse_identifier())?;
14175            Ok(Use::SecondaryRoles(SecondaryRoles::List(roles)))
14176        }
14177    }
14178
14179    pub fn parse_table_and_joins(&mut self) -> Result<TableWithJoins, ParserError> {
14180        let relation = self.parse_table_factor()?;
14181        // Note that for keywords to be properly handled here, they need to be
14182        // added to `RESERVED_FOR_TABLE_ALIAS`, otherwise they may be parsed as
14183        // a table alias.
14184        let joins = self.parse_joins()?;
14185        Ok(TableWithJoins { relation, joins })
14186    }
14187
14188    fn parse_joins(&mut self) -> Result<Vec<Join>, ParserError> {
14189        let mut joins = vec![];
14190        loop {
14191            let global = self.parse_keyword(Keyword::GLOBAL);
14192            let join = if self.parse_keyword(Keyword::CROSS) {
14193                let join_operator = if self.parse_keyword(Keyword::JOIN) {
14194                    JoinOperator::CrossJoin(JoinConstraint::None)
14195                } else if self.parse_keyword(Keyword::APPLY) {
14196                    // MSSQL extension, similar to CROSS JOIN LATERAL
14197                    JoinOperator::CrossApply
14198                } else {
14199                    return self.expected("JOIN or APPLY after CROSS", self.peek_token());
14200                };
14201                let relation = self.parse_table_factor()?;
14202                let join_operator = if matches!(join_operator, JoinOperator::CrossJoin(_))
14203                    && self.dialect.supports_cross_join_constraint()
14204                {
14205                    let constraint = self.parse_join_constraint(false)?;
14206                    JoinOperator::CrossJoin(constraint)
14207                } else {
14208                    join_operator
14209                };
14210                Join {
14211                    relation,
14212                    global,
14213                    join_operator,
14214                }
14215            } else if self.parse_keyword(Keyword::OUTER) {
14216                // MSSQL extension, similar to LEFT JOIN LATERAL .. ON 1=1
14217                self.expect_keyword_is(Keyword::APPLY)?;
14218                Join {
14219                    relation: self.parse_table_factor()?,
14220                    global,
14221                    join_operator: JoinOperator::OuterApply,
14222                }
14223            } else if self.parse_keyword(Keyword::ASOF) {
14224                self.expect_keyword_is(Keyword::JOIN)?;
14225                let relation = self.parse_table_factor()?;
14226                self.expect_keyword_is(Keyword::MATCH_CONDITION)?;
14227                let match_condition = self.parse_parenthesized(Self::parse_expr)?;
14228                Join {
14229                    relation,
14230                    global,
14231                    join_operator: JoinOperator::AsOf {
14232                        match_condition,
14233                        constraint: self.parse_join_constraint(false)?,
14234                    },
14235                }
14236            } else {
14237                let natural = self.parse_keyword(Keyword::NATURAL);
14238                let peek_keyword = if let Token::Word(w) = self.peek_token().token {
14239                    w.keyword
14240                } else {
14241                    Keyword::NoKeyword
14242                };
14243
14244                let join_operator_type = match peek_keyword {
14245                    Keyword::INNER | Keyword::JOIN => {
14246                        let inner = self.parse_keyword(Keyword::INNER); // [ INNER ]
14247                        self.expect_keyword_is(Keyword::JOIN)?;
14248                        if inner {
14249                            JoinOperator::Inner
14250                        } else {
14251                            JoinOperator::Join
14252                        }
14253                    }
14254                    kw @ Keyword::LEFT | kw @ Keyword::RIGHT => {
14255                        let _ = self.next_token(); // consume LEFT/RIGHT
14256                        let is_left = kw == Keyword::LEFT;
14257                        let join_type = self.parse_one_of_keywords(&[
14258                            Keyword::OUTER,
14259                            Keyword::SEMI,
14260                            Keyword::ANTI,
14261                            Keyword::JOIN,
14262                        ]);
14263                        match join_type {
14264                            Some(Keyword::OUTER) => {
14265                                self.expect_keyword_is(Keyword::JOIN)?;
14266                                if is_left {
14267                                    JoinOperator::LeftOuter
14268                                } else {
14269                                    JoinOperator::RightOuter
14270                                }
14271                            }
14272                            Some(Keyword::SEMI) => {
14273                                self.expect_keyword_is(Keyword::JOIN)?;
14274                                if is_left {
14275                                    JoinOperator::LeftSemi
14276                                } else {
14277                                    JoinOperator::RightSemi
14278                                }
14279                            }
14280                            Some(Keyword::ANTI) => {
14281                                self.expect_keyword_is(Keyword::JOIN)?;
14282                                if is_left {
14283                                    JoinOperator::LeftAnti
14284                                } else {
14285                                    JoinOperator::RightAnti
14286                                }
14287                            }
14288                            Some(Keyword::JOIN) => {
14289                                if is_left {
14290                                    JoinOperator::Left
14291                                } else {
14292                                    JoinOperator::Right
14293                                }
14294                            }
14295                            _ => {
14296                                return Err(ParserError::ParserError(format!(
14297                                    "expected OUTER, SEMI, ANTI or JOIN after {kw:?}"
14298                                )))
14299                            }
14300                        }
14301                    }
14302                    Keyword::ANTI => {
14303                        let _ = self.next_token(); // consume ANTI
14304                        self.expect_keyword_is(Keyword::JOIN)?;
14305                        JoinOperator::Anti
14306                    }
14307                    Keyword::SEMI => {
14308                        let _ = self.next_token(); // consume SEMI
14309                        self.expect_keyword_is(Keyword::JOIN)?;
14310                        JoinOperator::Semi
14311                    }
14312                    Keyword::FULL => {
14313                        let _ = self.next_token(); // consume FULL
14314                        let _ = self.parse_keyword(Keyword::OUTER); // [ OUTER ]
14315                        self.expect_keyword_is(Keyword::JOIN)?;
14316                        JoinOperator::FullOuter
14317                    }
14318                    Keyword::OUTER => {
14319                        return self.expected("LEFT, RIGHT, or FULL", self.peek_token());
14320                    }
14321                    Keyword::STRAIGHT_JOIN => {
14322                        let _ = self.next_token(); // consume STRAIGHT_JOIN
14323                        JoinOperator::StraightJoin
14324                    }
14325                    _ if natural => {
14326                        return self.expected("a join type after NATURAL", self.peek_token());
14327                    }
14328                    _ => break,
14329                };
14330                let mut relation = self.parse_table_factor()?;
14331
14332                if !self
14333                    .dialect
14334                    .supports_left_associative_joins_without_parens()
14335                    && self.peek_parens_less_nested_join()
14336                {
14337                    let joins = self.parse_joins()?;
14338                    relation = TableFactor::NestedJoin {
14339                        table_with_joins: Box::new(TableWithJoins { relation, joins }),
14340                        alias: None,
14341                    };
14342                }
14343
14344                let join_constraint = self.parse_join_constraint(natural)?;
14345                Join {
14346                    relation,
14347                    global,
14348                    join_operator: join_operator_type(join_constraint),
14349                }
14350            };
14351            joins.push(join);
14352        }
14353        Ok(joins)
14354    }
14355
14356    fn peek_parens_less_nested_join(&self) -> bool {
14357        matches!(
14358            self.peek_token_ref().token,
14359            Token::Word(Word {
14360                keyword: Keyword::JOIN
14361                    | Keyword::INNER
14362                    | Keyword::LEFT
14363                    | Keyword::RIGHT
14364                    | Keyword::FULL,
14365                ..
14366            })
14367        )
14368    }
14369
14370    /// A table name or a parenthesized subquery, followed by optional `[AS] alias`
14371    pub fn parse_table_factor(&mut self) -> Result<TableFactor, ParserError> {
14372        if self.parse_keyword(Keyword::LATERAL) {
14373            // LATERAL must always be followed by a subquery or table function.
14374            if self.consume_token(&Token::LParen) {
14375                self.parse_derived_table_factor(Lateral)
14376            } else {
14377                let name = self.parse_object_name(false)?;
14378                self.expect_token(&Token::LParen)?;
14379                let args = self.parse_optional_args()?;
14380                let alias = self.maybe_parse_table_alias()?;
14381                Ok(TableFactor::Function {
14382                    lateral: true,
14383                    name,
14384                    args,
14385                    alias,
14386                })
14387            }
14388        } else if self.parse_keyword(Keyword::TABLE) {
14389            // parse table function (SELECT * FROM TABLE (<expr>) [ AS <alias> ])
14390            self.expect_token(&Token::LParen)?;
14391            let expr = self.parse_expr()?;
14392            self.expect_token(&Token::RParen)?;
14393            let alias = self.maybe_parse_table_alias()?;
14394            Ok(TableFactor::TableFunction { expr, alias })
14395        } else if self.consume_token(&Token::LParen) {
14396            // A left paren introduces either a derived table (i.e., a subquery)
14397            // or a nested join. It's nearly impossible to determine ahead of
14398            // time which it is... so we just try to parse both.
14399            //
14400            // Here's an example that demonstrates the complexity:
14401            //                     /-------------------------------------------------------\
14402            //                     | /-----------------------------------\                 |
14403            //     SELECT * FROM ( ( ( (SELECT 1) UNION (SELECT 2) ) AS t1 NATURAL JOIN t2 ) )
14404            //                   ^ ^ ^ ^
14405            //                   | | | |
14406            //                   | | | |
14407            //                   | | | (4) belongs to a SetExpr::Query inside the subquery
14408            //                   | | (3) starts a derived table (subquery)
14409            //                   | (2) starts a nested join
14410            //                   (1) an additional set of parens around a nested join
14411            //
14412
14413            // If the recently consumed '(' starts a derived table, the call to
14414            // `parse_derived_table_factor` below will return success after parsing the
14415            // subquery, followed by the closing ')', and the alias of the derived table.
14416            // In the example above this is case (3).
14417            if let Some(mut table) =
14418                self.maybe_parse(|parser| parser.parse_derived_table_factor(NotLateral))?
14419            {
14420                while let Some(kw) = self.parse_one_of_keywords(&[Keyword::PIVOT, Keyword::UNPIVOT])
14421                {
14422                    table = match kw {
14423                        Keyword::PIVOT => self.parse_pivot_table_factor(table)?,
14424                        Keyword::UNPIVOT => self.parse_unpivot_table_factor(table)?,
14425                        unexpected_keyword => return Err(ParserError::ParserError(
14426                            format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in pivot/unpivot"),
14427                        )),
14428                    }
14429                }
14430                return Ok(table);
14431            }
14432
14433            // A parsing error from `parse_derived_table_factor` indicates that the '(' we've
14434            // recently consumed does not start a derived table (cases 1, 2, or 4).
14435            // `maybe_parse` will ignore such an error and rewind to be after the opening '('.
14436
14437            // Inside the parentheses we expect to find an (A) table factor
14438            // followed by some joins or (B) another level of nesting.
14439            let mut table_and_joins = self.parse_table_and_joins()?;
14440
14441            #[allow(clippy::if_same_then_else)]
14442            if !table_and_joins.joins.is_empty() {
14443                self.expect_token(&Token::RParen)?;
14444                let alias = self.maybe_parse_table_alias()?;
14445                Ok(TableFactor::NestedJoin {
14446                    table_with_joins: Box::new(table_and_joins),
14447                    alias,
14448                }) // (A)
14449            } else if let TableFactor::NestedJoin {
14450                table_with_joins: _,
14451                alias: _,
14452            } = &table_and_joins.relation
14453            {
14454                // (B): `table_and_joins` (what we found inside the parentheses)
14455                // is a nested join `(foo JOIN bar)`, not followed by other joins.
14456                self.expect_token(&Token::RParen)?;
14457                let alias = self.maybe_parse_table_alias()?;
14458                Ok(TableFactor::NestedJoin {
14459                    table_with_joins: Box::new(table_and_joins),
14460                    alias,
14461                })
14462            } else if dialect_of!(self is SnowflakeDialect | GenericDialect) {
14463                // Dialect-specific behavior: Snowflake diverges from the
14464                // standard and from most of the other implementations by
14465                // allowing extra parentheses not only around a join (B), but
14466                // around lone table names (e.g. `FROM (mytable [AS alias])`)
14467                // and around derived tables (e.g. `FROM ((SELECT ...)
14468                // [AS alias])`) as well.
14469                self.expect_token(&Token::RParen)?;
14470
14471                if let Some(outer_alias) = self.maybe_parse_table_alias()? {
14472                    // Snowflake also allows specifying an alias *after* parens
14473                    // e.g. `FROM (mytable) AS alias`
14474                    match &mut table_and_joins.relation {
14475                        TableFactor::Derived { alias, .. }
14476                        | TableFactor::Table { alias, .. }
14477                        | TableFactor::Function { alias, .. }
14478                        | TableFactor::UNNEST { alias, .. }
14479                        | TableFactor::JsonTable { alias, .. }
14480                        | TableFactor::XmlTable { alias, .. }
14481                        | TableFactor::OpenJsonTable { alias, .. }
14482                        | TableFactor::TableFunction { alias, .. }
14483                        | TableFactor::Pivot { alias, .. }
14484                        | TableFactor::Unpivot { alias, .. }
14485                        | TableFactor::MatchRecognize { alias, .. }
14486                        | TableFactor::SemanticView { alias, .. }
14487                        | TableFactor::NestedJoin { alias, .. } => {
14488                            // but not `FROM (mytable AS alias1) AS alias2`.
14489                            if let Some(inner_alias) = alias {
14490                                return Err(ParserError::ParserError(format!(
14491                                    "duplicate alias {inner_alias}"
14492                                )));
14493                            }
14494                            // Act as if the alias was specified normally next
14495                            // to the table name: `(mytable) AS alias` ->
14496                            // `(mytable AS alias)`
14497                            alias.replace(outer_alias);
14498                        }
14499                    };
14500                }
14501                // Do not store the extra set of parens in the AST
14502                Ok(table_and_joins.relation)
14503            } else {
14504                // The SQL spec prohibits derived tables and bare tables from
14505                // appearing alone in parentheses (e.g. `FROM (mytable)`)
14506                self.expected("joined table", self.peek_token())
14507            }
14508        } else if dialect_of!(self is SnowflakeDialect | DatabricksDialect | GenericDialect)
14509            && matches!(
14510                self.peek_tokens(),
14511                [
14512                    Token::Word(Word {
14513                        keyword: Keyword::VALUES,
14514                        ..
14515                    }),
14516                    Token::LParen
14517                ]
14518            )
14519        {
14520            self.expect_keyword_is(Keyword::VALUES)?;
14521
14522            // Snowflake and Databricks allow syntax like below:
14523            // SELECT * FROM VALUES (1, 'a'), (2, 'b') AS t (col1, col2)
14524            // where there are no parentheses around the VALUES clause.
14525            let values = SetExpr::Values(self.parse_values(false, false)?);
14526            let alias = self.maybe_parse_table_alias()?;
14527            Ok(TableFactor::Derived {
14528                lateral: false,
14529                subquery: Box::new(Query {
14530                    with: None,
14531                    body: Box::new(values),
14532                    order_by: None,
14533                    limit_clause: None,
14534                    fetch: None,
14535                    locks: vec![],
14536                    for_clause: None,
14537                    settings: None,
14538                    format_clause: None,
14539                    pipe_operators: vec![],
14540                }),
14541                alias,
14542            })
14543        } else if dialect_of!(self is BigQueryDialect | PostgreSqlDialect | GenericDialect)
14544            && self.parse_keyword(Keyword::UNNEST)
14545        {
14546            self.expect_token(&Token::LParen)?;
14547            let array_exprs = self.parse_comma_separated(Parser::parse_expr)?;
14548            self.expect_token(&Token::RParen)?;
14549
14550            let with_ordinality = self.parse_keywords(&[Keyword::WITH, Keyword::ORDINALITY]);
14551            let alias = match self.maybe_parse_table_alias() {
14552                Ok(Some(alias)) => Some(alias),
14553                Ok(None) => None,
14554                Err(e) => return Err(e),
14555            };
14556
14557            let with_offset = match self.expect_keywords(&[Keyword::WITH, Keyword::OFFSET]) {
14558                Ok(()) => true,
14559                Err(_) => false,
14560            };
14561
14562            let with_offset_alias = if with_offset {
14563                match self.parse_optional_alias(keywords::RESERVED_FOR_COLUMN_ALIAS) {
14564                    Ok(Some(alias)) => Some(alias),
14565                    Ok(None) => None,
14566                    Err(e) => return Err(e),
14567                }
14568            } else {
14569                None
14570            };
14571
14572            Ok(TableFactor::UNNEST {
14573                alias,
14574                array_exprs,
14575                with_offset,
14576                with_offset_alias,
14577                with_ordinality,
14578            })
14579        } else if self.parse_keyword_with_tokens(Keyword::JSON_TABLE, &[Token::LParen]) {
14580            let json_expr = self.parse_expr()?;
14581            self.expect_token(&Token::Comma)?;
14582            let json_path = self.parse_value()?.value;
14583            self.expect_keyword_is(Keyword::COLUMNS)?;
14584            self.expect_token(&Token::LParen)?;
14585            let columns = self.parse_comma_separated(Parser::parse_json_table_column_def)?;
14586            self.expect_token(&Token::RParen)?;
14587            self.expect_token(&Token::RParen)?;
14588            let alias = self.maybe_parse_table_alias()?;
14589            Ok(TableFactor::JsonTable {
14590                json_expr,
14591                json_path,
14592                columns,
14593                alias,
14594            })
14595        } else if self.parse_keyword_with_tokens(Keyword::OPENJSON, &[Token::LParen]) {
14596            self.prev_token();
14597            self.parse_open_json_table_factor()
14598        } else if self.parse_keyword_with_tokens(Keyword::XMLTABLE, &[Token::LParen]) {
14599            self.prev_token();
14600            self.parse_xml_table_factor()
14601        } else if self.dialect.supports_semantic_view_table_factor()
14602            && self.peek_keyword_with_tokens(Keyword::SEMANTIC_VIEW, &[Token::LParen])
14603        {
14604            self.parse_semantic_view_table_factor()
14605        } else {
14606            let name = self.parse_object_name(true)?;
14607
14608            let json_path = match self.peek_token().token {
14609                Token::LBracket if self.dialect.supports_partiql() => Some(self.parse_json_path()?),
14610                _ => None,
14611            };
14612
14613            let partitions: Vec<Ident> = if dialect_of!(self is MySqlDialect | GenericDialect)
14614                && self.parse_keyword(Keyword::PARTITION)
14615            {
14616                self.parse_parenthesized_identifiers()?
14617            } else {
14618                vec![]
14619            };
14620
14621            // Parse potential version qualifier
14622            let version = self.maybe_parse_table_version()?;
14623
14624            // Postgres, MSSQL, ClickHouse: table-valued functions:
14625            let args = if self.consume_token(&Token::LParen) {
14626                Some(self.parse_table_function_args()?)
14627            } else {
14628                None
14629            };
14630
14631            let with_ordinality = self.parse_keywords(&[Keyword::WITH, Keyword::ORDINALITY]);
14632
14633            let mut sample = None;
14634            if self.dialect.supports_table_sample_before_alias() {
14635                if let Some(parsed_sample) = self.maybe_parse_table_sample()? {
14636                    sample = Some(TableSampleKind::BeforeTableAlias(parsed_sample));
14637                }
14638            }
14639
14640            let alias = self.maybe_parse_table_alias()?;
14641
14642            // MYSQL-specific table hints:
14643            let index_hints = if self.dialect.supports_table_hints() {
14644                self.maybe_parse(|p| p.parse_table_index_hints())?
14645                    .unwrap_or(vec![])
14646            } else {
14647                vec![]
14648            };
14649
14650            // MSSQL-specific table hints:
14651            let mut with_hints = vec![];
14652            if self.parse_keyword(Keyword::WITH) {
14653                if self.consume_token(&Token::LParen) {
14654                    with_hints = self.parse_comma_separated(Parser::parse_expr)?;
14655                    self.expect_token(&Token::RParen)?;
14656                } else {
14657                    // rewind, as WITH may belong to the next statement's CTE
14658                    self.prev_token();
14659                }
14660            };
14661
14662            if !self.dialect.supports_table_sample_before_alias() {
14663                if let Some(parsed_sample) = self.maybe_parse_table_sample()? {
14664                    sample = Some(TableSampleKind::AfterTableAlias(parsed_sample));
14665                }
14666            }
14667
14668            let mut table = TableFactor::Table {
14669                name,
14670                alias,
14671                args,
14672                with_hints,
14673                version,
14674                partitions,
14675                with_ordinality,
14676                json_path,
14677                sample,
14678                index_hints,
14679            };
14680
14681            while let Some(kw) = self.parse_one_of_keywords(&[Keyword::PIVOT, Keyword::UNPIVOT]) {
14682                table = match kw {
14683                    Keyword::PIVOT => self.parse_pivot_table_factor(table)?,
14684                    Keyword::UNPIVOT => self.parse_unpivot_table_factor(table)?,
14685                    unexpected_keyword => return Err(ParserError::ParserError(
14686                        format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in pivot/unpivot"),
14687                    )),
14688                }
14689            }
14690
14691            if self.dialect.supports_match_recognize()
14692                && self.parse_keyword(Keyword::MATCH_RECOGNIZE)
14693            {
14694                table = self.parse_match_recognize(table)?;
14695            }
14696
14697            Ok(table)
14698        }
14699    }
14700
14701    fn maybe_parse_table_sample(&mut self) -> Result<Option<Box<TableSample>>, ParserError> {
14702        let modifier = if self.parse_keyword(Keyword::TABLESAMPLE) {
14703            TableSampleModifier::TableSample
14704        } else if self.parse_keyword(Keyword::SAMPLE) {
14705            TableSampleModifier::Sample
14706        } else {
14707            return Ok(None);
14708        };
14709        self.parse_table_sample(modifier).map(Some)
14710    }
14711
14712    fn parse_table_sample(
14713        &mut self,
14714        modifier: TableSampleModifier,
14715    ) -> Result<Box<TableSample>, ParserError> {
14716        let name = match self.parse_one_of_keywords(&[
14717            Keyword::BERNOULLI,
14718            Keyword::ROW,
14719            Keyword::SYSTEM,
14720            Keyword::BLOCK,
14721        ]) {
14722            Some(Keyword::BERNOULLI) => Some(TableSampleMethod::Bernoulli),
14723            Some(Keyword::ROW) => Some(TableSampleMethod::Row),
14724            Some(Keyword::SYSTEM) => Some(TableSampleMethod::System),
14725            Some(Keyword::BLOCK) => Some(TableSampleMethod::Block),
14726            _ => None,
14727        };
14728
14729        let parenthesized = self.consume_token(&Token::LParen);
14730
14731        let (quantity, bucket) = if parenthesized && self.parse_keyword(Keyword::BUCKET) {
14732            let selected_bucket = self.parse_number_value()?.value;
14733            self.expect_keywords(&[Keyword::OUT, Keyword::OF])?;
14734            let total = self.parse_number_value()?.value;
14735            let on = if self.parse_keyword(Keyword::ON) {
14736                Some(self.parse_expr()?)
14737            } else {
14738                None
14739            };
14740            (
14741                None,
14742                Some(TableSampleBucket {
14743                    bucket: selected_bucket,
14744                    total,
14745                    on,
14746                }),
14747            )
14748        } else {
14749            let value = match self.maybe_parse(|p| p.parse_expr())? {
14750                Some(num) => num,
14751                None => {
14752                    let next_token = self.next_token();
14753                    if let Token::Word(w) = next_token.token {
14754                        Expr::Value(Value::Placeholder(w.value).with_span(next_token.span))
14755                    } else {
14756                        return parser_err!(
14757                            "Expecting number or byte length e.g. 100M",
14758                            self.peek_token().span.start
14759                        );
14760                    }
14761                }
14762            };
14763            let unit = if self.parse_keyword(Keyword::ROWS) {
14764                Some(TableSampleUnit::Rows)
14765            } else if self.parse_keyword(Keyword::PERCENT) {
14766                Some(TableSampleUnit::Percent)
14767            } else {
14768                None
14769            };
14770            (
14771                Some(TableSampleQuantity {
14772                    parenthesized,
14773                    value,
14774                    unit,
14775                }),
14776                None,
14777            )
14778        };
14779        if parenthesized {
14780            self.expect_token(&Token::RParen)?;
14781        }
14782
14783        let seed = if self.parse_keyword(Keyword::REPEATABLE) {
14784            Some(self.parse_table_sample_seed(TableSampleSeedModifier::Repeatable)?)
14785        } else if self.parse_keyword(Keyword::SEED) {
14786            Some(self.parse_table_sample_seed(TableSampleSeedModifier::Seed)?)
14787        } else {
14788            None
14789        };
14790
14791        let offset = if self.parse_keyword(Keyword::OFFSET) {
14792            Some(self.parse_expr()?)
14793        } else {
14794            None
14795        };
14796
14797        Ok(Box::new(TableSample {
14798            modifier,
14799            name,
14800            quantity,
14801            seed,
14802            bucket,
14803            offset,
14804        }))
14805    }
14806
14807    fn parse_table_sample_seed(
14808        &mut self,
14809        modifier: TableSampleSeedModifier,
14810    ) -> Result<TableSampleSeed, ParserError> {
14811        self.expect_token(&Token::LParen)?;
14812        let value = self.parse_number_value()?.value;
14813        self.expect_token(&Token::RParen)?;
14814        Ok(TableSampleSeed { modifier, value })
14815    }
14816
14817    /// Parses `OPENJSON( jsonExpression [ , path ] )  [ <with_clause> ]` clause,
14818    /// assuming the `OPENJSON` keyword was already consumed.
14819    fn parse_open_json_table_factor(&mut self) -> Result<TableFactor, ParserError> {
14820        self.expect_token(&Token::LParen)?;
14821        let json_expr = self.parse_expr()?;
14822        let json_path = if self.consume_token(&Token::Comma) {
14823            Some(self.parse_value()?.value)
14824        } else {
14825            None
14826        };
14827        self.expect_token(&Token::RParen)?;
14828        let columns = if self.parse_keyword(Keyword::WITH) {
14829            self.expect_token(&Token::LParen)?;
14830            let columns = self.parse_comma_separated(Parser::parse_openjson_table_column_def)?;
14831            self.expect_token(&Token::RParen)?;
14832            columns
14833        } else {
14834            Vec::new()
14835        };
14836        let alias = self.maybe_parse_table_alias()?;
14837        Ok(TableFactor::OpenJsonTable {
14838            json_expr,
14839            json_path,
14840            columns,
14841            alias,
14842        })
14843    }
14844
14845    fn parse_xml_table_factor(&mut self) -> Result<TableFactor, ParserError> {
14846        self.expect_token(&Token::LParen)?;
14847        let namespaces = if self.parse_keyword(Keyword::XMLNAMESPACES) {
14848            self.expect_token(&Token::LParen)?;
14849            let namespaces = self.parse_comma_separated(Parser::parse_xml_namespace_definition)?;
14850            self.expect_token(&Token::RParen)?;
14851            self.expect_token(&Token::Comma)?;
14852            namespaces
14853        } else {
14854            vec![]
14855        };
14856        let row_expression = self.parse_expr()?;
14857        let passing = self.parse_xml_passing_clause()?;
14858        self.expect_keyword_is(Keyword::COLUMNS)?;
14859        let columns = self.parse_comma_separated(Parser::parse_xml_table_column)?;
14860        self.expect_token(&Token::RParen)?;
14861        let alias = self.maybe_parse_table_alias()?;
14862        Ok(TableFactor::XmlTable {
14863            namespaces,
14864            row_expression,
14865            passing,
14866            columns,
14867            alias,
14868        })
14869    }
14870
14871    fn parse_xml_namespace_definition(&mut self) -> Result<XmlNamespaceDefinition, ParserError> {
14872        let uri = self.parse_expr()?;
14873        self.expect_keyword_is(Keyword::AS)?;
14874        let name = self.parse_identifier()?;
14875        Ok(XmlNamespaceDefinition { uri, name })
14876    }
14877
14878    fn parse_xml_table_column(&mut self) -> Result<XmlTableColumn, ParserError> {
14879        let name = self.parse_identifier()?;
14880
14881        let option = if self.parse_keyword(Keyword::FOR) {
14882            self.expect_keyword(Keyword::ORDINALITY)?;
14883            XmlTableColumnOption::ForOrdinality
14884        } else {
14885            let r#type = self.parse_data_type()?;
14886            let mut path = None;
14887            let mut default = None;
14888
14889            if self.parse_keyword(Keyword::PATH) {
14890                path = Some(self.parse_expr()?);
14891            }
14892
14893            if self.parse_keyword(Keyword::DEFAULT) {
14894                default = Some(self.parse_expr()?);
14895            }
14896
14897            let not_null = self.parse_keywords(&[Keyword::NOT, Keyword::NULL]);
14898            if !not_null {
14899                // NULL is the default but can be specified explicitly
14900                let _ = self.parse_keyword(Keyword::NULL);
14901            }
14902
14903            XmlTableColumnOption::NamedInfo {
14904                r#type,
14905                path,
14906                default,
14907                nullable: !not_null,
14908            }
14909        };
14910        Ok(XmlTableColumn { name, option })
14911    }
14912
14913    fn parse_xml_passing_clause(&mut self) -> Result<XmlPassingClause, ParserError> {
14914        let mut arguments = vec![];
14915        if self.parse_keyword(Keyword::PASSING) {
14916            loop {
14917                let by_value =
14918                    self.parse_keyword(Keyword::BY) && self.expect_keyword(Keyword::VALUE).is_ok();
14919                let expr = self.parse_expr()?;
14920                let alias = if self.parse_keyword(Keyword::AS) {
14921                    Some(self.parse_identifier()?)
14922                } else {
14923                    None
14924                };
14925                arguments.push(XmlPassingArgument {
14926                    expr,
14927                    alias,
14928                    by_value,
14929                });
14930                if !self.consume_token(&Token::Comma) {
14931                    break;
14932                }
14933            }
14934        }
14935        Ok(XmlPassingClause { arguments })
14936    }
14937
14938    /// Parse a [TableFactor::SemanticView]
14939    fn parse_semantic_view_table_factor(&mut self) -> Result<TableFactor, ParserError> {
14940        self.expect_keyword(Keyword::SEMANTIC_VIEW)?;
14941        self.expect_token(&Token::LParen)?;
14942
14943        let name = self.parse_object_name(true)?;
14944
14945        // Parse DIMENSIONS, METRICS, FACTS and WHERE clauses in flexible order
14946        let mut dimensions = Vec::new();
14947        let mut metrics = Vec::new();
14948        let mut facts = Vec::new();
14949        let mut where_clause = None;
14950
14951        while self.peek_token().token != Token::RParen {
14952            if self.parse_keyword(Keyword::DIMENSIONS) {
14953                if !dimensions.is_empty() {
14954                    return Err(ParserError::ParserError(
14955                        "DIMENSIONS clause can only be specified once".to_string(),
14956                    ));
14957                }
14958                dimensions = self.parse_comma_separated(Parser::parse_wildcard_expr)?;
14959            } else if self.parse_keyword(Keyword::METRICS) {
14960                if !metrics.is_empty() {
14961                    return Err(ParserError::ParserError(
14962                        "METRICS clause can only be specified once".to_string(),
14963                    ));
14964                }
14965                metrics = self.parse_comma_separated(Parser::parse_wildcard_expr)?;
14966            } else if self.parse_keyword(Keyword::FACTS) {
14967                if !facts.is_empty() {
14968                    return Err(ParserError::ParserError(
14969                        "FACTS clause can only be specified once".to_string(),
14970                    ));
14971                }
14972                facts = self.parse_comma_separated(Parser::parse_wildcard_expr)?;
14973            } else if self.parse_keyword(Keyword::WHERE) {
14974                if where_clause.is_some() {
14975                    return Err(ParserError::ParserError(
14976                        "WHERE clause can only be specified once".to_string(),
14977                    ));
14978                }
14979                where_clause = Some(self.parse_expr()?);
14980            } else {
14981                return parser_err!(
14982                    format!(
14983                        "Expected one of DIMENSIONS, METRICS, FACTS or WHERE, got {}",
14984                        self.peek_token().token
14985                    ),
14986                    self.peek_token().span.start
14987                )?;
14988            }
14989        }
14990
14991        self.expect_token(&Token::RParen)?;
14992
14993        let alias = self.maybe_parse_table_alias()?;
14994
14995        Ok(TableFactor::SemanticView {
14996            name,
14997            dimensions,
14998            metrics,
14999            facts,
15000            where_clause,
15001            alias,
15002        })
15003    }
15004
15005    fn parse_match_recognize(&mut self, table: TableFactor) -> Result<TableFactor, ParserError> {
15006        self.expect_token(&Token::LParen)?;
15007
15008        let partition_by = if self.parse_keywords(&[Keyword::PARTITION, Keyword::BY]) {
15009            self.parse_comma_separated(Parser::parse_expr)?
15010        } else {
15011            vec![]
15012        };
15013
15014        let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
15015            self.parse_comma_separated(Parser::parse_order_by_expr)?
15016        } else {
15017            vec![]
15018        };
15019
15020        let measures = if self.parse_keyword(Keyword::MEASURES) {
15021            self.parse_comma_separated(|p| {
15022                let expr = p.parse_expr()?;
15023                let _ = p.parse_keyword(Keyword::AS);
15024                let alias = p.parse_identifier()?;
15025                Ok(Measure { expr, alias })
15026            })?
15027        } else {
15028            vec![]
15029        };
15030
15031        let rows_per_match =
15032            if self.parse_keywords(&[Keyword::ONE, Keyword::ROW, Keyword::PER, Keyword::MATCH]) {
15033                Some(RowsPerMatch::OneRow)
15034            } else if self.parse_keywords(&[
15035                Keyword::ALL,
15036                Keyword::ROWS,
15037                Keyword::PER,
15038                Keyword::MATCH,
15039            ]) {
15040                Some(RowsPerMatch::AllRows(
15041                    if self.parse_keywords(&[Keyword::SHOW, Keyword::EMPTY, Keyword::MATCHES]) {
15042                        Some(EmptyMatchesMode::Show)
15043                    } else if self.parse_keywords(&[
15044                        Keyword::OMIT,
15045                        Keyword::EMPTY,
15046                        Keyword::MATCHES,
15047                    ]) {
15048                        Some(EmptyMatchesMode::Omit)
15049                    } else if self.parse_keywords(&[
15050                        Keyword::WITH,
15051                        Keyword::UNMATCHED,
15052                        Keyword::ROWS,
15053                    ]) {
15054                        Some(EmptyMatchesMode::WithUnmatched)
15055                    } else {
15056                        None
15057                    },
15058                ))
15059            } else {
15060                None
15061            };
15062
15063        let after_match_skip =
15064            if self.parse_keywords(&[Keyword::AFTER, Keyword::MATCH, Keyword::SKIP]) {
15065                if self.parse_keywords(&[Keyword::PAST, Keyword::LAST, Keyword::ROW]) {
15066                    Some(AfterMatchSkip::PastLastRow)
15067                } else if self.parse_keywords(&[Keyword::TO, Keyword::NEXT, Keyword::ROW]) {
15068                    Some(AfterMatchSkip::ToNextRow)
15069                } else if self.parse_keywords(&[Keyword::TO, Keyword::FIRST]) {
15070                    Some(AfterMatchSkip::ToFirst(self.parse_identifier()?))
15071                } else if self.parse_keywords(&[Keyword::TO, Keyword::LAST]) {
15072                    Some(AfterMatchSkip::ToLast(self.parse_identifier()?))
15073                } else {
15074                    let found = self.next_token();
15075                    return self.expected("after match skip option", found);
15076                }
15077            } else {
15078                None
15079            };
15080
15081        self.expect_keyword_is(Keyword::PATTERN)?;
15082        let pattern = self.parse_parenthesized(Self::parse_pattern)?;
15083
15084        self.expect_keyword_is(Keyword::DEFINE)?;
15085
15086        let symbols = self.parse_comma_separated(|p| {
15087            let symbol = p.parse_identifier()?;
15088            p.expect_keyword_is(Keyword::AS)?;
15089            let definition = p.parse_expr()?;
15090            Ok(SymbolDefinition { symbol, definition })
15091        })?;
15092
15093        self.expect_token(&Token::RParen)?;
15094
15095        let alias = self.maybe_parse_table_alias()?;
15096
15097        Ok(TableFactor::MatchRecognize {
15098            table: Box::new(table),
15099            partition_by,
15100            order_by,
15101            measures,
15102            rows_per_match,
15103            after_match_skip,
15104            pattern,
15105            symbols,
15106            alias,
15107        })
15108    }
15109
15110    fn parse_base_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
15111        match self.next_token().token {
15112            Token::Caret => Ok(MatchRecognizePattern::Symbol(MatchRecognizeSymbol::Start)),
15113            Token::Placeholder(s) if s == "$" => {
15114                Ok(MatchRecognizePattern::Symbol(MatchRecognizeSymbol::End))
15115            }
15116            Token::LBrace => {
15117                self.expect_token(&Token::Minus)?;
15118                let symbol = self.parse_identifier().map(MatchRecognizeSymbol::Named)?;
15119                self.expect_token(&Token::Minus)?;
15120                self.expect_token(&Token::RBrace)?;
15121                Ok(MatchRecognizePattern::Exclude(symbol))
15122            }
15123            Token::Word(Word {
15124                value,
15125                quote_style: None,
15126                ..
15127            }) if value == "PERMUTE" => {
15128                self.expect_token(&Token::LParen)?;
15129                let symbols = self.parse_comma_separated(|p| {
15130                    p.parse_identifier().map(MatchRecognizeSymbol::Named)
15131                })?;
15132                self.expect_token(&Token::RParen)?;
15133                Ok(MatchRecognizePattern::Permute(symbols))
15134            }
15135            Token::LParen => {
15136                let pattern = self.parse_pattern()?;
15137                self.expect_token(&Token::RParen)?;
15138                Ok(MatchRecognizePattern::Group(Box::new(pattern)))
15139            }
15140            _ => {
15141                self.prev_token();
15142                self.parse_identifier()
15143                    .map(MatchRecognizeSymbol::Named)
15144                    .map(MatchRecognizePattern::Symbol)
15145            }
15146        }
15147    }
15148
15149    fn parse_repetition_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
15150        let mut pattern = self.parse_base_pattern()?;
15151        loop {
15152            let token = self.next_token();
15153            let quantifier = match token.token {
15154                Token::Mul => RepetitionQuantifier::ZeroOrMore,
15155                Token::Plus => RepetitionQuantifier::OneOrMore,
15156                Token::Placeholder(s) if s == "?" => RepetitionQuantifier::AtMostOne,
15157                Token::LBrace => {
15158                    // quantifier is a range like {n} or {n,} or {,m} or {n,m}
15159                    let token = self.next_token();
15160                    match token.token {
15161                        Token::Comma => {
15162                            let next_token = self.next_token();
15163                            let Token::Number(n, _) = next_token.token else {
15164                                return self.expected("literal number", next_token);
15165                            };
15166                            self.expect_token(&Token::RBrace)?;
15167                            RepetitionQuantifier::AtMost(Self::parse(n, token.span.start)?)
15168                        }
15169                        Token::Number(n, _) if self.consume_token(&Token::Comma) => {
15170                            let next_token = self.next_token();
15171                            match next_token.token {
15172                                Token::Number(m, _) => {
15173                                    self.expect_token(&Token::RBrace)?;
15174                                    RepetitionQuantifier::Range(
15175                                        Self::parse(n, token.span.start)?,
15176                                        Self::parse(m, token.span.start)?,
15177                                    )
15178                                }
15179                                Token::RBrace => {
15180                                    RepetitionQuantifier::AtLeast(Self::parse(n, token.span.start)?)
15181                                }
15182                                _ => {
15183                                    return self.expected("} or upper bound", next_token);
15184                                }
15185                            }
15186                        }
15187                        Token::Number(n, _) => {
15188                            self.expect_token(&Token::RBrace)?;
15189                            RepetitionQuantifier::Exactly(Self::parse(n, token.span.start)?)
15190                        }
15191                        _ => return self.expected("quantifier range", token),
15192                    }
15193                }
15194                _ => {
15195                    self.prev_token();
15196                    break;
15197                }
15198            };
15199            pattern = MatchRecognizePattern::Repetition(Box::new(pattern), quantifier);
15200        }
15201        Ok(pattern)
15202    }
15203
15204    fn parse_concat_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
15205        let mut patterns = vec![self.parse_repetition_pattern()?];
15206        while !matches!(self.peek_token().token, Token::RParen | Token::Pipe) {
15207            patterns.push(self.parse_repetition_pattern()?);
15208        }
15209        match <[MatchRecognizePattern; 1]>::try_from(patterns) {
15210            Ok([pattern]) => Ok(pattern),
15211            Err(patterns) => Ok(MatchRecognizePattern::Concat(patterns)),
15212        }
15213    }
15214
15215    fn parse_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
15216        let pattern = self.parse_concat_pattern()?;
15217        if self.consume_token(&Token::Pipe) {
15218            match self.parse_pattern()? {
15219                // flatten nested alternations
15220                MatchRecognizePattern::Alternation(mut patterns) => {
15221                    patterns.insert(0, pattern);
15222                    Ok(MatchRecognizePattern::Alternation(patterns))
15223                }
15224                next => Ok(MatchRecognizePattern::Alternation(vec![pattern, next])),
15225            }
15226        } else {
15227            Ok(pattern)
15228        }
15229    }
15230
15231    /// Parses a the timestamp version specifier (i.e. query historical data)
15232    pub fn maybe_parse_table_version(&mut self) -> Result<Option<TableVersion>, ParserError> {
15233        if self.dialect.supports_timestamp_versioning() {
15234            if self.parse_keywords(&[Keyword::FOR, Keyword::SYSTEM_TIME, Keyword::AS, Keyword::OF])
15235            {
15236                let expr = self.parse_expr()?;
15237                return Ok(Some(TableVersion::ForSystemTimeAsOf(expr)));
15238            } else if self.peek_keyword(Keyword::AT) || self.peek_keyword(Keyword::BEFORE) {
15239                let func_name = self.parse_object_name(true)?;
15240                let func = self.parse_function(func_name)?;
15241                return Ok(Some(TableVersion::Function(func)));
15242            }
15243        }
15244        Ok(None)
15245    }
15246
15247    /// Parses MySQL's JSON_TABLE column definition.
15248    /// For example: `id INT EXISTS PATH '$' DEFAULT '0' ON EMPTY ERROR ON ERROR`
15249    pub fn parse_json_table_column_def(&mut self) -> Result<JsonTableColumn, ParserError> {
15250        if self.parse_keyword(Keyword::NESTED) {
15251            let _has_path_keyword = self.parse_keyword(Keyword::PATH);
15252            let path = self.parse_value()?.value;
15253            self.expect_keyword_is(Keyword::COLUMNS)?;
15254            let columns = self.parse_parenthesized(|p| {
15255                p.parse_comma_separated(Self::parse_json_table_column_def)
15256            })?;
15257            return Ok(JsonTableColumn::Nested(JsonTableNestedColumn {
15258                path,
15259                columns,
15260            }));
15261        }
15262        let name = self.parse_identifier()?;
15263        if self.parse_keyword(Keyword::FOR) {
15264            self.expect_keyword_is(Keyword::ORDINALITY)?;
15265            return Ok(JsonTableColumn::ForOrdinality(name));
15266        }
15267        let r#type = self.parse_data_type()?;
15268        let exists = self.parse_keyword(Keyword::EXISTS);
15269        self.expect_keyword_is(Keyword::PATH)?;
15270        let path = self.parse_value()?.value;
15271        let mut on_empty = None;
15272        let mut on_error = None;
15273        while let Some(error_handling) = self.parse_json_table_column_error_handling()? {
15274            if self.parse_keyword(Keyword::EMPTY) {
15275                on_empty = Some(error_handling);
15276            } else {
15277                self.expect_keyword_is(Keyword::ERROR)?;
15278                on_error = Some(error_handling);
15279            }
15280        }
15281        Ok(JsonTableColumn::Named(JsonTableNamedColumn {
15282            name,
15283            r#type,
15284            path,
15285            exists,
15286            on_empty,
15287            on_error,
15288        }))
15289    }
15290
15291    /// Parses MSSQL's `OPENJSON WITH` column definition.
15292    ///
15293    /// ```sql
15294    /// colName type [ column_path ] [ AS JSON ]
15295    /// ```
15296    ///
15297    /// Reference: <https://learn.microsoft.com/en-us/sql/t-sql/functions/openjson-transact-sql?view=sql-server-ver16#syntax>
15298    pub fn parse_openjson_table_column_def(&mut self) -> Result<OpenJsonTableColumn, ParserError> {
15299        let name = self.parse_identifier()?;
15300        let r#type = self.parse_data_type()?;
15301        let path = if let Token::SingleQuotedString(path) = self.peek_token().token {
15302            self.next_token();
15303            Some(path)
15304        } else {
15305            None
15306        };
15307        let as_json = self.parse_keyword(Keyword::AS);
15308        if as_json {
15309            self.expect_keyword_is(Keyword::JSON)?;
15310        }
15311        Ok(OpenJsonTableColumn {
15312            name,
15313            r#type,
15314            path,
15315            as_json,
15316        })
15317    }
15318
15319    fn parse_json_table_column_error_handling(
15320        &mut self,
15321    ) -> Result<Option<JsonTableColumnErrorHandling>, ParserError> {
15322        let res = if self.parse_keyword(Keyword::NULL) {
15323            JsonTableColumnErrorHandling::Null
15324        } else if self.parse_keyword(Keyword::ERROR) {
15325            JsonTableColumnErrorHandling::Error
15326        } else if self.parse_keyword(Keyword::DEFAULT) {
15327            JsonTableColumnErrorHandling::Default(self.parse_value()?.value)
15328        } else {
15329            return Ok(None);
15330        };
15331        self.expect_keyword_is(Keyword::ON)?;
15332        Ok(Some(res))
15333    }
15334
15335    pub fn parse_derived_table_factor(
15336        &mut self,
15337        lateral: IsLateral,
15338    ) -> Result<TableFactor, ParserError> {
15339        let subquery = self.parse_query()?;
15340        self.expect_token(&Token::RParen)?;
15341        let alias = self.maybe_parse_table_alias()?;
15342        Ok(TableFactor::Derived {
15343            lateral: match lateral {
15344                Lateral => true,
15345                NotLateral => false,
15346            },
15347            subquery,
15348            alias,
15349        })
15350    }
15351
15352    fn parse_aliased_function_call(&mut self) -> Result<ExprWithAlias, ParserError> {
15353        let function_name = match self.next_token().token {
15354            Token::Word(w) => Ok(w.value),
15355            _ => self.expected("a function identifier", self.peek_token()),
15356        }?;
15357        let expr = self.parse_function(ObjectName::from(vec![Ident::new(function_name)]))?;
15358        let alias = if self.parse_keyword(Keyword::AS) {
15359            Some(self.parse_identifier()?)
15360        } else {
15361            None
15362        };
15363
15364        Ok(ExprWithAlias { expr, alias })
15365    }
15366    /// Parses an expression with an optional alias
15367    ///
15368    /// Examples:
15369    ///
15370    /// ```sql
15371    /// SUM(price) AS total_price
15372    /// ```
15373    /// ```sql
15374    /// SUM(price)
15375    /// ```
15376    ///
15377    /// Example
15378    /// ```
15379    /// # use sqlparser::parser::{Parser, ParserError};
15380    /// # use sqlparser::dialect::GenericDialect;
15381    /// # fn main() ->Result<(), ParserError> {
15382    /// let sql = r#"SUM("a") as "b""#;
15383    /// let mut parser = Parser::new(&GenericDialect).try_with_sql(sql)?;
15384    /// let expr_with_alias = parser.parse_expr_with_alias()?;
15385    /// assert_eq!(Some("b".to_string()), expr_with_alias.alias.map(|x|x.value));
15386    /// # Ok(())
15387    /// # }
15388    pub fn parse_expr_with_alias(&mut self) -> Result<ExprWithAlias, ParserError> {
15389        let expr = self.parse_expr()?;
15390        let alias = if self.parse_keyword(Keyword::AS) {
15391            Some(self.parse_identifier()?)
15392        } else {
15393            None
15394        };
15395
15396        Ok(ExprWithAlias { expr, alias })
15397    }
15398
15399    pub fn parse_pivot_table_factor(
15400        &mut self,
15401        table: TableFactor,
15402    ) -> Result<TableFactor, ParserError> {
15403        self.expect_token(&Token::LParen)?;
15404        let aggregate_functions = self.parse_comma_separated(Self::parse_aliased_function_call)?;
15405        self.expect_keyword_is(Keyword::FOR)?;
15406        let value_column = if self.peek_token_ref().token == Token::LParen {
15407            self.parse_parenthesized_column_list_inner(Mandatory, false, |p| {
15408                p.parse_subexpr(self.dialect.prec_value(Precedence::Between))
15409            })?
15410        } else {
15411            vec![self.parse_subexpr(self.dialect.prec_value(Precedence::Between))?]
15412        };
15413        self.expect_keyword_is(Keyword::IN)?;
15414
15415        self.expect_token(&Token::LParen)?;
15416        let value_source = if self.parse_keyword(Keyword::ANY) {
15417            let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
15418                self.parse_comma_separated(Parser::parse_order_by_expr)?
15419            } else {
15420                vec![]
15421            };
15422            PivotValueSource::Any(order_by)
15423        } else if self.peek_sub_query() {
15424            PivotValueSource::Subquery(self.parse_query()?)
15425        } else {
15426            PivotValueSource::List(self.parse_comma_separated(Self::parse_expr_with_alias)?)
15427        };
15428        self.expect_token(&Token::RParen)?;
15429
15430        let default_on_null =
15431            if self.parse_keywords(&[Keyword::DEFAULT, Keyword::ON, Keyword::NULL]) {
15432                self.expect_token(&Token::LParen)?;
15433                let expr = self.parse_expr()?;
15434                self.expect_token(&Token::RParen)?;
15435                Some(expr)
15436            } else {
15437                None
15438            };
15439
15440        self.expect_token(&Token::RParen)?;
15441        let alias = self.maybe_parse_table_alias()?;
15442        Ok(TableFactor::Pivot {
15443            table: Box::new(table),
15444            aggregate_functions,
15445            value_column,
15446            value_source,
15447            default_on_null,
15448            alias,
15449        })
15450    }
15451
15452    pub fn parse_unpivot_table_factor(
15453        &mut self,
15454        table: TableFactor,
15455    ) -> Result<TableFactor, ParserError> {
15456        let null_inclusion = if self.parse_keyword(Keyword::INCLUDE) {
15457            self.expect_keyword_is(Keyword::NULLS)?;
15458            Some(NullInclusion::IncludeNulls)
15459        } else if self.parse_keyword(Keyword::EXCLUDE) {
15460            self.expect_keyword_is(Keyword::NULLS)?;
15461            Some(NullInclusion::ExcludeNulls)
15462        } else {
15463            None
15464        };
15465        self.expect_token(&Token::LParen)?;
15466        let value = self.parse_expr()?;
15467        self.expect_keyword_is(Keyword::FOR)?;
15468        let name = self.parse_identifier()?;
15469        self.expect_keyword_is(Keyword::IN)?;
15470        let columns = self.parse_parenthesized_column_list_inner(Mandatory, false, |p| {
15471            p.parse_expr_with_alias()
15472        })?;
15473        self.expect_token(&Token::RParen)?;
15474        let alias = self.maybe_parse_table_alias()?;
15475        Ok(TableFactor::Unpivot {
15476            table: Box::new(table),
15477            value,
15478            null_inclusion,
15479            name,
15480            columns,
15481            alias,
15482        })
15483    }
15484
15485    pub fn parse_join_constraint(&mut self, natural: bool) -> Result<JoinConstraint, ParserError> {
15486        if natural {
15487            Ok(JoinConstraint::Natural)
15488        } else if self.parse_keyword(Keyword::ON) {
15489            let constraint = self.parse_expr()?;
15490            Ok(JoinConstraint::On(constraint))
15491        } else if self.parse_keyword(Keyword::USING) {
15492            let columns = self.parse_parenthesized_qualified_column_list(Mandatory, false)?;
15493            Ok(JoinConstraint::Using(columns))
15494        } else {
15495            Ok(JoinConstraint::None)
15496            //self.expected("ON, or USING after JOIN", self.peek_token())
15497        }
15498    }
15499
15500    /// Parse a GRANT statement.
15501    pub fn parse_grant(&mut self) -> Result<Statement, ParserError> {
15502        let (privileges, objects) = self.parse_grant_deny_revoke_privileges_objects()?;
15503
15504        self.expect_keyword_is(Keyword::TO)?;
15505        let grantees = self.parse_grantees()?;
15506
15507        let with_grant_option =
15508            self.parse_keywords(&[Keyword::WITH, Keyword::GRANT, Keyword::OPTION]);
15509
15510        let current_grants =
15511            if self.parse_keywords(&[Keyword::COPY, Keyword::CURRENT, Keyword::GRANTS]) {
15512                Some(CurrentGrantsKind::CopyCurrentGrants)
15513            } else if self.parse_keywords(&[Keyword::REVOKE, Keyword::CURRENT, Keyword::GRANTS]) {
15514                Some(CurrentGrantsKind::RevokeCurrentGrants)
15515            } else {
15516                None
15517            };
15518
15519        let as_grantor = if self.parse_keywords(&[Keyword::AS]) {
15520            Some(self.parse_identifier()?)
15521        } else {
15522            None
15523        };
15524
15525        let granted_by = if self.parse_keywords(&[Keyword::GRANTED, Keyword::BY]) {
15526            Some(self.parse_identifier()?)
15527        } else {
15528            None
15529        };
15530
15531        Ok(Statement::Grant {
15532            privileges,
15533            objects,
15534            grantees,
15535            with_grant_option,
15536            as_grantor,
15537            granted_by,
15538            current_grants,
15539        })
15540    }
15541
15542    fn parse_grantees(&mut self) -> Result<Vec<Grantee>, ParserError> {
15543        let mut values = vec![];
15544        let mut grantee_type = GranteesType::None;
15545        loop {
15546            let new_grantee_type = if self.parse_keyword(Keyword::ROLE) {
15547                GranteesType::Role
15548            } else if self.parse_keyword(Keyword::USER) {
15549                GranteesType::User
15550            } else if self.parse_keyword(Keyword::SHARE) {
15551                GranteesType::Share
15552            } else if self.parse_keyword(Keyword::GROUP) {
15553                GranteesType::Group
15554            } else if self.parse_keyword(Keyword::PUBLIC) {
15555                GranteesType::Public
15556            } else if self.parse_keywords(&[Keyword::DATABASE, Keyword::ROLE]) {
15557                GranteesType::DatabaseRole
15558            } else if self.parse_keywords(&[Keyword::APPLICATION, Keyword::ROLE]) {
15559                GranteesType::ApplicationRole
15560            } else if self.parse_keyword(Keyword::APPLICATION) {
15561                GranteesType::Application
15562            } else {
15563                grantee_type.clone() // keep from previous iteraton, if not specified
15564            };
15565
15566            if self
15567                .dialect
15568                .get_reserved_grantees_types()
15569                .contains(&new_grantee_type)
15570            {
15571                self.prev_token();
15572            } else {
15573                grantee_type = new_grantee_type;
15574            }
15575
15576            let grantee = if grantee_type == GranteesType::Public {
15577                Grantee {
15578                    grantee_type: grantee_type.clone(),
15579                    name: None,
15580                }
15581            } else {
15582                let mut name = self.parse_grantee_name()?;
15583                if self.consume_token(&Token::Colon) {
15584                    // Redshift supports namespace prefix for external users and groups:
15585                    // <Namespace>:<GroupName> or <Namespace>:<UserName>
15586                    // https://docs.aws.amazon.com/redshift/latest/mgmt/redshift-iam-access-control-native-idp.html
15587                    let ident = self.parse_identifier()?;
15588                    if let GranteeName::ObjectName(namespace) = name {
15589                        name = GranteeName::ObjectName(ObjectName::from(vec![Ident::new(
15590                            format!("{namespace}:{ident}"),
15591                        )]));
15592                    };
15593                }
15594                Grantee {
15595                    grantee_type: grantee_type.clone(),
15596                    name: Some(name),
15597                }
15598            };
15599
15600            values.push(grantee);
15601
15602            if !self.consume_token(&Token::Comma) {
15603                break;
15604            }
15605        }
15606
15607        Ok(values)
15608    }
15609
15610    pub fn parse_grant_deny_revoke_privileges_objects(
15611        &mut self,
15612    ) -> Result<(Privileges, Option<GrantObjects>), ParserError> {
15613        let privileges = if self.parse_keyword(Keyword::ALL) {
15614            Privileges::All {
15615                with_privileges_keyword: self.parse_keyword(Keyword::PRIVILEGES),
15616            }
15617        } else {
15618            let actions = self.parse_actions_list()?;
15619            Privileges::Actions(actions)
15620        };
15621
15622        let objects = if self.parse_keyword(Keyword::ON) {
15623            if self.parse_keywords(&[Keyword::ALL, Keyword::TABLES, Keyword::IN, Keyword::SCHEMA]) {
15624                Some(GrantObjects::AllTablesInSchema {
15625                    schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15626                })
15627            } else if self.parse_keywords(&[
15628                Keyword::ALL,
15629                Keyword::EXTERNAL,
15630                Keyword::TABLES,
15631                Keyword::IN,
15632                Keyword::SCHEMA,
15633            ]) {
15634                Some(GrantObjects::AllExternalTablesInSchema {
15635                    schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15636                })
15637            } else if self.parse_keywords(&[
15638                Keyword::ALL,
15639                Keyword::VIEWS,
15640                Keyword::IN,
15641                Keyword::SCHEMA,
15642            ]) {
15643                Some(GrantObjects::AllViewsInSchema {
15644                    schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15645                })
15646            } else if self.parse_keywords(&[
15647                Keyword::ALL,
15648                Keyword::MATERIALIZED,
15649                Keyword::VIEWS,
15650                Keyword::IN,
15651                Keyword::SCHEMA,
15652            ]) {
15653                Some(GrantObjects::AllMaterializedViewsInSchema {
15654                    schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15655                })
15656            } else if self.parse_keywords(&[
15657                Keyword::ALL,
15658                Keyword::FUNCTIONS,
15659                Keyword::IN,
15660                Keyword::SCHEMA,
15661            ]) {
15662                Some(GrantObjects::AllFunctionsInSchema {
15663                    schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15664                })
15665            } else if self.parse_keywords(&[
15666                Keyword::FUTURE,
15667                Keyword::SCHEMAS,
15668                Keyword::IN,
15669                Keyword::DATABASE,
15670            ]) {
15671                Some(GrantObjects::FutureSchemasInDatabase {
15672                    databases: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15673                })
15674            } else if self.parse_keywords(&[
15675                Keyword::FUTURE,
15676                Keyword::TABLES,
15677                Keyword::IN,
15678                Keyword::SCHEMA,
15679            ]) {
15680                Some(GrantObjects::FutureTablesInSchema {
15681                    schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15682                })
15683            } else if self.parse_keywords(&[
15684                Keyword::FUTURE,
15685                Keyword::EXTERNAL,
15686                Keyword::TABLES,
15687                Keyword::IN,
15688                Keyword::SCHEMA,
15689            ]) {
15690                Some(GrantObjects::FutureExternalTablesInSchema {
15691                    schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15692                })
15693            } else if self.parse_keywords(&[
15694                Keyword::FUTURE,
15695                Keyword::VIEWS,
15696                Keyword::IN,
15697                Keyword::SCHEMA,
15698            ]) {
15699                Some(GrantObjects::FutureViewsInSchema {
15700                    schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15701                })
15702            } else if self.parse_keywords(&[
15703                Keyword::FUTURE,
15704                Keyword::MATERIALIZED,
15705                Keyword::VIEWS,
15706                Keyword::IN,
15707                Keyword::SCHEMA,
15708            ]) {
15709                Some(GrantObjects::FutureMaterializedViewsInSchema {
15710                    schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15711                })
15712            } else if self.parse_keywords(&[
15713                Keyword::ALL,
15714                Keyword::SEQUENCES,
15715                Keyword::IN,
15716                Keyword::SCHEMA,
15717            ]) {
15718                Some(GrantObjects::AllSequencesInSchema {
15719                    schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15720                })
15721            } else if self.parse_keywords(&[
15722                Keyword::FUTURE,
15723                Keyword::SEQUENCES,
15724                Keyword::IN,
15725                Keyword::SCHEMA,
15726            ]) {
15727                Some(GrantObjects::FutureSequencesInSchema {
15728                    schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15729                })
15730            } else if self.parse_keywords(&[Keyword::RESOURCE, Keyword::MONITOR]) {
15731                Some(GrantObjects::ResourceMonitors(
15732                    self.parse_comma_separated(|p| p.parse_object_name(false))?,
15733                ))
15734            } else if self.parse_keywords(&[Keyword::COMPUTE, Keyword::POOL]) {
15735                Some(GrantObjects::ComputePools(
15736                    self.parse_comma_separated(|p| p.parse_object_name(false))?,
15737                ))
15738            } else if self.parse_keywords(&[Keyword::FAILOVER, Keyword::GROUP]) {
15739                Some(GrantObjects::FailoverGroup(
15740                    self.parse_comma_separated(|p| p.parse_object_name(false))?,
15741                ))
15742            } else if self.parse_keywords(&[Keyword::REPLICATION, Keyword::GROUP]) {
15743                Some(GrantObjects::ReplicationGroup(
15744                    self.parse_comma_separated(|p| p.parse_object_name(false))?,
15745                ))
15746            } else if self.parse_keywords(&[Keyword::EXTERNAL, Keyword::VOLUME]) {
15747                Some(GrantObjects::ExternalVolumes(
15748                    self.parse_comma_separated(|p| p.parse_object_name(false))?,
15749                ))
15750            } else {
15751                let object_type = self.parse_one_of_keywords(&[
15752                    Keyword::SEQUENCE,
15753                    Keyword::DATABASE,
15754                    Keyword::SCHEMA,
15755                    Keyword::TABLE,
15756                    Keyword::VIEW,
15757                    Keyword::WAREHOUSE,
15758                    Keyword::INTEGRATION,
15759                    Keyword::VIEW,
15760                    Keyword::WAREHOUSE,
15761                    Keyword::INTEGRATION,
15762                    Keyword::USER,
15763                    Keyword::CONNECTION,
15764                    Keyword::PROCEDURE,
15765                    Keyword::FUNCTION,
15766                ]);
15767                let objects =
15768                    self.parse_comma_separated(|p| p.parse_object_name_inner(false, true));
15769                match object_type {
15770                    Some(Keyword::DATABASE) => Some(GrantObjects::Databases(objects?)),
15771                    Some(Keyword::SCHEMA) => Some(GrantObjects::Schemas(objects?)),
15772                    Some(Keyword::SEQUENCE) => Some(GrantObjects::Sequences(objects?)),
15773                    Some(Keyword::WAREHOUSE) => Some(GrantObjects::Warehouses(objects?)),
15774                    Some(Keyword::INTEGRATION) => Some(GrantObjects::Integrations(objects?)),
15775                    Some(Keyword::VIEW) => Some(GrantObjects::Views(objects?)),
15776                    Some(Keyword::USER) => Some(GrantObjects::Users(objects?)),
15777                    Some(Keyword::CONNECTION) => Some(GrantObjects::Connections(objects?)),
15778                    kw @ (Some(Keyword::PROCEDURE) | Some(Keyword::FUNCTION)) => {
15779                        if let Some(name) = objects?.first() {
15780                            self.parse_grant_procedure_or_function(name, &kw)?
15781                        } else {
15782                            self.expected("procedure or function name", self.peek_token())?
15783                        }
15784                    }
15785                    Some(Keyword::TABLE) | None => Some(GrantObjects::Tables(objects?)),
15786                    Some(unexpected_keyword) => return Err(ParserError::ParserError(
15787                        format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in grant objects"),
15788                    )),
15789                }
15790            }
15791        } else {
15792            None
15793        };
15794
15795        Ok((privileges, objects))
15796    }
15797
15798    fn parse_grant_procedure_or_function(
15799        &mut self,
15800        name: &ObjectName,
15801        kw: &Option<Keyword>,
15802    ) -> Result<Option<GrantObjects>, ParserError> {
15803        let arg_types = if self.consume_token(&Token::LParen) {
15804            let list = self.parse_comma_separated0(Self::parse_data_type, Token::RParen)?;
15805            self.expect_token(&Token::RParen)?;
15806            list
15807        } else {
15808            vec![]
15809        };
15810        match kw {
15811            Some(Keyword::PROCEDURE) => Ok(Some(GrantObjects::Procedure {
15812                name: name.clone(),
15813                arg_types,
15814            })),
15815            Some(Keyword::FUNCTION) => Ok(Some(GrantObjects::Function {
15816                name: name.clone(),
15817                arg_types,
15818            })),
15819            _ => self.expected("procedure or function keywords", self.peek_token())?,
15820        }
15821    }
15822
15823    pub fn parse_grant_permission(&mut self) -> Result<Action, ParserError> {
15824        fn parse_columns(parser: &mut Parser) -> Result<Option<Vec<Ident>>, ParserError> {
15825            let columns = parser.parse_parenthesized_column_list(Optional, false)?;
15826            if columns.is_empty() {
15827                Ok(None)
15828            } else {
15829                Ok(Some(columns))
15830            }
15831        }
15832
15833        // Multi-word privileges
15834        if self.parse_keywords(&[Keyword::IMPORTED, Keyword::PRIVILEGES]) {
15835            Ok(Action::ImportedPrivileges)
15836        } else if self.parse_keywords(&[Keyword::ADD, Keyword::SEARCH, Keyword::OPTIMIZATION]) {
15837            Ok(Action::AddSearchOptimization)
15838        } else if self.parse_keywords(&[Keyword::ATTACH, Keyword::LISTING]) {
15839            Ok(Action::AttachListing)
15840        } else if self.parse_keywords(&[Keyword::ATTACH, Keyword::POLICY]) {
15841            Ok(Action::AttachPolicy)
15842        } else if self.parse_keywords(&[Keyword::BIND, Keyword::SERVICE, Keyword::ENDPOINT]) {
15843            Ok(Action::BindServiceEndpoint)
15844        } else if self.parse_keywords(&[Keyword::DATABASE, Keyword::ROLE]) {
15845            let role = self.parse_object_name(false)?;
15846            Ok(Action::DatabaseRole { role })
15847        } else if self.parse_keywords(&[Keyword::EVOLVE, Keyword::SCHEMA]) {
15848            Ok(Action::EvolveSchema)
15849        } else if self.parse_keywords(&[Keyword::IMPORT, Keyword::SHARE]) {
15850            Ok(Action::ImportShare)
15851        } else if self.parse_keywords(&[Keyword::MANAGE, Keyword::VERSIONS]) {
15852            Ok(Action::ManageVersions)
15853        } else if self.parse_keywords(&[Keyword::MANAGE, Keyword::RELEASES]) {
15854            Ok(Action::ManageReleases)
15855        } else if self.parse_keywords(&[Keyword::OVERRIDE, Keyword::SHARE, Keyword::RESTRICTIONS]) {
15856            Ok(Action::OverrideShareRestrictions)
15857        } else if self.parse_keywords(&[
15858            Keyword::PURCHASE,
15859            Keyword::DATA,
15860            Keyword::EXCHANGE,
15861            Keyword::LISTING,
15862        ]) {
15863            Ok(Action::PurchaseDataExchangeListing)
15864        } else if self.parse_keywords(&[Keyword::RESOLVE, Keyword::ALL]) {
15865            Ok(Action::ResolveAll)
15866        } else if self.parse_keywords(&[Keyword::READ, Keyword::SESSION]) {
15867            Ok(Action::ReadSession)
15868
15869        // Single-word privileges
15870        } else if self.parse_keyword(Keyword::APPLY) {
15871            let apply_type = self.parse_action_apply_type()?;
15872            Ok(Action::Apply { apply_type })
15873        } else if self.parse_keyword(Keyword::APPLYBUDGET) {
15874            Ok(Action::ApplyBudget)
15875        } else if self.parse_keyword(Keyword::AUDIT) {
15876            Ok(Action::Audit)
15877        } else if self.parse_keyword(Keyword::CONNECT) {
15878            Ok(Action::Connect)
15879        } else if self.parse_keyword(Keyword::CREATE) {
15880            let obj_type = self.maybe_parse_action_create_object_type();
15881            Ok(Action::Create { obj_type })
15882        } else if self.parse_keyword(Keyword::DELETE) {
15883            Ok(Action::Delete)
15884        } else if self.parse_keyword(Keyword::EXEC) {
15885            let obj_type = self.maybe_parse_action_execute_obj_type();
15886            Ok(Action::Exec { obj_type })
15887        } else if self.parse_keyword(Keyword::EXECUTE) {
15888            let obj_type = self.maybe_parse_action_execute_obj_type();
15889            Ok(Action::Execute { obj_type })
15890        } else if self.parse_keyword(Keyword::FAILOVER) {
15891            Ok(Action::Failover)
15892        } else if self.parse_keyword(Keyword::INSERT) {
15893            Ok(Action::Insert {
15894                columns: parse_columns(self)?,
15895            })
15896        } else if self.parse_keyword(Keyword::MANAGE) {
15897            let manage_type = self.parse_action_manage_type()?;
15898            Ok(Action::Manage { manage_type })
15899        } else if self.parse_keyword(Keyword::MODIFY) {
15900            let modify_type = self.parse_action_modify_type();
15901            Ok(Action::Modify { modify_type })
15902        } else if self.parse_keyword(Keyword::MONITOR) {
15903            let monitor_type = self.parse_action_monitor_type();
15904            Ok(Action::Monitor { monitor_type })
15905        } else if self.parse_keyword(Keyword::OPERATE) {
15906            Ok(Action::Operate)
15907        } else if self.parse_keyword(Keyword::REFERENCES) {
15908            Ok(Action::References {
15909                columns: parse_columns(self)?,
15910            })
15911        } else if self.parse_keyword(Keyword::READ) {
15912            Ok(Action::Read)
15913        } else if self.parse_keyword(Keyword::REPLICATE) {
15914            Ok(Action::Replicate)
15915        } else if self.parse_keyword(Keyword::ROLE) {
15916            let role = self.parse_object_name(false)?;
15917            Ok(Action::Role { role })
15918        } else if self.parse_keyword(Keyword::SELECT) {
15919            Ok(Action::Select {
15920                columns: parse_columns(self)?,
15921            })
15922        } else if self.parse_keyword(Keyword::TEMPORARY) {
15923            Ok(Action::Temporary)
15924        } else if self.parse_keyword(Keyword::TRIGGER) {
15925            Ok(Action::Trigger)
15926        } else if self.parse_keyword(Keyword::TRUNCATE) {
15927            Ok(Action::Truncate)
15928        } else if self.parse_keyword(Keyword::UPDATE) {
15929            Ok(Action::Update {
15930                columns: parse_columns(self)?,
15931            })
15932        } else if self.parse_keyword(Keyword::USAGE) {
15933            Ok(Action::Usage)
15934        } else if self.parse_keyword(Keyword::OWNERSHIP) {
15935            Ok(Action::Ownership)
15936        } else if self.parse_keyword(Keyword::DROP) {
15937            Ok(Action::Drop)
15938        } else {
15939            self.expected("a privilege keyword", self.peek_token())?
15940        }
15941    }
15942
15943    fn maybe_parse_action_create_object_type(&mut self) -> Option<ActionCreateObjectType> {
15944        // Multi-word object types
15945        if self.parse_keywords(&[Keyword::APPLICATION, Keyword::PACKAGE]) {
15946            Some(ActionCreateObjectType::ApplicationPackage)
15947        } else if self.parse_keywords(&[Keyword::COMPUTE, Keyword::POOL]) {
15948            Some(ActionCreateObjectType::ComputePool)
15949        } else if self.parse_keywords(&[Keyword::DATA, Keyword::EXCHANGE, Keyword::LISTING]) {
15950            Some(ActionCreateObjectType::DataExchangeListing)
15951        } else if self.parse_keywords(&[Keyword::EXTERNAL, Keyword::VOLUME]) {
15952            Some(ActionCreateObjectType::ExternalVolume)
15953        } else if self.parse_keywords(&[Keyword::FAILOVER, Keyword::GROUP]) {
15954            Some(ActionCreateObjectType::FailoverGroup)
15955        } else if self.parse_keywords(&[Keyword::NETWORK, Keyword::POLICY]) {
15956            Some(ActionCreateObjectType::NetworkPolicy)
15957        } else if self.parse_keywords(&[Keyword::ORGANIZATION, Keyword::LISTING]) {
15958            Some(ActionCreateObjectType::OrganiationListing)
15959        } else if self.parse_keywords(&[Keyword::REPLICATION, Keyword::GROUP]) {
15960            Some(ActionCreateObjectType::ReplicationGroup)
15961        }
15962        // Single-word object types
15963        else if self.parse_keyword(Keyword::ACCOUNT) {
15964            Some(ActionCreateObjectType::Account)
15965        } else if self.parse_keyword(Keyword::APPLICATION) {
15966            Some(ActionCreateObjectType::Application)
15967        } else if self.parse_keyword(Keyword::DATABASE) {
15968            Some(ActionCreateObjectType::Database)
15969        } else if self.parse_keyword(Keyword::INTEGRATION) {
15970            Some(ActionCreateObjectType::Integration)
15971        } else if self.parse_keyword(Keyword::ROLE) {
15972            Some(ActionCreateObjectType::Role)
15973        } else if self.parse_keyword(Keyword::SCHEMA) {
15974            Some(ActionCreateObjectType::Schema)
15975        } else if self.parse_keyword(Keyword::SHARE) {
15976            Some(ActionCreateObjectType::Share)
15977        } else if self.parse_keyword(Keyword::USER) {
15978            Some(ActionCreateObjectType::User)
15979        } else if self.parse_keyword(Keyword::WAREHOUSE) {
15980            Some(ActionCreateObjectType::Warehouse)
15981        } else {
15982            None
15983        }
15984    }
15985
15986    fn parse_action_apply_type(&mut self) -> Result<ActionApplyType, ParserError> {
15987        if self.parse_keywords(&[Keyword::AGGREGATION, Keyword::POLICY]) {
15988            Ok(ActionApplyType::AggregationPolicy)
15989        } else if self.parse_keywords(&[Keyword::AUTHENTICATION, Keyword::POLICY]) {
15990            Ok(ActionApplyType::AuthenticationPolicy)
15991        } else if self.parse_keywords(&[Keyword::JOIN, Keyword::POLICY]) {
15992            Ok(ActionApplyType::JoinPolicy)
15993        } else if self.parse_keywords(&[Keyword::MASKING, Keyword::POLICY]) {
15994            Ok(ActionApplyType::MaskingPolicy)
15995        } else if self.parse_keywords(&[Keyword::PACKAGES, Keyword::POLICY]) {
15996            Ok(ActionApplyType::PackagesPolicy)
15997        } else if self.parse_keywords(&[Keyword::PASSWORD, Keyword::POLICY]) {
15998            Ok(ActionApplyType::PasswordPolicy)
15999        } else if self.parse_keywords(&[Keyword::PROJECTION, Keyword::POLICY]) {
16000            Ok(ActionApplyType::ProjectionPolicy)
16001        } else if self.parse_keywords(&[Keyword::ROW, Keyword::ACCESS, Keyword::POLICY]) {
16002            Ok(ActionApplyType::RowAccessPolicy)
16003        } else if self.parse_keywords(&[Keyword::SESSION, Keyword::POLICY]) {
16004            Ok(ActionApplyType::SessionPolicy)
16005        } else if self.parse_keyword(Keyword::TAG) {
16006            Ok(ActionApplyType::Tag)
16007        } else {
16008            self.expected("GRANT APPLY type", self.peek_token())
16009        }
16010    }
16011
16012    fn maybe_parse_action_execute_obj_type(&mut self) -> Option<ActionExecuteObjectType> {
16013        if self.parse_keywords(&[Keyword::DATA, Keyword::METRIC, Keyword::FUNCTION]) {
16014            Some(ActionExecuteObjectType::DataMetricFunction)
16015        } else if self.parse_keywords(&[Keyword::MANAGED, Keyword::ALERT]) {
16016            Some(ActionExecuteObjectType::ManagedAlert)
16017        } else if self.parse_keywords(&[Keyword::MANAGED, Keyword::TASK]) {
16018            Some(ActionExecuteObjectType::ManagedTask)
16019        } else if self.parse_keyword(Keyword::ALERT) {
16020            Some(ActionExecuteObjectType::Alert)
16021        } else if self.parse_keyword(Keyword::TASK) {
16022            Some(ActionExecuteObjectType::Task)
16023        } else {
16024            None
16025        }
16026    }
16027
16028    fn parse_action_manage_type(&mut self) -> Result<ActionManageType, ParserError> {
16029        if self.parse_keywords(&[Keyword::ACCOUNT, Keyword::SUPPORT, Keyword::CASES]) {
16030            Ok(ActionManageType::AccountSupportCases)
16031        } else if self.parse_keywords(&[Keyword::EVENT, Keyword::SHARING]) {
16032            Ok(ActionManageType::EventSharing)
16033        } else if self.parse_keywords(&[Keyword::LISTING, Keyword::AUTO, Keyword::FULFILLMENT]) {
16034            Ok(ActionManageType::ListingAutoFulfillment)
16035        } else if self.parse_keywords(&[Keyword::ORGANIZATION, Keyword::SUPPORT, Keyword::CASES]) {
16036            Ok(ActionManageType::OrganizationSupportCases)
16037        } else if self.parse_keywords(&[Keyword::USER, Keyword::SUPPORT, Keyword::CASES]) {
16038            Ok(ActionManageType::UserSupportCases)
16039        } else if self.parse_keyword(Keyword::GRANTS) {
16040            Ok(ActionManageType::Grants)
16041        } else if self.parse_keyword(Keyword::WAREHOUSES) {
16042            Ok(ActionManageType::Warehouses)
16043        } else {
16044            self.expected("GRANT MANAGE type", self.peek_token())
16045        }
16046    }
16047
16048    fn parse_action_modify_type(&mut self) -> Option<ActionModifyType> {
16049        if self.parse_keywords(&[Keyword::LOG, Keyword::LEVEL]) {
16050            Some(ActionModifyType::LogLevel)
16051        } else if self.parse_keywords(&[Keyword::TRACE, Keyword::LEVEL]) {
16052            Some(ActionModifyType::TraceLevel)
16053        } else if self.parse_keywords(&[Keyword::SESSION, Keyword::LOG, Keyword::LEVEL]) {
16054            Some(ActionModifyType::SessionLogLevel)
16055        } else if self.parse_keywords(&[Keyword::SESSION, Keyword::TRACE, Keyword::LEVEL]) {
16056            Some(ActionModifyType::SessionTraceLevel)
16057        } else {
16058            None
16059        }
16060    }
16061
16062    fn parse_action_monitor_type(&mut self) -> Option<ActionMonitorType> {
16063        if self.parse_keyword(Keyword::EXECUTION) {
16064            Some(ActionMonitorType::Execution)
16065        } else if self.parse_keyword(Keyword::SECURITY) {
16066            Some(ActionMonitorType::Security)
16067        } else if self.parse_keyword(Keyword::USAGE) {
16068            Some(ActionMonitorType::Usage)
16069        } else {
16070            None
16071        }
16072    }
16073
16074    pub fn parse_grantee_name(&mut self) -> Result<GranteeName, ParserError> {
16075        let mut name = self.parse_object_name(false)?;
16076        if self.dialect.supports_user_host_grantee()
16077            && name.0.len() == 1
16078            && name.0[0].as_ident().is_some()
16079            && self.consume_token(&Token::AtSign)
16080        {
16081            let user = name.0.pop().unwrap().as_ident().unwrap().clone();
16082            let host = self.parse_identifier()?;
16083            Ok(GranteeName::UserHost { user, host })
16084        } else {
16085            Ok(GranteeName::ObjectName(name))
16086        }
16087    }
16088
16089    /// Parse [`Statement::Deny`]
16090    pub fn parse_deny(&mut self) -> Result<Statement, ParserError> {
16091        self.expect_keyword(Keyword::DENY)?;
16092
16093        let (privileges, objects) = self.parse_grant_deny_revoke_privileges_objects()?;
16094        let objects = match objects {
16095            Some(o) => o,
16096            None => {
16097                return parser_err!(
16098                    "DENY statements must specify an object",
16099                    self.peek_token().span.start
16100                )
16101            }
16102        };
16103
16104        self.expect_keyword_is(Keyword::TO)?;
16105        let grantees = self.parse_grantees()?;
16106        let cascade = self.parse_cascade_option();
16107        let granted_by = if self.parse_keywords(&[Keyword::AS]) {
16108            Some(self.parse_identifier()?)
16109        } else {
16110            None
16111        };
16112
16113        Ok(Statement::Deny(DenyStatement {
16114            privileges,
16115            objects,
16116            grantees,
16117            cascade,
16118            granted_by,
16119        }))
16120    }
16121
16122    /// Parse a REVOKE statement
16123    pub fn parse_revoke(&mut self) -> Result<Statement, ParserError> {
16124        let (privileges, objects) = self.parse_grant_deny_revoke_privileges_objects()?;
16125
16126        self.expect_keyword_is(Keyword::FROM)?;
16127        let grantees = self.parse_grantees()?;
16128
16129        let granted_by = if self.parse_keywords(&[Keyword::GRANTED, Keyword::BY]) {
16130            Some(self.parse_identifier()?)
16131        } else {
16132            None
16133        };
16134
16135        let cascade = self.parse_cascade_option();
16136
16137        Ok(Statement::Revoke {
16138            privileges,
16139            objects,
16140            grantees,
16141            granted_by,
16142            cascade,
16143        })
16144    }
16145
16146    /// Parse an REPLACE statement
16147    pub fn parse_replace(
16148        &mut self,
16149        replace_token: TokenWithSpan,
16150    ) -> Result<Statement, ParserError> {
16151        if !dialect_of!(self is MySqlDialect | GenericDialect) {
16152            return parser_err!(
16153                "Unsupported statement REPLACE",
16154                self.peek_token().span.start
16155            );
16156        }
16157
16158        let mut insert = self.parse_insert(replace_token)?;
16159        if let Statement::Insert(Insert { replace_into, .. }) = &mut insert {
16160            *replace_into = true;
16161        }
16162
16163        Ok(insert)
16164    }
16165
16166    /// Parse an INSERT statement, returning a `Box`ed SetExpr
16167    ///
16168    /// This is used to reduce the size of the stack frames in debug builds
16169    fn parse_insert_setexpr_boxed(
16170        &mut self,
16171        insert_token: TokenWithSpan,
16172    ) -> Result<Box<SetExpr>, ParserError> {
16173        Ok(Box::new(SetExpr::Insert(self.parse_insert(insert_token)?)))
16174    }
16175
16176    /// Parse an INSERT statement
16177    pub fn parse_insert(&mut self, insert_token: TokenWithSpan) -> Result<Statement, ParserError> {
16178        let or = self.parse_conflict_clause();
16179        let priority = if !dialect_of!(self is MySqlDialect | GenericDialect) {
16180            None
16181        } else if self.parse_keyword(Keyword::LOW_PRIORITY) {
16182            Some(MysqlInsertPriority::LowPriority)
16183        } else if self.parse_keyword(Keyword::DELAYED) {
16184            Some(MysqlInsertPriority::Delayed)
16185        } else if self.parse_keyword(Keyword::HIGH_PRIORITY) {
16186            Some(MysqlInsertPriority::HighPriority)
16187        } else {
16188            None
16189        };
16190
16191        let ignore = dialect_of!(self is MySqlDialect | GenericDialect)
16192            && self.parse_keyword(Keyword::IGNORE);
16193
16194        let replace_into = false;
16195
16196        let overwrite = self.parse_keyword(Keyword::OVERWRITE);
16197        let into = self.parse_keyword(Keyword::INTO);
16198
16199        let local = self.parse_keyword(Keyword::LOCAL);
16200
16201        if self.parse_keyword(Keyword::DIRECTORY) {
16202            let path = self.parse_literal_string()?;
16203            let file_format = if self.parse_keywords(&[Keyword::STORED, Keyword::AS]) {
16204                Some(self.parse_file_format()?)
16205            } else {
16206                None
16207            };
16208            let source = self.parse_query()?;
16209            Ok(Statement::Directory {
16210                local,
16211                path,
16212                overwrite,
16213                file_format,
16214                source,
16215            })
16216        } else {
16217            // Hive lets you put table here regardless
16218            let table = self.parse_keyword(Keyword::TABLE);
16219            let table_object = self.parse_table_object()?;
16220
16221            let table_alias =
16222                if dialect_of!(self is PostgreSqlDialect) && self.parse_keyword(Keyword::AS) {
16223                    Some(self.parse_identifier()?)
16224                } else {
16225                    None
16226                };
16227
16228            let is_mysql = dialect_of!(self is MySqlDialect);
16229
16230            let (columns, partitioned, after_columns, source, assignments) = if self
16231                .parse_keywords(&[Keyword::DEFAULT, Keyword::VALUES])
16232            {
16233                (vec![], None, vec![], None, vec![])
16234            } else {
16235                let (columns, partitioned, after_columns) = if !self.peek_subquery_start() {
16236                    let columns = self.parse_parenthesized_column_list(Optional, is_mysql)?;
16237
16238                    let partitioned = self.parse_insert_partition()?;
16239                    // Hive allows you to specify columns after partitions as well if you want.
16240                    let after_columns = if dialect_of!(self is HiveDialect) {
16241                        self.parse_parenthesized_column_list(Optional, false)?
16242                    } else {
16243                        vec![]
16244                    };
16245                    (columns, partitioned, after_columns)
16246                } else {
16247                    Default::default()
16248                };
16249
16250                let (source, assignments) = if self.peek_keyword(Keyword::FORMAT)
16251                    || self.peek_keyword(Keyword::SETTINGS)
16252                {
16253                    (None, vec![])
16254                } else if self.dialect.supports_insert_set() && self.parse_keyword(Keyword::SET) {
16255                    (None, self.parse_comma_separated(Parser::parse_assignment)?)
16256                } else {
16257                    (Some(self.parse_query()?), vec![])
16258                };
16259
16260                (columns, partitioned, after_columns, source, assignments)
16261            };
16262
16263            let (format_clause, settings) = if self.dialect.supports_insert_format() {
16264                // Settings always comes before `FORMAT` for ClickHouse:
16265                // <https://clickhouse.com/docs/en/sql-reference/statements/insert-into>
16266                let settings = self.parse_settings()?;
16267
16268                let format = if self.parse_keyword(Keyword::FORMAT) {
16269                    Some(self.parse_input_format_clause()?)
16270                } else {
16271                    None
16272                };
16273
16274                (format, settings)
16275            } else {
16276                Default::default()
16277            };
16278
16279            let insert_alias = if dialect_of!(self is MySqlDialect | GenericDialect)
16280                && self.parse_keyword(Keyword::AS)
16281            {
16282                let row_alias = self.parse_object_name(false)?;
16283                let col_aliases = Some(self.parse_parenthesized_column_list(Optional, false)?);
16284                Some(InsertAliases {
16285                    row_alias,
16286                    col_aliases,
16287                })
16288            } else {
16289                None
16290            };
16291
16292            let on = if self.parse_keyword(Keyword::ON) {
16293                if self.parse_keyword(Keyword::CONFLICT) {
16294                    let conflict_target =
16295                        if self.parse_keywords(&[Keyword::ON, Keyword::CONSTRAINT]) {
16296                            Some(ConflictTarget::OnConstraint(self.parse_object_name(false)?))
16297                        } else if self.peek_token() == Token::LParen {
16298                            Some(ConflictTarget::Columns(
16299                                self.parse_parenthesized_column_list(IsOptional::Mandatory, false)?,
16300                            ))
16301                        } else {
16302                            None
16303                        };
16304
16305                    self.expect_keyword_is(Keyword::DO)?;
16306                    let action = if self.parse_keyword(Keyword::NOTHING) {
16307                        OnConflictAction::DoNothing
16308                    } else {
16309                        self.expect_keyword_is(Keyword::UPDATE)?;
16310                        self.expect_keyword_is(Keyword::SET)?;
16311                        let assignments = self.parse_comma_separated(Parser::parse_assignment)?;
16312                        let selection = if self.parse_keyword(Keyword::WHERE) {
16313                            Some(self.parse_expr()?)
16314                        } else {
16315                            None
16316                        };
16317                        OnConflictAction::DoUpdate(DoUpdate {
16318                            assignments,
16319                            selection,
16320                        })
16321                    };
16322
16323                    Some(OnInsert::OnConflict(OnConflict {
16324                        conflict_target,
16325                        action,
16326                    }))
16327                } else {
16328                    self.expect_keyword_is(Keyword::DUPLICATE)?;
16329                    self.expect_keyword_is(Keyword::KEY)?;
16330                    self.expect_keyword_is(Keyword::UPDATE)?;
16331                    let l = self.parse_comma_separated(Parser::parse_assignment)?;
16332
16333                    Some(OnInsert::DuplicateKeyUpdate(l))
16334                }
16335            } else {
16336                None
16337            };
16338
16339            let returning = if self.parse_keyword(Keyword::RETURNING) {
16340                Some(self.parse_comma_separated(Parser::parse_select_item)?)
16341            } else {
16342                None
16343            };
16344
16345            Ok(Statement::Insert(Insert {
16346                insert_token: insert_token.into(),
16347                or,
16348                table: table_object,
16349                table_alias,
16350                ignore,
16351                into,
16352                overwrite,
16353                partitioned,
16354                columns,
16355                after_columns,
16356                source,
16357                assignments,
16358                has_table_keyword: table,
16359                on,
16360                returning,
16361                replace_into,
16362                priority,
16363                insert_alias,
16364                settings,
16365                format_clause,
16366            }))
16367        }
16368    }
16369
16370    // Parses input format clause used for [ClickHouse].
16371    //
16372    // <https://clickhouse.com/docs/en/interfaces/formats>
16373    pub fn parse_input_format_clause(&mut self) -> Result<InputFormatClause, ParserError> {
16374        let ident = self.parse_identifier()?;
16375        let values = self
16376            .maybe_parse(|p| p.parse_comma_separated(|p| p.parse_expr()))?
16377            .unwrap_or_default();
16378
16379        Ok(InputFormatClause { ident, values })
16380    }
16381
16382    /// Returns true if the immediate tokens look like the
16383    /// beginning of a subquery. `(SELECT ...`
16384    fn peek_subquery_start(&mut self) -> bool {
16385        let [maybe_lparen, maybe_select] = self.peek_tokens();
16386        Token::LParen == maybe_lparen
16387            && matches!(maybe_select, Token::Word(w) if w.keyword == Keyword::SELECT)
16388    }
16389
16390    fn parse_conflict_clause(&mut self) -> Option<SqliteOnConflict> {
16391        if self.parse_keywords(&[Keyword::OR, Keyword::REPLACE]) {
16392            Some(SqliteOnConflict::Replace)
16393        } else if self.parse_keywords(&[Keyword::OR, Keyword::ROLLBACK]) {
16394            Some(SqliteOnConflict::Rollback)
16395        } else if self.parse_keywords(&[Keyword::OR, Keyword::ABORT]) {
16396            Some(SqliteOnConflict::Abort)
16397        } else if self.parse_keywords(&[Keyword::OR, Keyword::FAIL]) {
16398            Some(SqliteOnConflict::Fail)
16399        } else if self.parse_keywords(&[Keyword::OR, Keyword::IGNORE]) {
16400            Some(SqliteOnConflict::Ignore)
16401        } else if self.parse_keyword(Keyword::REPLACE) {
16402            Some(SqliteOnConflict::Replace)
16403        } else {
16404            None
16405        }
16406    }
16407
16408    pub fn parse_insert_partition(&mut self) -> Result<Option<Vec<Expr>>, ParserError> {
16409        if self.parse_keyword(Keyword::PARTITION) {
16410            self.expect_token(&Token::LParen)?;
16411            let partition_cols = Some(self.parse_comma_separated(Parser::parse_expr)?);
16412            self.expect_token(&Token::RParen)?;
16413            Ok(partition_cols)
16414        } else {
16415            Ok(None)
16416        }
16417    }
16418
16419    pub fn parse_load_data_table_format(
16420        &mut self,
16421    ) -> Result<Option<HiveLoadDataFormat>, ParserError> {
16422        if self.parse_keyword(Keyword::INPUTFORMAT) {
16423            let input_format = self.parse_expr()?;
16424            self.expect_keyword_is(Keyword::SERDE)?;
16425            let serde = self.parse_expr()?;
16426            Ok(Some(HiveLoadDataFormat {
16427                input_format,
16428                serde,
16429            }))
16430        } else {
16431            Ok(None)
16432        }
16433    }
16434
16435    /// Parse an UPDATE statement, returning a `Box`ed SetExpr
16436    ///
16437    /// This is used to reduce the size of the stack frames in debug builds
16438    fn parse_update_setexpr_boxed(
16439        &mut self,
16440        update_token: TokenWithSpan,
16441    ) -> Result<Box<SetExpr>, ParserError> {
16442        Ok(Box::new(SetExpr::Update(self.parse_update(update_token)?)))
16443    }
16444
16445    pub fn parse_update(&mut self, update_token: TokenWithSpan) -> Result<Statement, ParserError> {
16446        let or = self.parse_conflict_clause();
16447        let table = self.parse_table_and_joins()?;
16448        let from_before_set = if self.parse_keyword(Keyword::FROM) {
16449            Some(UpdateTableFromKind::BeforeSet(
16450                self.parse_table_with_joins()?,
16451            ))
16452        } else {
16453            None
16454        };
16455        self.expect_keyword(Keyword::SET)?;
16456        let assignments = self.parse_comma_separated(Parser::parse_assignment)?;
16457        let from = if from_before_set.is_none() && self.parse_keyword(Keyword::FROM) {
16458            Some(UpdateTableFromKind::AfterSet(
16459                self.parse_table_with_joins()?,
16460            ))
16461        } else {
16462            from_before_set
16463        };
16464        let selection = if self.parse_keyword(Keyword::WHERE) {
16465            Some(self.parse_expr()?)
16466        } else {
16467            None
16468        };
16469        let returning = if self.parse_keyword(Keyword::RETURNING) {
16470            Some(self.parse_comma_separated(Parser::parse_select_item)?)
16471        } else {
16472            None
16473        };
16474        let limit = if self.parse_keyword(Keyword::LIMIT) {
16475            Some(self.parse_expr()?)
16476        } else {
16477            None
16478        };
16479        Ok(Update {
16480            update_token: update_token.into(),
16481            table,
16482            assignments,
16483            from,
16484            selection,
16485            returning,
16486            or,
16487            limit,
16488        }
16489        .into())
16490    }
16491
16492    /// Parse a `var = expr` assignment, used in an UPDATE statement
16493    pub fn parse_assignment(&mut self) -> Result<Assignment, ParserError> {
16494        let target = self.parse_assignment_target()?;
16495        self.expect_token(&Token::Eq)?;
16496        let value = self.parse_expr()?;
16497        Ok(Assignment { target, value })
16498    }
16499
16500    /// Parse the left-hand side of an assignment, used in an UPDATE statement
16501    pub fn parse_assignment_target(&mut self) -> Result<AssignmentTarget, ParserError> {
16502        if self.consume_token(&Token::LParen) {
16503            let columns = self.parse_comma_separated(|p| p.parse_object_name(false))?;
16504            self.expect_token(&Token::RParen)?;
16505            Ok(AssignmentTarget::Tuple(columns))
16506        } else {
16507            let column = self.parse_object_name(false)?;
16508            Ok(AssignmentTarget::ColumnName(column))
16509        }
16510    }
16511
16512    pub fn parse_function_args(&mut self) -> Result<FunctionArg, ParserError> {
16513        let arg = if self.dialect.supports_named_fn_args_with_expr_name() {
16514            self.maybe_parse(|p| {
16515                let name = p.parse_expr()?;
16516                let operator = p.parse_function_named_arg_operator()?;
16517                let arg = p.parse_wildcard_expr()?.into();
16518                Ok(FunctionArg::ExprNamed {
16519                    name,
16520                    arg,
16521                    operator,
16522                })
16523            })?
16524        } else {
16525            self.maybe_parse(|p| {
16526                let name = p.parse_identifier()?;
16527                let operator = p.parse_function_named_arg_operator()?;
16528                let arg = p.parse_wildcard_expr()?.into();
16529                Ok(FunctionArg::Named {
16530                    name,
16531                    arg,
16532                    operator,
16533                })
16534            })?
16535        };
16536        if let Some(arg) = arg {
16537            return Ok(arg);
16538        }
16539        Ok(FunctionArg::Unnamed(self.parse_wildcard_expr()?.into()))
16540    }
16541
16542    fn parse_function_named_arg_operator(&mut self) -> Result<FunctionArgOperator, ParserError> {
16543        if self.parse_keyword(Keyword::VALUE) {
16544            return Ok(FunctionArgOperator::Value);
16545        }
16546        let tok = self.next_token();
16547        match tok.token {
16548            Token::RArrow if self.dialect.supports_named_fn_args_with_rarrow_operator() => {
16549                Ok(FunctionArgOperator::RightArrow)
16550            }
16551            Token::Eq if self.dialect.supports_named_fn_args_with_eq_operator() => {
16552                Ok(FunctionArgOperator::Equals)
16553            }
16554            Token::Assignment
16555                if self
16556                    .dialect
16557                    .supports_named_fn_args_with_assignment_operator() =>
16558            {
16559                Ok(FunctionArgOperator::Assignment)
16560            }
16561            Token::Colon if self.dialect.supports_named_fn_args_with_colon_operator() => {
16562                Ok(FunctionArgOperator::Colon)
16563            }
16564            _ => {
16565                self.prev_token();
16566                self.expected("argument operator", tok)
16567            }
16568        }
16569    }
16570
16571    pub fn parse_optional_args(&mut self) -> Result<Vec<FunctionArg>, ParserError> {
16572        if self.consume_token(&Token::RParen) {
16573            Ok(vec![])
16574        } else {
16575            let args = self.parse_comma_separated(Parser::parse_function_args)?;
16576            self.expect_token(&Token::RParen)?;
16577            Ok(args)
16578        }
16579    }
16580
16581    fn parse_table_function_args(&mut self) -> Result<TableFunctionArgs, ParserError> {
16582        if self.consume_token(&Token::RParen) {
16583            return Ok(TableFunctionArgs {
16584                args: vec![],
16585                settings: None,
16586            });
16587        }
16588        let mut args = vec![];
16589        let settings = loop {
16590            if let Some(settings) = self.parse_settings()? {
16591                break Some(settings);
16592            }
16593            args.push(self.parse_function_args()?);
16594            if self.is_parse_comma_separated_end() {
16595                break None;
16596            }
16597        };
16598        self.expect_token(&Token::RParen)?;
16599        Ok(TableFunctionArgs { args, settings })
16600    }
16601
16602    /// Parses a potentially empty list of arguments to a function
16603    /// (including the closing parenthesis).
16604    ///
16605    /// Examples:
16606    /// ```sql
16607    /// FIRST_VALUE(x ORDER BY 1,2,3);
16608    /// FIRST_VALUE(x IGNORE NULL);
16609    /// ```
16610    fn parse_function_argument_list(&mut self) -> Result<FunctionArgumentList, ParserError> {
16611        let mut clauses = vec![];
16612
16613        // Handle clauses that may exist with an empty argument list
16614
16615        if let Some(null_clause) = self.parse_json_null_clause() {
16616            clauses.push(FunctionArgumentClause::JsonNullClause(null_clause));
16617        }
16618
16619        if let Some(json_returning_clause) = self.maybe_parse_json_returning_clause()? {
16620            clauses.push(FunctionArgumentClause::JsonReturningClause(
16621                json_returning_clause,
16622            ));
16623        }
16624
16625        if self.consume_token(&Token::RParen) {
16626            return Ok(FunctionArgumentList {
16627                duplicate_treatment: None,
16628                args: vec![],
16629                clauses,
16630            });
16631        }
16632
16633        let duplicate_treatment = self.parse_duplicate_treatment()?;
16634        let args = self.parse_comma_separated(Parser::parse_function_args)?;
16635
16636        if self.dialect.supports_window_function_null_treatment_arg() {
16637            if let Some(null_treatment) = self.parse_null_treatment()? {
16638                clauses.push(FunctionArgumentClause::IgnoreOrRespectNulls(null_treatment));
16639            }
16640        }
16641
16642        if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
16643            clauses.push(FunctionArgumentClause::OrderBy(
16644                self.parse_comma_separated(Parser::parse_order_by_expr)?,
16645            ));
16646        }
16647
16648        if self.parse_keyword(Keyword::LIMIT) {
16649            clauses.push(FunctionArgumentClause::Limit(self.parse_expr()?));
16650        }
16651
16652        if dialect_of!(self is GenericDialect | BigQueryDialect)
16653            && self.parse_keyword(Keyword::HAVING)
16654        {
16655            let kind = match self.expect_one_of_keywords(&[Keyword::MIN, Keyword::MAX])? {
16656                Keyword::MIN => HavingBoundKind::Min,
16657                Keyword::MAX => HavingBoundKind::Max,
16658                unexpected_keyword => return Err(ParserError::ParserError(
16659                    format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in having bound"),
16660                )),
16661            };
16662            clauses.push(FunctionArgumentClause::Having(HavingBound(
16663                kind,
16664                self.parse_expr()?,
16665            )))
16666        }
16667
16668        if dialect_of!(self is GenericDialect | MySqlDialect)
16669            && self.parse_keyword(Keyword::SEPARATOR)
16670        {
16671            clauses.push(FunctionArgumentClause::Separator(self.parse_value()?.value));
16672        }
16673
16674        if let Some(on_overflow) = self.parse_listagg_on_overflow()? {
16675            clauses.push(FunctionArgumentClause::OnOverflow(on_overflow));
16676        }
16677
16678        if let Some(null_clause) = self.parse_json_null_clause() {
16679            clauses.push(FunctionArgumentClause::JsonNullClause(null_clause));
16680        }
16681
16682        if let Some(json_returning_clause) = self.maybe_parse_json_returning_clause()? {
16683            clauses.push(FunctionArgumentClause::JsonReturningClause(
16684                json_returning_clause,
16685            ));
16686        }
16687
16688        self.expect_token(&Token::RParen)?;
16689        Ok(FunctionArgumentList {
16690            duplicate_treatment,
16691            args,
16692            clauses,
16693        })
16694    }
16695
16696    fn parse_json_null_clause(&mut self) -> Option<JsonNullClause> {
16697        if self.parse_keywords(&[Keyword::ABSENT, Keyword::ON, Keyword::NULL]) {
16698            Some(JsonNullClause::AbsentOnNull)
16699        } else if self.parse_keywords(&[Keyword::NULL, Keyword::ON, Keyword::NULL]) {
16700            Some(JsonNullClause::NullOnNull)
16701        } else {
16702            None
16703        }
16704    }
16705
16706    fn maybe_parse_json_returning_clause(
16707        &mut self,
16708    ) -> Result<Option<JsonReturningClause>, ParserError> {
16709        if self.parse_keyword(Keyword::RETURNING) {
16710            let data_type = self.parse_data_type()?;
16711            Ok(Some(JsonReturningClause { data_type }))
16712        } else {
16713            Ok(None)
16714        }
16715    }
16716
16717    fn parse_duplicate_treatment(&mut self) -> Result<Option<DuplicateTreatment>, ParserError> {
16718        let loc = self.peek_token().span.start;
16719        match (
16720            self.parse_keyword(Keyword::ALL),
16721            self.parse_keyword(Keyword::DISTINCT),
16722        ) {
16723            (true, false) => Ok(Some(DuplicateTreatment::All)),
16724            (false, true) => Ok(Some(DuplicateTreatment::Distinct)),
16725            (false, false) => Ok(None),
16726            (true, true) => parser_err!("Cannot specify both ALL and DISTINCT".to_string(), loc),
16727        }
16728    }
16729
16730    /// Parse a comma-delimited list of projections after SELECT
16731    pub fn parse_select_item(&mut self) -> Result<SelectItem, ParserError> {
16732        let prefix = self
16733            .parse_one_of_keywords(
16734                self.dialect
16735                    .get_reserved_keywords_for_select_item_operator(),
16736            )
16737            .map(|keyword| Ident::new(format!("{keyword:?}")));
16738
16739        match self.parse_wildcard_expr()? {
16740            Expr::QualifiedWildcard(prefix, token) => Ok(SelectItem::QualifiedWildcard(
16741                SelectItemQualifiedWildcardKind::ObjectName(prefix),
16742                self.parse_wildcard_additional_options(token.0)?,
16743            )),
16744            Expr::Wildcard(token) => Ok(SelectItem::Wildcard(
16745                self.parse_wildcard_additional_options(token.0)?,
16746            )),
16747            Expr::Identifier(v) if v.value.to_lowercase() == "from" && v.quote_style.is_none() => {
16748                parser_err!(
16749                    format!("Expected an expression, found: {}", v),
16750                    self.peek_token().span.start
16751                )
16752            }
16753            Expr::BinaryOp {
16754                left,
16755                op: BinaryOperator::Eq,
16756                right,
16757            } if self.dialect.supports_eq_alias_assignment()
16758                && matches!(left.as_ref(), Expr::Identifier(_)) =>
16759            {
16760                let Expr::Identifier(alias) = *left else {
16761                    return parser_err!(
16762                        "BUG: expected identifier expression as alias",
16763                        self.peek_token().span.start
16764                    );
16765                };
16766                Ok(SelectItem::ExprWithAlias {
16767                    expr: *right,
16768                    alias,
16769                })
16770            }
16771            expr if self.dialect.supports_select_expr_star()
16772                && self.consume_tokens(&[Token::Period, Token::Mul]) =>
16773            {
16774                let wildcard_token = self.get_previous_token().clone();
16775                Ok(SelectItem::QualifiedWildcard(
16776                    SelectItemQualifiedWildcardKind::Expr(expr),
16777                    self.parse_wildcard_additional_options(wildcard_token)?,
16778                ))
16779            }
16780            expr => self
16781                .maybe_parse_select_item_alias()
16782                .map(|alias| match alias {
16783                    Some(alias) => SelectItem::ExprWithAlias {
16784                        expr: maybe_prefixed_expr(expr, prefix),
16785                        alias,
16786                    },
16787                    None => SelectItem::UnnamedExpr(maybe_prefixed_expr(expr, prefix)),
16788                }),
16789        }
16790    }
16791
16792    /// Parse an [`WildcardAdditionalOptions`] information for wildcard select items.
16793    ///
16794    /// If it is not possible to parse it, will return an option.
16795    pub fn parse_wildcard_additional_options(
16796        &mut self,
16797        wildcard_token: TokenWithSpan,
16798    ) -> Result<WildcardAdditionalOptions, ParserError> {
16799        let opt_ilike = if dialect_of!(self is GenericDialect | SnowflakeDialect) {
16800            self.parse_optional_select_item_ilike()?
16801        } else {
16802            None
16803        };
16804        let opt_exclude = if opt_ilike.is_none() && self.dialect.supports_select_wildcard_exclude()
16805        {
16806            self.parse_optional_select_item_exclude()?
16807        } else {
16808            None
16809        };
16810        let opt_except = if self.dialect.supports_select_wildcard_except() {
16811            self.parse_optional_select_item_except()?
16812        } else {
16813            None
16814        };
16815        let opt_replace = if dialect_of!(self is GenericDialect | BigQueryDialect | ClickHouseDialect | DuckDbDialect | SnowflakeDialect)
16816        {
16817            self.parse_optional_select_item_replace()?
16818        } else {
16819            None
16820        };
16821        let opt_rename = if dialect_of!(self is GenericDialect | SnowflakeDialect) {
16822            self.parse_optional_select_item_rename()?
16823        } else {
16824            None
16825        };
16826
16827        Ok(WildcardAdditionalOptions {
16828            wildcard_token: wildcard_token.into(),
16829            opt_ilike,
16830            opt_exclude,
16831            opt_except,
16832            opt_rename,
16833            opt_replace,
16834        })
16835    }
16836
16837    /// Parse an [`Ilike`](IlikeSelectItem) information for wildcard select items.
16838    ///
16839    /// If it is not possible to parse it, will return an option.
16840    pub fn parse_optional_select_item_ilike(
16841        &mut self,
16842    ) -> Result<Option<IlikeSelectItem>, ParserError> {
16843        let opt_ilike = if self.parse_keyword(Keyword::ILIKE) {
16844            let next_token = self.next_token();
16845            let pattern = match next_token.token {
16846                Token::SingleQuotedString(s) => s,
16847                _ => return self.expected("ilike pattern", next_token),
16848            };
16849            Some(IlikeSelectItem { pattern })
16850        } else {
16851            None
16852        };
16853        Ok(opt_ilike)
16854    }
16855
16856    /// Parse an [`Exclude`](ExcludeSelectItem) information for wildcard select items.
16857    ///
16858    /// If it is not possible to parse it, will return an option.
16859    pub fn parse_optional_select_item_exclude(
16860        &mut self,
16861    ) -> Result<Option<ExcludeSelectItem>, ParserError> {
16862        let opt_exclude = if self.parse_keyword(Keyword::EXCLUDE) {
16863            if self.consume_token(&Token::LParen) {
16864                let columns = self.parse_comma_separated(|parser| parser.parse_identifier())?;
16865                self.expect_token(&Token::RParen)?;
16866                Some(ExcludeSelectItem::Multiple(columns))
16867            } else {
16868                let column = self.parse_identifier()?;
16869                Some(ExcludeSelectItem::Single(column))
16870            }
16871        } else {
16872            None
16873        };
16874
16875        Ok(opt_exclude)
16876    }
16877
16878    /// Parse an [`Except`](ExceptSelectItem) information for wildcard select items.
16879    ///
16880    /// If it is not possible to parse it, will return an option.
16881    pub fn parse_optional_select_item_except(
16882        &mut self,
16883    ) -> Result<Option<ExceptSelectItem>, ParserError> {
16884        let opt_except = if self.parse_keyword(Keyword::EXCEPT) {
16885            if self.peek_token().token == Token::LParen {
16886                let idents = self.parse_parenthesized_column_list(Mandatory, false)?;
16887                match &idents[..] {
16888                    [] => {
16889                        return self.expected(
16890                            "at least one column should be parsed by the expect clause",
16891                            self.peek_token(),
16892                        )?;
16893                    }
16894                    [first, idents @ ..] => Some(ExceptSelectItem {
16895                        first_element: first.clone(),
16896                        additional_elements: idents.to_vec(),
16897                    }),
16898                }
16899            } else {
16900                // Clickhouse allows EXCEPT column_name
16901                let ident = self.parse_identifier()?;
16902                Some(ExceptSelectItem {
16903                    first_element: ident,
16904                    additional_elements: vec![],
16905                })
16906            }
16907        } else {
16908            None
16909        };
16910
16911        Ok(opt_except)
16912    }
16913
16914    /// Parse a [`Rename`](RenameSelectItem) information for wildcard select items.
16915    pub fn parse_optional_select_item_rename(
16916        &mut self,
16917    ) -> Result<Option<RenameSelectItem>, ParserError> {
16918        let opt_rename = if self.parse_keyword(Keyword::RENAME) {
16919            if self.consume_token(&Token::LParen) {
16920                let idents =
16921                    self.parse_comma_separated(|parser| parser.parse_identifier_with_alias())?;
16922                self.expect_token(&Token::RParen)?;
16923                Some(RenameSelectItem::Multiple(idents))
16924            } else {
16925                let ident = self.parse_identifier_with_alias()?;
16926                Some(RenameSelectItem::Single(ident))
16927            }
16928        } else {
16929            None
16930        };
16931
16932        Ok(opt_rename)
16933    }
16934
16935    /// Parse a [`Replace`](ReplaceSelectItem) information for wildcard select items.
16936    pub fn parse_optional_select_item_replace(
16937        &mut self,
16938    ) -> Result<Option<ReplaceSelectItem>, ParserError> {
16939        let opt_replace = if self.parse_keyword(Keyword::REPLACE) {
16940            if self.consume_token(&Token::LParen) {
16941                let items = self.parse_comma_separated(|parser| {
16942                    Ok(Box::new(parser.parse_replace_elements()?))
16943                })?;
16944                self.expect_token(&Token::RParen)?;
16945                Some(ReplaceSelectItem { items })
16946            } else {
16947                let tok = self.next_token();
16948                return self.expected("( after REPLACE but", tok);
16949            }
16950        } else {
16951            None
16952        };
16953
16954        Ok(opt_replace)
16955    }
16956    pub fn parse_replace_elements(&mut self) -> Result<ReplaceSelectElement, ParserError> {
16957        let expr = self.parse_expr()?;
16958        let as_keyword = self.parse_keyword(Keyword::AS);
16959        let ident = self.parse_identifier()?;
16960        Ok(ReplaceSelectElement {
16961            expr,
16962            column_name: ident,
16963            as_keyword,
16964        })
16965    }
16966
16967    /// Parse ASC or DESC, returns an Option with true if ASC, false of DESC or `None` if none of
16968    /// them.
16969    pub fn parse_asc_desc(&mut self) -> Option<bool> {
16970        if self.parse_keyword(Keyword::ASC) {
16971            Some(true)
16972        } else if self.parse_keyword(Keyword::DESC) {
16973            Some(false)
16974        } else {
16975            None
16976        }
16977    }
16978
16979    /// Parse an [OrderByExpr] expression.
16980    pub fn parse_order_by_expr(&mut self) -> Result<OrderByExpr, ParserError> {
16981        self.parse_order_by_expr_inner(false)
16982            .map(|(order_by, _)| order_by)
16983    }
16984
16985    /// Parse an [IndexColumn].
16986    pub fn parse_create_index_expr(&mut self) -> Result<IndexColumn, ParserError> {
16987        self.parse_order_by_expr_inner(true)
16988            .map(|(column, operator_class)| IndexColumn {
16989                column,
16990                operator_class,
16991            })
16992    }
16993
16994    fn parse_order_by_expr_inner(
16995        &mut self,
16996        with_operator_class: bool,
16997    ) -> Result<(OrderByExpr, Option<Ident>), ParserError> {
16998        let expr = self.parse_expr()?;
16999
17000        let operator_class: Option<Ident> = if with_operator_class {
17001            // We check that if non of the following keywords are present, then we parse an
17002            // identifier as operator class.
17003            if self
17004                .peek_one_of_keywords(&[Keyword::ASC, Keyword::DESC, Keyword::NULLS, Keyword::WITH])
17005                .is_some()
17006            {
17007                None
17008            } else {
17009                self.maybe_parse(|parser| parser.parse_identifier())?
17010            }
17011        } else {
17012            None
17013        };
17014
17015        let options = self.parse_order_by_options()?;
17016
17017        let with_fill = if dialect_of!(self is ClickHouseDialect | GenericDialect)
17018            && self.parse_keywords(&[Keyword::WITH, Keyword::FILL])
17019        {
17020            Some(self.parse_with_fill()?)
17021        } else {
17022            None
17023        };
17024
17025        Ok((
17026            OrderByExpr {
17027                expr,
17028                options,
17029                with_fill,
17030            },
17031            operator_class,
17032        ))
17033    }
17034
17035    fn parse_order_by_options(&mut self) -> Result<OrderByOptions, ParserError> {
17036        let asc = self.parse_asc_desc();
17037
17038        let nulls_first = if self.parse_keywords(&[Keyword::NULLS, Keyword::FIRST]) {
17039            Some(true)
17040        } else if self.parse_keywords(&[Keyword::NULLS, Keyword::LAST]) {
17041            Some(false)
17042        } else {
17043            None
17044        };
17045
17046        Ok(OrderByOptions { asc, nulls_first })
17047    }
17048
17049    // Parse a WITH FILL clause (ClickHouse dialect)
17050    // that follow the WITH FILL keywords in a ORDER BY clause
17051    pub fn parse_with_fill(&mut self) -> Result<WithFill, ParserError> {
17052        let from = if self.parse_keyword(Keyword::FROM) {
17053            Some(self.parse_expr()?)
17054        } else {
17055            None
17056        };
17057
17058        let to = if self.parse_keyword(Keyword::TO) {
17059            Some(self.parse_expr()?)
17060        } else {
17061            None
17062        };
17063
17064        let step = if self.parse_keyword(Keyword::STEP) {
17065            Some(self.parse_expr()?)
17066        } else {
17067            None
17068        };
17069
17070        Ok(WithFill { from, to, step })
17071    }
17072
17073    // Parse a set of comma separated INTERPOLATE expressions (ClickHouse dialect)
17074    // that follow the INTERPOLATE keyword in an ORDER BY clause with the WITH FILL modifier
17075    pub fn parse_interpolations(&mut self) -> Result<Option<Interpolate>, ParserError> {
17076        if !self.parse_keyword(Keyword::INTERPOLATE) {
17077            return Ok(None);
17078        }
17079
17080        if self.consume_token(&Token::LParen) {
17081            let interpolations =
17082                self.parse_comma_separated0(|p| p.parse_interpolation(), Token::RParen)?;
17083            self.expect_token(&Token::RParen)?;
17084            // INTERPOLATE () and INTERPOLATE ( ... ) variants
17085            return Ok(Some(Interpolate {
17086                exprs: Some(interpolations),
17087            }));
17088        }
17089
17090        // INTERPOLATE
17091        Ok(Some(Interpolate { exprs: None }))
17092    }
17093
17094    // Parse a INTERPOLATE expression (ClickHouse dialect)
17095    pub fn parse_interpolation(&mut self) -> Result<InterpolateExpr, ParserError> {
17096        let column = self.parse_identifier()?;
17097        let expr = if self.parse_keyword(Keyword::AS) {
17098            Some(self.parse_expr()?)
17099        } else {
17100            None
17101        };
17102        Ok(InterpolateExpr { column, expr })
17103    }
17104
17105    /// Parse a TOP clause, MSSQL equivalent of LIMIT,
17106    /// that follows after `SELECT [DISTINCT]`.
17107    pub fn parse_top(&mut self) -> Result<Top, ParserError> {
17108        let quantity = if self.consume_token(&Token::LParen) {
17109            let quantity = self.parse_expr()?;
17110            self.expect_token(&Token::RParen)?;
17111            Some(TopQuantity::Expr(quantity))
17112        } else {
17113            let next_token = self.next_token();
17114            let quantity = match next_token.token {
17115                Token::Number(s, _) => Self::parse::<u64>(s, next_token.span.start)?,
17116                _ => self.expected("literal int", next_token)?,
17117            };
17118            Some(TopQuantity::Constant(quantity))
17119        };
17120
17121        let percent = self.parse_keyword(Keyword::PERCENT);
17122
17123        let with_ties = self.parse_keywords(&[Keyword::WITH, Keyword::TIES]);
17124
17125        Ok(Top {
17126            with_ties,
17127            percent,
17128            quantity,
17129        })
17130    }
17131
17132    /// Parse a LIMIT clause
17133    pub fn parse_limit(&mut self) -> Result<Option<Expr>, ParserError> {
17134        if self.parse_keyword(Keyword::ALL) {
17135            Ok(None)
17136        } else {
17137            Ok(Some(self.parse_expr()?))
17138        }
17139    }
17140
17141    /// Parse an OFFSET clause
17142    pub fn parse_offset(&mut self) -> Result<Offset, ParserError> {
17143        let value = self.parse_expr()?;
17144        let rows = if self.parse_keyword(Keyword::ROW) {
17145            OffsetRows::Row
17146        } else if self.parse_keyword(Keyword::ROWS) {
17147            OffsetRows::Rows
17148        } else {
17149            OffsetRows::None
17150        };
17151        Ok(Offset { value, rows })
17152    }
17153
17154    /// Parse a FETCH clause
17155    pub fn parse_fetch(&mut self) -> Result<Fetch, ParserError> {
17156        let _ = self.parse_one_of_keywords(&[Keyword::FIRST, Keyword::NEXT]);
17157
17158        let (quantity, percent) = if self
17159            .parse_one_of_keywords(&[Keyword::ROW, Keyword::ROWS])
17160            .is_some()
17161        {
17162            (None, false)
17163        } else {
17164            let quantity = Expr::Value(self.parse_value()?);
17165            let percent = self.parse_keyword(Keyword::PERCENT);
17166            let _ = self.parse_one_of_keywords(&[Keyword::ROW, Keyword::ROWS]);
17167            (Some(quantity), percent)
17168        };
17169
17170        let with_ties = if self.parse_keyword(Keyword::ONLY) {
17171            false
17172        } else {
17173            self.parse_keywords(&[Keyword::WITH, Keyword::TIES])
17174        };
17175
17176        Ok(Fetch {
17177            with_ties,
17178            percent,
17179            quantity,
17180        })
17181    }
17182
17183    /// Parse a FOR UPDATE/FOR SHARE clause
17184    pub fn parse_lock(&mut self) -> Result<LockClause, ParserError> {
17185        let lock_type = match self.expect_one_of_keywords(&[Keyword::UPDATE, Keyword::SHARE])? {
17186            Keyword::UPDATE => LockType::Update,
17187            Keyword::SHARE => LockType::Share,
17188            unexpected_keyword => return Err(ParserError::ParserError(
17189                format!("Internal parser error: expected any of {{UPDATE, SHARE}}, got {unexpected_keyword:?}"),
17190            )),
17191        };
17192        let of = if self.parse_keyword(Keyword::OF) {
17193            Some(self.parse_object_name(false)?)
17194        } else {
17195            None
17196        };
17197        let nonblock = if self.parse_keyword(Keyword::NOWAIT) {
17198            Some(NonBlock::Nowait)
17199        } else if self.parse_keywords(&[Keyword::SKIP, Keyword::LOCKED]) {
17200            Some(NonBlock::SkipLocked)
17201        } else {
17202            None
17203        };
17204        Ok(LockClause {
17205            lock_type,
17206            of,
17207            nonblock,
17208        })
17209    }
17210
17211    pub fn parse_values(
17212        &mut self,
17213        allow_empty: bool,
17214        value_keyword: bool,
17215    ) -> Result<Values, ParserError> {
17216        let mut explicit_row = false;
17217
17218        let rows = self.parse_comma_separated(|parser| {
17219            if parser.parse_keyword(Keyword::ROW) {
17220                explicit_row = true;
17221            }
17222
17223            parser.expect_token(&Token::LParen)?;
17224            if allow_empty && parser.peek_token().token == Token::RParen {
17225                parser.next_token();
17226                Ok(vec![])
17227            } else {
17228                let exprs = parser.parse_comma_separated(Parser::parse_expr)?;
17229                parser.expect_token(&Token::RParen)?;
17230                Ok(exprs)
17231            }
17232        })?;
17233        Ok(Values {
17234            explicit_row,
17235            rows,
17236            value_keyword,
17237        })
17238    }
17239
17240    pub fn parse_start_transaction(&mut self) -> Result<Statement, ParserError> {
17241        self.expect_keyword_is(Keyword::TRANSACTION)?;
17242        Ok(Statement::StartTransaction {
17243            modes: self.parse_transaction_modes()?,
17244            begin: false,
17245            transaction: Some(BeginTransactionKind::Transaction),
17246            modifier: None,
17247            statements: vec![],
17248            exception: None,
17249            has_end_keyword: false,
17250        })
17251    }
17252
17253    pub fn parse_begin(&mut self) -> Result<Statement, ParserError> {
17254        let modifier = if !self.dialect.supports_start_transaction_modifier() {
17255            None
17256        } else if self.parse_keyword(Keyword::DEFERRED) {
17257            Some(TransactionModifier::Deferred)
17258        } else if self.parse_keyword(Keyword::IMMEDIATE) {
17259            Some(TransactionModifier::Immediate)
17260        } else if self.parse_keyword(Keyword::EXCLUSIVE) {
17261            Some(TransactionModifier::Exclusive)
17262        } else if self.parse_keyword(Keyword::TRY) {
17263            Some(TransactionModifier::Try)
17264        } else if self.parse_keyword(Keyword::CATCH) {
17265            Some(TransactionModifier::Catch)
17266        } else {
17267            None
17268        };
17269        let transaction = match self.parse_one_of_keywords(&[Keyword::TRANSACTION, Keyword::WORK]) {
17270            Some(Keyword::TRANSACTION) => Some(BeginTransactionKind::Transaction),
17271            Some(Keyword::WORK) => Some(BeginTransactionKind::Work),
17272            _ => None,
17273        };
17274        Ok(Statement::StartTransaction {
17275            modes: self.parse_transaction_modes()?,
17276            begin: true,
17277            transaction,
17278            modifier,
17279            statements: vec![],
17280            exception: None,
17281            has_end_keyword: false,
17282        })
17283    }
17284
17285    pub fn parse_begin_exception_end(&mut self) -> Result<Statement, ParserError> {
17286        let statements = self.parse_statement_list(&[Keyword::EXCEPTION, Keyword::END])?;
17287
17288        let exception = if self.parse_keyword(Keyword::EXCEPTION) {
17289            let mut when = Vec::new();
17290
17291            // We can have multiple `WHEN` arms so we consume all cases until `END`
17292            while !self.peek_keyword(Keyword::END) {
17293                self.expect_keyword(Keyword::WHEN)?;
17294
17295                // Each `WHEN` case can have one or more conditions, e.g.
17296                // WHEN EXCEPTION_1 [OR EXCEPTION_2] THEN
17297                // So we parse identifiers until the `THEN` keyword.
17298                let mut idents = Vec::new();
17299
17300                while !self.parse_keyword(Keyword::THEN) {
17301                    let ident = self.parse_identifier()?;
17302                    idents.push(ident);
17303
17304                    self.maybe_parse(|p| p.expect_keyword(Keyword::OR))?;
17305                }
17306
17307                let statements = self.parse_statement_list(&[Keyword::WHEN, Keyword::END])?;
17308
17309                when.push(ExceptionWhen { idents, statements });
17310            }
17311
17312            Some(when)
17313        } else {
17314            None
17315        };
17316
17317        self.expect_keyword(Keyword::END)?;
17318
17319        Ok(Statement::StartTransaction {
17320            begin: true,
17321            statements,
17322            exception,
17323            has_end_keyword: true,
17324            transaction: None,
17325            modifier: None,
17326            modes: Default::default(),
17327        })
17328    }
17329
17330    pub fn parse_end(&mut self) -> Result<Statement, ParserError> {
17331        let modifier = if !self.dialect.supports_end_transaction_modifier() {
17332            None
17333        } else if self.parse_keyword(Keyword::TRY) {
17334            Some(TransactionModifier::Try)
17335        } else if self.parse_keyword(Keyword::CATCH) {
17336            Some(TransactionModifier::Catch)
17337        } else {
17338            None
17339        };
17340        Ok(Statement::Commit {
17341            chain: self.parse_commit_rollback_chain()?,
17342            end: true,
17343            modifier,
17344        })
17345    }
17346
17347    pub fn parse_transaction_modes(&mut self) -> Result<Vec<TransactionMode>, ParserError> {
17348        let mut modes = vec![];
17349        let mut required = false;
17350        loop {
17351            let mode = if self.parse_keywords(&[Keyword::ISOLATION, Keyword::LEVEL]) {
17352                let iso_level = if self.parse_keywords(&[Keyword::READ, Keyword::UNCOMMITTED]) {
17353                    TransactionIsolationLevel::ReadUncommitted
17354                } else if self.parse_keywords(&[Keyword::READ, Keyword::COMMITTED]) {
17355                    TransactionIsolationLevel::ReadCommitted
17356                } else if self.parse_keywords(&[Keyword::REPEATABLE, Keyword::READ]) {
17357                    TransactionIsolationLevel::RepeatableRead
17358                } else if self.parse_keyword(Keyword::SERIALIZABLE) {
17359                    TransactionIsolationLevel::Serializable
17360                } else if self.parse_keyword(Keyword::SNAPSHOT) {
17361                    TransactionIsolationLevel::Snapshot
17362                } else {
17363                    self.expected("isolation level", self.peek_token())?
17364                };
17365                TransactionMode::IsolationLevel(iso_level)
17366            } else if self.parse_keywords(&[Keyword::READ, Keyword::ONLY]) {
17367                TransactionMode::AccessMode(TransactionAccessMode::ReadOnly)
17368            } else if self.parse_keywords(&[Keyword::READ, Keyword::WRITE]) {
17369                TransactionMode::AccessMode(TransactionAccessMode::ReadWrite)
17370            } else if required {
17371                self.expected("transaction mode", self.peek_token())?
17372            } else {
17373                break;
17374            };
17375            modes.push(mode);
17376            // ANSI requires a comma after each transaction mode, but
17377            // PostgreSQL, for historical reasons, does not. We follow
17378            // PostgreSQL in making the comma optional, since that is strictly
17379            // more general.
17380            required = self.consume_token(&Token::Comma);
17381        }
17382        Ok(modes)
17383    }
17384
17385    pub fn parse_commit(&mut self) -> Result<Statement, ParserError> {
17386        Ok(Statement::Commit {
17387            chain: self.parse_commit_rollback_chain()?,
17388            end: false,
17389            modifier: None,
17390        })
17391    }
17392
17393    pub fn parse_rollback(&mut self) -> Result<Statement, ParserError> {
17394        let chain = self.parse_commit_rollback_chain()?;
17395        let savepoint = self.parse_rollback_savepoint()?;
17396
17397        Ok(Statement::Rollback { chain, savepoint })
17398    }
17399
17400    pub fn parse_commit_rollback_chain(&mut self) -> Result<bool, ParserError> {
17401        let _ = self.parse_one_of_keywords(&[Keyword::TRANSACTION, Keyword::WORK]);
17402        if self.parse_keyword(Keyword::AND) {
17403            let chain = !self.parse_keyword(Keyword::NO);
17404            self.expect_keyword_is(Keyword::CHAIN)?;
17405            Ok(chain)
17406        } else {
17407            Ok(false)
17408        }
17409    }
17410
17411    pub fn parse_rollback_savepoint(&mut self) -> Result<Option<Ident>, ParserError> {
17412        if self.parse_keyword(Keyword::TO) {
17413            let _ = self.parse_keyword(Keyword::SAVEPOINT);
17414            let savepoint = self.parse_identifier()?;
17415
17416            Ok(Some(savepoint))
17417        } else {
17418            Ok(None)
17419        }
17420    }
17421
17422    /// Parse a 'RAISERROR' statement
17423    pub fn parse_raiserror(&mut self) -> Result<Statement, ParserError> {
17424        self.expect_token(&Token::LParen)?;
17425        let message = Box::new(self.parse_expr()?);
17426        self.expect_token(&Token::Comma)?;
17427        let severity = Box::new(self.parse_expr()?);
17428        self.expect_token(&Token::Comma)?;
17429        let state = Box::new(self.parse_expr()?);
17430        let arguments = if self.consume_token(&Token::Comma) {
17431            self.parse_comma_separated(Parser::parse_expr)?
17432        } else {
17433            vec![]
17434        };
17435        self.expect_token(&Token::RParen)?;
17436        let options = if self.parse_keyword(Keyword::WITH) {
17437            self.parse_comma_separated(Parser::parse_raiserror_option)?
17438        } else {
17439            vec![]
17440        };
17441        Ok(Statement::RaisError {
17442            message,
17443            severity,
17444            state,
17445            arguments,
17446            options,
17447        })
17448    }
17449
17450    pub fn parse_raiserror_option(&mut self) -> Result<RaisErrorOption, ParserError> {
17451        match self.expect_one_of_keywords(&[Keyword::LOG, Keyword::NOWAIT, Keyword::SETERROR])? {
17452            Keyword::LOG => Ok(RaisErrorOption::Log),
17453            Keyword::NOWAIT => Ok(RaisErrorOption::NoWait),
17454            Keyword::SETERROR => Ok(RaisErrorOption::SetError),
17455            _ => self.expected(
17456                "LOG, NOWAIT OR SETERROR raiserror option",
17457                self.peek_token(),
17458            ),
17459        }
17460    }
17461
17462    pub fn parse_deallocate(&mut self) -> Result<Statement, ParserError> {
17463        let prepare = self.parse_keyword(Keyword::PREPARE);
17464        let name = self.parse_identifier()?;
17465        Ok(Statement::Deallocate { name, prepare })
17466    }
17467
17468    pub fn parse_execute(&mut self) -> Result<Statement, ParserError> {
17469        let name = if self.dialect.supports_execute_immediate()
17470            && self.parse_keyword(Keyword::IMMEDIATE)
17471        {
17472            None
17473        } else {
17474            let name = self.parse_object_name(false)?;
17475            Some(name)
17476        };
17477
17478        let has_parentheses = self.consume_token(&Token::LParen);
17479
17480        let end_kws = &[Keyword::USING, Keyword::OUTPUT, Keyword::DEFAULT];
17481        let end_token = match (has_parentheses, self.peek_token().token) {
17482            (true, _) => Token::RParen,
17483            (false, Token::EOF) => Token::EOF,
17484            (false, Token::Word(w)) if end_kws.contains(&w.keyword) => Token::Word(w),
17485            (false, _) => Token::SemiColon,
17486        };
17487
17488        let parameters = self.parse_comma_separated0(Parser::parse_expr, end_token)?;
17489
17490        if has_parentheses {
17491            self.expect_token(&Token::RParen)?;
17492        }
17493
17494        let into = if self.parse_keyword(Keyword::INTO) {
17495            self.parse_comma_separated(Self::parse_identifier)?
17496        } else {
17497            vec![]
17498        };
17499
17500        let using = if self.parse_keyword(Keyword::USING) {
17501            self.parse_comma_separated(Self::parse_expr_with_alias)?
17502        } else {
17503            vec![]
17504        };
17505
17506        let output = self.parse_keyword(Keyword::OUTPUT);
17507
17508        let default = self.parse_keyword(Keyword::DEFAULT);
17509
17510        Ok(Statement::Execute {
17511            immediate: name.is_none(),
17512            name,
17513            parameters,
17514            has_parentheses,
17515            into,
17516            using,
17517            output,
17518            default,
17519        })
17520    }
17521
17522    pub fn parse_prepare(&mut self) -> Result<Statement, ParserError> {
17523        let name = self.parse_identifier()?;
17524
17525        let mut data_types = vec![];
17526        if self.consume_token(&Token::LParen) {
17527            data_types = self.parse_comma_separated(Parser::parse_data_type)?;
17528            self.expect_token(&Token::RParen)?;
17529        }
17530
17531        self.expect_keyword_is(Keyword::AS)?;
17532        let statement = Box::new(self.parse_statement()?);
17533        Ok(Statement::Prepare {
17534            name,
17535            data_types,
17536            statement,
17537        })
17538    }
17539
17540    pub fn parse_unload(&mut self) -> Result<Statement, ParserError> {
17541        self.expect_keyword(Keyword::UNLOAD)?;
17542        self.expect_token(&Token::LParen)?;
17543        let (query, query_text) = if matches!(self.peek_token().token, Token::SingleQuotedString(_))
17544        {
17545            (None, Some(self.parse_literal_string()?))
17546        } else {
17547            (Some(self.parse_query()?), None)
17548        };
17549        self.expect_token(&Token::RParen)?;
17550
17551        self.expect_keyword_is(Keyword::TO)?;
17552        let to = self.parse_identifier()?;
17553        let auth = if self.parse_keyword(Keyword::IAM_ROLE) {
17554            Some(self.parse_iam_role_kind()?)
17555        } else {
17556            None
17557        };
17558        let with = self.parse_options(Keyword::WITH)?;
17559        let mut options = vec![];
17560        while let Some(opt) = self.maybe_parse(|parser| parser.parse_copy_legacy_option())? {
17561            options.push(opt);
17562        }
17563        Ok(Statement::Unload {
17564            query,
17565            query_text,
17566            to,
17567            auth,
17568            with,
17569            options,
17570        })
17571    }
17572
17573    fn parse_select_into(&mut self) -> Result<SelectInto, ParserError> {
17574        let temporary = self
17575            .parse_one_of_keywords(&[Keyword::TEMP, Keyword::TEMPORARY])
17576            .is_some();
17577        let unlogged = self.parse_keyword(Keyword::UNLOGGED);
17578        let table = self.parse_keyword(Keyword::TABLE);
17579        let name = self.parse_object_name(false)?;
17580
17581        Ok(SelectInto {
17582            temporary,
17583            unlogged,
17584            table,
17585            name,
17586        })
17587    }
17588
17589    fn parse_pragma_value(&mut self) -> Result<Value, ParserError> {
17590        match self.parse_value()?.value {
17591            v @ Value::SingleQuotedString(_) => Ok(v),
17592            v @ Value::DoubleQuotedString(_) => Ok(v),
17593            v @ Value::Number(_, _) => Ok(v),
17594            v @ Value::Placeholder(_) => Ok(v),
17595            _ => {
17596                self.prev_token();
17597                self.expected("number or string or ? placeholder", self.peek_token())
17598            }
17599        }
17600    }
17601
17602    // PRAGMA [schema-name '.'] pragma-name [('=' pragma-value) | '(' pragma-value ')']
17603    pub fn parse_pragma(&mut self) -> Result<Statement, ParserError> {
17604        let name = self.parse_object_name(false)?;
17605        if self.consume_token(&Token::LParen) {
17606            let value = self.parse_pragma_value()?;
17607            self.expect_token(&Token::RParen)?;
17608            Ok(Statement::Pragma {
17609                name,
17610                value: Some(value),
17611                is_eq: false,
17612            })
17613        } else if self.consume_token(&Token::Eq) {
17614            Ok(Statement::Pragma {
17615                name,
17616                value: Some(self.parse_pragma_value()?),
17617                is_eq: true,
17618            })
17619        } else {
17620            Ok(Statement::Pragma {
17621                name,
17622                value: None,
17623                is_eq: false,
17624            })
17625        }
17626    }
17627
17628    /// `INSTALL [extension_name]`
17629    pub fn parse_install(&mut self) -> Result<Statement, ParserError> {
17630        let extension_name = self.parse_identifier()?;
17631
17632        Ok(Statement::Install { extension_name })
17633    }
17634
17635    /// Parse a SQL LOAD statement
17636    pub fn parse_load(&mut self) -> Result<Statement, ParserError> {
17637        if self.dialect.supports_load_extension() {
17638            let extension_name = self.parse_identifier()?;
17639            Ok(Statement::Load { extension_name })
17640        } else if self.parse_keyword(Keyword::DATA) && self.dialect.supports_load_data() {
17641            let local = self.parse_one_of_keywords(&[Keyword::LOCAL]).is_some();
17642            self.expect_keyword_is(Keyword::INPATH)?;
17643            let inpath = self.parse_literal_string()?;
17644            let overwrite = self.parse_one_of_keywords(&[Keyword::OVERWRITE]).is_some();
17645            self.expect_keyword_is(Keyword::INTO)?;
17646            self.expect_keyword_is(Keyword::TABLE)?;
17647            let table_name = self.parse_object_name(false)?;
17648            let partitioned = self.parse_insert_partition()?;
17649            let table_format = self.parse_load_data_table_format()?;
17650            Ok(Statement::LoadData {
17651                local,
17652                inpath,
17653                overwrite,
17654                table_name,
17655                partitioned,
17656                table_format,
17657            })
17658        } else {
17659            self.expected(
17660                "`DATA` or an extension name after `LOAD`",
17661                self.peek_token(),
17662            )
17663        }
17664    }
17665
17666    /// ```sql
17667    /// OPTIMIZE TABLE [db.]name [ON CLUSTER cluster] [PARTITION partition | PARTITION ID 'partition_id'] [FINAL] [DEDUPLICATE [BY expression]]
17668    /// ```
17669    /// [ClickHouse](https://clickhouse.com/docs/en/sql-reference/statements/optimize)
17670    pub fn parse_optimize_table(&mut self) -> Result<Statement, ParserError> {
17671        self.expect_keyword_is(Keyword::TABLE)?;
17672        let name = self.parse_object_name(false)?;
17673        let on_cluster = self.parse_optional_on_cluster()?;
17674
17675        let partition = if self.parse_keyword(Keyword::PARTITION) {
17676            if self.parse_keyword(Keyword::ID) {
17677                Some(Partition::Identifier(self.parse_identifier()?))
17678            } else {
17679                Some(Partition::Expr(self.parse_expr()?))
17680            }
17681        } else {
17682            None
17683        };
17684
17685        let include_final = self.parse_keyword(Keyword::FINAL);
17686        let deduplicate = if self.parse_keyword(Keyword::DEDUPLICATE) {
17687            if self.parse_keyword(Keyword::BY) {
17688                Some(Deduplicate::ByExpression(self.parse_expr()?))
17689            } else {
17690                Some(Deduplicate::All)
17691            }
17692        } else {
17693            None
17694        };
17695
17696        Ok(Statement::OptimizeTable {
17697            name,
17698            on_cluster,
17699            partition,
17700            include_final,
17701            deduplicate,
17702        })
17703    }
17704
17705    /// ```sql
17706    /// CREATE [ { TEMPORARY | TEMP } ] SEQUENCE [ IF NOT EXISTS ] <sequence_name>
17707    /// ```
17708    ///
17709    /// See [Postgres docs](https://www.postgresql.org/docs/current/sql-createsequence.html) for more details.
17710    pub fn parse_create_sequence(&mut self, temporary: bool) -> Result<Statement, ParserError> {
17711        //[ IF NOT EXISTS ]
17712        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
17713        //name
17714        let name = self.parse_object_name(false)?;
17715        //[ AS data_type ]
17716        let mut data_type: Option<DataType> = None;
17717        if self.parse_keywords(&[Keyword::AS]) {
17718            data_type = Some(self.parse_data_type()?)
17719        }
17720        let sequence_options = self.parse_create_sequence_options()?;
17721        // [ OWNED BY { table_name.column_name | NONE } ]
17722        let owned_by = if self.parse_keywords(&[Keyword::OWNED, Keyword::BY]) {
17723            if self.parse_keywords(&[Keyword::NONE]) {
17724                Some(ObjectName::from(vec![Ident::new("NONE")]))
17725            } else {
17726                Some(self.parse_object_name(false)?)
17727            }
17728        } else {
17729            None
17730        };
17731        Ok(Statement::CreateSequence {
17732            temporary,
17733            if_not_exists,
17734            name,
17735            data_type,
17736            sequence_options,
17737            owned_by,
17738        })
17739    }
17740
17741    fn parse_create_sequence_options(&mut self) -> Result<Vec<SequenceOptions>, ParserError> {
17742        let mut sequence_options = vec![];
17743        //[ INCREMENT [ BY ] increment ]
17744        if self.parse_keywords(&[Keyword::INCREMENT]) {
17745            if self.parse_keywords(&[Keyword::BY]) {
17746                sequence_options.push(SequenceOptions::IncrementBy(self.parse_number()?, true));
17747            } else {
17748                sequence_options.push(SequenceOptions::IncrementBy(self.parse_number()?, false));
17749            }
17750        }
17751        //[ MINVALUE minvalue | NO MINVALUE ]
17752        if self.parse_keyword(Keyword::MINVALUE) {
17753            sequence_options.push(SequenceOptions::MinValue(Some(self.parse_number()?)));
17754        } else if self.parse_keywords(&[Keyword::NO, Keyword::MINVALUE]) {
17755            sequence_options.push(SequenceOptions::MinValue(None));
17756        }
17757        //[ MAXVALUE maxvalue | NO MAXVALUE ]
17758        if self.parse_keywords(&[Keyword::MAXVALUE]) {
17759            sequence_options.push(SequenceOptions::MaxValue(Some(self.parse_number()?)));
17760        } else if self.parse_keywords(&[Keyword::NO, Keyword::MAXVALUE]) {
17761            sequence_options.push(SequenceOptions::MaxValue(None));
17762        }
17763
17764        //[ START [ WITH ] start ]
17765        if self.parse_keywords(&[Keyword::START]) {
17766            if self.parse_keywords(&[Keyword::WITH]) {
17767                sequence_options.push(SequenceOptions::StartWith(self.parse_number()?, true));
17768            } else {
17769                sequence_options.push(SequenceOptions::StartWith(self.parse_number()?, false));
17770            }
17771        }
17772        //[ CACHE cache ]
17773        if self.parse_keywords(&[Keyword::CACHE]) {
17774            sequence_options.push(SequenceOptions::Cache(self.parse_number()?));
17775        }
17776        // [ [ NO ] CYCLE ]
17777        if self.parse_keywords(&[Keyword::NO, Keyword::CYCLE]) {
17778            sequence_options.push(SequenceOptions::Cycle(true));
17779        } else if self.parse_keywords(&[Keyword::CYCLE]) {
17780            sequence_options.push(SequenceOptions::Cycle(false));
17781        }
17782
17783        Ok(sequence_options)
17784    }
17785
17786    ///   Parse a `CREATE SERVER` statement.
17787    ///
17788    ///  See [Statement::CreateServer]
17789    pub fn parse_pg_create_server(&mut self) -> Result<Statement, ParserError> {
17790        let ine = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
17791        let name = self.parse_object_name(false)?;
17792
17793        let server_type = if self.parse_keyword(Keyword::TYPE) {
17794            Some(self.parse_identifier()?)
17795        } else {
17796            None
17797        };
17798
17799        let version = if self.parse_keyword(Keyword::VERSION) {
17800            Some(self.parse_identifier()?)
17801        } else {
17802            None
17803        };
17804
17805        self.expect_keywords(&[Keyword::FOREIGN, Keyword::DATA, Keyword::WRAPPER])?;
17806        let foreign_data_wrapper = self.parse_object_name(false)?;
17807
17808        let mut options = None;
17809        if self.parse_keyword(Keyword::OPTIONS) {
17810            self.expect_token(&Token::LParen)?;
17811            options = Some(self.parse_comma_separated(|p| {
17812                let key = p.parse_identifier()?;
17813                let value = p.parse_identifier()?;
17814                Ok(CreateServerOption { key, value })
17815            })?);
17816            self.expect_token(&Token::RParen)?;
17817        }
17818
17819        Ok(Statement::CreateServer(CreateServerStatement {
17820            name,
17821            if_not_exists: ine,
17822            server_type,
17823            version,
17824            foreign_data_wrapper,
17825            options,
17826        }))
17827    }
17828
17829    /// The index of the first unprocessed token.
17830    pub fn index(&self) -> usize {
17831        self.index
17832    }
17833
17834    pub fn parse_named_window(&mut self) -> Result<NamedWindowDefinition, ParserError> {
17835        let ident = self.parse_identifier()?;
17836        self.expect_keyword_is(Keyword::AS)?;
17837
17838        let window_expr = if self.consume_token(&Token::LParen) {
17839            NamedWindowExpr::WindowSpec(self.parse_window_spec()?)
17840        } else if self.dialect.supports_window_clause_named_window_reference() {
17841            NamedWindowExpr::NamedWindow(self.parse_identifier()?)
17842        } else {
17843            return self.expected("(", self.peek_token());
17844        };
17845
17846        Ok(NamedWindowDefinition(ident, window_expr))
17847    }
17848
17849    pub fn parse_create_procedure(&mut self, or_alter: bool) -> Result<Statement, ParserError> {
17850        let name = self.parse_object_name(false)?;
17851        let params = self.parse_optional_procedure_parameters()?;
17852
17853        let language = if self.parse_keyword(Keyword::LANGUAGE) {
17854            Some(self.parse_identifier()?)
17855        } else {
17856            None
17857        };
17858
17859        self.expect_keyword_is(Keyword::AS)?;
17860
17861        let body = self.parse_conditional_statements(&[Keyword::END])?;
17862
17863        Ok(Statement::CreateProcedure {
17864            name,
17865            or_alter,
17866            params,
17867            language,
17868            body,
17869        })
17870    }
17871
17872    pub fn parse_window_spec(&mut self) -> Result<WindowSpec, ParserError> {
17873        let window_name = match self.peek_token().token {
17874            Token::Word(word) if word.keyword == Keyword::NoKeyword => {
17875                self.parse_optional_ident()?
17876            }
17877            _ => None,
17878        };
17879
17880        let partition_by = if self.parse_keywords(&[Keyword::PARTITION, Keyword::BY]) {
17881            self.parse_comma_separated(Parser::parse_expr)?
17882        } else {
17883            vec![]
17884        };
17885        let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
17886            self.parse_comma_separated(Parser::parse_order_by_expr)?
17887        } else {
17888            vec![]
17889        };
17890
17891        let window_frame = if !self.consume_token(&Token::RParen) {
17892            let window_frame = self.parse_window_frame()?;
17893            self.expect_token(&Token::RParen)?;
17894            Some(window_frame)
17895        } else {
17896            None
17897        };
17898        Ok(WindowSpec {
17899            window_name,
17900            partition_by,
17901            order_by,
17902            window_frame,
17903        })
17904    }
17905
17906    pub fn parse_create_type(&mut self) -> Result<Statement, ParserError> {
17907        let name = self.parse_object_name(false)?;
17908
17909        // Check if we have AS keyword
17910        let has_as = self.parse_keyword(Keyword::AS);
17911
17912        if !has_as {
17913            // Two cases: CREATE TYPE name; or CREATE TYPE name (options);
17914            if self.consume_token(&Token::LParen) {
17915                // CREATE TYPE name (options) - SQL definition without AS
17916                let options = self.parse_create_type_sql_definition_options()?;
17917                self.expect_token(&Token::RParen)?;
17918                return Ok(Statement::CreateType {
17919                    name,
17920                    representation: Some(UserDefinedTypeRepresentation::SqlDefinition { options }),
17921                });
17922            }
17923
17924            // CREATE TYPE name; - no representation
17925            return Ok(Statement::CreateType {
17926                name,
17927                representation: None,
17928            });
17929        }
17930
17931        // We have AS keyword
17932        if self.parse_keyword(Keyword::ENUM) {
17933            // CREATE TYPE name AS ENUM (labels)
17934            self.parse_create_type_enum(name)
17935        } else if self.parse_keyword(Keyword::RANGE) {
17936            // CREATE TYPE name AS RANGE (options)
17937            self.parse_create_type_range(name)
17938        } else if self.consume_token(&Token::LParen) {
17939            // CREATE TYPE name AS (attributes) - Composite
17940            self.parse_create_type_composite(name)
17941        } else {
17942            self.expected("ENUM, RANGE, or '(' after AS", self.peek_token())
17943        }
17944    }
17945
17946    /// Parse remainder of `CREATE TYPE AS (attributes)` statement (composite type)
17947    ///
17948    /// See [PostgreSQL](https://www.postgresql.org/docs/current/sql-createtype.html)
17949    fn parse_create_type_composite(&mut self, name: ObjectName) -> Result<Statement, ParserError> {
17950        if self.consume_token(&Token::RParen) {
17951            // Empty composite type
17952            return Ok(Statement::CreateType {
17953                name,
17954                representation: Some(UserDefinedTypeRepresentation::Composite {
17955                    attributes: vec![],
17956                }),
17957            });
17958        }
17959
17960        let mut attributes = vec![];
17961        loop {
17962            let attr_name = self.parse_identifier()?;
17963            let attr_data_type = self.parse_data_type()?;
17964            let attr_collation = if self.parse_keyword(Keyword::COLLATE) {
17965                Some(self.parse_object_name(false)?)
17966            } else {
17967                None
17968            };
17969            attributes.push(UserDefinedTypeCompositeAttributeDef {
17970                name: attr_name,
17971                data_type: attr_data_type,
17972                collation: attr_collation,
17973            });
17974
17975            if !self.consume_token(&Token::Comma) {
17976                break;
17977            }
17978        }
17979        self.expect_token(&Token::RParen)?;
17980
17981        Ok(Statement::CreateType {
17982            name,
17983            representation: Some(UserDefinedTypeRepresentation::Composite { attributes }),
17984        })
17985    }
17986
17987    /// Parse remainder of `CREATE TYPE AS ENUM` statement (see [Statement::CreateType] and [Self::parse_create_type])
17988    ///
17989    /// See [PostgreSQL](https://www.postgresql.org/docs/current/sql-createtype.html)
17990    pub fn parse_create_type_enum(&mut self, name: ObjectName) -> Result<Statement, ParserError> {
17991        self.expect_token(&Token::LParen)?;
17992        let labels = self.parse_comma_separated0(|p| p.parse_identifier(), Token::RParen)?;
17993        self.expect_token(&Token::RParen)?;
17994
17995        Ok(Statement::CreateType {
17996            name,
17997            representation: Some(UserDefinedTypeRepresentation::Enum { labels }),
17998        })
17999    }
18000
18001    /// Parse remainder of `CREATE TYPE AS RANGE` statement
18002    ///
18003    /// See [PostgreSQL](https://www.postgresql.org/docs/current/sql-createtype.html)
18004    fn parse_create_type_range(&mut self, name: ObjectName) -> Result<Statement, ParserError> {
18005        self.expect_token(&Token::LParen)?;
18006        let options = self.parse_comma_separated0(|p| p.parse_range_option(), Token::RParen)?;
18007        self.expect_token(&Token::RParen)?;
18008
18009        Ok(Statement::CreateType {
18010            name,
18011            representation: Some(UserDefinedTypeRepresentation::Range { options }),
18012        })
18013    }
18014
18015    /// Parse a single range option for a `CREATE TYPE AS RANGE` statement
18016    fn parse_range_option(&mut self) -> Result<UserDefinedTypeRangeOption, ParserError> {
18017        let keyword = self.parse_one_of_keywords(&[
18018            Keyword::SUBTYPE,
18019            Keyword::SUBTYPE_OPCLASS,
18020            Keyword::COLLATION,
18021            Keyword::CANONICAL,
18022            Keyword::SUBTYPE_DIFF,
18023            Keyword::MULTIRANGE_TYPE_NAME,
18024        ]);
18025
18026        match keyword {
18027            Some(Keyword::SUBTYPE) => {
18028                self.expect_token(&Token::Eq)?;
18029                let data_type = self.parse_data_type()?;
18030                Ok(UserDefinedTypeRangeOption::Subtype(data_type))
18031            }
18032            Some(Keyword::SUBTYPE_OPCLASS) => {
18033                self.expect_token(&Token::Eq)?;
18034                let name = self.parse_object_name(false)?;
18035                Ok(UserDefinedTypeRangeOption::SubtypeOpClass(name))
18036            }
18037            Some(Keyword::COLLATION) => {
18038                self.expect_token(&Token::Eq)?;
18039                let name = self.parse_object_name(false)?;
18040                Ok(UserDefinedTypeRangeOption::Collation(name))
18041            }
18042            Some(Keyword::CANONICAL) => {
18043                self.expect_token(&Token::Eq)?;
18044                let name = self.parse_object_name(false)?;
18045                Ok(UserDefinedTypeRangeOption::Canonical(name))
18046            }
18047            Some(Keyword::SUBTYPE_DIFF) => {
18048                self.expect_token(&Token::Eq)?;
18049                let name = self.parse_object_name(false)?;
18050                Ok(UserDefinedTypeRangeOption::SubtypeDiff(name))
18051            }
18052            Some(Keyword::MULTIRANGE_TYPE_NAME) => {
18053                self.expect_token(&Token::Eq)?;
18054                let name = self.parse_object_name(false)?;
18055                Ok(UserDefinedTypeRangeOption::MultirangeTypeName(name))
18056            }
18057            _ => self.expected("range option keyword", self.peek_token()),
18058        }
18059    }
18060
18061    /// Parse SQL definition options for CREATE TYPE (options)
18062    fn parse_create_type_sql_definition_options(
18063        &mut self,
18064    ) -> Result<Vec<UserDefinedTypeSqlDefinitionOption>, ParserError> {
18065        self.parse_comma_separated0(|p| p.parse_sql_definition_option(), Token::RParen)
18066    }
18067
18068    /// Parse a single SQL definition option for CREATE TYPE (options)
18069    fn parse_sql_definition_option(
18070        &mut self,
18071    ) -> Result<UserDefinedTypeSqlDefinitionOption, ParserError> {
18072        let keyword = self.parse_one_of_keywords(&[
18073            Keyword::INPUT,
18074            Keyword::OUTPUT,
18075            Keyword::RECEIVE,
18076            Keyword::SEND,
18077            Keyword::TYPMOD_IN,
18078            Keyword::TYPMOD_OUT,
18079            Keyword::ANALYZE,
18080            Keyword::SUBSCRIPT,
18081            Keyword::INTERNALLENGTH,
18082            Keyword::PASSEDBYVALUE,
18083            Keyword::ALIGNMENT,
18084            Keyword::STORAGE,
18085            Keyword::LIKE,
18086            Keyword::CATEGORY,
18087            Keyword::PREFERRED,
18088            Keyword::DEFAULT,
18089            Keyword::ELEMENT,
18090            Keyword::DELIMITER,
18091            Keyword::COLLATABLE,
18092        ]);
18093
18094        match keyword {
18095            Some(Keyword::INPUT) => {
18096                self.expect_token(&Token::Eq)?;
18097                let name = self.parse_object_name(false)?;
18098                Ok(UserDefinedTypeSqlDefinitionOption::Input(name))
18099            }
18100            Some(Keyword::OUTPUT) => {
18101                self.expect_token(&Token::Eq)?;
18102                let name = self.parse_object_name(false)?;
18103                Ok(UserDefinedTypeSqlDefinitionOption::Output(name))
18104            }
18105            Some(Keyword::RECEIVE) => {
18106                self.expect_token(&Token::Eq)?;
18107                let name = self.parse_object_name(false)?;
18108                Ok(UserDefinedTypeSqlDefinitionOption::Receive(name))
18109            }
18110            Some(Keyword::SEND) => {
18111                self.expect_token(&Token::Eq)?;
18112                let name = self.parse_object_name(false)?;
18113                Ok(UserDefinedTypeSqlDefinitionOption::Send(name))
18114            }
18115            Some(Keyword::TYPMOD_IN) => {
18116                self.expect_token(&Token::Eq)?;
18117                let name = self.parse_object_name(false)?;
18118                Ok(UserDefinedTypeSqlDefinitionOption::TypmodIn(name))
18119            }
18120            Some(Keyword::TYPMOD_OUT) => {
18121                self.expect_token(&Token::Eq)?;
18122                let name = self.parse_object_name(false)?;
18123                Ok(UserDefinedTypeSqlDefinitionOption::TypmodOut(name))
18124            }
18125            Some(Keyword::ANALYZE) => {
18126                self.expect_token(&Token::Eq)?;
18127                let name = self.parse_object_name(false)?;
18128                Ok(UserDefinedTypeSqlDefinitionOption::Analyze(name))
18129            }
18130            Some(Keyword::SUBSCRIPT) => {
18131                self.expect_token(&Token::Eq)?;
18132                let name = self.parse_object_name(false)?;
18133                Ok(UserDefinedTypeSqlDefinitionOption::Subscript(name))
18134            }
18135            Some(Keyword::INTERNALLENGTH) => {
18136                self.expect_token(&Token::Eq)?;
18137                if self.parse_keyword(Keyword::VARIABLE) {
18138                    Ok(UserDefinedTypeSqlDefinitionOption::InternalLength(
18139                        UserDefinedTypeInternalLength::Variable,
18140                    ))
18141                } else {
18142                    let value = self.parse_literal_uint()?;
18143                    Ok(UserDefinedTypeSqlDefinitionOption::InternalLength(
18144                        UserDefinedTypeInternalLength::Fixed(value),
18145                    ))
18146                }
18147            }
18148            Some(Keyword::PASSEDBYVALUE) => Ok(UserDefinedTypeSqlDefinitionOption::PassedByValue),
18149            Some(Keyword::ALIGNMENT) => {
18150                self.expect_token(&Token::Eq)?;
18151                let align_keyword = self.parse_one_of_keywords(&[
18152                    Keyword::CHAR,
18153                    Keyword::INT2,
18154                    Keyword::INT4,
18155                    Keyword::DOUBLE,
18156                ]);
18157                match align_keyword {
18158                    Some(Keyword::CHAR) => Ok(UserDefinedTypeSqlDefinitionOption::Alignment(
18159                        Alignment::Char,
18160                    )),
18161                    Some(Keyword::INT2) => Ok(UserDefinedTypeSqlDefinitionOption::Alignment(
18162                        Alignment::Int2,
18163                    )),
18164                    Some(Keyword::INT4) => Ok(UserDefinedTypeSqlDefinitionOption::Alignment(
18165                        Alignment::Int4,
18166                    )),
18167                    Some(Keyword::DOUBLE) => Ok(UserDefinedTypeSqlDefinitionOption::Alignment(
18168                        Alignment::Double,
18169                    )),
18170                    _ => self.expected(
18171                        "alignment value (char, int2, int4, or double)",
18172                        self.peek_token(),
18173                    ),
18174                }
18175            }
18176            Some(Keyword::STORAGE) => {
18177                self.expect_token(&Token::Eq)?;
18178                let storage_keyword = self.parse_one_of_keywords(&[
18179                    Keyword::PLAIN,
18180                    Keyword::EXTERNAL,
18181                    Keyword::EXTENDED,
18182                    Keyword::MAIN,
18183                ]);
18184                match storage_keyword {
18185                    Some(Keyword::PLAIN) => Ok(UserDefinedTypeSqlDefinitionOption::Storage(
18186                        UserDefinedTypeStorage::Plain,
18187                    )),
18188                    Some(Keyword::EXTERNAL) => Ok(UserDefinedTypeSqlDefinitionOption::Storage(
18189                        UserDefinedTypeStorage::External,
18190                    )),
18191                    Some(Keyword::EXTENDED) => Ok(UserDefinedTypeSqlDefinitionOption::Storage(
18192                        UserDefinedTypeStorage::Extended,
18193                    )),
18194                    Some(Keyword::MAIN) => Ok(UserDefinedTypeSqlDefinitionOption::Storage(
18195                        UserDefinedTypeStorage::Main,
18196                    )),
18197                    _ => self.expected(
18198                        "storage value (plain, external, extended, or main)",
18199                        self.peek_token(),
18200                    ),
18201                }
18202            }
18203            Some(Keyword::LIKE) => {
18204                self.expect_token(&Token::Eq)?;
18205                let name = self.parse_object_name(false)?;
18206                Ok(UserDefinedTypeSqlDefinitionOption::Like(name))
18207            }
18208            Some(Keyword::CATEGORY) => {
18209                self.expect_token(&Token::Eq)?;
18210                let category_str = self.parse_literal_string()?;
18211                let category_char = category_str.chars().next().ok_or_else(|| {
18212                    ParserError::ParserError(
18213                        "CATEGORY value must be a single character".to_string(),
18214                    )
18215                })?;
18216                Ok(UserDefinedTypeSqlDefinitionOption::Category(category_char))
18217            }
18218            Some(Keyword::PREFERRED) => {
18219                self.expect_token(&Token::Eq)?;
18220                let value =
18221                    self.parse_keyword(Keyword::TRUE) || !self.parse_keyword(Keyword::FALSE);
18222                Ok(UserDefinedTypeSqlDefinitionOption::Preferred(value))
18223            }
18224            Some(Keyword::DEFAULT) => {
18225                self.expect_token(&Token::Eq)?;
18226                let expr = self.parse_expr()?;
18227                Ok(UserDefinedTypeSqlDefinitionOption::Default(expr))
18228            }
18229            Some(Keyword::ELEMENT) => {
18230                self.expect_token(&Token::Eq)?;
18231                let data_type = self.parse_data_type()?;
18232                Ok(UserDefinedTypeSqlDefinitionOption::Element(data_type))
18233            }
18234            Some(Keyword::DELIMITER) => {
18235                self.expect_token(&Token::Eq)?;
18236                let delimiter = self.parse_literal_string()?;
18237                Ok(UserDefinedTypeSqlDefinitionOption::Delimiter(delimiter))
18238            }
18239            Some(Keyword::COLLATABLE) => {
18240                self.expect_token(&Token::Eq)?;
18241                let value =
18242                    self.parse_keyword(Keyword::TRUE) || !self.parse_keyword(Keyword::FALSE);
18243                Ok(UserDefinedTypeSqlDefinitionOption::Collatable(value))
18244            }
18245            _ => self.expected("SQL definition option keyword", self.peek_token()),
18246        }
18247    }
18248
18249    fn parse_parenthesized_identifiers(&mut self) -> Result<Vec<Ident>, ParserError> {
18250        self.expect_token(&Token::LParen)?;
18251        let idents = self.parse_comma_separated0(|p| p.parse_identifier(), Token::RParen)?;
18252        self.expect_token(&Token::RParen)?;
18253        Ok(idents)
18254    }
18255
18256    fn parse_column_position(&mut self) -> Result<Option<MySQLColumnPosition>, ParserError> {
18257        if dialect_of!(self is MySqlDialect | GenericDialect) {
18258            if self.parse_keyword(Keyword::FIRST) {
18259                Ok(Some(MySQLColumnPosition::First))
18260            } else if self.parse_keyword(Keyword::AFTER) {
18261                let ident = self.parse_identifier()?;
18262                Ok(Some(MySQLColumnPosition::After(ident)))
18263            } else {
18264                Ok(None)
18265            }
18266        } else {
18267            Ok(None)
18268        }
18269    }
18270
18271    /// Parse [Statement::Print]
18272    fn parse_print(&mut self) -> Result<Statement, ParserError> {
18273        Ok(Statement::Print(PrintStatement {
18274            message: Box::new(self.parse_expr()?),
18275        }))
18276    }
18277
18278    /// Parse [Statement::Return]
18279    fn parse_return(&mut self) -> Result<Statement, ParserError> {
18280        match self.maybe_parse(|p| p.parse_expr())? {
18281            Some(expr) => Ok(Statement::Return(ReturnStatement {
18282                value: Some(ReturnStatementValue::Expr(expr)),
18283            })),
18284            None => Ok(Statement::Return(ReturnStatement { value: None })),
18285        }
18286    }
18287
18288    /// /// Parse a `EXPORT DATA` statement.
18289    ///
18290    /// See [Statement::ExportData]
18291    fn parse_export_data(&mut self) -> Result<Statement, ParserError> {
18292        self.expect_keywords(&[Keyword::EXPORT, Keyword::DATA])?;
18293
18294        let connection = if self.parse_keywords(&[Keyword::WITH, Keyword::CONNECTION]) {
18295            Some(self.parse_object_name(false)?)
18296        } else {
18297            None
18298        };
18299        self.expect_keyword(Keyword::OPTIONS)?;
18300        self.expect_token(&Token::LParen)?;
18301        let options = self.parse_comma_separated(|p| p.parse_sql_option())?;
18302        self.expect_token(&Token::RParen)?;
18303        self.expect_keyword(Keyword::AS)?;
18304        let query = self.parse_query()?;
18305        Ok(Statement::ExportData(ExportData {
18306            options,
18307            query,
18308            connection,
18309        }))
18310    }
18311
18312    fn parse_vacuum(&mut self) -> Result<Statement, ParserError> {
18313        self.expect_keyword(Keyword::VACUUM)?;
18314        let full = self.parse_keyword(Keyword::FULL);
18315        let sort_only = self.parse_keywords(&[Keyword::SORT, Keyword::ONLY]);
18316        let delete_only = self.parse_keywords(&[Keyword::DELETE, Keyword::ONLY]);
18317        let reindex = self.parse_keyword(Keyword::REINDEX);
18318        let recluster = self.parse_keyword(Keyword::RECLUSTER);
18319        let (table_name, threshold, boost) =
18320            match self.maybe_parse(|p| p.parse_object_name(false))? {
18321                Some(table_name) => {
18322                    let threshold = if self.parse_keyword(Keyword::TO) {
18323                        let value = self.parse_value()?;
18324                        self.expect_keyword(Keyword::PERCENT)?;
18325                        Some(value.value)
18326                    } else {
18327                        None
18328                    };
18329                    let boost = self.parse_keyword(Keyword::BOOST);
18330                    (Some(table_name), threshold, boost)
18331                }
18332                _ => (None, None, false),
18333            };
18334        Ok(Statement::Vacuum(VacuumStatement {
18335            full,
18336            sort_only,
18337            delete_only,
18338            reindex,
18339            recluster,
18340            table_name,
18341            threshold,
18342            boost,
18343        }))
18344    }
18345
18346    /// Consume the parser and return its underlying token buffer
18347    pub fn into_tokens(self) -> Vec<TokenWithSpan> {
18348        self.tokens
18349    }
18350
18351    /// Returns true if the next keyword indicates a sub query, i.e. SELECT or WITH
18352    fn peek_sub_query(&mut self) -> bool {
18353        if self
18354            .parse_one_of_keywords(&[Keyword::SELECT, Keyword::WITH])
18355            .is_some()
18356        {
18357            self.prev_token();
18358            return true;
18359        }
18360        false
18361    }
18362
18363    pub(crate) fn parse_show_stmt_options(&mut self) -> Result<ShowStatementOptions, ParserError> {
18364        let show_in;
18365        let mut filter_position = None;
18366        if self.dialect.supports_show_like_before_in() {
18367            if let Some(filter) = self.parse_show_statement_filter()? {
18368                filter_position = Some(ShowStatementFilterPosition::Infix(filter));
18369            }
18370            show_in = self.maybe_parse_show_stmt_in()?;
18371        } else {
18372            show_in = self.maybe_parse_show_stmt_in()?;
18373            if let Some(filter) = self.parse_show_statement_filter()? {
18374                filter_position = Some(ShowStatementFilterPosition::Suffix(filter));
18375            }
18376        }
18377        let starts_with = self.maybe_parse_show_stmt_starts_with()?;
18378        let limit = self.maybe_parse_show_stmt_limit()?;
18379        let from = self.maybe_parse_show_stmt_from()?;
18380        Ok(ShowStatementOptions {
18381            filter_position,
18382            show_in,
18383            starts_with,
18384            limit,
18385            limit_from: from,
18386        })
18387    }
18388
18389    fn maybe_parse_show_stmt_in(&mut self) -> Result<Option<ShowStatementIn>, ParserError> {
18390        let clause = match self.parse_one_of_keywords(&[Keyword::FROM, Keyword::IN]) {
18391            Some(Keyword::FROM) => ShowStatementInClause::FROM,
18392            Some(Keyword::IN) => ShowStatementInClause::IN,
18393            None => return Ok(None),
18394            _ => return self.expected("FROM or IN", self.peek_token()),
18395        };
18396
18397        let (parent_type, parent_name) = match self.parse_one_of_keywords(&[
18398            Keyword::ACCOUNT,
18399            Keyword::DATABASE,
18400            Keyword::SCHEMA,
18401            Keyword::TABLE,
18402            Keyword::VIEW,
18403        ]) {
18404            // If we see these next keywords it means we don't have a parent name
18405            Some(Keyword::DATABASE)
18406                if self.peek_keywords(&[Keyword::STARTS, Keyword::WITH])
18407                    | self.peek_keyword(Keyword::LIMIT) =>
18408            {
18409                (Some(ShowStatementInParentType::Database), None)
18410            }
18411            Some(Keyword::SCHEMA)
18412                if self.peek_keywords(&[Keyword::STARTS, Keyword::WITH])
18413                    | self.peek_keyword(Keyword::LIMIT) =>
18414            {
18415                (Some(ShowStatementInParentType::Schema), None)
18416            }
18417            Some(parent_kw) => {
18418                // The parent name here is still optional, for example:
18419                // SHOW TABLES IN ACCOUNT, so parsing the object name
18420                // may fail because the statement ends.
18421                let parent_name = self.maybe_parse(|p| p.parse_object_name(false))?;
18422                match parent_kw {
18423                    Keyword::ACCOUNT => (Some(ShowStatementInParentType::Account), parent_name),
18424                    Keyword::DATABASE => (Some(ShowStatementInParentType::Database), parent_name),
18425                    Keyword::SCHEMA => (Some(ShowStatementInParentType::Schema), parent_name),
18426                    Keyword::TABLE => (Some(ShowStatementInParentType::Table), parent_name),
18427                    Keyword::VIEW => (Some(ShowStatementInParentType::View), parent_name),
18428                    _ => {
18429                        return self.expected(
18430                            "one of ACCOUNT, DATABASE, SCHEMA, TABLE or VIEW",
18431                            self.peek_token(),
18432                        )
18433                    }
18434                }
18435            }
18436            None => {
18437                // Parsing MySQL style FROM tbl_name FROM db_name
18438                // which is equivalent to FROM tbl_name.db_name
18439                let mut parent_name = self.parse_object_name(false)?;
18440                if self
18441                    .parse_one_of_keywords(&[Keyword::FROM, Keyword::IN])
18442                    .is_some()
18443                {
18444                    parent_name
18445                        .0
18446                        .insert(0, ObjectNamePart::Identifier(self.parse_identifier()?));
18447                }
18448                (None, Some(parent_name))
18449            }
18450        };
18451
18452        Ok(Some(ShowStatementIn {
18453            clause,
18454            parent_type,
18455            parent_name,
18456        }))
18457    }
18458
18459    fn maybe_parse_show_stmt_starts_with(&mut self) -> Result<Option<Value>, ParserError> {
18460        if self.parse_keywords(&[Keyword::STARTS, Keyword::WITH]) {
18461            Ok(Some(self.parse_value()?.value))
18462        } else {
18463            Ok(None)
18464        }
18465    }
18466
18467    fn maybe_parse_show_stmt_limit(&mut self) -> Result<Option<Expr>, ParserError> {
18468        if self.parse_keyword(Keyword::LIMIT) {
18469            Ok(self.parse_limit()?)
18470        } else {
18471            Ok(None)
18472        }
18473    }
18474
18475    fn maybe_parse_show_stmt_from(&mut self) -> Result<Option<Value>, ParserError> {
18476        if self.parse_keyword(Keyword::FROM) {
18477            Ok(Some(self.parse_value()?.value))
18478        } else {
18479            Ok(None)
18480        }
18481    }
18482
18483    pub(crate) fn in_column_definition_state(&self) -> bool {
18484        matches!(self.state, ColumnDefinition)
18485    }
18486
18487    /// Parses options provided in key-value format.
18488    ///
18489    /// * `parenthesized` - true if the options are enclosed in parenthesis
18490    /// * `end_words` - a list of keywords that any of them indicates the end of the options section
18491    pub(crate) fn parse_key_value_options(
18492        &mut self,
18493        parenthesized: bool,
18494        end_words: &[Keyword],
18495    ) -> Result<KeyValueOptions, ParserError> {
18496        let mut options: Vec<KeyValueOption> = Vec::new();
18497        let mut delimiter = KeyValueOptionsDelimiter::Space;
18498        if parenthesized {
18499            self.expect_token(&Token::LParen)?;
18500        }
18501        loop {
18502            match self.next_token().token {
18503                Token::RParen => {
18504                    if parenthesized {
18505                        break;
18506                    } else {
18507                        return self.expected(" another option or EOF", self.peek_token());
18508                    }
18509                }
18510                Token::EOF => break,
18511                Token::Comma => {
18512                    delimiter = KeyValueOptionsDelimiter::Comma;
18513                    continue;
18514                }
18515                Token::Word(w) if !end_words.contains(&w.keyword) => {
18516                    options.push(self.parse_key_value_option(&w)?)
18517                }
18518                Token::Word(w) if end_words.contains(&w.keyword) => {
18519                    self.prev_token();
18520                    break;
18521                }
18522                _ => return self.expected("another option, EOF, Comma or ')'", self.peek_token()),
18523            };
18524        }
18525
18526        Ok(KeyValueOptions { delimiter, options })
18527    }
18528
18529    /// Parses a `KEY = VALUE` construct based on the specified key
18530    pub(crate) fn parse_key_value_option(
18531        &mut self,
18532        key: &Word,
18533    ) -> Result<KeyValueOption, ParserError> {
18534        self.expect_token(&Token::Eq)?;
18535        match self.peek_token().token {
18536            Token::SingleQuotedString(_) => Ok(KeyValueOption {
18537                option_name: key.value.clone(),
18538                option_value: KeyValueOptionKind::Single(self.parse_value()?.into()),
18539            }),
18540            Token::Word(word)
18541                if word.keyword == Keyword::TRUE || word.keyword == Keyword::FALSE =>
18542            {
18543                Ok(KeyValueOption {
18544                    option_name: key.value.clone(),
18545                    option_value: KeyValueOptionKind::Single(self.parse_value()?.into()),
18546                })
18547            }
18548            Token::Number(..) => Ok(KeyValueOption {
18549                option_name: key.value.clone(),
18550                option_value: KeyValueOptionKind::Single(self.parse_value()?.into()),
18551            }),
18552            Token::Word(word) => {
18553                self.next_token();
18554                Ok(KeyValueOption {
18555                    option_name: key.value.clone(),
18556                    option_value: KeyValueOptionKind::Single(Value::Placeholder(
18557                        word.value.clone(),
18558                    )),
18559                })
18560            }
18561            Token::LParen => {
18562                // Can be a list of values or a list of key value properties.
18563                // Try to parse a list of values and if that fails, try to parse
18564                // a list of key-value properties.
18565                match self.maybe_parse(|parser| {
18566                    parser.expect_token(&Token::LParen)?;
18567                    let values = parser.parse_comma_separated0(|p| p.parse_value(), Token::RParen);
18568                    parser.expect_token(&Token::RParen)?;
18569                    values
18570                })? {
18571                    Some(values) => {
18572                        let values = values.into_iter().map(|v| v.value).collect();
18573                        Ok(KeyValueOption {
18574                            option_name: key.value.clone(),
18575                            option_value: KeyValueOptionKind::Multi(values),
18576                        })
18577                    }
18578                    None => Ok(KeyValueOption {
18579                        option_name: key.value.clone(),
18580                        option_value: KeyValueOptionKind::KeyValueOptions(Box::new(
18581                            self.parse_key_value_options(true, &[])?,
18582                        )),
18583                    }),
18584                }
18585            }
18586            _ => self.expected("expected option value", self.peek_token()),
18587        }
18588    }
18589
18590    /// Parses a RESET statement
18591    fn parse_reset(&mut self) -> Result<Statement, ParserError> {
18592        if self.parse_keyword(Keyword::ALL) {
18593            return Ok(Statement::Reset(ResetStatement { reset: Reset::ALL }));
18594        }
18595
18596        let obj = self.parse_object_name(false)?;
18597        Ok(Statement::Reset(ResetStatement {
18598            reset: Reset::ConfigurationParameter(obj),
18599        }))
18600    }
18601}
18602
18603fn maybe_prefixed_expr(expr: Expr, prefix: Option<Ident>) -> Expr {
18604    if let Some(prefix) = prefix {
18605        Expr::Prefixed {
18606            prefix,
18607            value: Box::new(expr),
18608        }
18609    } else {
18610        expr
18611    }
18612}
18613
18614impl Word {
18615    #[deprecated(since = "0.54.0", note = "please use `into_ident` instead")]
18616    pub fn to_ident(&self, span: Span) -> Ident {
18617        Ident {
18618            value: self.value.clone(),
18619            quote_style: self.quote_style,
18620            span,
18621        }
18622    }
18623
18624    /// Convert this word into an [`Ident`] identifier
18625    pub fn into_ident(self, span: Span) -> Ident {
18626        Ident {
18627            value: self.value,
18628            quote_style: self.quote_style,
18629            span,
18630        }
18631    }
18632}
18633
18634#[cfg(test)]
18635mod tests {
18636    use crate::test_utils::{all_dialects, TestedDialects};
18637
18638    use super::*;
18639
18640    #[test]
18641    fn test_prev_index() {
18642        let sql = "SELECT version";
18643        all_dialects().run_parser_method(sql, |parser| {
18644            assert_eq!(parser.peek_token(), Token::make_keyword("SELECT"));
18645            assert_eq!(parser.next_token(), Token::make_keyword("SELECT"));
18646            parser.prev_token();
18647            assert_eq!(parser.next_token(), Token::make_keyword("SELECT"));
18648            assert_eq!(parser.next_token(), Token::make_word("version", None));
18649            parser.prev_token();
18650            assert_eq!(parser.peek_token(), Token::make_word("version", None));
18651            assert_eq!(parser.next_token(), Token::make_word("version", None));
18652            assert_eq!(parser.peek_token(), Token::EOF);
18653            parser.prev_token();
18654            assert_eq!(parser.next_token(), Token::make_word("version", None));
18655            assert_eq!(parser.next_token(), Token::EOF);
18656            assert_eq!(parser.next_token(), Token::EOF);
18657            parser.prev_token();
18658        });
18659    }
18660
18661    #[test]
18662    fn test_peek_tokens() {
18663        all_dialects().run_parser_method("SELECT foo AS bar FROM baz", |parser| {
18664            assert!(matches!(
18665                parser.peek_tokens(),
18666                [Token::Word(Word {
18667                    keyword: Keyword::SELECT,
18668                    ..
18669                })]
18670            ));
18671
18672            assert!(matches!(
18673                parser.peek_tokens(),
18674                [
18675                    Token::Word(Word {
18676                        keyword: Keyword::SELECT,
18677                        ..
18678                    }),
18679                    Token::Word(_),
18680                    Token::Word(Word {
18681                        keyword: Keyword::AS,
18682                        ..
18683                    }),
18684                ]
18685            ));
18686
18687            for _ in 0..4 {
18688                parser.next_token();
18689            }
18690
18691            assert!(matches!(
18692                parser.peek_tokens(),
18693                [
18694                    Token::Word(Word {
18695                        keyword: Keyword::FROM,
18696                        ..
18697                    }),
18698                    Token::Word(_),
18699                    Token::EOF,
18700                    Token::EOF,
18701                ]
18702            ))
18703        })
18704    }
18705
18706    #[cfg(test)]
18707    mod test_parse_data_type {
18708        use crate::ast::{
18709            CharLengthUnits, CharacterLength, DataType, ExactNumberInfo, ObjectName, TimezoneInfo,
18710        };
18711        use crate::dialect::{AnsiDialect, GenericDialect, PostgreSqlDialect};
18712        use crate::test_utils::TestedDialects;
18713
18714        macro_rules! test_parse_data_type {
18715            ($dialect:expr, $input:expr, $expected_type:expr $(,)?) => {{
18716                $dialect.run_parser_method(&*$input, |parser| {
18717                    let data_type = parser.parse_data_type().unwrap();
18718                    assert_eq!($expected_type, data_type);
18719                    assert_eq!($input.to_string(), data_type.to_string());
18720                });
18721            }};
18722        }
18723
18724        #[test]
18725        fn test_ansii_character_string_types() {
18726            // Character string types: <https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#character-string-type>
18727            let dialect =
18728                TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
18729
18730            test_parse_data_type!(dialect, "CHARACTER", DataType::Character(None));
18731
18732            test_parse_data_type!(
18733                dialect,
18734                "CHARACTER(20)",
18735                DataType::Character(Some(CharacterLength::IntegerLength {
18736                    length: 20,
18737                    unit: None
18738                }))
18739            );
18740
18741            test_parse_data_type!(
18742                dialect,
18743                "CHARACTER(20 CHARACTERS)",
18744                DataType::Character(Some(CharacterLength::IntegerLength {
18745                    length: 20,
18746                    unit: Some(CharLengthUnits::Characters)
18747                }))
18748            );
18749
18750            test_parse_data_type!(
18751                dialect,
18752                "CHARACTER(20 OCTETS)",
18753                DataType::Character(Some(CharacterLength::IntegerLength {
18754                    length: 20,
18755                    unit: Some(CharLengthUnits::Octets)
18756                }))
18757            );
18758
18759            test_parse_data_type!(dialect, "CHAR", DataType::Char(None));
18760
18761            test_parse_data_type!(
18762                dialect,
18763                "CHAR(20)",
18764                DataType::Char(Some(CharacterLength::IntegerLength {
18765                    length: 20,
18766                    unit: None
18767                }))
18768            );
18769
18770            test_parse_data_type!(
18771                dialect,
18772                "CHAR(20 CHARACTERS)",
18773                DataType::Char(Some(CharacterLength::IntegerLength {
18774                    length: 20,
18775                    unit: Some(CharLengthUnits::Characters)
18776                }))
18777            );
18778
18779            test_parse_data_type!(
18780                dialect,
18781                "CHAR(20 OCTETS)",
18782                DataType::Char(Some(CharacterLength::IntegerLength {
18783                    length: 20,
18784                    unit: Some(CharLengthUnits::Octets)
18785                }))
18786            );
18787
18788            test_parse_data_type!(
18789                dialect,
18790                "CHARACTER VARYING(20)",
18791                DataType::CharacterVarying(Some(CharacterLength::IntegerLength {
18792                    length: 20,
18793                    unit: None
18794                }))
18795            );
18796
18797            test_parse_data_type!(
18798                dialect,
18799                "CHARACTER VARYING(20 CHARACTERS)",
18800                DataType::CharacterVarying(Some(CharacterLength::IntegerLength {
18801                    length: 20,
18802                    unit: Some(CharLengthUnits::Characters)
18803                }))
18804            );
18805
18806            test_parse_data_type!(
18807                dialect,
18808                "CHARACTER VARYING(20 OCTETS)",
18809                DataType::CharacterVarying(Some(CharacterLength::IntegerLength {
18810                    length: 20,
18811                    unit: Some(CharLengthUnits::Octets)
18812                }))
18813            );
18814
18815            test_parse_data_type!(
18816                dialect,
18817                "CHAR VARYING(20)",
18818                DataType::CharVarying(Some(CharacterLength::IntegerLength {
18819                    length: 20,
18820                    unit: None
18821                }))
18822            );
18823
18824            test_parse_data_type!(
18825                dialect,
18826                "CHAR VARYING(20 CHARACTERS)",
18827                DataType::CharVarying(Some(CharacterLength::IntegerLength {
18828                    length: 20,
18829                    unit: Some(CharLengthUnits::Characters)
18830                }))
18831            );
18832
18833            test_parse_data_type!(
18834                dialect,
18835                "CHAR VARYING(20 OCTETS)",
18836                DataType::CharVarying(Some(CharacterLength::IntegerLength {
18837                    length: 20,
18838                    unit: Some(CharLengthUnits::Octets)
18839                }))
18840            );
18841
18842            test_parse_data_type!(
18843                dialect,
18844                "VARCHAR(20)",
18845                DataType::Varchar(Some(CharacterLength::IntegerLength {
18846                    length: 20,
18847                    unit: None
18848                }))
18849            );
18850        }
18851
18852        #[test]
18853        fn test_ansii_character_large_object_types() {
18854            // Character large object types: <https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#character-large-object-length>
18855            let dialect =
18856                TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
18857
18858            test_parse_data_type!(
18859                dialect,
18860                "CHARACTER LARGE OBJECT",
18861                DataType::CharacterLargeObject(None)
18862            );
18863            test_parse_data_type!(
18864                dialect,
18865                "CHARACTER LARGE OBJECT(20)",
18866                DataType::CharacterLargeObject(Some(20))
18867            );
18868
18869            test_parse_data_type!(
18870                dialect,
18871                "CHAR LARGE OBJECT",
18872                DataType::CharLargeObject(None)
18873            );
18874            test_parse_data_type!(
18875                dialect,
18876                "CHAR LARGE OBJECT(20)",
18877                DataType::CharLargeObject(Some(20))
18878            );
18879
18880            test_parse_data_type!(dialect, "CLOB", DataType::Clob(None));
18881            test_parse_data_type!(dialect, "CLOB(20)", DataType::Clob(Some(20)));
18882        }
18883
18884        #[test]
18885        fn test_parse_custom_types() {
18886            let dialect =
18887                TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
18888
18889            test_parse_data_type!(
18890                dialect,
18891                "GEOMETRY",
18892                DataType::Custom(ObjectName::from(vec!["GEOMETRY".into()]), vec![])
18893            );
18894
18895            test_parse_data_type!(
18896                dialect,
18897                "GEOMETRY(POINT)",
18898                DataType::Custom(
18899                    ObjectName::from(vec!["GEOMETRY".into()]),
18900                    vec!["POINT".to_string()]
18901                )
18902            );
18903
18904            test_parse_data_type!(
18905                dialect,
18906                "GEOMETRY(POINT, 4326)",
18907                DataType::Custom(
18908                    ObjectName::from(vec!["GEOMETRY".into()]),
18909                    vec!["POINT".to_string(), "4326".to_string()]
18910                )
18911            );
18912        }
18913
18914        #[test]
18915        fn test_ansii_exact_numeric_types() {
18916            // Exact numeric types: <https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#exact-numeric-type>
18917            let dialect = TestedDialects::new(vec![
18918                Box::new(GenericDialect {}),
18919                Box::new(AnsiDialect {}),
18920                Box::new(PostgreSqlDialect {}),
18921            ]);
18922
18923            test_parse_data_type!(dialect, "NUMERIC", DataType::Numeric(ExactNumberInfo::None));
18924
18925            test_parse_data_type!(
18926                dialect,
18927                "NUMERIC(2)",
18928                DataType::Numeric(ExactNumberInfo::Precision(2))
18929            );
18930
18931            test_parse_data_type!(
18932                dialect,
18933                "NUMERIC(2,10)",
18934                DataType::Numeric(ExactNumberInfo::PrecisionAndScale(2, 10))
18935            );
18936
18937            test_parse_data_type!(dialect, "DECIMAL", DataType::Decimal(ExactNumberInfo::None));
18938
18939            test_parse_data_type!(
18940                dialect,
18941                "DECIMAL(2)",
18942                DataType::Decimal(ExactNumberInfo::Precision(2))
18943            );
18944
18945            test_parse_data_type!(
18946                dialect,
18947                "DECIMAL(2,10)",
18948                DataType::Decimal(ExactNumberInfo::PrecisionAndScale(2, 10))
18949            );
18950
18951            test_parse_data_type!(dialect, "DEC", DataType::Dec(ExactNumberInfo::None));
18952
18953            test_parse_data_type!(
18954                dialect,
18955                "DEC(2)",
18956                DataType::Dec(ExactNumberInfo::Precision(2))
18957            );
18958
18959            test_parse_data_type!(
18960                dialect,
18961                "DEC(2,10)",
18962                DataType::Dec(ExactNumberInfo::PrecisionAndScale(2, 10))
18963            );
18964
18965            // Test negative scale values.
18966            test_parse_data_type!(
18967                dialect,
18968                "NUMERIC(10,-2)",
18969                DataType::Numeric(ExactNumberInfo::PrecisionAndScale(10, -2))
18970            );
18971
18972            test_parse_data_type!(
18973                dialect,
18974                "DECIMAL(1000,-10)",
18975                DataType::Decimal(ExactNumberInfo::PrecisionAndScale(1000, -10))
18976            );
18977
18978            test_parse_data_type!(
18979                dialect,
18980                "DEC(5,-1000)",
18981                DataType::Dec(ExactNumberInfo::PrecisionAndScale(5, -1000))
18982            );
18983
18984            test_parse_data_type!(
18985                dialect,
18986                "NUMERIC(10,-5)",
18987                DataType::Numeric(ExactNumberInfo::PrecisionAndScale(10, -5))
18988            );
18989
18990            test_parse_data_type!(
18991                dialect,
18992                "DECIMAL(20,-10)",
18993                DataType::Decimal(ExactNumberInfo::PrecisionAndScale(20, -10))
18994            );
18995
18996            test_parse_data_type!(
18997                dialect,
18998                "DEC(5,-2)",
18999                DataType::Dec(ExactNumberInfo::PrecisionAndScale(5, -2))
19000            );
19001
19002            dialect.run_parser_method("NUMERIC(10,+5)", |parser| {
19003                let data_type = parser.parse_data_type().unwrap();
19004                assert_eq!(
19005                    DataType::Numeric(ExactNumberInfo::PrecisionAndScale(10, 5)),
19006                    data_type
19007                );
19008                // Note: Explicit '+' sign is not preserved in output, which is correct
19009                assert_eq!("NUMERIC(10,5)", data_type.to_string());
19010            });
19011        }
19012
19013        #[test]
19014        fn test_ansii_date_type() {
19015            // Datetime types: <https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#datetime-type>
19016            let dialect =
19017                TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
19018
19019            test_parse_data_type!(dialect, "DATE", DataType::Date);
19020
19021            test_parse_data_type!(dialect, "TIME", DataType::Time(None, TimezoneInfo::None));
19022
19023            test_parse_data_type!(
19024                dialect,
19025                "TIME(6)",
19026                DataType::Time(Some(6), TimezoneInfo::None)
19027            );
19028
19029            test_parse_data_type!(
19030                dialect,
19031                "TIME WITH TIME ZONE",
19032                DataType::Time(None, TimezoneInfo::WithTimeZone)
19033            );
19034
19035            test_parse_data_type!(
19036                dialect,
19037                "TIME(6) WITH TIME ZONE",
19038                DataType::Time(Some(6), TimezoneInfo::WithTimeZone)
19039            );
19040
19041            test_parse_data_type!(
19042                dialect,
19043                "TIME WITHOUT TIME ZONE",
19044                DataType::Time(None, TimezoneInfo::WithoutTimeZone)
19045            );
19046
19047            test_parse_data_type!(
19048                dialect,
19049                "TIME(6) WITHOUT TIME ZONE",
19050                DataType::Time(Some(6), TimezoneInfo::WithoutTimeZone)
19051            );
19052
19053            test_parse_data_type!(
19054                dialect,
19055                "TIMESTAMP",
19056                DataType::Timestamp(None, TimezoneInfo::None)
19057            );
19058
19059            test_parse_data_type!(
19060                dialect,
19061                "TIMESTAMP(22)",
19062                DataType::Timestamp(Some(22), TimezoneInfo::None)
19063            );
19064
19065            test_parse_data_type!(
19066                dialect,
19067                "TIMESTAMP(22) WITH TIME ZONE",
19068                DataType::Timestamp(Some(22), TimezoneInfo::WithTimeZone)
19069            );
19070
19071            test_parse_data_type!(
19072                dialect,
19073                "TIMESTAMP(33) WITHOUT TIME ZONE",
19074                DataType::Timestamp(Some(33), TimezoneInfo::WithoutTimeZone)
19075            );
19076        }
19077    }
19078
19079    #[test]
19080    fn test_parse_schema_name() {
19081        // The expected name should be identical as the input name, that's why I don't receive both
19082        macro_rules! test_parse_schema_name {
19083            ($input:expr, $expected_name:expr $(,)?) => {{
19084                all_dialects().run_parser_method(&*$input, |parser| {
19085                    let schema_name = parser.parse_schema_name().unwrap();
19086                    // Validate that the structure is the same as expected
19087                    assert_eq!(schema_name, $expected_name);
19088                    // Validate that the input and the expected structure serialization are the same
19089                    assert_eq!(schema_name.to_string(), $input.to_string());
19090                });
19091            }};
19092        }
19093
19094        let dummy_name = ObjectName::from(vec![Ident::new("dummy_name")]);
19095        let dummy_authorization = Ident::new("dummy_authorization");
19096
19097        test_parse_schema_name!(
19098            format!("{dummy_name}"),
19099            SchemaName::Simple(dummy_name.clone())
19100        );
19101
19102        test_parse_schema_name!(
19103            format!("AUTHORIZATION {dummy_authorization}"),
19104            SchemaName::UnnamedAuthorization(dummy_authorization.clone()),
19105        );
19106        test_parse_schema_name!(
19107            format!("{dummy_name} AUTHORIZATION {dummy_authorization}"),
19108            SchemaName::NamedAuthorization(dummy_name.clone(), dummy_authorization.clone()),
19109        );
19110    }
19111
19112    #[test]
19113    fn mysql_parse_index_table_constraint() {
19114        macro_rules! test_parse_table_constraint {
19115            ($dialect:expr, $input:expr, $expected:expr $(,)?) => {{
19116                $dialect.run_parser_method(&*$input, |parser| {
19117                    let constraint = parser.parse_optional_table_constraint().unwrap().unwrap();
19118                    // Validate that the structure is the same as expected
19119                    assert_eq!(constraint, $expected);
19120                    // Validate that the input and the expected structure serialization are the same
19121                    assert_eq!(constraint.to_string(), $input.to_string());
19122                });
19123            }};
19124        }
19125
19126        fn mk_expected_col(name: &str) -> IndexColumn {
19127            IndexColumn {
19128                column: OrderByExpr {
19129                    expr: Expr::Identifier(name.into()),
19130                    options: OrderByOptions {
19131                        asc: None,
19132                        nulls_first: None,
19133                    },
19134                    with_fill: None,
19135                },
19136                operator_class: None,
19137            }
19138        }
19139
19140        let dialect =
19141            TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(MySqlDialect {})]);
19142
19143        test_parse_table_constraint!(
19144            dialect,
19145            "INDEX (c1)",
19146            IndexConstraint {
19147                display_as_key: false,
19148                name: None,
19149                index_type: None,
19150                columns: vec![mk_expected_col("c1")],
19151                index_options: vec![],
19152            }
19153            .into()
19154        );
19155
19156        test_parse_table_constraint!(
19157            dialect,
19158            "KEY (c1)",
19159            IndexConstraint {
19160                display_as_key: true,
19161                name: None,
19162                index_type: None,
19163                columns: vec![mk_expected_col("c1")],
19164                index_options: vec![],
19165            }
19166            .into()
19167        );
19168
19169        test_parse_table_constraint!(
19170            dialect,
19171            "INDEX 'index' (c1, c2)",
19172            TableConstraint::Index(IndexConstraint {
19173                display_as_key: false,
19174                name: Some(Ident::with_quote('\'', "index")),
19175                index_type: None,
19176                columns: vec![mk_expected_col("c1"), mk_expected_col("c2")],
19177                index_options: vec![],
19178            })
19179        );
19180
19181        test_parse_table_constraint!(
19182            dialect,
19183            "INDEX USING BTREE (c1)",
19184            IndexConstraint {
19185                display_as_key: false,
19186                name: None,
19187                index_type: Some(IndexType::BTree),
19188                columns: vec![mk_expected_col("c1")],
19189                index_options: vec![],
19190            }
19191            .into()
19192        );
19193
19194        test_parse_table_constraint!(
19195            dialect,
19196            "INDEX USING HASH (c1)",
19197            IndexConstraint {
19198                display_as_key: false,
19199                name: None,
19200                index_type: Some(IndexType::Hash),
19201                columns: vec![mk_expected_col("c1")],
19202                index_options: vec![],
19203            }
19204            .into()
19205        );
19206
19207        test_parse_table_constraint!(
19208            dialect,
19209            "INDEX idx_name USING BTREE (c1)",
19210            IndexConstraint {
19211                display_as_key: false,
19212                name: Some(Ident::new("idx_name")),
19213                index_type: Some(IndexType::BTree),
19214                columns: vec![mk_expected_col("c1")],
19215                index_options: vec![],
19216            }
19217            .into()
19218        );
19219
19220        test_parse_table_constraint!(
19221            dialect,
19222            "INDEX idx_name USING HASH (c1)",
19223            IndexConstraint {
19224                display_as_key: false,
19225                name: Some(Ident::new("idx_name")),
19226                index_type: Some(IndexType::Hash),
19227                columns: vec![mk_expected_col("c1")],
19228                index_options: vec![],
19229            }
19230            .into()
19231        );
19232    }
19233
19234    #[test]
19235    fn test_tokenizer_error_loc() {
19236        let sql = "foo '";
19237        let ast = Parser::parse_sql(&GenericDialect, sql);
19238        assert_eq!(
19239            ast,
19240            Err(ParserError::TokenizerError(
19241                "Unterminated string literal at Line: 1, Column: 5".to_string()
19242            ))
19243        );
19244    }
19245
19246    #[test]
19247    fn test_parser_error_loc() {
19248        let sql = "SELECT this is a syntax error";
19249        let ast = Parser::parse_sql(&GenericDialect, sql);
19250        assert_eq!(
19251            ast,
19252            Err(ParserError::ParserError(
19253                "Expected: [NOT] NULL | TRUE | FALSE | DISTINCT | [form] NORMALIZED FROM after IS, found: a at Line: 1, Column: 16"
19254                    .to_string()
19255            ))
19256        );
19257    }
19258
19259    #[test]
19260    fn test_nested_explain_error() {
19261        let sql = "EXPLAIN EXPLAIN SELECT 1";
19262        let ast = Parser::parse_sql(&GenericDialect, sql);
19263        assert_eq!(
19264            ast,
19265            Err(ParserError::ParserError(
19266                "Explain must be root of the plan".to_string()
19267            ))
19268        );
19269    }
19270
19271    #[test]
19272    fn test_parse_multipart_identifier_positive() {
19273        let dialect = TestedDialects::new(vec![Box::new(GenericDialect {})]);
19274
19275        // parse multipart with quotes
19276        let expected = vec![
19277            Ident {
19278                value: "CATALOG".to_string(),
19279                quote_style: None,
19280                span: Span::empty(),
19281            },
19282            Ident {
19283                value: "F(o)o. \"bar".to_string(),
19284                quote_style: Some('"'),
19285                span: Span::empty(),
19286            },
19287            Ident {
19288                value: "table".to_string(),
19289                quote_style: None,
19290                span: Span::empty(),
19291            },
19292        ];
19293        dialect.run_parser_method(r#"CATALOG."F(o)o. ""bar".table"#, |parser| {
19294            let actual = parser.parse_multipart_identifier().unwrap();
19295            assert_eq!(expected, actual);
19296        });
19297
19298        // allow whitespace between ident parts
19299        let expected = vec![
19300            Ident {
19301                value: "CATALOG".to_string(),
19302                quote_style: None,
19303                span: Span::empty(),
19304            },
19305            Ident {
19306                value: "table".to_string(),
19307                quote_style: None,
19308                span: Span::empty(),
19309            },
19310        ];
19311        dialect.run_parser_method("CATALOG . table", |parser| {
19312            let actual = parser.parse_multipart_identifier().unwrap();
19313            assert_eq!(expected, actual);
19314        });
19315    }
19316
19317    #[test]
19318    fn test_parse_multipart_identifier_negative() {
19319        macro_rules! test_parse_multipart_identifier_error {
19320            ($input:expr, $expected_err:expr $(,)?) => {{
19321                all_dialects().run_parser_method(&*$input, |parser| {
19322                    let actual_err = parser.parse_multipart_identifier().unwrap_err();
19323                    assert_eq!(actual_err.to_string(), $expected_err);
19324                });
19325            }};
19326        }
19327
19328        test_parse_multipart_identifier_error!(
19329            "",
19330            "sql parser error: Empty input when parsing identifier",
19331        );
19332
19333        test_parse_multipart_identifier_error!(
19334            "*schema.table",
19335            "sql parser error: Unexpected token in identifier: *",
19336        );
19337
19338        test_parse_multipart_identifier_error!(
19339            "schema.table*",
19340            "sql parser error: Unexpected token in identifier: *",
19341        );
19342
19343        test_parse_multipart_identifier_error!(
19344            "schema.table.",
19345            "sql parser error: Trailing period in identifier",
19346        );
19347
19348        test_parse_multipart_identifier_error!(
19349            "schema.*",
19350            "sql parser error: Unexpected token following period in identifier: *",
19351        );
19352    }
19353
19354    #[test]
19355    fn test_mysql_partition_selection() {
19356        let sql = "SELECT * FROM employees PARTITION (p0, p2)";
19357        let expected = vec!["p0", "p2"];
19358
19359        let ast: Vec<Statement> = Parser::parse_sql(&MySqlDialect {}, sql).unwrap();
19360        assert_eq!(ast.len(), 1);
19361        if let Statement::Query(v) = &ast[0] {
19362            if let SetExpr::Select(select) = &*v.body {
19363                assert_eq!(select.from.len(), 1);
19364                let from: &TableWithJoins = &select.from[0];
19365                let table_factor = &from.relation;
19366                if let TableFactor::Table { partitions, .. } = table_factor {
19367                    let actual: Vec<&str> = partitions
19368                        .iter()
19369                        .map(|ident| ident.value.as_str())
19370                        .collect();
19371                    assert_eq!(expected, actual);
19372                }
19373            }
19374        } else {
19375            panic!("fail to parse mysql partition selection");
19376        }
19377    }
19378
19379    #[test]
19380    fn test_replace_into_placeholders() {
19381        let sql = "REPLACE INTO t (a) VALUES (&a)";
19382
19383        assert!(Parser::parse_sql(&GenericDialect {}, sql).is_err());
19384    }
19385
19386    #[test]
19387    fn test_replace_into_set_placeholder() {
19388        let sql = "REPLACE INTO t SET ?";
19389
19390        assert!(Parser::parse_sql(&GenericDialect {}, sql).is_err());
19391    }
19392
19393    #[test]
19394    fn test_replace_incomplete() {
19395        let sql = r#"REPLACE"#;
19396
19397        assert!(Parser::parse_sql(&MySqlDialect {}, sql).is_err());
19398    }
19399
19400    #[test]
19401    fn test_placeholder_invalid_whitespace() {
19402        for w in ["  ", "/*invalid*/"] {
19403            let sql = format!("\nSELECT\n  :{w}fooBar");
19404            assert!(Parser::parse_sql(&GenericDialect, &sql).is_err());
19405        }
19406    }
19407}