sqlparser/parser/
mod.rs

1// Licensed under the Apache License, Version 2.0 (the "License");
2// you may not use this file except in compliance with the License.
3// You may obtain a copy of the License at
4//
5// http://www.apache.org/licenses/LICENSE-2.0
6//
7// Unless required by applicable law or agreed to in writing, software
8// distributed under the License is distributed on an "AS IS" BASIS,
9// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
10// See the License for the specific language governing permissions and
11// limitations under the License.
12
13//! SQL Parser
14
15#[cfg(not(feature = "std"))]
16use alloc::{
17    boxed::Box,
18    format,
19    string::{String, ToString},
20    vec,
21    vec::Vec,
22};
23use core::{
24    fmt::{self, Display},
25    str::FromStr,
26};
27use helpers::attached_token::AttachedToken;
28
29use log::debug;
30
31use recursion::RecursionCounter;
32use IsLateral::*;
33use IsOptional::*;
34
35use crate::ast::helpers::{
36    key_value_options::{
37        KeyValueOption, KeyValueOptionKind, KeyValueOptions, KeyValueOptionsDelimiter,
38    },
39    stmt_create_table::{CreateTableBuilder, CreateTableConfiguration},
40};
41use crate::ast::Statement::CreatePolicy;
42use crate::ast::*;
43use crate::dialect::*;
44use crate::keywords::{Keyword, ALL_KEYWORDS};
45use crate::tokenizer::*;
46use sqlparser::parser::ParserState::ColumnDefinition;
47
48#[derive(Debug, Clone, PartialEq, Eq)]
49pub enum ParserError {
50    TokenizerError(String),
51    ParserError(String),
52    RecursionLimitExceeded,
53}
54
55// Use `Parser::expected` instead, if possible
56macro_rules! parser_err {
57    ($MSG:expr, $loc:expr) => {
58        Err(ParserError::ParserError(format!("{}{}", $MSG, $loc)))
59    };
60}
61
62mod alter;
63mod merge;
64
65#[cfg(feature = "std")]
66/// Implementation [`RecursionCounter`] if std is available
67mod recursion {
68    use std::cell::Cell;
69    use std::rc::Rc;
70
71    use super::ParserError;
72
73    /// Tracks remaining recursion depth. This value is decremented on
74    /// each call to [`RecursionCounter::try_decrease()`], when it reaches 0 an error will
75    /// be returned.
76    ///
77    /// Note: Uses an [`std::rc::Rc`] and [`std::cell::Cell`] in order to satisfy the Rust
78    /// borrow checker so the automatic [`DepthGuard`] decrement a
79    /// reference to the counter.
80    ///
81    /// Note: when "recursive-protection" feature is enabled, this crate uses additional stack overflow protection
82    /// for some of its recursive methods. See [`recursive::recursive`] for more information.
83    pub(crate) struct RecursionCounter {
84        remaining_depth: Rc<Cell<usize>>,
85    }
86
87    impl RecursionCounter {
88        /// Creates a [`RecursionCounter`] with the specified maximum
89        /// depth
90        pub fn new(remaining_depth: usize) -> Self {
91            Self {
92                remaining_depth: Rc::new(remaining_depth.into()),
93            }
94        }
95
96        /// Decreases the remaining depth by 1.
97        ///
98        /// Returns [`Err`] if the remaining depth falls to 0.
99        ///
100        /// Returns a [`DepthGuard`] which will adds 1 to the
101        /// remaining depth upon drop;
102        pub fn try_decrease(&self) -> Result<DepthGuard, ParserError> {
103            let old_value = self.remaining_depth.get();
104            // ran out of space
105            if old_value == 0 {
106                Err(ParserError::RecursionLimitExceeded)
107            } else {
108                self.remaining_depth.set(old_value - 1);
109                Ok(DepthGuard::new(Rc::clone(&self.remaining_depth)))
110            }
111        }
112    }
113
114    /// Guard that increases the remaining depth by 1 on drop
115    pub struct DepthGuard {
116        remaining_depth: Rc<Cell<usize>>,
117    }
118
119    impl DepthGuard {
120        fn new(remaining_depth: Rc<Cell<usize>>) -> Self {
121            Self { remaining_depth }
122        }
123    }
124    impl Drop for DepthGuard {
125        fn drop(&mut self) {
126            let old_value = self.remaining_depth.get();
127            self.remaining_depth.set(old_value + 1);
128        }
129    }
130}
131
132#[cfg(not(feature = "std"))]
133mod recursion {
134    /// Implementation [`RecursionCounter`] if std is NOT available (and does not
135    /// guard against stack overflow).
136    ///
137    /// Has the same API as the std [`RecursionCounter`] implementation
138    /// but does not actually limit stack depth.
139    pub(crate) struct RecursionCounter {}
140
141    impl RecursionCounter {
142        pub fn new(_remaining_depth: usize) -> Self {
143            Self {}
144        }
145        pub fn try_decrease(&self) -> Result<DepthGuard, super::ParserError> {
146            Ok(DepthGuard {})
147        }
148    }
149
150    pub struct DepthGuard {}
151}
152
153#[derive(PartialEq, Eq)]
154pub enum IsOptional {
155    Optional,
156    Mandatory,
157}
158
159pub enum IsLateral {
160    Lateral,
161    NotLateral,
162}
163
164pub enum WildcardExpr {
165    Expr(Expr),
166    QualifiedWildcard(ObjectName),
167    Wildcard,
168}
169
170impl From<TokenizerError> for ParserError {
171    fn from(e: TokenizerError) -> Self {
172        ParserError::TokenizerError(e.to_string())
173    }
174}
175
176impl fmt::Display for ParserError {
177    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
178        write!(
179            f,
180            "sql parser error: {}",
181            match self {
182                ParserError::TokenizerError(s) => s,
183                ParserError::ParserError(s) => s,
184                ParserError::RecursionLimitExceeded => "recursion limit exceeded",
185            }
186        )
187    }
188}
189
190#[cfg(feature = "std")]
191impl std::error::Error for ParserError {}
192
193// By default, allow expressions up to this deep before erroring
194const DEFAULT_REMAINING_DEPTH: usize = 50;
195
196// A constant EOF token that can be referenced.
197const EOF_TOKEN: TokenWithSpan = TokenWithSpan {
198    token: Token::EOF,
199    span: Span {
200        start: Location { line: 0, column: 0 },
201        end: Location { line: 0, column: 0 },
202    },
203};
204
205/// Composite types declarations using angle brackets syntax can be arbitrary
206/// nested such that the following declaration is possible:
207///      `ARRAY<ARRAY<INT>>`
208/// But the tokenizer recognizes the `>>` as a ShiftRight token.
209/// We work around that limitation when parsing a data type by accepting
210/// either a `>` or `>>` token in such cases, remembering which variant we
211/// matched.
212/// In the latter case having matched a `>>`, the parent type will not look to
213/// match its closing `>` as a result since that will have taken place at the
214/// child type.
215///
216/// See [Parser::parse_data_type] for details
217struct MatchedTrailingBracket(bool);
218
219impl From<bool> for MatchedTrailingBracket {
220    fn from(value: bool) -> Self {
221        Self(value)
222    }
223}
224
225/// Options that control how the [`Parser`] parses SQL text
226#[derive(Debug, Clone, PartialEq, Eq)]
227pub struct ParserOptions {
228    pub trailing_commas: bool,
229    /// Controls how literal values are unescaped. See
230    /// [`Tokenizer::with_unescape`] for more details.
231    pub unescape: bool,
232    /// Controls if the parser expects a semi-colon token
233    /// between statements. Default is `true`.
234    pub require_semicolon_stmt_delimiter: bool,
235}
236
237impl Default for ParserOptions {
238    fn default() -> Self {
239        Self {
240            trailing_commas: false,
241            unescape: true,
242            require_semicolon_stmt_delimiter: true,
243        }
244    }
245}
246
247impl ParserOptions {
248    /// Create a new [`ParserOptions`]
249    pub fn new() -> Self {
250        Default::default()
251    }
252
253    /// Set if trailing commas are allowed.
254    ///
255    /// If this option is `false` (the default), the following SQL will
256    /// not parse. If the option is `true`, the SQL will parse.
257    ///
258    /// ```sql
259    ///  SELECT
260    ///   foo,
261    ///   bar,
262    ///  FROM baz
263    /// ```
264    pub fn with_trailing_commas(mut self, trailing_commas: bool) -> Self {
265        self.trailing_commas = trailing_commas;
266        self
267    }
268
269    /// Set if literal values are unescaped. Defaults to true. See
270    /// [`Tokenizer::with_unescape`] for more details.
271    pub fn with_unescape(mut self, unescape: bool) -> Self {
272        self.unescape = unescape;
273        self
274    }
275}
276
277#[derive(Copy, Clone)]
278enum ParserState {
279    /// The default state of the parser.
280    Normal,
281    /// The state when parsing a CONNECT BY expression. This allows parsing
282    /// PRIOR expressions while still allowing prior as an identifier name
283    /// in other contexts.
284    ConnectBy,
285    /// The state when parsing column definitions.  This state prohibits
286    /// NOT NULL as an alias for IS NOT NULL.  For example:
287    /// ```sql
288    /// CREATE TABLE foo (abc BIGINT NOT NULL);
289    /// ```
290    ColumnDefinition,
291}
292
293/// A SQL Parser
294///
295/// This struct is the main entry point for parsing SQL queries.
296///
297/// # Functionality:
298/// * Parsing SQL: see examples on [`Parser::new`] and [`Parser::parse_sql`]
299/// * Controlling recursion: See [`Parser::with_recursion_limit`]
300/// * Controlling parser options: See [`Parser::with_options`]
301/// * Providing your own tokens: See [`Parser::with_tokens`]
302///
303/// # Internals
304///
305/// The parser uses a [`Tokenizer`] to tokenize the input SQL string into a
306/// `Vec` of [`TokenWithSpan`]s and maintains an `index` to the current token
307/// being processed. The token vec may contain multiple SQL statements.
308///
309/// * The "current" token is the token at `index - 1`
310/// * The "next" token is the token at `index`
311/// * The "previous" token is the token at `index - 2`
312///
313/// If `index` is equal to the length of the token stream, the 'next' token is
314/// [`Token::EOF`].
315///
316/// For example, the SQL string "SELECT * FROM foo" will be tokenized into
317/// following tokens:
318/// ```text
319///  [
320///    "SELECT", // token index 0
321///    " ",      // whitespace
322///    "*",
323///    " ",
324///    "FROM",
325///    " ",
326///    "foo"
327///   ]
328/// ```
329///
330///
331pub struct Parser<'a> {
332    /// The tokens
333    tokens: Vec<TokenWithSpan>,
334    /// The index of the first unprocessed token in [`Parser::tokens`].
335    index: usize,
336    /// The current state of the parser.
337    state: ParserState,
338    /// The SQL dialect to use.
339    dialect: &'a dyn Dialect,
340    /// Additional options that allow you to mix & match behavior
341    /// otherwise constrained to certain dialects (e.g. trailing
342    /// commas) and/or format of parse (e.g. unescaping).
343    options: ParserOptions,
344    /// Ensures the stack does not overflow by limiting recursion depth.
345    recursion_counter: RecursionCounter,
346}
347
348impl<'a> Parser<'a> {
349    /// Create a parser for a [`Dialect`]
350    ///
351    /// See also [`Parser::parse_sql`]
352    ///
353    /// Example:
354    /// ```
355    /// # use sqlparser::{parser::{Parser, ParserError}, dialect::GenericDialect};
356    /// # fn main() -> Result<(), ParserError> {
357    /// let dialect = GenericDialect{};
358    /// let statements = Parser::new(&dialect)
359    ///   .try_with_sql("SELECT * FROM foo")?
360    ///   .parse_statements()?;
361    /// # Ok(())
362    /// # }
363    /// ```
364    pub fn new(dialect: &'a dyn Dialect) -> Self {
365        Self {
366            tokens: vec![],
367            index: 0,
368            state: ParserState::Normal,
369            dialect,
370            recursion_counter: RecursionCounter::new(DEFAULT_REMAINING_DEPTH),
371            options: ParserOptions::new().with_trailing_commas(dialect.supports_trailing_commas()),
372        }
373    }
374
375    /// Specify the maximum recursion limit while parsing.
376    ///
377    /// [`Parser`] prevents stack overflows by returning
378    /// [`ParserError::RecursionLimitExceeded`] if the parser exceeds
379    /// this depth while processing the query.
380    ///
381    /// Example:
382    /// ```
383    /// # use sqlparser::{parser::{Parser, ParserError}, dialect::GenericDialect};
384    /// # fn main() -> Result<(), ParserError> {
385    /// let dialect = GenericDialect{};
386    /// let result = Parser::new(&dialect)
387    ///   .with_recursion_limit(1)
388    ///   .try_with_sql("SELECT * FROM foo WHERE (a OR (b OR (c OR d)))")?
389    ///   .parse_statements();
390    ///   assert_eq!(result, Err(ParserError::RecursionLimitExceeded));
391    /// # Ok(())
392    /// # }
393    /// ```
394    ///
395    /// Note: when "recursive-protection" feature is enabled, this crate uses additional stack overflow protection
396    //  for some of its recursive methods. See [`recursive::recursive`] for more information.
397    pub fn with_recursion_limit(mut self, recursion_limit: usize) -> Self {
398        self.recursion_counter = RecursionCounter::new(recursion_limit);
399        self
400    }
401
402    /// Specify additional parser options
403    ///
404    /// [`Parser`] supports additional options ([`ParserOptions`])
405    /// that allow you to mix & match behavior otherwise constrained
406    /// to certain dialects (e.g. trailing commas).
407    ///
408    /// Example:
409    /// ```
410    /// # use sqlparser::{parser::{Parser, ParserError, ParserOptions}, dialect::GenericDialect};
411    /// # fn main() -> Result<(), ParserError> {
412    /// let dialect = GenericDialect{};
413    /// let options = ParserOptions::new()
414    ///    .with_trailing_commas(true)
415    ///    .with_unescape(false);
416    /// let result = Parser::new(&dialect)
417    ///   .with_options(options)
418    ///   .try_with_sql("SELECT a, b, COUNT(*), FROM foo GROUP BY a, b,")?
419    ///   .parse_statements();
420    ///   assert!(matches!(result, Ok(_)));
421    /// # Ok(())
422    /// # }
423    /// ```
424    pub fn with_options(mut self, options: ParserOptions) -> Self {
425        self.options = options;
426        self
427    }
428
429    /// Reset this parser to parse the specified token stream
430    pub fn with_tokens_with_locations(mut self, tokens: Vec<TokenWithSpan>) -> Self {
431        self.tokens = tokens;
432        self.index = 0;
433        self
434    }
435
436    /// Reset this parser state to parse the specified tokens
437    pub fn with_tokens(self, tokens: Vec<Token>) -> Self {
438        // Put in dummy locations
439        let tokens_with_locations: Vec<TokenWithSpan> = tokens
440            .into_iter()
441            .map(|token| TokenWithSpan {
442                token,
443                span: Span::empty(),
444            })
445            .collect();
446        self.with_tokens_with_locations(tokens_with_locations)
447    }
448
449    /// Tokenize the sql string and sets this [`Parser`]'s state to
450    /// parse the resulting tokens
451    ///
452    /// Returns an error if there was an error tokenizing the SQL string.
453    ///
454    /// See example on [`Parser::new()`] for an example
455    pub fn try_with_sql(self, sql: &str) -> Result<Self, ParserError> {
456        debug!("Parsing sql '{sql}'...");
457        let tokens = Tokenizer::new(self.dialect, sql)
458            .with_unescape(self.options.unescape)
459            .tokenize_with_location()?;
460        Ok(self.with_tokens_with_locations(tokens))
461    }
462
463    /// Parse potentially multiple statements
464    ///
465    /// Example
466    /// ```
467    /// # use sqlparser::{parser::{Parser, ParserError}, dialect::GenericDialect};
468    /// # fn main() -> Result<(), ParserError> {
469    /// let dialect = GenericDialect{};
470    /// let statements = Parser::new(&dialect)
471    ///   // Parse a SQL string with 2 separate statements
472    ///   .try_with_sql("SELECT * FROM foo; SELECT * FROM bar;")?
473    ///   .parse_statements()?;
474    /// assert_eq!(statements.len(), 2);
475    /// # Ok(())
476    /// # }
477    /// ```
478    pub fn parse_statements(&mut self) -> Result<Vec<Statement>, ParserError> {
479        let mut stmts = Vec::new();
480        let mut expecting_statement_delimiter = false;
481        loop {
482            // ignore empty statements (between successive statement delimiters)
483            while self.consume_token(&Token::SemiColon) {
484                expecting_statement_delimiter = false;
485            }
486
487            if !self.options.require_semicolon_stmt_delimiter {
488                expecting_statement_delimiter = false;
489            }
490
491            match self.peek_token().token {
492                Token::EOF => break,
493
494                // end of statement
495                Token::Word(word) => {
496                    if expecting_statement_delimiter && word.keyword == Keyword::END {
497                        break;
498                    }
499                }
500                _ => {}
501            }
502
503            if expecting_statement_delimiter {
504                return self.expected("end of statement", self.peek_token());
505            }
506
507            let statement = self.parse_statement()?;
508            stmts.push(statement);
509            expecting_statement_delimiter = true;
510        }
511        Ok(stmts)
512    }
513
514    /// Convenience method to parse a string with one or more SQL
515    /// statements into produce an Abstract Syntax Tree (AST).
516    ///
517    /// Example
518    /// ```
519    /// # use sqlparser::{parser::{Parser, ParserError}, dialect::GenericDialect};
520    /// # fn main() -> Result<(), ParserError> {
521    /// let dialect = GenericDialect{};
522    /// let statements = Parser::parse_sql(
523    ///   &dialect, "SELECT * FROM foo"
524    /// )?;
525    /// assert_eq!(statements.len(), 1);
526    /// # Ok(())
527    /// # }
528    /// ```
529    pub fn parse_sql(dialect: &dyn Dialect, sql: &str) -> Result<Vec<Statement>, ParserError> {
530        Parser::new(dialect).try_with_sql(sql)?.parse_statements()
531    }
532
533    /// Parse a single top-level statement (such as SELECT, INSERT, CREATE, etc.),
534    /// stopping before the statement separator, if any.
535    pub fn parse_statement(&mut self) -> Result<Statement, ParserError> {
536        let _guard = self.recursion_counter.try_decrease()?;
537
538        // allow the dialect to override statement parsing
539        if let Some(statement) = self.dialect.parse_statement(self) {
540            return statement;
541        }
542
543        let next_token = self.next_token();
544        match &next_token.token {
545            Token::Word(w) => match w.keyword {
546                Keyword::KILL => self.parse_kill(),
547                Keyword::FLUSH => self.parse_flush(),
548                Keyword::DESC => self.parse_explain(DescribeAlias::Desc),
549                Keyword::DESCRIBE => self.parse_explain(DescribeAlias::Describe),
550                Keyword::EXPLAIN => self.parse_explain(DescribeAlias::Explain),
551                Keyword::ANALYZE => self.parse_analyze(),
552                Keyword::CASE => {
553                    self.prev_token();
554                    self.parse_case_stmt()
555                }
556                Keyword::IF => {
557                    self.prev_token();
558                    self.parse_if_stmt()
559                }
560                Keyword::WHILE => {
561                    self.prev_token();
562                    self.parse_while()
563                }
564                Keyword::RAISE => {
565                    self.prev_token();
566                    self.parse_raise_stmt()
567                }
568                Keyword::SELECT | Keyword::WITH | Keyword::VALUES | Keyword::FROM => {
569                    self.prev_token();
570                    self.parse_query().map(Statement::Query)
571                }
572                Keyword::TRUNCATE => self.parse_truncate(),
573                Keyword::ATTACH => {
574                    if dialect_of!(self is DuckDbDialect) {
575                        self.parse_attach_duckdb_database()
576                    } else {
577                        self.parse_attach_database()
578                    }
579                }
580                Keyword::DETACH if dialect_of!(self is DuckDbDialect | GenericDialect) => {
581                    self.parse_detach_duckdb_database()
582                }
583                Keyword::MSCK => self.parse_msck(),
584                Keyword::CREATE => self.parse_create(),
585                Keyword::CACHE => self.parse_cache_table(),
586                Keyword::DROP => self.parse_drop(),
587                Keyword::DISCARD => self.parse_discard(),
588                Keyword::DECLARE => self.parse_declare(),
589                Keyword::FETCH => self.parse_fetch_statement(),
590                Keyword::DELETE => self.parse_delete(next_token),
591                Keyword::INSERT => self.parse_insert(next_token),
592                Keyword::REPLACE => self.parse_replace(next_token),
593                Keyword::UNCACHE => self.parse_uncache_table(),
594                Keyword::UPDATE => self.parse_update(next_token),
595                Keyword::ALTER => self.parse_alter(),
596                Keyword::CALL => self.parse_call(),
597                Keyword::COPY => self.parse_copy(),
598                Keyword::OPEN => {
599                    self.prev_token();
600                    self.parse_open()
601                }
602                Keyword::CLOSE => self.parse_close(),
603                Keyword::SET => self.parse_set(),
604                Keyword::SHOW => self.parse_show(),
605                Keyword::USE => self.parse_use(),
606                Keyword::GRANT => self.parse_grant(),
607                Keyword::DENY => {
608                    self.prev_token();
609                    self.parse_deny()
610                }
611                Keyword::REVOKE => self.parse_revoke(),
612                Keyword::START => self.parse_start_transaction(),
613                Keyword::BEGIN => self.parse_begin(),
614                Keyword::END => self.parse_end(),
615                Keyword::SAVEPOINT => self.parse_savepoint(),
616                Keyword::RELEASE => self.parse_release(),
617                Keyword::COMMIT => self.parse_commit(),
618                Keyword::RAISERROR => Ok(self.parse_raiserror()?),
619                Keyword::ROLLBACK => self.parse_rollback(),
620                Keyword::ASSERT => self.parse_assert(),
621                // `PREPARE`, `EXECUTE` and `DEALLOCATE` are Postgres-specific
622                // syntaxes. They are used for Postgres prepared statement.
623                Keyword::DEALLOCATE => self.parse_deallocate(),
624                Keyword::EXECUTE | Keyword::EXEC => self.parse_execute(),
625                Keyword::PREPARE => self.parse_prepare(),
626                Keyword::MERGE => self.parse_merge(next_token),
627                // `LISTEN`, `UNLISTEN` and `NOTIFY` are Postgres-specific
628                // syntaxes. They are used for Postgres statement.
629                Keyword::LISTEN if self.dialect.supports_listen_notify() => self.parse_listen(),
630                Keyword::UNLISTEN if self.dialect.supports_listen_notify() => self.parse_unlisten(),
631                Keyword::NOTIFY if self.dialect.supports_listen_notify() => self.parse_notify(),
632                // `PRAGMA` is sqlite specific https://www.sqlite.org/pragma.html
633                Keyword::PRAGMA => self.parse_pragma(),
634                Keyword::UNLOAD => {
635                    self.prev_token();
636                    self.parse_unload()
637                }
638                Keyword::RENAME => self.parse_rename(),
639                // `INSTALL` is duckdb specific https://duckdb.org/docs/extensions/overview
640                Keyword::INSTALL if dialect_of!(self is DuckDbDialect | GenericDialect) => {
641                    self.parse_install()
642                }
643                Keyword::LOAD => self.parse_load(),
644                // `OPTIMIZE` is clickhouse specific https://clickhouse.tech/docs/en/sql-reference/statements/optimize/
645                Keyword::OPTIMIZE if dialect_of!(self is ClickHouseDialect | GenericDialect) => {
646                    self.parse_optimize_table()
647                }
648                // `COMMENT` is snowflake specific https://docs.snowflake.com/en/sql-reference/sql/comment
649                Keyword::COMMENT if self.dialect.supports_comment_on() => self.parse_comment(),
650                Keyword::PRINT => self.parse_print(),
651                Keyword::RETURN => self.parse_return(),
652                Keyword::EXPORT => {
653                    self.prev_token();
654                    self.parse_export_data()
655                }
656                Keyword::VACUUM => {
657                    self.prev_token();
658                    self.parse_vacuum()
659                }
660                Keyword::RESET => self.parse_reset(),
661                _ => self.expected("an SQL statement", next_token),
662            },
663            Token::LParen => {
664                self.prev_token();
665                self.parse_query().map(Statement::Query)
666            }
667            _ => self.expected("an SQL statement", next_token),
668        }
669    }
670
671    /// Parse a `CASE` statement.
672    ///
673    /// See [Statement::Case]
674    pub fn parse_case_stmt(&mut self) -> Result<Statement, ParserError> {
675        let case_token = self.expect_keyword(Keyword::CASE)?;
676
677        let match_expr = if self.peek_keyword(Keyword::WHEN) {
678            None
679        } else {
680            Some(self.parse_expr()?)
681        };
682
683        self.expect_keyword_is(Keyword::WHEN)?;
684        let when_blocks = self.parse_keyword_separated(Keyword::WHEN, |parser| {
685            parser.parse_conditional_statement_block(&[Keyword::WHEN, Keyword::ELSE, Keyword::END])
686        })?;
687
688        let else_block = if self.parse_keyword(Keyword::ELSE) {
689            Some(self.parse_conditional_statement_block(&[Keyword::END])?)
690        } else {
691            None
692        };
693
694        let mut end_case_token = self.expect_keyword(Keyword::END)?;
695        if self.peek_keyword(Keyword::CASE) {
696            end_case_token = self.expect_keyword(Keyword::CASE)?;
697        }
698
699        Ok(Statement::Case(CaseStatement {
700            case_token: AttachedToken(case_token),
701            match_expr,
702            when_blocks,
703            else_block,
704            end_case_token: AttachedToken(end_case_token),
705        }))
706    }
707
708    /// Parse an `IF` statement.
709    ///
710    /// See [Statement::If]
711    pub fn parse_if_stmt(&mut self) -> Result<Statement, ParserError> {
712        self.expect_keyword_is(Keyword::IF)?;
713        let if_block = self.parse_conditional_statement_block(&[
714            Keyword::ELSE,
715            Keyword::ELSEIF,
716            Keyword::END,
717        ])?;
718
719        let elseif_blocks = if self.parse_keyword(Keyword::ELSEIF) {
720            self.parse_keyword_separated(Keyword::ELSEIF, |parser| {
721                parser.parse_conditional_statement_block(&[
722                    Keyword::ELSEIF,
723                    Keyword::ELSE,
724                    Keyword::END,
725                ])
726            })?
727        } else {
728            vec![]
729        };
730
731        let else_block = if self.parse_keyword(Keyword::ELSE) {
732            Some(self.parse_conditional_statement_block(&[Keyword::END])?)
733        } else {
734            None
735        };
736
737        self.expect_keyword_is(Keyword::END)?;
738        let end_token = self.expect_keyword(Keyword::IF)?;
739
740        Ok(Statement::If(IfStatement {
741            if_block,
742            elseif_blocks,
743            else_block,
744            end_token: Some(AttachedToken(end_token)),
745        }))
746    }
747
748    /// Parse a `WHILE` statement.
749    ///
750    /// See [Statement::While]
751    fn parse_while(&mut self) -> Result<Statement, ParserError> {
752        self.expect_keyword_is(Keyword::WHILE)?;
753        let while_block = self.parse_conditional_statement_block(&[Keyword::END])?;
754
755        Ok(Statement::While(WhileStatement { while_block }))
756    }
757
758    /// Parses an expression and associated list of statements
759    /// belonging to a conditional statement like `IF` or `WHEN` or `WHILE`.
760    ///
761    /// Example:
762    /// ```sql
763    /// IF condition THEN statement1; statement2;
764    /// ```
765    fn parse_conditional_statement_block(
766        &mut self,
767        terminal_keywords: &[Keyword],
768    ) -> Result<ConditionalStatementBlock, ParserError> {
769        let start_token = self.get_current_token().clone(); // self.expect_keyword(keyword)?;
770        let mut then_token = None;
771
772        let condition = match &start_token.token {
773            Token::Word(w) if w.keyword == Keyword::ELSE => None,
774            Token::Word(w) if w.keyword == Keyword::WHILE => {
775                let expr = self.parse_expr()?;
776                Some(expr)
777            }
778            _ => {
779                let expr = self.parse_expr()?;
780                then_token = Some(AttachedToken(self.expect_keyword(Keyword::THEN)?));
781                Some(expr)
782            }
783        };
784
785        let conditional_statements = self.parse_conditional_statements(terminal_keywords)?;
786
787        Ok(ConditionalStatementBlock {
788            start_token: AttachedToken(start_token),
789            condition,
790            then_token,
791            conditional_statements,
792        })
793    }
794
795    /// Parse a BEGIN/END block or a sequence of statements
796    /// This could be inside of a conditional (IF, CASE, WHILE etc.) or an object body defined optionally BEGIN/END and one or more statements.
797    pub(crate) fn parse_conditional_statements(
798        &mut self,
799        terminal_keywords: &[Keyword],
800    ) -> Result<ConditionalStatements, ParserError> {
801        let conditional_statements = if self.peek_keyword(Keyword::BEGIN) {
802            let begin_token = self.expect_keyword(Keyword::BEGIN)?;
803            let statements = self.parse_statement_list(terminal_keywords)?;
804            let end_token = self.expect_keyword(Keyword::END)?;
805
806            ConditionalStatements::BeginEnd(BeginEndStatements {
807                begin_token: AttachedToken(begin_token),
808                statements,
809                end_token: AttachedToken(end_token),
810            })
811        } else {
812            ConditionalStatements::Sequence {
813                statements: self.parse_statement_list(terminal_keywords)?,
814            }
815        };
816        Ok(conditional_statements)
817    }
818
819    /// Parse a `RAISE` statement.
820    ///
821    /// See [Statement::Raise]
822    pub fn parse_raise_stmt(&mut self) -> Result<Statement, ParserError> {
823        self.expect_keyword_is(Keyword::RAISE)?;
824
825        let value = if self.parse_keywords(&[Keyword::USING, Keyword::MESSAGE]) {
826            self.expect_token(&Token::Eq)?;
827            Some(RaiseStatementValue::UsingMessage(self.parse_expr()?))
828        } else {
829            self.maybe_parse(|parser| parser.parse_expr().map(RaiseStatementValue::Expr))?
830        };
831
832        Ok(Statement::Raise(RaiseStatement { value }))
833    }
834
835    pub fn parse_comment(&mut self) -> Result<Statement, ParserError> {
836        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
837
838        self.expect_keyword_is(Keyword::ON)?;
839        let token = self.next_token();
840
841        let (object_type, object_name) = match token.token {
842            Token::Word(w) if w.keyword == Keyword::COLUMN => {
843                (CommentObject::Column, self.parse_object_name(false)?)
844            }
845            Token::Word(w) if w.keyword == Keyword::TABLE => {
846                (CommentObject::Table, self.parse_object_name(false)?)
847            }
848            Token::Word(w) if w.keyword == Keyword::EXTENSION => {
849                (CommentObject::Extension, self.parse_object_name(false)?)
850            }
851            Token::Word(w) if w.keyword == Keyword::SCHEMA => {
852                (CommentObject::Schema, self.parse_object_name(false)?)
853            }
854            Token::Word(w) if w.keyword == Keyword::DATABASE => {
855                (CommentObject::Database, self.parse_object_name(false)?)
856            }
857            Token::Word(w) if w.keyword == Keyword::USER => {
858                (CommentObject::User, self.parse_object_name(false)?)
859            }
860            Token::Word(w) if w.keyword == Keyword::ROLE => {
861                (CommentObject::Role, self.parse_object_name(false)?)
862            }
863            _ => self.expected("comment object_type", token)?,
864        };
865
866        self.expect_keyword_is(Keyword::IS)?;
867        let comment = if self.parse_keyword(Keyword::NULL) {
868            None
869        } else {
870            Some(self.parse_literal_string()?)
871        };
872        Ok(Statement::Comment {
873            object_type,
874            object_name,
875            comment,
876            if_exists,
877        })
878    }
879
880    pub fn parse_flush(&mut self) -> Result<Statement, ParserError> {
881        let mut channel = None;
882        let mut tables: Vec<ObjectName> = vec![];
883        let mut read_lock = false;
884        let mut export = false;
885
886        if !dialect_of!(self is MySqlDialect | GenericDialect) {
887            return parser_err!("Unsupported statement FLUSH", self.peek_token().span.start);
888        }
889
890        let location = if self.parse_keyword(Keyword::NO_WRITE_TO_BINLOG) {
891            Some(FlushLocation::NoWriteToBinlog)
892        } else if self.parse_keyword(Keyword::LOCAL) {
893            Some(FlushLocation::Local)
894        } else {
895            None
896        };
897
898        let object_type = if self.parse_keywords(&[Keyword::BINARY, Keyword::LOGS]) {
899            FlushType::BinaryLogs
900        } else if self.parse_keywords(&[Keyword::ENGINE, Keyword::LOGS]) {
901            FlushType::EngineLogs
902        } else if self.parse_keywords(&[Keyword::ERROR, Keyword::LOGS]) {
903            FlushType::ErrorLogs
904        } else if self.parse_keywords(&[Keyword::GENERAL, Keyword::LOGS]) {
905            FlushType::GeneralLogs
906        } else if self.parse_keywords(&[Keyword::HOSTS]) {
907            FlushType::Hosts
908        } else if self.parse_keyword(Keyword::PRIVILEGES) {
909            FlushType::Privileges
910        } else if self.parse_keyword(Keyword::OPTIMIZER_COSTS) {
911            FlushType::OptimizerCosts
912        } else if self.parse_keywords(&[Keyword::RELAY, Keyword::LOGS]) {
913            if self.parse_keywords(&[Keyword::FOR, Keyword::CHANNEL]) {
914                channel = Some(self.parse_object_name(false).unwrap().to_string());
915            }
916            FlushType::RelayLogs
917        } else if self.parse_keywords(&[Keyword::SLOW, Keyword::LOGS]) {
918            FlushType::SlowLogs
919        } else if self.parse_keyword(Keyword::STATUS) {
920            FlushType::Status
921        } else if self.parse_keyword(Keyword::USER_RESOURCES) {
922            FlushType::UserResources
923        } else if self.parse_keywords(&[Keyword::LOGS]) {
924            FlushType::Logs
925        } else if self.parse_keywords(&[Keyword::TABLES]) {
926            loop {
927                let next_token = self.next_token();
928                match &next_token.token {
929                    Token::Word(w) => match w.keyword {
930                        Keyword::WITH => {
931                            read_lock = self.parse_keywords(&[Keyword::READ, Keyword::LOCK]);
932                        }
933                        Keyword::FOR => {
934                            export = self.parse_keyword(Keyword::EXPORT);
935                        }
936                        Keyword::NoKeyword => {
937                            self.prev_token();
938                            tables = self.parse_comma_separated(|p| p.parse_object_name(false))?;
939                        }
940                        _ => {}
941                    },
942                    _ => {
943                        break;
944                    }
945                }
946            }
947
948            FlushType::Tables
949        } else {
950            return self.expected(
951                "BINARY LOGS, ENGINE LOGS, ERROR LOGS, GENERAL LOGS, HOSTS, LOGS, PRIVILEGES, OPTIMIZER_COSTS,\
952                 RELAY LOGS [FOR CHANNEL channel], SLOW LOGS, STATUS, USER_RESOURCES",
953                self.peek_token(),
954            );
955        };
956
957        Ok(Statement::Flush {
958            object_type,
959            location,
960            channel,
961            read_lock,
962            export,
963            tables,
964        })
965    }
966
967    pub fn parse_msck(&mut self) -> Result<Statement, ParserError> {
968        let repair = self.parse_keyword(Keyword::REPAIR);
969        self.expect_keyword_is(Keyword::TABLE)?;
970        let table_name = self.parse_object_name(false)?;
971        let partition_action = self
972            .maybe_parse(|parser| {
973                let pa = match parser.parse_one_of_keywords(&[
974                    Keyword::ADD,
975                    Keyword::DROP,
976                    Keyword::SYNC,
977                ]) {
978                    Some(Keyword::ADD) => Some(AddDropSync::ADD),
979                    Some(Keyword::DROP) => Some(AddDropSync::DROP),
980                    Some(Keyword::SYNC) => Some(AddDropSync::SYNC),
981                    _ => None,
982                };
983                parser.expect_keyword_is(Keyword::PARTITIONS)?;
984                Ok(pa)
985            })?
986            .unwrap_or_default();
987        Ok(Msck {
988            repair,
989            table_name,
990            partition_action,
991        }
992        .into())
993    }
994
995    pub fn parse_truncate(&mut self) -> Result<Statement, ParserError> {
996        let table = self.parse_keyword(Keyword::TABLE);
997
998        let table_names = self
999            .parse_comma_separated(|p| {
1000                Ok((p.parse_keyword(Keyword::ONLY), p.parse_object_name(false)?))
1001            })?
1002            .into_iter()
1003            .map(|(only, name)| TruncateTableTarget { name, only })
1004            .collect();
1005
1006        let mut partitions = None;
1007        if self.parse_keyword(Keyword::PARTITION) {
1008            self.expect_token(&Token::LParen)?;
1009            partitions = Some(self.parse_comma_separated(Parser::parse_expr)?);
1010            self.expect_token(&Token::RParen)?;
1011        }
1012
1013        let mut identity = None;
1014        let mut cascade = None;
1015
1016        if dialect_of!(self is PostgreSqlDialect | GenericDialect) {
1017            identity = if self.parse_keywords(&[Keyword::RESTART, Keyword::IDENTITY]) {
1018                Some(TruncateIdentityOption::Restart)
1019            } else if self.parse_keywords(&[Keyword::CONTINUE, Keyword::IDENTITY]) {
1020                Some(TruncateIdentityOption::Continue)
1021            } else {
1022                None
1023            };
1024
1025            cascade = self.parse_cascade_option();
1026        };
1027
1028        let on_cluster = self.parse_optional_on_cluster()?;
1029
1030        Ok(Truncate {
1031            table_names,
1032            partitions,
1033            table,
1034            identity,
1035            cascade,
1036            on_cluster,
1037        }
1038        .into())
1039    }
1040
1041    fn parse_cascade_option(&mut self) -> Option<CascadeOption> {
1042        if self.parse_keyword(Keyword::CASCADE) {
1043            Some(CascadeOption::Cascade)
1044        } else if self.parse_keyword(Keyword::RESTRICT) {
1045            Some(CascadeOption::Restrict)
1046        } else {
1047            None
1048        }
1049    }
1050
1051    pub fn parse_attach_duckdb_database_options(
1052        &mut self,
1053    ) -> Result<Vec<AttachDuckDBDatabaseOption>, ParserError> {
1054        if !self.consume_token(&Token::LParen) {
1055            return Ok(vec![]);
1056        }
1057
1058        let mut options = vec![];
1059        loop {
1060            if self.parse_keyword(Keyword::READ_ONLY) {
1061                let boolean = if self.parse_keyword(Keyword::TRUE) {
1062                    Some(true)
1063                } else if self.parse_keyword(Keyword::FALSE) {
1064                    Some(false)
1065                } else {
1066                    None
1067                };
1068                options.push(AttachDuckDBDatabaseOption::ReadOnly(boolean));
1069            } else if self.parse_keyword(Keyword::TYPE) {
1070                let ident = self.parse_identifier()?;
1071                options.push(AttachDuckDBDatabaseOption::Type(ident));
1072            } else {
1073                return self.expected("expected one of: ), READ_ONLY, TYPE", self.peek_token());
1074            };
1075
1076            if self.consume_token(&Token::RParen) {
1077                return Ok(options);
1078            } else if self.consume_token(&Token::Comma) {
1079                continue;
1080            } else {
1081                return self.expected("expected one of: ')', ','", self.peek_token());
1082            }
1083        }
1084    }
1085
1086    pub fn parse_attach_duckdb_database(&mut self) -> Result<Statement, ParserError> {
1087        let database = self.parse_keyword(Keyword::DATABASE);
1088        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
1089        let database_path = self.parse_identifier()?;
1090        let database_alias = if self.parse_keyword(Keyword::AS) {
1091            Some(self.parse_identifier()?)
1092        } else {
1093            None
1094        };
1095
1096        let attach_options = self.parse_attach_duckdb_database_options()?;
1097        Ok(Statement::AttachDuckDBDatabase {
1098            if_not_exists,
1099            database,
1100            database_path,
1101            database_alias,
1102            attach_options,
1103        })
1104    }
1105
1106    pub fn parse_detach_duckdb_database(&mut self) -> Result<Statement, ParserError> {
1107        let database = self.parse_keyword(Keyword::DATABASE);
1108        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
1109        let database_alias = self.parse_identifier()?;
1110        Ok(Statement::DetachDuckDBDatabase {
1111            if_exists,
1112            database,
1113            database_alias,
1114        })
1115    }
1116
1117    pub fn parse_attach_database(&mut self) -> Result<Statement, ParserError> {
1118        let database = self.parse_keyword(Keyword::DATABASE);
1119        let database_file_name = self.parse_expr()?;
1120        self.expect_keyword_is(Keyword::AS)?;
1121        let schema_name = self.parse_identifier()?;
1122        Ok(Statement::AttachDatabase {
1123            database,
1124            schema_name,
1125            database_file_name,
1126        })
1127    }
1128
1129    pub fn parse_analyze(&mut self) -> Result<Statement, ParserError> {
1130        let has_table_keyword = self.parse_keyword(Keyword::TABLE);
1131        let table_name = self.parse_object_name(false)?;
1132        let mut for_columns = false;
1133        let mut cache_metadata = false;
1134        let mut noscan = false;
1135        let mut partitions = None;
1136        let mut compute_statistics = false;
1137        let mut columns = vec![];
1138        loop {
1139            match self.parse_one_of_keywords(&[
1140                Keyword::PARTITION,
1141                Keyword::FOR,
1142                Keyword::CACHE,
1143                Keyword::NOSCAN,
1144                Keyword::COMPUTE,
1145            ]) {
1146                Some(Keyword::PARTITION) => {
1147                    self.expect_token(&Token::LParen)?;
1148                    partitions = Some(self.parse_comma_separated(Parser::parse_expr)?);
1149                    self.expect_token(&Token::RParen)?;
1150                }
1151                Some(Keyword::NOSCAN) => noscan = true,
1152                Some(Keyword::FOR) => {
1153                    self.expect_keyword_is(Keyword::COLUMNS)?;
1154
1155                    columns = self
1156                        .maybe_parse(|parser| {
1157                            parser.parse_comma_separated(|p| p.parse_identifier())
1158                        })?
1159                        .unwrap_or_default();
1160                    for_columns = true
1161                }
1162                Some(Keyword::CACHE) => {
1163                    self.expect_keyword_is(Keyword::METADATA)?;
1164                    cache_metadata = true
1165                }
1166                Some(Keyword::COMPUTE) => {
1167                    self.expect_keyword_is(Keyword::STATISTICS)?;
1168                    compute_statistics = true
1169                }
1170                _ => break,
1171            }
1172        }
1173
1174        Ok(Analyze {
1175            has_table_keyword,
1176            table_name,
1177            for_columns,
1178            columns,
1179            partitions,
1180            cache_metadata,
1181            noscan,
1182            compute_statistics,
1183        }
1184        .into())
1185    }
1186
1187    /// Parse a new expression including wildcard & qualified wildcard.
1188    pub fn parse_wildcard_expr(&mut self) -> Result<Expr, ParserError> {
1189        let index = self.index;
1190
1191        let next_token = self.next_token();
1192        match next_token.token {
1193            t @ (Token::Word(_) | Token::SingleQuotedString(_)) => {
1194                if self.peek_token().token == Token::Period {
1195                    let mut id_parts: Vec<Ident> = vec![match t {
1196                        Token::Word(w) => w.into_ident(next_token.span),
1197                        Token::SingleQuotedString(s) => Ident::with_quote('\'', s),
1198                        _ => {
1199                            return Err(ParserError::ParserError(
1200                                "Internal parser error: unexpected token type".to_string(),
1201                            ))
1202                        }
1203                    }];
1204
1205                    while self.consume_token(&Token::Period) {
1206                        let next_token = self.next_token();
1207                        match next_token.token {
1208                            Token::Word(w) => id_parts.push(w.into_ident(next_token.span)),
1209                            Token::SingleQuotedString(s) => {
1210                                // SQLite has single-quoted identifiers
1211                                id_parts.push(Ident::with_quote('\'', s))
1212                            }
1213                            Token::Mul => {
1214                                return Ok(Expr::QualifiedWildcard(
1215                                    ObjectName::from(id_parts),
1216                                    AttachedToken(next_token),
1217                                ));
1218                            }
1219                            _ => {
1220                                return self
1221                                    .expected("an identifier or a '*' after '.'", next_token);
1222                            }
1223                        }
1224                    }
1225                }
1226            }
1227            Token::Mul => {
1228                return Ok(Expr::Wildcard(AttachedToken(next_token)));
1229            }
1230            _ => (),
1231        };
1232
1233        self.index = index;
1234        self.parse_expr()
1235    }
1236
1237    /// Parse a new expression.
1238    pub fn parse_expr(&mut self) -> Result<Expr, ParserError> {
1239        self.parse_subexpr(self.dialect.prec_unknown())
1240    }
1241
1242    pub fn parse_expr_with_alias_and_order_by(
1243        &mut self,
1244    ) -> Result<ExprWithAliasAndOrderBy, ParserError> {
1245        let expr = self.parse_expr()?;
1246
1247        fn validator(explicit: bool, kw: &Keyword, _parser: &mut Parser) -> bool {
1248            explicit || !&[Keyword::ASC, Keyword::DESC, Keyword::GROUP].contains(kw)
1249        }
1250        let alias = self.parse_optional_alias_inner(None, validator)?;
1251        let order_by = OrderByOptions {
1252            asc: self.parse_asc_desc(),
1253            nulls_first: None,
1254        };
1255        Ok(ExprWithAliasAndOrderBy {
1256            expr: ExprWithAlias { expr, alias },
1257            order_by,
1258        })
1259    }
1260
1261    /// Parse tokens until the precedence changes.
1262    pub fn parse_subexpr(&mut self, precedence: u8) -> Result<Expr, ParserError> {
1263        let _guard = self.recursion_counter.try_decrease()?;
1264        debug!("parsing expr");
1265        let mut expr = self.parse_prefix()?;
1266
1267        expr = self.parse_compound_expr(expr, vec![])?;
1268
1269        debug!("prefix: {expr:?}");
1270        loop {
1271            let next_precedence = self.get_next_precedence()?;
1272            debug!("next precedence: {next_precedence:?}");
1273
1274            if precedence >= next_precedence {
1275                break;
1276            }
1277
1278            // The period operator is handled exclusively by the
1279            // compound field access parsing.
1280            if Token::Period == self.peek_token_ref().token {
1281                break;
1282            }
1283
1284            expr = self.parse_infix(expr, next_precedence)?;
1285        }
1286        Ok(expr)
1287    }
1288
1289    pub fn parse_assert(&mut self) -> Result<Statement, ParserError> {
1290        let condition = self.parse_expr()?;
1291        let message = if self.parse_keyword(Keyword::AS) {
1292            Some(self.parse_expr()?)
1293        } else {
1294            None
1295        };
1296
1297        Ok(Statement::Assert { condition, message })
1298    }
1299
1300    pub fn parse_savepoint(&mut self) -> Result<Statement, ParserError> {
1301        let name = self.parse_identifier()?;
1302        Ok(Statement::Savepoint { name })
1303    }
1304
1305    pub fn parse_release(&mut self) -> Result<Statement, ParserError> {
1306        let _ = self.parse_keyword(Keyword::SAVEPOINT);
1307        let name = self.parse_identifier()?;
1308
1309        Ok(Statement::ReleaseSavepoint { name })
1310    }
1311
1312    pub fn parse_listen(&mut self) -> Result<Statement, ParserError> {
1313        let channel = self.parse_identifier()?;
1314        Ok(Statement::LISTEN { channel })
1315    }
1316
1317    pub fn parse_unlisten(&mut self) -> Result<Statement, ParserError> {
1318        let channel = if self.consume_token(&Token::Mul) {
1319            Ident::new(Expr::Wildcard(AttachedToken::empty()).to_string())
1320        } else {
1321            match self.parse_identifier() {
1322                Ok(expr) => expr,
1323                _ => {
1324                    self.prev_token();
1325                    return self.expected("wildcard or identifier", self.peek_token());
1326                }
1327            }
1328        };
1329        Ok(Statement::UNLISTEN { channel })
1330    }
1331
1332    pub fn parse_notify(&mut self) -> Result<Statement, ParserError> {
1333        let channel = self.parse_identifier()?;
1334        let payload = if self.consume_token(&Token::Comma) {
1335            Some(self.parse_literal_string()?)
1336        } else {
1337            None
1338        };
1339        Ok(Statement::NOTIFY { channel, payload })
1340    }
1341
1342    /// Parses a `RENAME TABLE` statement. See [Statement::RenameTable]
1343    pub fn parse_rename(&mut self) -> Result<Statement, ParserError> {
1344        if self.peek_keyword(Keyword::TABLE) {
1345            self.expect_keyword(Keyword::TABLE)?;
1346            let rename_tables = self.parse_comma_separated(|parser| {
1347                let old_name = parser.parse_object_name(false)?;
1348                parser.expect_keyword(Keyword::TO)?;
1349                let new_name = parser.parse_object_name(false)?;
1350
1351                Ok(RenameTable { old_name, new_name })
1352            })?;
1353            Ok(Statement::RenameTable(rename_tables))
1354        } else {
1355            self.expected("KEYWORD `TABLE` after RENAME", self.peek_token())
1356        }
1357    }
1358
1359    /// Tries to parse an expression by matching the specified word to known keywords that have a special meaning in the dialect.
1360    /// Returns `None if no match is found.
1361    fn parse_expr_prefix_by_reserved_word(
1362        &mut self,
1363        w: &Word,
1364        w_span: Span,
1365    ) -> Result<Option<Expr>, ParserError> {
1366        match w.keyword {
1367            Keyword::TRUE | Keyword::FALSE if self.dialect.supports_boolean_literals() => {
1368                self.prev_token();
1369                Ok(Some(Expr::Value(self.parse_value()?)))
1370            }
1371            Keyword::NULL => {
1372                self.prev_token();
1373                Ok(Some(Expr::Value(self.parse_value()?)))
1374            }
1375            Keyword::CURRENT_CATALOG
1376            | Keyword::CURRENT_USER
1377            | Keyword::SESSION_USER
1378            | Keyword::USER
1379            if dialect_of!(self is PostgreSqlDialect | GenericDialect) =>
1380                {
1381                    Ok(Some(Expr::Function(Function {
1382                        name: ObjectName::from(vec![w.clone().into_ident(w_span)]),
1383                        uses_odbc_syntax: false,
1384                        parameters: FunctionArguments::None,
1385                        args: FunctionArguments::None,
1386                        null_treatment: None,
1387                        filter: None,
1388                        over: None,
1389                        within_group: vec![],
1390                    })))
1391                }
1392            Keyword::CURRENT_TIMESTAMP
1393            | Keyword::CURRENT_TIME
1394            | Keyword::CURRENT_DATE
1395            | Keyword::LOCALTIME
1396            | Keyword::LOCALTIMESTAMP => {
1397                Ok(Some(self.parse_time_functions(ObjectName::from(vec![w.clone().into_ident(w_span)]))?))
1398            }
1399            Keyword::CASE => Ok(Some(self.parse_case_expr()?)),
1400            Keyword::CONVERT => Ok(Some(self.parse_convert_expr(false)?)),
1401            Keyword::TRY_CONVERT if self.dialect.supports_try_convert() => Ok(Some(self.parse_convert_expr(true)?)),
1402            Keyword::CAST => Ok(Some(self.parse_cast_expr(CastKind::Cast)?)),
1403            Keyword::TRY_CAST => Ok(Some(self.parse_cast_expr(CastKind::TryCast)?)),
1404            Keyword::SAFE_CAST => Ok(Some(self.parse_cast_expr(CastKind::SafeCast)?)),
1405            Keyword::EXISTS
1406            // Support parsing Databricks has a function named `exists`.
1407            if !dialect_of!(self is DatabricksDialect)
1408                || matches!(
1409                        self.peek_nth_token_ref(1).token,
1410                        Token::Word(Word {
1411                            keyword: Keyword::SELECT | Keyword::WITH,
1412                            ..
1413                        })
1414                    ) =>
1415                {
1416                    Ok(Some(self.parse_exists_expr(false)?))
1417                }
1418            Keyword::EXTRACT => Ok(Some(self.parse_extract_expr()?)),
1419            Keyword::CEIL => Ok(Some(self.parse_ceil_floor_expr(true)?)),
1420            Keyword::FLOOR => Ok(Some(self.parse_ceil_floor_expr(false)?)),
1421            Keyword::POSITION if self.peek_token_ref().token == Token::LParen => {
1422                Ok(Some(self.parse_position_expr(w.clone().into_ident(w_span))?))
1423            }
1424            Keyword::SUBSTR | Keyword::SUBSTRING => {
1425                self.prev_token();
1426                Ok(Some(self.parse_substring()?))
1427            }
1428            Keyword::OVERLAY => Ok(Some(self.parse_overlay_expr()?)),
1429            Keyword::TRIM => Ok(Some(self.parse_trim_expr()?)),
1430            Keyword::INTERVAL => Ok(Some(self.parse_interval()?)),
1431            // Treat ARRAY[1,2,3] as an array [1,2,3], otherwise try as subquery or a function call
1432            Keyword::ARRAY if *self.peek_token_ref() == Token::LBracket => {
1433                self.expect_token(&Token::LBracket)?;
1434                Ok(Some(self.parse_array_expr(true)?))
1435            }
1436            Keyword::ARRAY
1437            if self.peek_token() == Token::LParen
1438                && !dialect_of!(self is ClickHouseDialect | DatabricksDialect) =>
1439                {
1440                    self.expect_token(&Token::LParen)?;
1441                    let query = self.parse_query()?;
1442                    self.expect_token(&Token::RParen)?;
1443                    Ok(Some(Expr::Function(Function {
1444                        name: ObjectName::from(vec![w.clone().into_ident(w_span)]),
1445                        uses_odbc_syntax: false,
1446                        parameters: FunctionArguments::None,
1447                        args: FunctionArguments::Subquery(query),
1448                        filter: None,
1449                        null_treatment: None,
1450                        over: None,
1451                        within_group: vec![],
1452                    })))
1453                }
1454            Keyword::NOT => Ok(Some(self.parse_not()?)),
1455            Keyword::MATCH if self.dialect.supports_match_against() => {
1456                Ok(Some(self.parse_match_against()?))
1457            }
1458            Keyword::STRUCT if self.dialect.supports_struct_literal() => {
1459                let struct_expr = self.parse_struct_literal()?;
1460                Ok(Some(struct_expr))
1461            }
1462            Keyword::PRIOR if matches!(self.state, ParserState::ConnectBy) => {
1463                let expr = self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?;
1464                Ok(Some(Expr::Prior(Box::new(expr))))
1465            }
1466            Keyword::MAP if *self.peek_token_ref() == Token::LBrace && self.dialect.support_map_literal_syntax() => {
1467                Ok(Some(self.parse_duckdb_map_literal()?))
1468            }
1469            _ if self.dialect.supports_geometric_types() => match w.keyword {
1470                Keyword::CIRCLE => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Circle)?)),
1471                Keyword::BOX => Ok(Some(self.parse_geometric_type(GeometricTypeKind::GeometricBox)?)),
1472                Keyword::PATH => Ok(Some(self.parse_geometric_type(GeometricTypeKind::GeometricPath)?)),
1473                Keyword::LINE => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Line)?)),
1474                Keyword::LSEG => Ok(Some(self.parse_geometric_type(GeometricTypeKind::LineSegment)?)),
1475                Keyword::POINT => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Point)?)),
1476                Keyword::POLYGON => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Polygon)?)),
1477                _ => Ok(None),
1478            },
1479            _ => Ok(None),
1480        }
1481    }
1482
1483    /// Tries to parse an expression by a word that is not known to have a special meaning in the dialect.
1484    fn parse_expr_prefix_by_unreserved_word(
1485        &mut self,
1486        w: &Word,
1487        w_span: Span,
1488    ) -> Result<Expr, ParserError> {
1489        match self.peek_token().token {
1490            Token::LParen if !self.peek_outer_join_operator() => {
1491                let id_parts = vec![w.clone().into_ident(w_span)];
1492                self.parse_function(ObjectName::from(id_parts))
1493            }
1494            // string introducer https://dev.mysql.com/doc/refman/8.0/en/charset-introducer.html
1495            Token::SingleQuotedString(_)
1496            | Token::DoubleQuotedString(_)
1497            | Token::HexStringLiteral(_)
1498                if w.value.starts_with('_') =>
1499            {
1500                Ok(Expr::Prefixed {
1501                    prefix: w.clone().into_ident(w_span),
1502                    value: self.parse_introduced_string_expr()?.into(),
1503                })
1504            }
1505            // string introducer https://dev.mysql.com/doc/refman/8.0/en/charset-introducer.html
1506            Token::SingleQuotedString(_)
1507            | Token::DoubleQuotedString(_)
1508            | Token::HexStringLiteral(_)
1509                if w.value.starts_with('_') =>
1510            {
1511                Ok(Expr::Prefixed {
1512                    prefix: w.clone().into_ident(w_span),
1513                    value: self.parse_introduced_string_expr()?.into(),
1514                })
1515            }
1516            Token::Arrow if self.dialect.supports_lambda_functions() => {
1517                self.expect_token(&Token::Arrow)?;
1518                Ok(Expr::Lambda(LambdaFunction {
1519                    params: OneOrManyWithParens::One(w.clone().into_ident(w_span)),
1520                    body: Box::new(self.parse_expr()?),
1521                }))
1522            }
1523            _ => Ok(Expr::Identifier(w.clone().into_ident(w_span))),
1524        }
1525    }
1526
1527    /// Parse an expression prefix.
1528    pub fn parse_prefix(&mut self) -> Result<Expr, ParserError> {
1529        // allow the dialect to override prefix parsing
1530        if let Some(prefix) = self.dialect.parse_prefix(self) {
1531            return prefix;
1532        }
1533
1534        // PostgreSQL allows any string literal to be preceded by a type name, indicating that the
1535        // string literal represents a literal of that type. Some examples:
1536        //
1537        //      DATE '2020-05-20'
1538        //      TIMESTAMP WITH TIME ZONE '2020-05-20 7:43:54'
1539        //      BOOL 'true'
1540        //
1541        // The first two are standard SQL, while the latter is a PostgreSQL extension. Complicating
1542        // matters is the fact that INTERVAL string literals may optionally be followed by special
1543        // keywords, e.g.:
1544        //
1545        //      INTERVAL '7' DAY
1546        //
1547        // Note also that naively `SELECT date` looks like a syntax error because the `date` type
1548        // name is not followed by a string literal, but in fact in PostgreSQL it is a valid
1549        // expression that should parse as the column name "date".
1550        let loc = self.peek_token_ref().span.start;
1551        let opt_expr = self.maybe_parse(|parser| {
1552            match parser.parse_data_type()? {
1553                DataType::Interval { .. } => parser.parse_interval(),
1554                // PostgreSQL allows almost any identifier to be used as custom data type name,
1555                // and we support that in `parse_data_type()`. But unlike Postgres we don't
1556                // have a list of globally reserved keywords (since they vary across dialects),
1557                // so given `NOT 'a' LIKE 'b'`, we'd accept `NOT` as a possible custom data type
1558                // name, resulting in `NOT 'a'` being recognized as a `TypedString` instead of
1559                // an unary negation `NOT ('a' LIKE 'b')`. To solve this, we don't accept the
1560                // `type 'string'` syntax for the custom data types at all.
1561                DataType::Custom(..) => parser_err!("dummy", loc),
1562                data_type => Ok(Expr::TypedString(TypedString {
1563                    data_type,
1564                    value: parser.parse_value()?,
1565                    uses_odbc_syntax: false,
1566                })),
1567            }
1568        })?;
1569
1570        if let Some(expr) = opt_expr {
1571            return Ok(expr);
1572        }
1573
1574        // Cache some dialect properties to avoid lifetime issues with the
1575        // next_token reference.
1576
1577        let dialect = self.dialect;
1578
1579        self.advance_token();
1580        let next_token_index = self.get_current_index();
1581        let next_token = self.get_current_token();
1582        let span = next_token.span;
1583        let expr = match &next_token.token {
1584            Token::Word(w) => {
1585                // The word we consumed may fall into one of two cases: it has a special meaning, or not.
1586                // For example, in Snowflake, the word `interval` may have two meanings depending on the context:
1587                // `SELECT CURRENT_DATE() + INTERVAL '1 DAY', MAX(interval) FROM tbl;`
1588                //                          ^^^^^^^^^^^^^^^^      ^^^^^^^^
1589                //                         interval expression   identifier
1590                //
1591                // We first try to parse the word and following tokens as a special expression, and if that fails,
1592                // we rollback and try to parse it as an identifier.
1593                let w = w.clone();
1594                match self.try_parse(|parser| parser.parse_expr_prefix_by_reserved_word(&w, span)) {
1595                    // This word indicated an expression prefix and parsing was successful
1596                    Ok(Some(expr)) => Ok(expr),
1597
1598                    // No expression prefix associated with this word
1599                    Ok(None) => Ok(self.parse_expr_prefix_by_unreserved_word(&w, span)?),
1600
1601                    // If parsing of the word as a special expression failed, we are facing two options:
1602                    // 1. The statement is malformed, e.g. `SELECT INTERVAL '1 DAI` (`DAI` instead of `DAY`)
1603                    // 2. The word is used as an identifier, e.g. `SELECT MAX(interval) FROM tbl`
1604                    // We first try to parse the word as an identifier and if that fails
1605                    // we rollback and return the parsing error we got from trying to parse a
1606                    // special expression (to maintain backwards compatibility of parsing errors).
1607                    Err(e) => {
1608                        if !self.dialect.is_reserved_for_identifier(w.keyword) {
1609                            if let Ok(Some(expr)) = self.maybe_parse(|parser| {
1610                                parser.parse_expr_prefix_by_unreserved_word(&w, span)
1611                            }) {
1612                                return Ok(expr);
1613                            }
1614                        }
1615                        return Err(e);
1616                    }
1617                }
1618            } // End of Token::Word
1619            // array `[1, 2, 3]`
1620            Token::LBracket => self.parse_array_expr(false),
1621            tok @ Token::Minus | tok @ Token::Plus => {
1622                let op = if *tok == Token::Plus {
1623                    UnaryOperator::Plus
1624                } else {
1625                    UnaryOperator::Minus
1626                };
1627                Ok(Expr::UnaryOp {
1628                    op,
1629                    expr: Box::new(
1630                        self.parse_subexpr(self.dialect.prec_value(Precedence::MulDivModOp))?,
1631                    ),
1632                })
1633            }
1634            Token::ExclamationMark if dialect.supports_bang_not_operator() => Ok(Expr::UnaryOp {
1635                op: UnaryOperator::BangNot,
1636                expr: Box::new(self.parse_subexpr(self.dialect.prec_value(Precedence::UnaryNot))?),
1637            }),
1638            tok @ Token::DoubleExclamationMark
1639            | tok @ Token::PGSquareRoot
1640            | tok @ Token::PGCubeRoot
1641            | tok @ Token::AtSign
1642                if dialect_is!(dialect is PostgreSqlDialect) =>
1643            {
1644                let op = match tok {
1645                    Token::DoubleExclamationMark => UnaryOperator::PGPrefixFactorial,
1646                    Token::PGSquareRoot => UnaryOperator::PGSquareRoot,
1647                    Token::PGCubeRoot => UnaryOperator::PGCubeRoot,
1648                    Token::AtSign => UnaryOperator::PGAbs,
1649                    _ => {
1650                        return Err(ParserError::ParserError(
1651                            "Internal parser error: unexpected unary operator token".to_string(),
1652                        ))
1653                    }
1654                };
1655                Ok(Expr::UnaryOp {
1656                    op,
1657                    expr: Box::new(
1658                        self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?,
1659                    ),
1660                })
1661            }
1662            Token::Tilde => Ok(Expr::UnaryOp {
1663                op: UnaryOperator::BitwiseNot,
1664                expr: Box::new(self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?),
1665            }),
1666            tok @ Token::Sharp
1667            | tok @ Token::AtDashAt
1668            | tok @ Token::AtAt
1669            | tok @ Token::QuestionMarkDash
1670            | tok @ Token::QuestionPipe
1671                if self.dialect.supports_geometric_types() =>
1672            {
1673                let op = match tok {
1674                    Token::Sharp => UnaryOperator::Hash,
1675                    Token::AtDashAt => UnaryOperator::AtDashAt,
1676                    Token::AtAt => UnaryOperator::DoubleAt,
1677                    Token::QuestionMarkDash => UnaryOperator::QuestionDash,
1678                    Token::QuestionPipe => UnaryOperator::QuestionPipe,
1679                    _ => {
1680                        return Err(ParserError::ParserError(format!(
1681                            "Unexpected token in unary operator parsing: {tok:?}"
1682                        )))
1683                    }
1684                };
1685                Ok(Expr::UnaryOp {
1686                    op,
1687                    expr: Box::new(
1688                        self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?,
1689                    ),
1690                })
1691            }
1692            Token::EscapedStringLiteral(_) if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) =>
1693            {
1694                self.prev_token();
1695                Ok(Expr::Value(self.parse_value()?))
1696            }
1697            Token::UnicodeStringLiteral(_) => {
1698                self.prev_token();
1699                Ok(Expr::Value(self.parse_value()?))
1700            }
1701            Token::Number(_, _)
1702            | Token::SingleQuotedString(_)
1703            | Token::DoubleQuotedString(_)
1704            | Token::TripleSingleQuotedString(_)
1705            | Token::TripleDoubleQuotedString(_)
1706            | Token::DollarQuotedString(_)
1707            | Token::SingleQuotedByteStringLiteral(_)
1708            | Token::DoubleQuotedByteStringLiteral(_)
1709            | Token::TripleSingleQuotedByteStringLiteral(_)
1710            | Token::TripleDoubleQuotedByteStringLiteral(_)
1711            | Token::SingleQuotedRawStringLiteral(_)
1712            | Token::DoubleQuotedRawStringLiteral(_)
1713            | Token::TripleSingleQuotedRawStringLiteral(_)
1714            | Token::TripleDoubleQuotedRawStringLiteral(_)
1715            | Token::NationalStringLiteral(_)
1716            | Token::HexStringLiteral(_) => {
1717                self.prev_token();
1718                Ok(Expr::Value(self.parse_value()?))
1719            }
1720            Token::LParen => {
1721                let expr =
1722                    if let Some(expr) = self.try_parse_expr_sub_query()? {
1723                        expr
1724                    } else if let Some(lambda) = self.try_parse_lambda()? {
1725                        return Ok(lambda);
1726                    } else {
1727                        let exprs = self.parse_comma_separated(Parser::parse_expr)?;
1728                        match exprs.len() {
1729                            0 => return Err(ParserError::ParserError(
1730                                "Internal parser error: parse_comma_separated returned empty list"
1731                                    .to_string(),
1732                            )),
1733                            1 => Expr::Nested(Box::new(exprs.into_iter().next().unwrap())),
1734                            _ => Expr::Tuple(exprs),
1735                        }
1736                    };
1737                self.expect_token(&Token::RParen)?;
1738                Ok(expr)
1739            }
1740            Token::Placeholder(_) | Token::Colon | Token::AtSign => {
1741                self.prev_token();
1742                Ok(Expr::Value(self.parse_value()?))
1743            }
1744            Token::LBrace => {
1745                self.prev_token();
1746                self.parse_lbrace_expr()
1747            }
1748            _ => self.expected_at("an expression", next_token_index),
1749        }?;
1750
1751        if !self.in_column_definition_state() && self.parse_keyword(Keyword::COLLATE) {
1752            Ok(Expr::Collate {
1753                expr: Box::new(expr),
1754                collation: self.parse_object_name(false)?,
1755            })
1756        } else {
1757            Ok(expr)
1758        }
1759    }
1760
1761    fn parse_geometric_type(&mut self, kind: GeometricTypeKind) -> Result<Expr, ParserError> {
1762        Ok(Expr::TypedString(TypedString {
1763            data_type: DataType::GeometricType(kind),
1764            value: self.parse_value()?,
1765            uses_odbc_syntax: false,
1766        }))
1767    }
1768
1769    /// Try to parse an [Expr::CompoundFieldAccess] like `a.b.c` or `a.b[1].c`.
1770    /// If all the fields are `Expr::Identifier`s, return an [Expr::CompoundIdentifier] instead.
1771    /// If only the root exists, return the root.
1772    /// Parses compound expressions which may be delimited by period
1773    /// or bracket notation.
1774    /// For example: `a.b.c`, `a.b[1]`.
1775    pub fn parse_compound_expr(
1776        &mut self,
1777        root: Expr,
1778        mut chain: Vec<AccessExpr>,
1779    ) -> Result<Expr, ParserError> {
1780        let mut ending_wildcard: Option<TokenWithSpan> = None;
1781        loop {
1782            if self.consume_token(&Token::Period) {
1783                let next_token = self.peek_token_ref();
1784                match &next_token.token {
1785                    Token::Mul => {
1786                        // Postgres explicitly allows funcnm(tablenm.*) and the
1787                        // function array_agg traverses this control flow
1788                        if dialect_of!(self is PostgreSqlDialect) {
1789                            ending_wildcard = Some(self.next_token());
1790                        } else {
1791                            // Put back the consumed `.` tokens before exiting.
1792                            // If this expression is being parsed in the
1793                            // context of a projection, then the `.*` could imply
1794                            // a wildcard expansion. For example:
1795                            // `SELECT STRUCT('foo').* FROM T`
1796                            self.prev_token(); // .
1797                        }
1798
1799                        break;
1800                    }
1801                    Token::SingleQuotedString(s) => {
1802                        let expr =
1803                            Expr::Identifier(Ident::with_quote_and_span('\'', next_token.span, s));
1804                        chain.push(AccessExpr::Dot(expr));
1805                        self.advance_token(); // The consumed string
1806                    }
1807                    // Fallback to parsing an arbitrary expression.
1808                    _ => match self.parse_subexpr(self.dialect.prec_value(Precedence::Period))? {
1809                        // If we get back a compound field access or identifier,
1810                        // we flatten the nested expression.
1811                        // For example if the current root is `foo`
1812                        // and we get back a compound identifier expression `bar.baz`
1813                        // The full expression should be `foo.bar.baz` (i.e.
1814                        // a root with an access chain with 2 entries) and not
1815                        // `foo.(bar.baz)` (i.e. a root with an access chain with
1816                        // 1 entry`).
1817                        Expr::CompoundFieldAccess { root, access_chain } => {
1818                            chain.push(AccessExpr::Dot(*root));
1819                            chain.extend(access_chain);
1820                        }
1821                        Expr::CompoundIdentifier(parts) => chain
1822                            .extend(parts.into_iter().map(Expr::Identifier).map(AccessExpr::Dot)),
1823                        expr => {
1824                            chain.push(AccessExpr::Dot(expr));
1825                        }
1826                    },
1827                }
1828            } else if !self.dialect.supports_partiql()
1829                && self.peek_token_ref().token == Token::LBracket
1830            {
1831                self.parse_multi_dim_subscript(&mut chain)?;
1832            } else {
1833                break;
1834            }
1835        }
1836
1837        let tok_index = self.get_current_index();
1838        if let Some(wildcard_token) = ending_wildcard {
1839            if !Self::is_all_ident(&root, &chain) {
1840                return self.expected("an identifier or a '*' after '.'", self.peek_token());
1841            };
1842            Ok(Expr::QualifiedWildcard(
1843                ObjectName::from(Self::exprs_to_idents(root, chain)?),
1844                AttachedToken(wildcard_token),
1845            ))
1846        } else if self.maybe_parse_outer_join_operator() {
1847            if !Self::is_all_ident(&root, &chain) {
1848                return self.expected_at("column identifier before (+)", tok_index);
1849            };
1850            let expr = if chain.is_empty() {
1851                root
1852            } else {
1853                Expr::CompoundIdentifier(Self::exprs_to_idents(root, chain)?)
1854            };
1855            Ok(Expr::OuterJoin(expr.into()))
1856        } else {
1857            Self::build_compound_expr(root, chain)
1858        }
1859    }
1860
1861    /// Combines a root expression and access chain to form
1862    /// a compound expression. Which may be a [Expr::CompoundFieldAccess]
1863    /// or other special cased expressions like [Expr::CompoundIdentifier],
1864    /// [Expr::OuterJoin].
1865    fn build_compound_expr(
1866        root: Expr,
1867        mut access_chain: Vec<AccessExpr>,
1868    ) -> Result<Expr, ParserError> {
1869        if access_chain.is_empty() {
1870            return Ok(root);
1871        }
1872
1873        if Self::is_all_ident(&root, &access_chain) {
1874            return Ok(Expr::CompoundIdentifier(Self::exprs_to_idents(
1875                root,
1876                access_chain,
1877            )?));
1878        }
1879
1880        // Flatten qualified function calls.
1881        // For example, the expression `a.b.c.foo(1,2,3)` should
1882        // represent a function called `a.b.c.foo`, rather than
1883        // a composite expression.
1884        if matches!(root, Expr::Identifier(_))
1885            && matches!(
1886                access_chain.last(),
1887                Some(AccessExpr::Dot(Expr::Function(_)))
1888            )
1889            && access_chain
1890                .iter()
1891                .rev()
1892                .skip(1) // All except the Function
1893                .all(|access| matches!(access, AccessExpr::Dot(Expr::Identifier(_))))
1894        {
1895            let Some(AccessExpr::Dot(Expr::Function(mut func))) = access_chain.pop() else {
1896                return parser_err!("expected function expression", root.span().start);
1897            };
1898
1899            let compound_func_name = [root]
1900                .into_iter()
1901                .chain(access_chain.into_iter().flat_map(|access| match access {
1902                    AccessExpr::Dot(expr) => Some(expr),
1903                    _ => None,
1904                }))
1905                .flat_map(|expr| match expr {
1906                    Expr::Identifier(ident) => Some(ident),
1907                    _ => None,
1908                })
1909                .map(ObjectNamePart::Identifier)
1910                .chain(func.name.0)
1911                .collect::<Vec<_>>();
1912            func.name = ObjectName(compound_func_name);
1913
1914            return Ok(Expr::Function(func));
1915        }
1916
1917        // Flatten qualified outer join expressions.
1918        // For example, the expression `T.foo(+)` should
1919        // represent an outer join on the column name `T.foo`
1920        // rather than a composite expression.
1921        if access_chain.len() == 1
1922            && matches!(
1923                access_chain.last(),
1924                Some(AccessExpr::Dot(Expr::OuterJoin(_)))
1925            )
1926        {
1927            let Some(AccessExpr::Dot(Expr::OuterJoin(inner_expr))) = access_chain.pop() else {
1928                return parser_err!("expected (+) expression", root.span().start);
1929            };
1930
1931            if !Self::is_all_ident(&root, &[]) {
1932                return parser_err!("column identifier before (+)", root.span().start);
1933            };
1934
1935            let token_start = root.span().start;
1936            let mut idents = Self::exprs_to_idents(root, vec![])?;
1937            match *inner_expr {
1938                Expr::CompoundIdentifier(suffix) => idents.extend(suffix),
1939                Expr::Identifier(suffix) => idents.push(suffix),
1940                _ => {
1941                    return parser_err!("column identifier before (+)", token_start);
1942                }
1943            }
1944
1945            return Ok(Expr::OuterJoin(Expr::CompoundIdentifier(idents).into()));
1946        }
1947
1948        Ok(Expr::CompoundFieldAccess {
1949            root: Box::new(root),
1950            access_chain,
1951        })
1952    }
1953
1954    fn keyword_to_modifier(k: Keyword) -> Option<ContextModifier> {
1955        match k {
1956            Keyword::LOCAL => Some(ContextModifier::Local),
1957            Keyword::GLOBAL => Some(ContextModifier::Global),
1958            Keyword::SESSION => Some(ContextModifier::Session),
1959            _ => None,
1960        }
1961    }
1962
1963    /// Check if the root is an identifier and all fields are identifiers.
1964    fn is_all_ident(root: &Expr, fields: &[AccessExpr]) -> bool {
1965        if !matches!(root, Expr::Identifier(_)) {
1966            return false;
1967        }
1968        fields
1969            .iter()
1970            .all(|x| matches!(x, AccessExpr::Dot(Expr::Identifier(_))))
1971    }
1972
1973    /// Convert a root and a list of fields to a list of identifiers.
1974    fn exprs_to_idents(root: Expr, fields: Vec<AccessExpr>) -> Result<Vec<Ident>, ParserError> {
1975        let mut idents = vec![];
1976        if let Expr::Identifier(root) = root {
1977            idents.push(root);
1978            for x in fields {
1979                if let AccessExpr::Dot(Expr::Identifier(ident)) = x {
1980                    idents.push(ident);
1981                } else {
1982                    return parser_err!(
1983                        format!("Expected identifier, found: {}", x),
1984                        x.span().start
1985                    );
1986                }
1987            }
1988            Ok(idents)
1989        } else {
1990            parser_err!(
1991                format!("Expected identifier, found: {}", root),
1992                root.span().start
1993            )
1994        }
1995    }
1996
1997    /// Returns true if the next tokens indicate the outer join operator `(+)`.
1998    fn peek_outer_join_operator(&mut self) -> bool {
1999        if !self.dialect.supports_outer_join_operator() {
2000            return false;
2001        }
2002
2003        let [maybe_lparen, maybe_plus, maybe_rparen] = self.peek_tokens_ref();
2004        Token::LParen == maybe_lparen.token
2005            && Token::Plus == maybe_plus.token
2006            && Token::RParen == maybe_rparen.token
2007    }
2008
2009    /// If the next tokens indicates the outer join operator `(+)`, consume
2010    /// the tokens and return true.
2011    fn maybe_parse_outer_join_operator(&mut self) -> bool {
2012        self.dialect.supports_outer_join_operator()
2013            && self.consume_tokens(&[Token::LParen, Token::Plus, Token::RParen])
2014    }
2015
2016    pub fn parse_utility_options(&mut self) -> Result<Vec<UtilityOption>, ParserError> {
2017        self.expect_token(&Token::LParen)?;
2018        let options = self.parse_comma_separated(Self::parse_utility_option)?;
2019        self.expect_token(&Token::RParen)?;
2020
2021        Ok(options)
2022    }
2023
2024    fn parse_utility_option(&mut self) -> Result<UtilityOption, ParserError> {
2025        let name = self.parse_identifier()?;
2026
2027        let next_token = self.peek_token();
2028        if next_token == Token::Comma || next_token == Token::RParen {
2029            return Ok(UtilityOption { name, arg: None });
2030        }
2031        let arg = self.parse_expr()?;
2032
2033        Ok(UtilityOption {
2034            name,
2035            arg: Some(arg),
2036        })
2037    }
2038
2039    fn try_parse_expr_sub_query(&mut self) -> Result<Option<Expr>, ParserError> {
2040        if !self.peek_sub_query() {
2041            return Ok(None);
2042        }
2043
2044        Ok(Some(Expr::Subquery(self.parse_query()?)))
2045    }
2046
2047    fn try_parse_lambda(&mut self) -> Result<Option<Expr>, ParserError> {
2048        if !self.dialect.supports_lambda_functions() {
2049            return Ok(None);
2050        }
2051        self.maybe_parse(|p| {
2052            let params = p.parse_comma_separated(|p| p.parse_identifier())?;
2053            p.expect_token(&Token::RParen)?;
2054            p.expect_token(&Token::Arrow)?;
2055            let expr = p.parse_expr()?;
2056            Ok(Expr::Lambda(LambdaFunction {
2057                params: OneOrManyWithParens::Many(params),
2058                body: Box::new(expr),
2059            }))
2060        })
2061    }
2062
2063    /// Tries to parse the body of an [ODBC escaping sequence]
2064    /// i.e. without the enclosing braces
2065    /// Currently implemented:
2066    /// Scalar Function Calls
2067    /// Date, Time, and Timestamp Literals
2068    /// See <https://learn.microsoft.com/en-us/sql/odbc/reference/develop-app/escape-sequences-in-odbc?view=sql-server-2017>
2069    fn maybe_parse_odbc_body(&mut self) -> Result<Option<Expr>, ParserError> {
2070        // Attempt 1: Try to parse it as a function.
2071        if let Some(expr) = self.maybe_parse_odbc_fn_body()? {
2072            return Ok(Some(expr));
2073        }
2074        // Attempt 2: Try to parse it as a Date, Time or Timestamp Literal
2075        self.maybe_parse_odbc_body_datetime()
2076    }
2077
2078    /// Tries to parse the body of an [ODBC Date, Time, and Timestamp Literals] call.
2079    ///
2080    /// ```sql
2081    /// {d '2025-07-17'}
2082    /// {t '14:12:01'}
2083    /// {ts '2025-07-17 14:12:01'}
2084    /// ```
2085    ///
2086    /// [ODBC Date, Time, and Timestamp Literals]:
2087    /// https://learn.microsoft.com/en-us/sql/odbc/reference/develop-app/date-time-and-timestamp-literals?view=sql-server-2017
2088    fn maybe_parse_odbc_body_datetime(&mut self) -> Result<Option<Expr>, ParserError> {
2089        self.maybe_parse(|p| {
2090            let token = p.next_token().clone();
2091            let word_string = token.token.to_string();
2092            let data_type = match word_string.as_str() {
2093                "t" => DataType::Time(None, TimezoneInfo::None),
2094                "d" => DataType::Date,
2095                "ts" => DataType::Timestamp(None, TimezoneInfo::None),
2096                _ => return p.expected("ODBC datetime keyword (t, d, or ts)", token),
2097            };
2098            let value = p.parse_value()?;
2099            Ok(Expr::TypedString(TypedString {
2100                data_type,
2101                value,
2102                uses_odbc_syntax: true,
2103            }))
2104        })
2105    }
2106
2107    /// Tries to parse the body of an [ODBC function] call.
2108    /// i.e. without the enclosing braces
2109    ///
2110    /// ```sql
2111    /// fn myfunc(1,2,3)
2112    /// ```
2113    ///
2114    /// [ODBC function]: https://learn.microsoft.com/en-us/sql/odbc/reference/develop-app/scalar-function-calls?view=sql-server-2017
2115    fn maybe_parse_odbc_fn_body(&mut self) -> Result<Option<Expr>, ParserError> {
2116        self.maybe_parse(|p| {
2117            p.expect_keyword(Keyword::FN)?;
2118            let fn_name = p.parse_object_name(false)?;
2119            let mut fn_call = p.parse_function_call(fn_name)?;
2120            fn_call.uses_odbc_syntax = true;
2121            Ok(Expr::Function(fn_call))
2122        })
2123    }
2124
2125    pub fn parse_function(&mut self, name: ObjectName) -> Result<Expr, ParserError> {
2126        self.parse_function_call(name).map(Expr::Function)
2127    }
2128
2129    fn parse_function_call(&mut self, name: ObjectName) -> Result<Function, ParserError> {
2130        self.expect_token(&Token::LParen)?;
2131
2132        // Snowflake permits a subquery to be passed as an argument without
2133        // an enclosing set of parens if it's the only argument.
2134        if dialect_of!(self is SnowflakeDialect) && self.peek_sub_query() {
2135            let subquery = self.parse_query()?;
2136            self.expect_token(&Token::RParen)?;
2137            return Ok(Function {
2138                name,
2139                uses_odbc_syntax: false,
2140                parameters: FunctionArguments::None,
2141                args: FunctionArguments::Subquery(subquery),
2142                filter: None,
2143                null_treatment: None,
2144                over: None,
2145                within_group: vec![],
2146            });
2147        }
2148
2149        let mut args = self.parse_function_argument_list()?;
2150        let mut parameters = FunctionArguments::None;
2151        // ClickHouse aggregations support parametric functions like `HISTOGRAM(0.5, 0.6)(x, y)`
2152        // which (0.5, 0.6) is a parameter to the function.
2153        if dialect_of!(self is ClickHouseDialect | GenericDialect)
2154            && self.consume_token(&Token::LParen)
2155        {
2156            parameters = FunctionArguments::List(args);
2157            args = self.parse_function_argument_list()?;
2158        }
2159
2160        let within_group = if self.parse_keywords(&[Keyword::WITHIN, Keyword::GROUP]) {
2161            self.expect_token(&Token::LParen)?;
2162            self.expect_keywords(&[Keyword::ORDER, Keyword::BY])?;
2163            let order_by = self.parse_comma_separated(Parser::parse_order_by_expr)?;
2164            self.expect_token(&Token::RParen)?;
2165            order_by
2166        } else {
2167            vec![]
2168        };
2169
2170        let filter = if self.dialect.supports_filter_during_aggregation()
2171            && self.parse_keyword(Keyword::FILTER)
2172            && self.consume_token(&Token::LParen)
2173            && self.parse_keyword(Keyword::WHERE)
2174        {
2175            let filter = Some(Box::new(self.parse_expr()?));
2176            self.expect_token(&Token::RParen)?;
2177            filter
2178        } else {
2179            None
2180        };
2181
2182        // Syntax for null treatment shows up either in the args list
2183        // or after the function call, but not both.
2184        let null_treatment = if args
2185            .clauses
2186            .iter()
2187            .all(|clause| !matches!(clause, FunctionArgumentClause::IgnoreOrRespectNulls(_)))
2188        {
2189            self.parse_null_treatment()?
2190        } else {
2191            None
2192        };
2193
2194        let over = if self.parse_keyword(Keyword::OVER) {
2195            if self.consume_token(&Token::LParen) {
2196                let window_spec = self.parse_window_spec()?;
2197                Some(WindowType::WindowSpec(window_spec))
2198            } else {
2199                Some(WindowType::NamedWindow(self.parse_identifier()?))
2200            }
2201        } else {
2202            None
2203        };
2204
2205        Ok(Function {
2206            name,
2207            uses_odbc_syntax: false,
2208            parameters,
2209            args: FunctionArguments::List(args),
2210            null_treatment,
2211            filter,
2212            over,
2213            within_group,
2214        })
2215    }
2216
2217    /// Optionally parses a null treatment clause.
2218    fn parse_null_treatment(&mut self) -> Result<Option<NullTreatment>, ParserError> {
2219        match self.parse_one_of_keywords(&[Keyword::RESPECT, Keyword::IGNORE]) {
2220            Some(keyword) => {
2221                self.expect_keyword_is(Keyword::NULLS)?;
2222
2223                Ok(match keyword {
2224                    Keyword::RESPECT => Some(NullTreatment::RespectNulls),
2225                    Keyword::IGNORE => Some(NullTreatment::IgnoreNulls),
2226                    _ => None,
2227                })
2228            }
2229            None => Ok(None),
2230        }
2231    }
2232
2233    pub fn parse_time_functions(&mut self, name: ObjectName) -> Result<Expr, ParserError> {
2234        let args = if self.consume_token(&Token::LParen) {
2235            FunctionArguments::List(self.parse_function_argument_list()?)
2236        } else {
2237            FunctionArguments::None
2238        };
2239        Ok(Expr::Function(Function {
2240            name,
2241            uses_odbc_syntax: false,
2242            parameters: FunctionArguments::None,
2243            args,
2244            filter: None,
2245            over: None,
2246            null_treatment: None,
2247            within_group: vec![],
2248        }))
2249    }
2250
2251    pub fn parse_window_frame_units(&mut self) -> Result<WindowFrameUnits, ParserError> {
2252        let next_token = self.next_token();
2253        match &next_token.token {
2254            Token::Word(w) => match w.keyword {
2255                Keyword::ROWS => Ok(WindowFrameUnits::Rows),
2256                Keyword::RANGE => Ok(WindowFrameUnits::Range),
2257                Keyword::GROUPS => Ok(WindowFrameUnits::Groups),
2258                _ => self.expected("ROWS, RANGE, GROUPS", next_token)?,
2259            },
2260            _ => self.expected("ROWS, RANGE, GROUPS", next_token),
2261        }
2262    }
2263
2264    pub fn parse_window_frame(&mut self) -> Result<WindowFrame, ParserError> {
2265        let units = self.parse_window_frame_units()?;
2266        let (start_bound, end_bound) = if self.parse_keyword(Keyword::BETWEEN) {
2267            let start_bound = self.parse_window_frame_bound()?;
2268            self.expect_keyword_is(Keyword::AND)?;
2269            let end_bound = Some(self.parse_window_frame_bound()?);
2270            (start_bound, end_bound)
2271        } else {
2272            (self.parse_window_frame_bound()?, None)
2273        };
2274        Ok(WindowFrame {
2275            units,
2276            start_bound,
2277            end_bound,
2278        })
2279    }
2280
2281    /// Parse `CURRENT ROW` or `{ <positive number> | UNBOUNDED } { PRECEDING | FOLLOWING }`
2282    pub fn parse_window_frame_bound(&mut self) -> Result<WindowFrameBound, ParserError> {
2283        if self.parse_keywords(&[Keyword::CURRENT, Keyword::ROW]) {
2284            Ok(WindowFrameBound::CurrentRow)
2285        } else {
2286            let rows = if self.parse_keyword(Keyword::UNBOUNDED) {
2287                None
2288            } else {
2289                Some(Box::new(match self.peek_token().token {
2290                    Token::SingleQuotedString(_) => self.parse_interval()?,
2291                    _ => self.parse_expr()?,
2292                }))
2293            };
2294            if self.parse_keyword(Keyword::PRECEDING) {
2295                Ok(WindowFrameBound::Preceding(rows))
2296            } else if self.parse_keyword(Keyword::FOLLOWING) {
2297                Ok(WindowFrameBound::Following(rows))
2298            } else {
2299                self.expected("PRECEDING or FOLLOWING", self.peek_token())
2300            }
2301        }
2302    }
2303
2304    /// Parse a group by expr. Group by expr can be one of group sets, roll up, cube, or simple expr.
2305    fn parse_group_by_expr(&mut self) -> Result<Expr, ParserError> {
2306        if self.dialect.supports_group_by_expr() {
2307            if self.parse_keywords(&[Keyword::GROUPING, Keyword::SETS]) {
2308                self.expect_token(&Token::LParen)?;
2309                let result = self.parse_comma_separated(|p| p.parse_tuple(false, true))?;
2310                self.expect_token(&Token::RParen)?;
2311                Ok(Expr::GroupingSets(result))
2312            } else if self.parse_keyword(Keyword::CUBE) {
2313                self.expect_token(&Token::LParen)?;
2314                let result = self.parse_comma_separated(|p| p.parse_tuple(true, true))?;
2315                self.expect_token(&Token::RParen)?;
2316                Ok(Expr::Cube(result))
2317            } else if self.parse_keyword(Keyword::ROLLUP) {
2318                self.expect_token(&Token::LParen)?;
2319                let result = self.parse_comma_separated(|p| p.parse_tuple(true, true))?;
2320                self.expect_token(&Token::RParen)?;
2321                Ok(Expr::Rollup(result))
2322            } else if self.consume_tokens(&[Token::LParen, Token::RParen]) {
2323                // PostgreSQL allow to use empty tuple as a group by expression,
2324                // e.g. `GROUP BY (), name`. Please refer to GROUP BY Clause section in
2325                // [PostgreSQL](https://www.postgresql.org/docs/16/sql-select.html)
2326                Ok(Expr::Tuple(vec![]))
2327            } else {
2328                self.parse_expr()
2329            }
2330        } else {
2331            // TODO parse rollup for other dialects
2332            self.parse_expr()
2333        }
2334    }
2335
2336    /// Parse a tuple with `(` and `)`.
2337    /// If `lift_singleton` is true, then a singleton tuple is lifted to a tuple of length 1, otherwise it will fail.
2338    /// If `allow_empty` is true, then an empty tuple is allowed.
2339    fn parse_tuple(
2340        &mut self,
2341        lift_singleton: bool,
2342        allow_empty: bool,
2343    ) -> Result<Vec<Expr>, ParserError> {
2344        if lift_singleton {
2345            if self.consume_token(&Token::LParen) {
2346                let result = if allow_empty && self.consume_token(&Token::RParen) {
2347                    vec![]
2348                } else {
2349                    let result = self.parse_comma_separated(Parser::parse_expr)?;
2350                    self.expect_token(&Token::RParen)?;
2351                    result
2352                };
2353                Ok(result)
2354            } else {
2355                Ok(vec![self.parse_expr()?])
2356            }
2357        } else {
2358            self.expect_token(&Token::LParen)?;
2359            let result = if allow_empty && self.consume_token(&Token::RParen) {
2360                vec![]
2361            } else {
2362                let result = self.parse_comma_separated(Parser::parse_expr)?;
2363                self.expect_token(&Token::RParen)?;
2364                result
2365            };
2366            Ok(result)
2367        }
2368    }
2369
2370    pub fn parse_case_expr(&mut self) -> Result<Expr, ParserError> {
2371        let case_token = AttachedToken(self.get_current_token().clone());
2372        let mut operand = None;
2373        if !self.parse_keyword(Keyword::WHEN) {
2374            operand = Some(Box::new(self.parse_expr()?));
2375            self.expect_keyword_is(Keyword::WHEN)?;
2376        }
2377        let mut conditions = vec![];
2378        loop {
2379            let condition = self.parse_expr()?;
2380            self.expect_keyword_is(Keyword::THEN)?;
2381            let result = self.parse_expr()?;
2382            conditions.push(CaseWhen { condition, result });
2383            if !self.parse_keyword(Keyword::WHEN) {
2384                break;
2385            }
2386        }
2387        let else_result = if self.parse_keyword(Keyword::ELSE) {
2388            Some(Box::new(self.parse_expr()?))
2389        } else {
2390            None
2391        };
2392        let end_token = AttachedToken(self.expect_keyword(Keyword::END)?);
2393        Ok(Expr::Case {
2394            case_token,
2395            end_token,
2396            operand,
2397            conditions,
2398            else_result,
2399        })
2400    }
2401
2402    pub fn parse_optional_cast_format(&mut self) -> Result<Option<CastFormat>, ParserError> {
2403        if self.parse_keyword(Keyword::FORMAT) {
2404            let value = self.parse_value()?.value;
2405            match self.parse_optional_time_zone()? {
2406                Some(tz) => Ok(Some(CastFormat::ValueAtTimeZone(value, tz))),
2407                None => Ok(Some(CastFormat::Value(value))),
2408            }
2409        } else {
2410            Ok(None)
2411        }
2412    }
2413
2414    pub fn parse_optional_time_zone(&mut self) -> Result<Option<Value>, ParserError> {
2415        if self.parse_keywords(&[Keyword::AT, Keyword::TIME, Keyword::ZONE]) {
2416            self.parse_value().map(|v| Some(v.value))
2417        } else {
2418            Ok(None)
2419        }
2420    }
2421
2422    /// mssql-like convert function
2423    fn parse_mssql_convert(&mut self, is_try: bool) -> Result<Expr, ParserError> {
2424        self.expect_token(&Token::LParen)?;
2425        let data_type = self.parse_data_type()?;
2426        self.expect_token(&Token::Comma)?;
2427        let expr = self.parse_expr()?;
2428        let styles = if self.consume_token(&Token::Comma) {
2429            self.parse_comma_separated(Parser::parse_expr)?
2430        } else {
2431            Default::default()
2432        };
2433        self.expect_token(&Token::RParen)?;
2434        Ok(Expr::Convert {
2435            is_try,
2436            expr: Box::new(expr),
2437            data_type: Some(data_type),
2438            charset: None,
2439            target_before_value: true,
2440            styles,
2441        })
2442    }
2443
2444    /// Parse a SQL CONVERT function:
2445    ///  - `CONVERT('héhé' USING utf8mb4)` (MySQL)
2446    ///  - `CONVERT('héhé', CHAR CHARACTER SET utf8mb4)` (MySQL)
2447    ///  - `CONVERT(DECIMAL(10, 5), 42)` (MSSQL) - the type comes first
2448    pub fn parse_convert_expr(&mut self, is_try: bool) -> Result<Expr, ParserError> {
2449        if self.dialect.convert_type_before_value() {
2450            return self.parse_mssql_convert(is_try);
2451        }
2452        self.expect_token(&Token::LParen)?;
2453        let expr = self.parse_expr()?;
2454        if self.parse_keyword(Keyword::USING) {
2455            let charset = self.parse_object_name(false)?;
2456            self.expect_token(&Token::RParen)?;
2457            return Ok(Expr::Convert {
2458                is_try,
2459                expr: Box::new(expr),
2460                data_type: None,
2461                charset: Some(charset),
2462                target_before_value: false,
2463                styles: vec![],
2464            });
2465        }
2466        self.expect_token(&Token::Comma)?;
2467        let data_type = self.parse_data_type()?;
2468        let charset = if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
2469            Some(self.parse_object_name(false)?)
2470        } else {
2471            None
2472        };
2473        self.expect_token(&Token::RParen)?;
2474        Ok(Expr::Convert {
2475            is_try,
2476            expr: Box::new(expr),
2477            data_type: Some(data_type),
2478            charset,
2479            target_before_value: false,
2480            styles: vec![],
2481        })
2482    }
2483
2484    /// Parse a SQL CAST function e.g. `CAST(expr AS FLOAT)`
2485    pub fn parse_cast_expr(&mut self, kind: CastKind) -> Result<Expr, ParserError> {
2486        self.expect_token(&Token::LParen)?;
2487        let expr = self.parse_expr()?;
2488        self.expect_keyword_is(Keyword::AS)?;
2489        let data_type = self.parse_data_type()?;
2490        let format = self.parse_optional_cast_format()?;
2491        self.expect_token(&Token::RParen)?;
2492        Ok(Expr::Cast {
2493            kind,
2494            expr: Box::new(expr),
2495            data_type,
2496            format,
2497        })
2498    }
2499
2500    /// Parse a SQL EXISTS expression e.g. `WHERE EXISTS(SELECT ...)`.
2501    pub fn parse_exists_expr(&mut self, negated: bool) -> Result<Expr, ParserError> {
2502        self.expect_token(&Token::LParen)?;
2503        let exists_node = Expr::Exists {
2504            negated,
2505            subquery: self.parse_query()?,
2506        };
2507        self.expect_token(&Token::RParen)?;
2508        Ok(exists_node)
2509    }
2510
2511    pub fn parse_extract_expr(&mut self) -> Result<Expr, ParserError> {
2512        self.expect_token(&Token::LParen)?;
2513        let field = self.parse_date_time_field()?;
2514
2515        let syntax = if self.parse_keyword(Keyword::FROM) {
2516            ExtractSyntax::From
2517        } else if self.consume_token(&Token::Comma)
2518            && dialect_of!(self is SnowflakeDialect | GenericDialect)
2519        {
2520            ExtractSyntax::Comma
2521        } else {
2522            return Err(ParserError::ParserError(
2523                "Expected 'FROM' or ','".to_string(),
2524            ));
2525        };
2526
2527        let expr = self.parse_expr()?;
2528        self.expect_token(&Token::RParen)?;
2529        Ok(Expr::Extract {
2530            field,
2531            expr: Box::new(expr),
2532            syntax,
2533        })
2534    }
2535
2536    pub fn parse_ceil_floor_expr(&mut self, is_ceil: bool) -> Result<Expr, ParserError> {
2537        self.expect_token(&Token::LParen)?;
2538        let expr = self.parse_expr()?;
2539        // Parse `CEIL/FLOOR(expr)`
2540        let field = if self.parse_keyword(Keyword::TO) {
2541            // Parse `CEIL/FLOOR(expr TO DateTimeField)`
2542            CeilFloorKind::DateTimeField(self.parse_date_time_field()?)
2543        } else if self.consume_token(&Token::Comma) {
2544            // Parse `CEIL/FLOOR(expr, scale)`
2545            match self.parse_value()?.value {
2546                Value::Number(n, s) => CeilFloorKind::Scale(Value::Number(n, s)),
2547                _ => {
2548                    return Err(ParserError::ParserError(
2549                        "Scale field can only be of number type".to_string(),
2550                    ))
2551                }
2552            }
2553        } else {
2554            CeilFloorKind::DateTimeField(DateTimeField::NoDateTime)
2555        };
2556        self.expect_token(&Token::RParen)?;
2557        if is_ceil {
2558            Ok(Expr::Ceil {
2559                expr: Box::new(expr),
2560                field,
2561            })
2562        } else {
2563            Ok(Expr::Floor {
2564                expr: Box::new(expr),
2565                field,
2566            })
2567        }
2568    }
2569
2570    pub fn parse_position_expr(&mut self, ident: Ident) -> Result<Expr, ParserError> {
2571        let between_prec = self.dialect.prec_value(Precedence::Between);
2572        let position_expr = self.maybe_parse(|p| {
2573            // PARSE SELECT POSITION('@' in field)
2574            p.expect_token(&Token::LParen)?;
2575
2576            // Parse the subexpr till the IN keyword
2577            let expr = p.parse_subexpr(between_prec)?;
2578            p.expect_keyword_is(Keyword::IN)?;
2579            let from = p.parse_expr()?;
2580            p.expect_token(&Token::RParen)?;
2581            Ok(Expr::Position {
2582                expr: Box::new(expr),
2583                r#in: Box::new(from),
2584            })
2585        })?;
2586        match position_expr {
2587            Some(expr) => Ok(expr),
2588            // Snowflake supports `position` as an ordinary function call
2589            // without the special `IN` syntax.
2590            None => self.parse_function(ObjectName::from(vec![ident])),
2591        }
2592    }
2593
2594    // { SUBSTRING | SUBSTR } (<EXPR> [FROM 1] [FOR 3])
2595    pub fn parse_substring(&mut self) -> Result<Expr, ParserError> {
2596        let shorthand = match self.expect_one_of_keywords(&[Keyword::SUBSTR, Keyword::SUBSTRING])? {
2597            Keyword::SUBSTR => true,
2598            Keyword::SUBSTRING => false,
2599            _ => {
2600                self.prev_token();
2601                return self.expected("SUBSTR or SUBSTRING", self.peek_token());
2602            }
2603        };
2604        self.expect_token(&Token::LParen)?;
2605        let expr = self.parse_expr()?;
2606        let mut from_expr = None;
2607        let special = self.consume_token(&Token::Comma);
2608        if special || self.parse_keyword(Keyword::FROM) {
2609            from_expr = Some(self.parse_expr()?);
2610        }
2611
2612        let mut to_expr = None;
2613        if self.parse_keyword(Keyword::FOR) || self.consume_token(&Token::Comma) {
2614            to_expr = Some(self.parse_expr()?);
2615        }
2616        self.expect_token(&Token::RParen)?;
2617
2618        Ok(Expr::Substring {
2619            expr: Box::new(expr),
2620            substring_from: from_expr.map(Box::new),
2621            substring_for: to_expr.map(Box::new),
2622            special,
2623            shorthand,
2624        })
2625    }
2626
2627    pub fn parse_overlay_expr(&mut self) -> Result<Expr, ParserError> {
2628        // PARSE OVERLAY (EXPR PLACING EXPR FROM 1 [FOR 3])
2629        self.expect_token(&Token::LParen)?;
2630        let expr = self.parse_expr()?;
2631        self.expect_keyword_is(Keyword::PLACING)?;
2632        let what_expr = self.parse_expr()?;
2633        self.expect_keyword_is(Keyword::FROM)?;
2634        let from_expr = self.parse_expr()?;
2635        let mut for_expr = None;
2636        if self.parse_keyword(Keyword::FOR) {
2637            for_expr = Some(self.parse_expr()?);
2638        }
2639        self.expect_token(&Token::RParen)?;
2640
2641        Ok(Expr::Overlay {
2642            expr: Box::new(expr),
2643            overlay_what: Box::new(what_expr),
2644            overlay_from: Box::new(from_expr),
2645            overlay_for: for_expr.map(Box::new),
2646        })
2647    }
2648
2649    /// ```sql
2650    /// TRIM ([WHERE] ['text' FROM] 'text')
2651    /// TRIM ('text')
2652    /// TRIM(<expr>, [, characters]) -- only Snowflake or BigQuery
2653    /// ```
2654    pub fn parse_trim_expr(&mut self) -> Result<Expr, ParserError> {
2655        self.expect_token(&Token::LParen)?;
2656        let mut trim_where = None;
2657        if let Token::Word(word) = self.peek_token().token {
2658            if [Keyword::BOTH, Keyword::LEADING, Keyword::TRAILING].contains(&word.keyword) {
2659                trim_where = Some(self.parse_trim_where()?);
2660            }
2661        }
2662        let expr = self.parse_expr()?;
2663        if self.parse_keyword(Keyword::FROM) {
2664            let trim_what = Box::new(expr);
2665            let expr = self.parse_expr()?;
2666            self.expect_token(&Token::RParen)?;
2667            Ok(Expr::Trim {
2668                expr: Box::new(expr),
2669                trim_where,
2670                trim_what: Some(trim_what),
2671                trim_characters: None,
2672            })
2673        } else if self.consume_token(&Token::Comma)
2674            && dialect_of!(self is DuckDbDialect | SnowflakeDialect | BigQueryDialect | GenericDialect)
2675        {
2676            let characters = self.parse_comma_separated(Parser::parse_expr)?;
2677            self.expect_token(&Token::RParen)?;
2678            Ok(Expr::Trim {
2679                expr: Box::new(expr),
2680                trim_where: None,
2681                trim_what: None,
2682                trim_characters: Some(characters),
2683            })
2684        } else {
2685            self.expect_token(&Token::RParen)?;
2686            Ok(Expr::Trim {
2687                expr: Box::new(expr),
2688                trim_where,
2689                trim_what: None,
2690                trim_characters: None,
2691            })
2692        }
2693    }
2694
2695    pub fn parse_trim_where(&mut self) -> Result<TrimWhereField, ParserError> {
2696        let next_token = self.next_token();
2697        match &next_token.token {
2698            Token::Word(w) => match w.keyword {
2699                Keyword::BOTH => Ok(TrimWhereField::Both),
2700                Keyword::LEADING => Ok(TrimWhereField::Leading),
2701                Keyword::TRAILING => Ok(TrimWhereField::Trailing),
2702                _ => self.expected("trim_where field", next_token)?,
2703            },
2704            _ => self.expected("trim_where field", next_token),
2705        }
2706    }
2707
2708    /// Parses an array expression `[ex1, ex2, ..]`
2709    /// if `named` is `true`, came from an expression like  `ARRAY[ex1, ex2]`
2710    pub fn parse_array_expr(&mut self, named: bool) -> Result<Expr, ParserError> {
2711        let exprs = self.parse_comma_separated0(Parser::parse_expr, Token::RBracket)?;
2712        self.expect_token(&Token::RBracket)?;
2713        Ok(Expr::Array(Array { elem: exprs, named }))
2714    }
2715
2716    pub fn parse_listagg_on_overflow(&mut self) -> Result<Option<ListAggOnOverflow>, ParserError> {
2717        if self.parse_keywords(&[Keyword::ON, Keyword::OVERFLOW]) {
2718            if self.parse_keyword(Keyword::ERROR) {
2719                Ok(Some(ListAggOnOverflow::Error))
2720            } else {
2721                self.expect_keyword_is(Keyword::TRUNCATE)?;
2722                let filler = match self.peek_token().token {
2723                    Token::Word(w)
2724                        if w.keyword == Keyword::WITH || w.keyword == Keyword::WITHOUT =>
2725                    {
2726                        None
2727                    }
2728                    Token::SingleQuotedString(_)
2729                    | Token::EscapedStringLiteral(_)
2730                    | Token::UnicodeStringLiteral(_)
2731                    | Token::NationalStringLiteral(_)
2732                    | Token::HexStringLiteral(_) => Some(Box::new(self.parse_expr()?)),
2733                    _ => self.expected(
2734                        "either filler, WITH, or WITHOUT in LISTAGG",
2735                        self.peek_token(),
2736                    )?,
2737                };
2738                let with_count = self.parse_keyword(Keyword::WITH);
2739                if !with_count && !self.parse_keyword(Keyword::WITHOUT) {
2740                    self.expected("either WITH or WITHOUT in LISTAGG", self.peek_token())?;
2741                }
2742                self.expect_keyword_is(Keyword::COUNT)?;
2743                Ok(Some(ListAggOnOverflow::Truncate { filler, with_count }))
2744            }
2745        } else {
2746            Ok(None)
2747        }
2748    }
2749
2750    // This function parses date/time fields for the EXTRACT function-like
2751    // operator, interval qualifiers, and the ceil/floor operations.
2752    // EXTRACT supports a wider set of date/time fields than interval qualifiers,
2753    // so this function may need to be split in two.
2754    pub fn parse_date_time_field(&mut self) -> Result<DateTimeField, ParserError> {
2755        let next_token = self.next_token();
2756        match &next_token.token {
2757            Token::Word(w) => match w.keyword {
2758                Keyword::YEAR => Ok(DateTimeField::Year),
2759                Keyword::YEARS => Ok(DateTimeField::Years),
2760                Keyword::MONTH => Ok(DateTimeField::Month),
2761                Keyword::MONTHS => Ok(DateTimeField::Months),
2762                Keyword::WEEK => {
2763                    let week_day = if dialect_of!(self is BigQueryDialect | GenericDialect)
2764                        && self.consume_token(&Token::LParen)
2765                    {
2766                        let week_day = self.parse_identifier()?;
2767                        self.expect_token(&Token::RParen)?;
2768                        Some(week_day)
2769                    } else {
2770                        None
2771                    };
2772                    Ok(DateTimeField::Week(week_day))
2773                }
2774                Keyword::WEEKS => Ok(DateTimeField::Weeks),
2775                Keyword::DAY => Ok(DateTimeField::Day),
2776                Keyword::DAYOFWEEK => Ok(DateTimeField::DayOfWeek),
2777                Keyword::DAYOFYEAR => Ok(DateTimeField::DayOfYear),
2778                Keyword::DAYS => Ok(DateTimeField::Days),
2779                Keyword::DATE => Ok(DateTimeField::Date),
2780                Keyword::DATETIME => Ok(DateTimeField::Datetime),
2781                Keyword::HOUR => Ok(DateTimeField::Hour),
2782                Keyword::HOURS => Ok(DateTimeField::Hours),
2783                Keyword::MINUTE => Ok(DateTimeField::Minute),
2784                Keyword::MINUTES => Ok(DateTimeField::Minutes),
2785                Keyword::SECOND => Ok(DateTimeField::Second),
2786                Keyword::SECONDS => Ok(DateTimeField::Seconds),
2787                Keyword::CENTURY => Ok(DateTimeField::Century),
2788                Keyword::DECADE => Ok(DateTimeField::Decade),
2789                Keyword::DOY => Ok(DateTimeField::Doy),
2790                Keyword::DOW => Ok(DateTimeField::Dow),
2791                Keyword::EPOCH => Ok(DateTimeField::Epoch),
2792                Keyword::ISODOW => Ok(DateTimeField::Isodow),
2793                Keyword::ISOYEAR => Ok(DateTimeField::Isoyear),
2794                Keyword::ISOWEEK => Ok(DateTimeField::IsoWeek),
2795                Keyword::JULIAN => Ok(DateTimeField::Julian),
2796                Keyword::MICROSECOND => Ok(DateTimeField::Microsecond),
2797                Keyword::MICROSECONDS => Ok(DateTimeField::Microseconds),
2798                Keyword::MILLENIUM => Ok(DateTimeField::Millenium),
2799                Keyword::MILLENNIUM => Ok(DateTimeField::Millennium),
2800                Keyword::MILLISECOND => Ok(DateTimeField::Millisecond),
2801                Keyword::MILLISECONDS => Ok(DateTimeField::Milliseconds),
2802                Keyword::NANOSECOND => Ok(DateTimeField::Nanosecond),
2803                Keyword::NANOSECONDS => Ok(DateTimeField::Nanoseconds),
2804                Keyword::QUARTER => Ok(DateTimeField::Quarter),
2805                Keyword::TIME => Ok(DateTimeField::Time),
2806                Keyword::TIMEZONE => Ok(DateTimeField::Timezone),
2807                Keyword::TIMEZONE_ABBR => Ok(DateTimeField::TimezoneAbbr),
2808                Keyword::TIMEZONE_HOUR => Ok(DateTimeField::TimezoneHour),
2809                Keyword::TIMEZONE_MINUTE => Ok(DateTimeField::TimezoneMinute),
2810                Keyword::TIMEZONE_REGION => Ok(DateTimeField::TimezoneRegion),
2811                _ if self.dialect.allow_extract_custom() => {
2812                    self.prev_token();
2813                    let custom = self.parse_identifier()?;
2814                    Ok(DateTimeField::Custom(custom))
2815                }
2816                _ => self.expected("date/time field", next_token),
2817            },
2818            Token::SingleQuotedString(_) if self.dialect.allow_extract_single_quotes() => {
2819                self.prev_token();
2820                let custom = self.parse_identifier()?;
2821                Ok(DateTimeField::Custom(custom))
2822            }
2823            _ => self.expected("date/time field", next_token),
2824        }
2825    }
2826
2827    pub fn parse_not(&mut self) -> Result<Expr, ParserError> {
2828        match self.peek_token().token {
2829            Token::Word(w) => match w.keyword {
2830                Keyword::EXISTS => {
2831                    let negated = true;
2832                    let _ = self.parse_keyword(Keyword::EXISTS);
2833                    self.parse_exists_expr(negated)
2834                }
2835                _ => Ok(Expr::UnaryOp {
2836                    op: UnaryOperator::Not,
2837                    expr: Box::new(
2838                        self.parse_subexpr(self.dialect.prec_value(Precedence::UnaryNot))?,
2839                    ),
2840                }),
2841            },
2842            _ => Ok(Expr::UnaryOp {
2843                op: UnaryOperator::Not,
2844                expr: Box::new(self.parse_subexpr(self.dialect.prec_value(Precedence::UnaryNot))?),
2845            }),
2846        }
2847    }
2848
2849    /// Parse expression types that start with a left brace '{'.
2850    /// Examples:
2851    /// ```sql
2852    /// -- Dictionary expr.
2853    /// {'key1': 'value1', 'key2': 'value2'}
2854    ///
2855    /// -- Function call using the ODBC syntax.
2856    /// { fn CONCAT('foo', 'bar') }
2857    /// ```
2858    fn parse_lbrace_expr(&mut self) -> Result<Expr, ParserError> {
2859        let token = self.expect_token(&Token::LBrace)?;
2860
2861        if let Some(fn_expr) = self.maybe_parse_odbc_body()? {
2862            self.expect_token(&Token::RBrace)?;
2863            return Ok(fn_expr);
2864        }
2865
2866        if self.dialect.supports_dictionary_syntax() {
2867            self.prev_token(); // Put back the '{'
2868            return self.parse_dictionary();
2869        }
2870
2871        self.expected("an expression", token)
2872    }
2873
2874    /// Parses fulltext expressions [`sqlparser::ast::Expr::MatchAgainst`]
2875    ///
2876    /// # Errors
2877    /// This method will raise an error if the column list is empty or with invalid identifiers,
2878    /// the match expression is not a literal string, or if the search modifier is not valid.
2879    pub fn parse_match_against(&mut self) -> Result<Expr, ParserError> {
2880        let columns = self.parse_parenthesized_qualified_column_list(Mandatory, false)?;
2881
2882        self.expect_keyword_is(Keyword::AGAINST)?;
2883
2884        self.expect_token(&Token::LParen)?;
2885
2886        // MySQL is too permissive about the value, IMO we can't validate it perfectly on syntax level.
2887        let match_value = self.parse_value()?.value;
2888
2889        let in_natural_language_mode_keywords = &[
2890            Keyword::IN,
2891            Keyword::NATURAL,
2892            Keyword::LANGUAGE,
2893            Keyword::MODE,
2894        ];
2895
2896        let with_query_expansion_keywords = &[Keyword::WITH, Keyword::QUERY, Keyword::EXPANSION];
2897
2898        let in_boolean_mode_keywords = &[Keyword::IN, Keyword::BOOLEAN, Keyword::MODE];
2899
2900        let opt_search_modifier = if self.parse_keywords(in_natural_language_mode_keywords) {
2901            if self.parse_keywords(with_query_expansion_keywords) {
2902                Some(SearchModifier::InNaturalLanguageModeWithQueryExpansion)
2903            } else {
2904                Some(SearchModifier::InNaturalLanguageMode)
2905            }
2906        } else if self.parse_keywords(in_boolean_mode_keywords) {
2907            Some(SearchModifier::InBooleanMode)
2908        } else if self.parse_keywords(with_query_expansion_keywords) {
2909            Some(SearchModifier::WithQueryExpansion)
2910        } else {
2911            None
2912        };
2913
2914        self.expect_token(&Token::RParen)?;
2915
2916        Ok(Expr::MatchAgainst {
2917            columns,
2918            match_value,
2919            opt_search_modifier,
2920        })
2921    }
2922
2923    /// Parse an `INTERVAL` expression.
2924    ///
2925    /// Some syntactically valid intervals:
2926    ///
2927    /// ```sql
2928    ///   1. INTERVAL '1' DAY
2929    ///   2. INTERVAL '1-1' YEAR TO MONTH
2930    ///   3. INTERVAL '1' SECOND
2931    ///   4. INTERVAL '1:1:1.1' HOUR (5) TO SECOND (5)
2932    ///   5. INTERVAL '1.1' SECOND (2, 2)
2933    ///   6. INTERVAL '1:1' HOUR (5) TO MINUTE (5)
2934    ///   7. (MySql & BigQuery only): INTERVAL 1 DAY
2935    /// ```
2936    ///
2937    /// Note that we do not currently attempt to parse the quoted value.
2938    pub fn parse_interval(&mut self) -> Result<Expr, ParserError> {
2939        // The SQL standard allows an optional sign before the value string, but
2940        // it is not clear if any implementations support that syntax, so we
2941        // don't currently try to parse it. (The sign can instead be included
2942        // inside the value string.)
2943
2944        // to match the different flavours of INTERVAL syntax, we only allow expressions
2945        // if the dialect requires an interval qualifier,
2946        // see https://github.com/sqlparser-rs/sqlparser-rs/pull/1398 for more details
2947        let value = if self.dialect.require_interval_qualifier() {
2948            // parse a whole expression so `INTERVAL 1 + 1 DAY` is valid
2949            self.parse_expr()?
2950        } else {
2951            // parse a prefix expression so `INTERVAL 1 DAY` is valid, but `INTERVAL 1 + 1 DAY` is not
2952            // this also means that `INTERVAL '5 days' > INTERVAL '1 day'` treated properly
2953            self.parse_prefix()?
2954        };
2955
2956        // Following the string literal is a qualifier which indicates the units
2957        // of the duration specified in the string literal.
2958        //
2959        // Note that PostgreSQL allows omitting the qualifier, so we provide
2960        // this more general implementation.
2961        let leading_field = if self.next_token_is_temporal_unit() {
2962            Some(self.parse_date_time_field()?)
2963        } else if self.dialect.require_interval_qualifier() {
2964            return parser_err!(
2965                "INTERVAL requires a unit after the literal value",
2966                self.peek_token().span.start
2967            );
2968        } else {
2969            None
2970        };
2971
2972        let (leading_precision, last_field, fsec_precision) =
2973            if leading_field == Some(DateTimeField::Second) {
2974                // SQL mandates special syntax for `SECOND TO SECOND` literals.
2975                // Instead of
2976                //     `SECOND [(<leading precision>)] TO SECOND[(<fractional seconds precision>)]`
2977                // one must use the special format:
2978                //     `SECOND [( <leading precision> [ , <fractional seconds precision>] )]`
2979                let last_field = None;
2980                let (leading_precision, fsec_precision) = self.parse_optional_precision_scale()?;
2981                (leading_precision, last_field, fsec_precision)
2982            } else {
2983                let leading_precision = self.parse_optional_precision()?;
2984                if self.parse_keyword(Keyword::TO) {
2985                    let last_field = Some(self.parse_date_time_field()?);
2986                    let fsec_precision = if last_field == Some(DateTimeField::Second) {
2987                        self.parse_optional_precision()?
2988                    } else {
2989                        None
2990                    };
2991                    (leading_precision, last_field, fsec_precision)
2992                } else {
2993                    (leading_precision, None, None)
2994                }
2995            };
2996
2997        Ok(Expr::Interval(Interval {
2998            value: Box::new(value),
2999            leading_field,
3000            leading_precision,
3001            last_field,
3002            fractional_seconds_precision: fsec_precision,
3003        }))
3004    }
3005
3006    /// Peek at the next token and determine if it is a temporal unit
3007    /// like `second`.
3008    pub fn next_token_is_temporal_unit(&mut self) -> bool {
3009        if let Token::Word(word) = self.peek_token().token {
3010            matches!(
3011                word.keyword,
3012                Keyword::YEAR
3013                    | Keyword::YEARS
3014                    | Keyword::MONTH
3015                    | Keyword::MONTHS
3016                    | Keyword::WEEK
3017                    | Keyword::WEEKS
3018                    | Keyword::DAY
3019                    | Keyword::DAYS
3020                    | Keyword::HOUR
3021                    | Keyword::HOURS
3022                    | Keyword::MINUTE
3023                    | Keyword::MINUTES
3024                    | Keyword::SECOND
3025                    | Keyword::SECONDS
3026                    | Keyword::CENTURY
3027                    | Keyword::DECADE
3028                    | Keyword::DOW
3029                    | Keyword::DOY
3030                    | Keyword::EPOCH
3031                    | Keyword::ISODOW
3032                    | Keyword::ISOYEAR
3033                    | Keyword::JULIAN
3034                    | Keyword::MICROSECOND
3035                    | Keyword::MICROSECONDS
3036                    | Keyword::MILLENIUM
3037                    | Keyword::MILLENNIUM
3038                    | Keyword::MILLISECOND
3039                    | Keyword::MILLISECONDS
3040                    | Keyword::NANOSECOND
3041                    | Keyword::NANOSECONDS
3042                    | Keyword::QUARTER
3043                    | Keyword::TIMEZONE
3044                    | Keyword::TIMEZONE_HOUR
3045                    | Keyword::TIMEZONE_MINUTE
3046            )
3047        } else {
3048            false
3049        }
3050    }
3051
3052    /// Syntax
3053    /// ```sql
3054    /// -- typed
3055    /// STRUCT<[field_name] field_type, ...>( expr1 [, ... ])
3056    /// -- typeless
3057    /// STRUCT( expr1 [AS field_name] [, ... ])
3058    /// ```
3059    fn parse_struct_literal(&mut self) -> Result<Expr, ParserError> {
3060        // Parse the fields definition if exist `<[field_name] field_type, ...>`
3061        self.prev_token();
3062        let (fields, trailing_bracket) =
3063            self.parse_struct_type_def(Self::parse_struct_field_def)?;
3064        if trailing_bracket.0 {
3065            return parser_err!(
3066                "unmatched > in STRUCT literal",
3067                self.peek_token().span.start
3068            );
3069        }
3070
3071        // Parse the struct values `(expr1 [, ... ])`
3072        self.expect_token(&Token::LParen)?;
3073        let values = self
3074            .parse_comma_separated(|parser| parser.parse_struct_field_expr(!fields.is_empty()))?;
3075        self.expect_token(&Token::RParen)?;
3076
3077        Ok(Expr::Struct { values, fields })
3078    }
3079
3080    /// Parse an expression value for a struct literal
3081    /// Syntax
3082    /// ```sql
3083    /// expr [AS name]
3084    /// ```
3085    ///
3086    /// For biquery [1], Parameter typed_syntax is set to true if the expression
3087    /// is to be parsed as a field expression declared using typed
3088    /// struct syntax [2], and false if using typeless struct syntax [3].
3089    ///
3090    /// [1]: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#constructing_a_struct
3091    /// [2]: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#typed_struct_syntax
3092    /// [3]: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#typeless_struct_syntax
3093    fn parse_struct_field_expr(&mut self, typed_syntax: bool) -> Result<Expr, ParserError> {
3094        let expr = self.parse_expr()?;
3095        if self.parse_keyword(Keyword::AS) {
3096            if typed_syntax {
3097                return parser_err!("Typed syntax does not allow AS", {
3098                    self.prev_token();
3099                    self.peek_token().span.start
3100                });
3101            }
3102            let field_name = self.parse_identifier()?;
3103            Ok(Expr::Named {
3104                expr: expr.into(),
3105                name: field_name,
3106            })
3107        } else {
3108            Ok(expr)
3109        }
3110    }
3111
3112    /// Parse a Struct type definition as a sequence of field-value pairs.
3113    /// The syntax of the Struct elem differs by dialect so it is customised
3114    /// by the `elem_parser` argument.
3115    ///
3116    /// Syntax
3117    /// ```sql
3118    /// Hive:
3119    /// STRUCT<field_name: field_type>
3120    ///
3121    /// BigQuery:
3122    /// STRUCT<[field_name] field_type>
3123    /// ```
3124    fn parse_struct_type_def<F>(
3125        &mut self,
3126        mut elem_parser: F,
3127    ) -> Result<(Vec<StructField>, MatchedTrailingBracket), ParserError>
3128    where
3129        F: FnMut(&mut Parser<'a>) -> Result<(StructField, MatchedTrailingBracket), ParserError>,
3130    {
3131        self.expect_keyword_is(Keyword::STRUCT)?;
3132
3133        // Nothing to do if we have no type information.
3134        if Token::Lt != self.peek_token() {
3135            return Ok((Default::default(), false.into()));
3136        }
3137        self.next_token();
3138
3139        let mut field_defs = vec![];
3140        let trailing_bracket = loop {
3141            let (def, trailing_bracket) = elem_parser(self)?;
3142            field_defs.push(def);
3143            // The struct field definition is finished if it occurs `>>` or comma.
3144            if trailing_bracket.0 || !self.consume_token(&Token::Comma) {
3145                break trailing_bracket;
3146            }
3147        };
3148
3149        Ok((
3150            field_defs,
3151            self.expect_closing_angle_bracket(trailing_bracket)?,
3152        ))
3153    }
3154
3155    /// Duckdb Struct Data Type <https://duckdb.org/docs/sql/data_types/struct.html#retrieving-from-structs>
3156    fn parse_duckdb_struct_type_def(&mut self) -> Result<Vec<StructField>, ParserError> {
3157        self.expect_keyword_is(Keyword::STRUCT)?;
3158        self.expect_token(&Token::LParen)?;
3159        let struct_body = self.parse_comma_separated(|parser| {
3160            let field_name = parser.parse_identifier()?;
3161            let field_type = parser.parse_data_type()?;
3162
3163            Ok(StructField {
3164                field_name: Some(field_name),
3165                field_type,
3166                options: None,
3167            })
3168        });
3169        self.expect_token(&Token::RParen)?;
3170        struct_body
3171    }
3172
3173    /// Parse a field definition in a [struct] or [tuple].
3174    /// Syntax:
3175    ///
3176    /// ```sql
3177    /// [field_name] field_type
3178    /// ```
3179    ///
3180    /// [struct]: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#declaring_a_struct_type
3181    /// [tuple]: https://clickhouse.com/docs/en/sql-reference/data-types/tuple
3182    fn parse_struct_field_def(
3183        &mut self,
3184    ) -> Result<(StructField, MatchedTrailingBracket), ParserError> {
3185        // Look beyond the next item to infer whether both field name
3186        // and type are specified.
3187        let is_anonymous_field = !matches!(
3188            (self.peek_nth_token(0).token, self.peek_nth_token(1).token),
3189            (Token::Word(_), Token::Word(_))
3190        );
3191
3192        let field_name = if is_anonymous_field {
3193            None
3194        } else {
3195            Some(self.parse_identifier()?)
3196        };
3197
3198        let (field_type, trailing_bracket) = self.parse_data_type_helper()?;
3199
3200        let options = self.maybe_parse_options(Keyword::OPTIONS)?;
3201        Ok((
3202            StructField {
3203                field_name,
3204                field_type,
3205                options,
3206            },
3207            trailing_bracket,
3208        ))
3209    }
3210
3211    /// DuckDB specific: Parse a Union type definition as a sequence of field-value pairs.
3212    ///
3213    /// Syntax:
3214    ///
3215    /// ```sql
3216    /// UNION(field_name field_type[,...])
3217    /// ```
3218    ///
3219    /// [1]: https://duckdb.org/docs/sql/data_types/union.html
3220    fn parse_union_type_def(&mut self) -> Result<Vec<UnionField>, ParserError> {
3221        self.expect_keyword_is(Keyword::UNION)?;
3222
3223        self.expect_token(&Token::LParen)?;
3224
3225        let fields = self.parse_comma_separated(|p| {
3226            Ok(UnionField {
3227                field_name: p.parse_identifier()?,
3228                field_type: p.parse_data_type()?,
3229            })
3230        })?;
3231
3232        self.expect_token(&Token::RParen)?;
3233
3234        Ok(fields)
3235    }
3236
3237    /// DuckDB and ClickHouse specific: Parse a duckdb [dictionary] or a clickhouse [map] setting
3238    ///
3239    /// Syntax:
3240    ///
3241    /// ```sql
3242    /// {'field_name': expr1[, ... ]}
3243    /// ```
3244    ///
3245    /// [dictionary]: https://duckdb.org/docs/sql/data_types/struct#creating-structs
3246    /// [map]: https://clickhouse.com/docs/operations/settings/settings#additional_table_filters
3247    fn parse_dictionary(&mut self) -> Result<Expr, ParserError> {
3248        self.expect_token(&Token::LBrace)?;
3249
3250        let fields = self.parse_comma_separated0(Self::parse_dictionary_field, Token::RBrace)?;
3251
3252        self.expect_token(&Token::RBrace)?;
3253
3254        Ok(Expr::Dictionary(fields))
3255    }
3256
3257    /// Parse a field for a duckdb [dictionary] or a clickhouse [map] setting
3258    ///
3259    /// Syntax
3260    ///
3261    /// ```sql
3262    /// 'name': expr
3263    /// ```
3264    ///
3265    /// [dictionary]: https://duckdb.org/docs/sql/data_types/struct#creating-structs
3266    /// [map]: https://clickhouse.com/docs/operations/settings/settings#additional_table_filters
3267    fn parse_dictionary_field(&mut self) -> Result<DictionaryField, ParserError> {
3268        let key = self.parse_identifier()?;
3269
3270        self.expect_token(&Token::Colon)?;
3271
3272        let expr = self.parse_expr()?;
3273
3274        Ok(DictionaryField {
3275            key,
3276            value: Box::new(expr),
3277        })
3278    }
3279
3280    /// DuckDB specific: Parse a duckdb [map]
3281    ///
3282    /// Syntax:
3283    ///
3284    /// ```sql
3285    /// Map {key1: value1[, ... ]}
3286    /// ```
3287    ///
3288    /// [map]: https://duckdb.org/docs/sql/data_types/map.html#creating-maps
3289    fn parse_duckdb_map_literal(&mut self) -> Result<Expr, ParserError> {
3290        self.expect_token(&Token::LBrace)?;
3291        let fields = self.parse_comma_separated0(Self::parse_duckdb_map_field, Token::RBrace)?;
3292        self.expect_token(&Token::RBrace)?;
3293        Ok(Expr::Map(Map { entries: fields }))
3294    }
3295
3296    /// Parse a field for a duckdb [map]
3297    ///
3298    /// Syntax
3299    ///
3300    /// ```sql
3301    /// key: value
3302    /// ```
3303    ///
3304    /// [map]: https://duckdb.org/docs/sql/data_types/map.html#creating-maps
3305    fn parse_duckdb_map_field(&mut self) -> Result<MapEntry, ParserError> {
3306        let key = self.parse_expr()?;
3307
3308        self.expect_token(&Token::Colon)?;
3309
3310        let value = self.parse_expr()?;
3311
3312        Ok(MapEntry {
3313            key: Box::new(key),
3314            value: Box::new(value),
3315        })
3316    }
3317
3318    /// Parse clickhouse [map]
3319    ///
3320    /// Syntax
3321    ///
3322    /// ```sql
3323    /// Map(key_data_type, value_data_type)
3324    /// ```
3325    ///
3326    /// [map]: https://clickhouse.com/docs/en/sql-reference/data-types/map
3327    fn parse_click_house_map_def(&mut self) -> Result<(DataType, DataType), ParserError> {
3328        self.expect_keyword_is(Keyword::MAP)?;
3329        self.expect_token(&Token::LParen)?;
3330        let key_data_type = self.parse_data_type()?;
3331        self.expect_token(&Token::Comma)?;
3332        let value_data_type = self.parse_data_type()?;
3333        self.expect_token(&Token::RParen)?;
3334
3335        Ok((key_data_type, value_data_type))
3336    }
3337
3338    /// Parse clickhouse [tuple]
3339    ///
3340    /// Syntax
3341    ///
3342    /// ```sql
3343    /// Tuple([field_name] field_type, ...)
3344    /// ```
3345    ///
3346    /// [tuple]: https://clickhouse.com/docs/en/sql-reference/data-types/tuple
3347    fn parse_click_house_tuple_def(&mut self) -> Result<Vec<StructField>, ParserError> {
3348        self.expect_keyword_is(Keyword::TUPLE)?;
3349        self.expect_token(&Token::LParen)?;
3350        let mut field_defs = vec![];
3351        loop {
3352            let (def, _) = self.parse_struct_field_def()?;
3353            field_defs.push(def);
3354            if !self.consume_token(&Token::Comma) {
3355                break;
3356            }
3357        }
3358        self.expect_token(&Token::RParen)?;
3359
3360        Ok(field_defs)
3361    }
3362
3363    /// For nested types that use the angle bracket syntax, this matches either
3364    /// `>`, `>>` or nothing depending on which variant is expected (specified by the previously
3365    /// matched `trailing_bracket` argument). It returns whether there is a trailing
3366    /// left to be matched - (i.e. if '>>' was matched).
3367    fn expect_closing_angle_bracket(
3368        &mut self,
3369        trailing_bracket: MatchedTrailingBracket,
3370    ) -> Result<MatchedTrailingBracket, ParserError> {
3371        let trailing_bracket = if !trailing_bracket.0 {
3372            match self.peek_token().token {
3373                Token::Gt => {
3374                    self.next_token();
3375                    false.into()
3376                }
3377                Token::ShiftRight => {
3378                    self.next_token();
3379                    true.into()
3380                }
3381                _ => return self.expected(">", self.peek_token()),
3382            }
3383        } else {
3384            false.into()
3385        };
3386
3387        Ok(trailing_bracket)
3388    }
3389
3390    /// Parse an operator following an expression
3391    pub fn parse_infix(&mut self, expr: Expr, precedence: u8) -> Result<Expr, ParserError> {
3392        // allow the dialect to override infix parsing
3393        if let Some(infix) = self.dialect.parse_infix(self, &expr, precedence) {
3394            return infix;
3395        }
3396
3397        let dialect = self.dialect;
3398
3399        self.advance_token();
3400        let tok = self.get_current_token();
3401        debug!("infix: {tok:?}");
3402        let tok_index = self.get_current_index();
3403        let span = tok.span;
3404        let regular_binary_operator = match &tok.token {
3405            Token::Spaceship => Some(BinaryOperator::Spaceship),
3406            Token::DoubleEq => Some(BinaryOperator::Eq),
3407            Token::Assignment => Some(BinaryOperator::Assignment),
3408            Token::Eq => Some(BinaryOperator::Eq),
3409            Token::Neq => Some(BinaryOperator::NotEq),
3410            Token::Gt => Some(BinaryOperator::Gt),
3411            Token::GtEq => Some(BinaryOperator::GtEq),
3412            Token::Lt => Some(BinaryOperator::Lt),
3413            Token::LtEq => Some(BinaryOperator::LtEq),
3414            Token::Plus => Some(BinaryOperator::Plus),
3415            Token::Minus => Some(BinaryOperator::Minus),
3416            Token::Mul => Some(BinaryOperator::Multiply),
3417            Token::Mod => Some(BinaryOperator::Modulo),
3418            Token::StringConcat => Some(BinaryOperator::StringConcat),
3419            Token::Pipe => Some(BinaryOperator::BitwiseOr),
3420            Token::Caret => {
3421                // In PostgreSQL, ^ stands for the exponentiation operation,
3422                // and # stands for XOR. See https://www.postgresql.org/docs/current/functions-math.html
3423                if dialect_is!(dialect is PostgreSqlDialect) {
3424                    Some(BinaryOperator::PGExp)
3425                } else {
3426                    Some(BinaryOperator::BitwiseXor)
3427                }
3428            }
3429            Token::Ampersand => Some(BinaryOperator::BitwiseAnd),
3430            Token::Div => Some(BinaryOperator::Divide),
3431            Token::DuckIntDiv if dialect_is!(dialect is DuckDbDialect | GenericDialect) => {
3432                Some(BinaryOperator::DuckIntegerDivide)
3433            }
3434            Token::ShiftLeft if dialect_is!(dialect is PostgreSqlDialect | DuckDbDialect | GenericDialect | RedshiftSqlDialect) => {
3435                Some(BinaryOperator::PGBitwiseShiftLeft)
3436            }
3437            Token::ShiftRight if dialect_is!(dialect is PostgreSqlDialect | DuckDbDialect | GenericDialect | RedshiftSqlDialect) => {
3438                Some(BinaryOperator::PGBitwiseShiftRight)
3439            }
3440            Token::Sharp if dialect_is!(dialect is PostgreSqlDialect | RedshiftSqlDialect) => {
3441                Some(BinaryOperator::PGBitwiseXor)
3442            }
3443            Token::Overlap if dialect_is!(dialect is PostgreSqlDialect | RedshiftSqlDialect) => {
3444                Some(BinaryOperator::PGOverlap)
3445            }
3446            Token::Overlap if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
3447                Some(BinaryOperator::PGOverlap)
3448            }
3449            Token::CaretAt if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
3450                Some(BinaryOperator::PGStartsWith)
3451            }
3452            Token::Tilde => Some(BinaryOperator::PGRegexMatch),
3453            Token::TildeAsterisk => Some(BinaryOperator::PGRegexIMatch),
3454            Token::ExclamationMarkTilde => Some(BinaryOperator::PGRegexNotMatch),
3455            Token::ExclamationMarkTildeAsterisk => Some(BinaryOperator::PGRegexNotIMatch),
3456            Token::DoubleTilde => Some(BinaryOperator::PGLikeMatch),
3457            Token::DoubleTildeAsterisk => Some(BinaryOperator::PGILikeMatch),
3458            Token::ExclamationMarkDoubleTilde => Some(BinaryOperator::PGNotLikeMatch),
3459            Token::ExclamationMarkDoubleTildeAsterisk => Some(BinaryOperator::PGNotILikeMatch),
3460            Token::Arrow => Some(BinaryOperator::Arrow),
3461            Token::LongArrow => Some(BinaryOperator::LongArrow),
3462            Token::HashArrow => Some(BinaryOperator::HashArrow),
3463            Token::HashLongArrow => Some(BinaryOperator::HashLongArrow),
3464            Token::AtArrow => Some(BinaryOperator::AtArrow),
3465            Token::ArrowAt => Some(BinaryOperator::ArrowAt),
3466            Token::HashMinus => Some(BinaryOperator::HashMinus),
3467            Token::AtQuestion => Some(BinaryOperator::AtQuestion),
3468            Token::AtAt => Some(BinaryOperator::AtAt),
3469            Token::Question => Some(BinaryOperator::Question),
3470            Token::QuestionAnd => Some(BinaryOperator::QuestionAnd),
3471            Token::QuestionPipe => Some(BinaryOperator::QuestionPipe),
3472            Token::CustomBinaryOperator(s) => Some(BinaryOperator::Custom(s.clone())),
3473            Token::DoubleSharp if self.dialect.supports_geometric_types() => {
3474                Some(BinaryOperator::DoubleHash)
3475            }
3476
3477            Token::AmpersandLeftAngleBracket if self.dialect.supports_geometric_types() => {
3478                Some(BinaryOperator::AndLt)
3479            }
3480            Token::AmpersandRightAngleBracket if self.dialect.supports_geometric_types() => {
3481                Some(BinaryOperator::AndGt)
3482            }
3483            Token::QuestionMarkDash if self.dialect.supports_geometric_types() => {
3484                Some(BinaryOperator::QuestionDash)
3485            }
3486            Token::AmpersandLeftAngleBracketVerticalBar
3487                if self.dialect.supports_geometric_types() =>
3488            {
3489                Some(BinaryOperator::AndLtPipe)
3490            }
3491            Token::VerticalBarAmpersandRightAngleBracket
3492                if self.dialect.supports_geometric_types() =>
3493            {
3494                Some(BinaryOperator::PipeAndGt)
3495            }
3496            Token::TwoWayArrow if self.dialect.supports_geometric_types() => {
3497                Some(BinaryOperator::LtDashGt)
3498            }
3499            Token::LeftAngleBracketCaret if self.dialect.supports_geometric_types() => {
3500                Some(BinaryOperator::LtCaret)
3501            }
3502            Token::RightAngleBracketCaret if self.dialect.supports_geometric_types() => {
3503                Some(BinaryOperator::GtCaret)
3504            }
3505            Token::QuestionMarkSharp if self.dialect.supports_geometric_types() => {
3506                Some(BinaryOperator::QuestionHash)
3507            }
3508            Token::QuestionMarkDoubleVerticalBar if self.dialect.supports_geometric_types() => {
3509                Some(BinaryOperator::QuestionDoublePipe)
3510            }
3511            Token::QuestionMarkDashVerticalBar if self.dialect.supports_geometric_types() => {
3512                Some(BinaryOperator::QuestionDashPipe)
3513            }
3514            Token::TildeEqual if self.dialect.supports_geometric_types() => {
3515                Some(BinaryOperator::TildeEq)
3516            }
3517            Token::ShiftLeftVerticalBar if self.dialect.supports_geometric_types() => {
3518                Some(BinaryOperator::LtLtPipe)
3519            }
3520            Token::VerticalBarShiftRight if self.dialect.supports_geometric_types() => {
3521                Some(BinaryOperator::PipeGtGt)
3522            }
3523            Token::AtSign if self.dialect.supports_geometric_types() => Some(BinaryOperator::At),
3524
3525            Token::Word(w) => match w.keyword {
3526                Keyword::AND => Some(BinaryOperator::And),
3527                Keyword::OR => Some(BinaryOperator::Or),
3528                Keyword::XOR => Some(BinaryOperator::Xor),
3529                Keyword::OVERLAPS => Some(BinaryOperator::Overlaps),
3530                Keyword::OPERATOR if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
3531                    self.expect_token(&Token::LParen)?;
3532                    // there are special rules for operator names in
3533                    // postgres so we can not use 'parse_object'
3534                    // or similar.
3535                    // See https://www.postgresql.org/docs/current/sql-createoperator.html
3536                    let mut idents = vec![];
3537                    loop {
3538                        self.advance_token();
3539                        idents.push(self.get_current_token().to_string());
3540                        if !self.consume_token(&Token::Period) {
3541                            break;
3542                        }
3543                    }
3544                    self.expect_token(&Token::RParen)?;
3545                    Some(BinaryOperator::PGCustomBinaryOperator(idents))
3546                }
3547                _ => None,
3548            },
3549            _ => None,
3550        };
3551
3552        let tok = self.token_at(tok_index);
3553        if let Some(op) = regular_binary_operator {
3554            if let Some(keyword) =
3555                self.parse_one_of_keywords(&[Keyword::ANY, Keyword::ALL, Keyword::SOME])
3556            {
3557                self.expect_token(&Token::LParen)?;
3558                let right = if self.peek_sub_query() {
3559                    // We have a subquery ahead (SELECT\WITH ...) need to rewind and
3560                    // use the parenthesis for parsing the subquery as an expression.
3561                    self.prev_token(); // LParen
3562                    self.parse_subexpr(precedence)?
3563                } else {
3564                    // Non-subquery expression
3565                    let right = self.parse_subexpr(precedence)?;
3566                    self.expect_token(&Token::RParen)?;
3567                    right
3568                };
3569
3570                if !matches!(
3571                    op,
3572                    BinaryOperator::Gt
3573                        | BinaryOperator::Lt
3574                        | BinaryOperator::GtEq
3575                        | BinaryOperator::LtEq
3576                        | BinaryOperator::Eq
3577                        | BinaryOperator::NotEq
3578                        | BinaryOperator::PGRegexMatch
3579                        | BinaryOperator::PGRegexIMatch
3580                        | BinaryOperator::PGRegexNotMatch
3581                        | BinaryOperator::PGRegexNotIMatch
3582                        | BinaryOperator::PGLikeMatch
3583                        | BinaryOperator::PGILikeMatch
3584                        | BinaryOperator::PGNotLikeMatch
3585                        | BinaryOperator::PGNotILikeMatch
3586                ) {
3587                    return parser_err!(
3588                        format!(
3589                        "Expected one of [=, >, <, =>, =<, !=, ~, ~*, !~, !~*, ~~, ~~*, !~~, !~~*] as comparison operator, found: {op}"
3590                    ),
3591                        span.start
3592                    );
3593                };
3594
3595                Ok(match keyword {
3596                    Keyword::ALL => Expr::AllOp {
3597                        left: Box::new(expr),
3598                        compare_op: op,
3599                        right: Box::new(right),
3600                    },
3601                    Keyword::ANY | Keyword::SOME => Expr::AnyOp {
3602                        left: Box::new(expr),
3603                        compare_op: op,
3604                        right: Box::new(right),
3605                        is_some: keyword == Keyword::SOME,
3606                    },
3607                    unexpected_keyword => return Err(ParserError::ParserError(
3608                        format!("Internal parser error: expected any of {{ALL, ANY, SOME}}, got {unexpected_keyword:?}"),
3609                    )),
3610                })
3611            } else {
3612                Ok(Expr::BinaryOp {
3613                    left: Box::new(expr),
3614                    op,
3615                    right: Box::new(self.parse_subexpr(precedence)?),
3616                })
3617            }
3618        } else if let Token::Word(w) = &tok.token {
3619            match w.keyword {
3620                Keyword::IS => {
3621                    if self.parse_keyword(Keyword::NULL) {
3622                        Ok(Expr::IsNull(Box::new(expr)))
3623                    } else if self.parse_keywords(&[Keyword::NOT, Keyword::NULL]) {
3624                        Ok(Expr::IsNotNull(Box::new(expr)))
3625                    } else if self.parse_keywords(&[Keyword::TRUE]) {
3626                        Ok(Expr::IsTrue(Box::new(expr)))
3627                    } else if self.parse_keywords(&[Keyword::NOT, Keyword::TRUE]) {
3628                        Ok(Expr::IsNotTrue(Box::new(expr)))
3629                    } else if self.parse_keywords(&[Keyword::FALSE]) {
3630                        Ok(Expr::IsFalse(Box::new(expr)))
3631                    } else if self.parse_keywords(&[Keyword::NOT, Keyword::FALSE]) {
3632                        Ok(Expr::IsNotFalse(Box::new(expr)))
3633                    } else if self.parse_keywords(&[Keyword::UNKNOWN]) {
3634                        Ok(Expr::IsUnknown(Box::new(expr)))
3635                    } else if self.parse_keywords(&[Keyword::NOT, Keyword::UNKNOWN]) {
3636                        Ok(Expr::IsNotUnknown(Box::new(expr)))
3637                    } else if self.parse_keywords(&[Keyword::DISTINCT, Keyword::FROM]) {
3638                        let expr2 = self.parse_expr()?;
3639                        Ok(Expr::IsDistinctFrom(Box::new(expr), Box::new(expr2)))
3640                    } else if self.parse_keywords(&[Keyword::NOT, Keyword::DISTINCT, Keyword::FROM])
3641                    {
3642                        let expr2 = self.parse_expr()?;
3643                        Ok(Expr::IsNotDistinctFrom(Box::new(expr), Box::new(expr2)))
3644                    } else if let Ok(is_normalized) = self.parse_unicode_is_normalized(expr) {
3645                        Ok(is_normalized)
3646                    } else {
3647                        self.expected(
3648                            "[NOT] NULL | TRUE | FALSE | DISTINCT | [form] NORMALIZED FROM after IS",
3649                            self.peek_token(),
3650                        )
3651                    }
3652                }
3653                Keyword::AT => {
3654                    self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
3655                    Ok(Expr::AtTimeZone {
3656                        timestamp: Box::new(expr),
3657                        time_zone: Box::new(self.parse_subexpr(precedence)?),
3658                    })
3659                }
3660                Keyword::NOT
3661                | Keyword::IN
3662                | Keyword::BETWEEN
3663                | Keyword::LIKE
3664                | Keyword::ILIKE
3665                | Keyword::SIMILAR
3666                | Keyword::REGEXP
3667                | Keyword::RLIKE => {
3668                    self.prev_token();
3669                    let negated = self.parse_keyword(Keyword::NOT);
3670                    let regexp = self.parse_keyword(Keyword::REGEXP);
3671                    let rlike = self.parse_keyword(Keyword::RLIKE);
3672                    let null = if !self.in_column_definition_state() {
3673                        self.parse_keyword(Keyword::NULL)
3674                    } else {
3675                        false
3676                    };
3677                    if regexp || rlike {
3678                        Ok(Expr::RLike {
3679                            negated,
3680                            expr: Box::new(expr),
3681                            pattern: Box::new(
3682                                self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
3683                            ),
3684                            regexp,
3685                        })
3686                    } else if negated && null {
3687                        Ok(Expr::IsNotNull(Box::new(expr)))
3688                    } else if self.parse_keyword(Keyword::IN) {
3689                        self.parse_in(expr, negated)
3690                    } else if self.parse_keyword(Keyword::BETWEEN) {
3691                        self.parse_between(expr, negated)
3692                    } else if self.parse_keyword(Keyword::LIKE) {
3693                        Ok(Expr::Like {
3694                            negated,
3695                            any: self.parse_keyword(Keyword::ANY),
3696                            expr: Box::new(expr),
3697                            pattern: Box::new(
3698                                self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
3699                            ),
3700                            escape_char: self.parse_escape_char()?,
3701                        })
3702                    } else if self.parse_keyword(Keyword::ILIKE) {
3703                        Ok(Expr::ILike {
3704                            negated,
3705                            any: self.parse_keyword(Keyword::ANY),
3706                            expr: Box::new(expr),
3707                            pattern: Box::new(
3708                                self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
3709                            ),
3710                            escape_char: self.parse_escape_char()?,
3711                        })
3712                    } else if self.parse_keywords(&[Keyword::SIMILAR, Keyword::TO]) {
3713                        Ok(Expr::SimilarTo {
3714                            negated,
3715                            expr: Box::new(expr),
3716                            pattern: Box::new(
3717                                self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
3718                            ),
3719                            escape_char: self.parse_escape_char()?,
3720                        })
3721                    } else {
3722                        self.expected("IN or BETWEEN after NOT", self.peek_token())
3723                    }
3724                }
3725                Keyword::NOTNULL if dialect.supports_notnull_operator() => {
3726                    Ok(Expr::IsNotNull(Box::new(expr)))
3727                }
3728                Keyword::MEMBER => {
3729                    if self.parse_keyword(Keyword::OF) {
3730                        self.expect_token(&Token::LParen)?;
3731                        let array = self.parse_expr()?;
3732                        self.expect_token(&Token::RParen)?;
3733                        Ok(Expr::MemberOf(MemberOf {
3734                            value: Box::new(expr),
3735                            array: Box::new(array),
3736                        }))
3737                    } else {
3738                        self.expected("OF after MEMBER", self.peek_token())
3739                    }
3740                }
3741                // Can only happen if `get_next_precedence` got out of sync with this function
3742                _ => parser_err!(
3743                    format!("No infix parser for token {:?}", tok.token),
3744                    tok.span.start
3745                ),
3746            }
3747        } else if Token::DoubleColon == *tok {
3748            Ok(Expr::Cast {
3749                kind: CastKind::DoubleColon,
3750                expr: Box::new(expr),
3751                data_type: self.parse_data_type()?,
3752                format: None,
3753            })
3754        } else if Token::ExclamationMark == *tok && self.dialect.supports_factorial_operator() {
3755            Ok(Expr::UnaryOp {
3756                op: UnaryOperator::PGPostfixFactorial,
3757                expr: Box::new(expr),
3758            })
3759        } else if Token::LBracket == *tok && self.dialect.supports_partiql()
3760            || (dialect_of!(self is SnowflakeDialect | GenericDialect) && Token::Colon == *tok)
3761        {
3762            self.prev_token();
3763            self.parse_json_access(expr)
3764        } else {
3765            // Can only happen if `get_next_precedence` got out of sync with this function
3766            parser_err!(
3767                format!("No infix parser for token {:?}", tok.token),
3768                tok.span.start
3769            )
3770        }
3771    }
3772
3773    /// Parse the `ESCAPE CHAR` portion of `LIKE`, `ILIKE`, and `SIMILAR TO`
3774    pub fn parse_escape_char(&mut self) -> Result<Option<Value>, ParserError> {
3775        if self.parse_keyword(Keyword::ESCAPE) {
3776            Ok(Some(self.parse_value()?.into()))
3777        } else {
3778            Ok(None)
3779        }
3780    }
3781
3782    /// Parses an array subscript like
3783    /// * `[:]`
3784    /// * `[l]`
3785    /// * `[l:]`
3786    /// * `[:u]`
3787    /// * `[l:u]`
3788    /// * `[l:u:s]`
3789    ///
3790    /// Parser is right after `[`
3791    fn parse_subscript_inner(&mut self) -> Result<Subscript, ParserError> {
3792        // at either `<lower>:(rest)` or `:(rest)]`
3793        let lower_bound = if self.consume_token(&Token::Colon) {
3794            None
3795        } else {
3796            Some(self.parse_expr()?)
3797        };
3798
3799        // check for end
3800        if self.consume_token(&Token::RBracket) {
3801            if let Some(lower_bound) = lower_bound {
3802                return Ok(Subscript::Index { index: lower_bound });
3803            };
3804            return Ok(Subscript::Slice {
3805                lower_bound,
3806                upper_bound: None,
3807                stride: None,
3808            });
3809        }
3810
3811        // consume the `:`
3812        if lower_bound.is_some() {
3813            self.expect_token(&Token::Colon)?;
3814        }
3815
3816        // we are now at either `]`, `<upper>(rest)]`
3817        let upper_bound = if self.consume_token(&Token::RBracket) {
3818            return Ok(Subscript::Slice {
3819                lower_bound,
3820                upper_bound: None,
3821                stride: None,
3822            });
3823        } else {
3824            Some(self.parse_expr()?)
3825        };
3826
3827        // check for end
3828        if self.consume_token(&Token::RBracket) {
3829            return Ok(Subscript::Slice {
3830                lower_bound,
3831                upper_bound,
3832                stride: None,
3833            });
3834        }
3835
3836        // we are now at `:]` or `:stride]`
3837        self.expect_token(&Token::Colon)?;
3838        let stride = if self.consume_token(&Token::RBracket) {
3839            None
3840        } else {
3841            Some(self.parse_expr()?)
3842        };
3843
3844        if stride.is_some() {
3845            self.expect_token(&Token::RBracket)?;
3846        }
3847
3848        Ok(Subscript::Slice {
3849            lower_bound,
3850            upper_bound,
3851            stride,
3852        })
3853    }
3854
3855    /// Parse a multi-dimension array accessing like `[1:3][1][1]`
3856    pub fn parse_multi_dim_subscript(
3857        &mut self,
3858        chain: &mut Vec<AccessExpr>,
3859    ) -> Result<(), ParserError> {
3860        while self.consume_token(&Token::LBracket) {
3861            self.parse_subscript(chain)?;
3862        }
3863        Ok(())
3864    }
3865
3866    /// Parses an array subscript like `[1:3]`
3867    ///
3868    /// Parser is right after `[`
3869    fn parse_subscript(&mut self, chain: &mut Vec<AccessExpr>) -> Result<(), ParserError> {
3870        let subscript = self.parse_subscript_inner()?;
3871        chain.push(AccessExpr::Subscript(subscript));
3872        Ok(())
3873    }
3874
3875    fn parse_json_path_object_key(&mut self) -> Result<JsonPathElem, ParserError> {
3876        let token = self.next_token();
3877        match token.token {
3878            Token::Word(Word {
3879                value,
3880                // path segments in SF dot notation can be unquoted or double-quoted
3881                quote_style: quote_style @ (Some('"') | None),
3882                // some experimentation suggests that snowflake permits
3883                // any keyword here unquoted.
3884                keyword: _,
3885            }) => Ok(JsonPathElem::Dot {
3886                key: value,
3887                quoted: quote_style.is_some(),
3888            }),
3889
3890            // This token should never be generated on snowflake or generic
3891            // dialects, but we handle it just in case this is used on future
3892            // dialects.
3893            Token::DoubleQuotedString(key) => Ok(JsonPathElem::Dot { key, quoted: true }),
3894
3895            _ => self.expected("variant object key name", token),
3896        }
3897    }
3898
3899    fn parse_json_access(&mut self, expr: Expr) -> Result<Expr, ParserError> {
3900        let path = self.parse_json_path()?;
3901        Ok(Expr::JsonAccess {
3902            value: Box::new(expr),
3903            path,
3904        })
3905    }
3906
3907    fn parse_json_path(&mut self) -> Result<JsonPath, ParserError> {
3908        let mut path = Vec::new();
3909        loop {
3910            match self.next_token().token {
3911                Token::Colon if path.is_empty() => {
3912                    path.push(self.parse_json_path_object_key()?);
3913                }
3914                Token::Period if !path.is_empty() => {
3915                    path.push(self.parse_json_path_object_key()?);
3916                }
3917                Token::LBracket => {
3918                    let key = self.parse_expr()?;
3919                    self.expect_token(&Token::RBracket)?;
3920
3921                    path.push(JsonPathElem::Bracket { key });
3922                }
3923                _ => {
3924                    self.prev_token();
3925                    break;
3926                }
3927            };
3928        }
3929
3930        debug_assert!(!path.is_empty());
3931        Ok(JsonPath { path })
3932    }
3933
3934    /// Parses the parens following the `[ NOT ] IN` operator.
3935    pub fn parse_in(&mut self, expr: Expr, negated: bool) -> Result<Expr, ParserError> {
3936        // BigQuery allows `IN UNNEST(array_expression)`
3937        // https://cloud.google.com/bigquery/docs/reference/standard-sql/operators#in_operators
3938        if self.parse_keyword(Keyword::UNNEST) {
3939            self.expect_token(&Token::LParen)?;
3940            let array_expr = self.parse_expr()?;
3941            self.expect_token(&Token::RParen)?;
3942            return Ok(Expr::InUnnest {
3943                expr: Box::new(expr),
3944                array_expr: Box::new(array_expr),
3945                negated,
3946            });
3947        }
3948        self.expect_token(&Token::LParen)?;
3949        let in_op = match self.maybe_parse(|p| p.parse_query())? {
3950            Some(subquery) => Expr::InSubquery {
3951                expr: Box::new(expr),
3952                subquery,
3953                negated,
3954            },
3955            None => Expr::InList {
3956                expr: Box::new(expr),
3957                list: if self.dialect.supports_in_empty_list() {
3958                    self.parse_comma_separated0(Parser::parse_expr, Token::RParen)?
3959                } else {
3960                    self.parse_comma_separated(Parser::parse_expr)?
3961                },
3962                negated,
3963            },
3964        };
3965        self.expect_token(&Token::RParen)?;
3966        Ok(in_op)
3967    }
3968
3969    /// Parses `BETWEEN <low> AND <high>`, assuming the `BETWEEN` keyword was already consumed.
3970    pub fn parse_between(&mut self, expr: Expr, negated: bool) -> Result<Expr, ParserError> {
3971        // Stop parsing subexpressions for <low> and <high> on tokens with
3972        // precedence lower than that of `BETWEEN`, such as `AND`, `IS`, etc.
3973        let low = self.parse_subexpr(self.dialect.prec_value(Precedence::Between))?;
3974        self.expect_keyword_is(Keyword::AND)?;
3975        let high = self.parse_subexpr(self.dialect.prec_value(Precedence::Between))?;
3976        Ok(Expr::Between {
3977            expr: Box::new(expr),
3978            negated,
3979            low: Box::new(low),
3980            high: Box::new(high),
3981        })
3982    }
3983
3984    /// Parse a PostgreSQL casting style which is in the form of `expr::datatype`.
3985    pub fn parse_pg_cast(&mut self, expr: Expr) -> Result<Expr, ParserError> {
3986        Ok(Expr::Cast {
3987            kind: CastKind::DoubleColon,
3988            expr: Box::new(expr),
3989            data_type: self.parse_data_type()?,
3990            format: None,
3991        })
3992    }
3993
3994    /// Get the precedence of the next token
3995    pub fn get_next_precedence(&self) -> Result<u8, ParserError> {
3996        self.dialect.get_next_precedence_default(self)
3997    }
3998
3999    /// Return the token at the given location, or EOF if the index is beyond
4000    /// the length of the current set of tokens.
4001    pub fn token_at(&self, index: usize) -> &TokenWithSpan {
4002        self.tokens.get(index).unwrap_or(&EOF_TOKEN)
4003    }
4004
4005    /// Return the first non-whitespace token that has not yet been processed
4006    /// or Token::EOF
4007    ///
4008    /// See [`Self::peek_token_ref`] to avoid the copy.
4009    pub fn peek_token(&self) -> TokenWithSpan {
4010        self.peek_nth_token(0)
4011    }
4012
4013    /// Return a reference to the first non-whitespace token that has not yet
4014    /// been processed or Token::EOF
4015    pub fn peek_token_ref(&self) -> &TokenWithSpan {
4016        self.peek_nth_token_ref(0)
4017    }
4018
4019    /// Returns the `N` next non-whitespace tokens that have not yet been
4020    /// processed.
4021    ///
4022    /// Example:
4023    /// ```rust
4024    /// # use sqlparser::dialect::GenericDialect;
4025    /// # use sqlparser::parser::Parser;
4026    /// # use sqlparser::keywords::Keyword;
4027    /// # use sqlparser::tokenizer::{Token, Word};
4028    /// let dialect = GenericDialect {};
4029    /// let mut parser = Parser::new(&dialect).try_with_sql("ORDER BY foo, bar").unwrap();
4030    ///
4031    /// // Note that Rust infers the number of tokens to peek based on the
4032    /// // length of the slice pattern!
4033    /// assert!(matches!(
4034    ///     parser.peek_tokens(),
4035    ///     [
4036    ///         Token::Word(Word { keyword: Keyword::ORDER, .. }),
4037    ///         Token::Word(Word { keyword: Keyword::BY, .. }),
4038    ///     ]
4039    /// ));
4040    /// ```
4041    pub fn peek_tokens<const N: usize>(&self) -> [Token; N] {
4042        self.peek_tokens_with_location()
4043            .map(|with_loc| with_loc.token)
4044    }
4045
4046    /// Returns the `N` next non-whitespace tokens with locations that have not
4047    /// yet been processed.
4048    ///
4049    /// See [`Self::peek_token`] for an example.
4050    pub fn peek_tokens_with_location<const N: usize>(&self) -> [TokenWithSpan; N] {
4051        let mut index = self.index;
4052        core::array::from_fn(|_| loop {
4053            let token = self.tokens.get(index);
4054            index += 1;
4055            if let Some(TokenWithSpan {
4056                token: Token::Whitespace(_),
4057                span: _,
4058            }) = token
4059            {
4060                continue;
4061            }
4062            break token.cloned().unwrap_or(TokenWithSpan {
4063                token: Token::EOF,
4064                span: Span::empty(),
4065            });
4066        })
4067    }
4068
4069    /// Returns references to the `N` next non-whitespace tokens
4070    /// that have not yet been processed.
4071    ///
4072    /// See [`Self::peek_tokens`] for an example.
4073    pub fn peek_tokens_ref<const N: usize>(&self) -> [&TokenWithSpan; N] {
4074        let mut index = self.index;
4075        core::array::from_fn(|_| loop {
4076            let token = self.tokens.get(index);
4077            index += 1;
4078            if let Some(TokenWithSpan {
4079                token: Token::Whitespace(_),
4080                span: _,
4081            }) = token
4082            {
4083                continue;
4084            }
4085            break token.unwrap_or(&EOF_TOKEN);
4086        })
4087    }
4088
4089    /// Return nth non-whitespace token that has not yet been processed
4090    pub fn peek_nth_token(&self, n: usize) -> TokenWithSpan {
4091        self.peek_nth_token_ref(n).clone()
4092    }
4093
4094    /// Return nth non-whitespace token that has not yet been processed
4095    pub fn peek_nth_token_ref(&self, mut n: usize) -> &TokenWithSpan {
4096        let mut index = self.index;
4097        loop {
4098            index += 1;
4099            match self.tokens.get(index - 1) {
4100                Some(TokenWithSpan {
4101                    token: Token::Whitespace(_),
4102                    span: _,
4103                }) => continue,
4104                non_whitespace => {
4105                    if n == 0 {
4106                        return non_whitespace.unwrap_or(&EOF_TOKEN);
4107                    }
4108                    n -= 1;
4109                }
4110            }
4111        }
4112    }
4113
4114    /// Return the first token, possibly whitespace, that has not yet been processed
4115    /// (or None if reached end-of-file).
4116    pub fn peek_token_no_skip(&self) -> TokenWithSpan {
4117        self.peek_nth_token_no_skip(0)
4118    }
4119
4120    /// Return nth token, possibly whitespace, that has not yet been processed.
4121    pub fn peek_nth_token_no_skip(&self, n: usize) -> TokenWithSpan {
4122        self.tokens
4123            .get(self.index + n)
4124            .cloned()
4125            .unwrap_or(TokenWithSpan {
4126                token: Token::EOF,
4127                span: Span::empty(),
4128            })
4129    }
4130
4131    /// Return true if the next tokens exactly `expected`
4132    ///
4133    /// Does not advance the current token.
4134    fn peek_keywords(&mut self, expected: &[Keyword]) -> bool {
4135        let index = self.index;
4136        let matched = self.parse_keywords(expected);
4137        self.index = index;
4138        matched
4139    }
4140
4141    /// Advances to the next non-whitespace token and returns a copy.
4142    ///
4143    /// Please use [`Self::advance_token`] and [`Self::get_current_token`] to
4144    /// avoid the copy.
4145    pub fn next_token(&mut self) -> TokenWithSpan {
4146        self.advance_token();
4147        self.get_current_token().clone()
4148    }
4149
4150    /// Returns the index of the current token
4151    ///
4152    /// This can be used with APIs that expect an index, such as
4153    /// [`Self::token_at`]
4154    pub fn get_current_index(&self) -> usize {
4155        self.index.saturating_sub(1)
4156    }
4157
4158    /// Return the next unprocessed token, possibly whitespace.
4159    pub fn next_token_no_skip(&mut self) -> Option<&TokenWithSpan> {
4160        self.index += 1;
4161        self.tokens.get(self.index - 1)
4162    }
4163
4164    /// Advances the current token to the next non-whitespace token
4165    ///
4166    /// See [`Self::get_current_token`] to get the current token after advancing
4167    pub fn advance_token(&mut self) {
4168        loop {
4169            self.index += 1;
4170            match self.tokens.get(self.index - 1) {
4171                Some(TokenWithSpan {
4172                    token: Token::Whitespace(_),
4173                    span: _,
4174                }) => continue,
4175                _ => break,
4176            }
4177        }
4178    }
4179
4180    /// Returns a reference to the current token
4181    ///
4182    /// Does not advance the current token.
4183    pub fn get_current_token(&self) -> &TokenWithSpan {
4184        self.token_at(self.index.saturating_sub(1))
4185    }
4186
4187    /// Returns a reference to the previous token
4188    ///
4189    /// Does not advance the current token.
4190    pub fn get_previous_token(&self) -> &TokenWithSpan {
4191        self.token_at(self.index.saturating_sub(2))
4192    }
4193
4194    /// Returns a reference to the next token
4195    ///
4196    /// Does not advance the current token.
4197    pub fn get_next_token(&self) -> &TokenWithSpan {
4198        self.token_at(self.index)
4199    }
4200
4201    /// Seek back the last one non-whitespace token.
4202    ///
4203    /// Must be called after `next_token()`, otherwise might panic. OK to call
4204    /// after `next_token()` indicates an EOF.
4205    ///
4206    // TODO rename to backup_token and deprecate prev_token?
4207    pub fn prev_token(&mut self) {
4208        loop {
4209            assert!(self.index > 0);
4210            self.index -= 1;
4211            if let Some(TokenWithSpan {
4212                token: Token::Whitespace(_),
4213                span: _,
4214            }) = self.tokens.get(self.index)
4215            {
4216                continue;
4217            }
4218            return;
4219        }
4220    }
4221
4222    /// Report `found` was encountered instead of `expected`
4223    pub fn expected<T>(&self, expected: &str, found: TokenWithSpan) -> Result<T, ParserError> {
4224        parser_err!(
4225            format!("Expected: {expected}, found: {found}"),
4226            found.span.start
4227        )
4228    }
4229
4230    /// report `found` was encountered instead of `expected`
4231    pub fn expected_ref<T>(&self, expected: &str, found: &TokenWithSpan) -> Result<T, ParserError> {
4232        parser_err!(
4233            format!("Expected: {expected}, found: {found}"),
4234            found.span.start
4235        )
4236    }
4237
4238    /// Report that the token at `index` was found instead of `expected`.
4239    pub fn expected_at<T>(&self, expected: &str, index: usize) -> Result<T, ParserError> {
4240        let found = self.tokens.get(index).unwrap_or(&EOF_TOKEN);
4241        parser_err!(
4242            format!("Expected: {expected}, found: {found}"),
4243            found.span.start
4244        )
4245    }
4246
4247    /// If the current token is the `expected` keyword, consume it and returns
4248    /// true. Otherwise, no tokens are consumed and returns false.
4249    #[must_use]
4250    pub fn parse_keyword(&mut self, expected: Keyword) -> bool {
4251        if self.peek_keyword(expected) {
4252            self.advance_token();
4253            true
4254        } else {
4255            false
4256        }
4257    }
4258
4259    #[must_use]
4260    pub fn peek_keyword(&self, expected: Keyword) -> bool {
4261        matches!(&self.peek_token_ref().token, Token::Word(w) if expected == w.keyword)
4262    }
4263
4264    /// If the current token is the `expected` keyword followed by
4265    /// specified tokens, consume them and returns true.
4266    /// Otherwise, no tokens are consumed and returns false.
4267    ///
4268    /// Note that if the length of `tokens` is too long, this function will
4269    /// not be efficient as it does a loop on the tokens with `peek_nth_token`
4270    /// each time.
4271    pub fn parse_keyword_with_tokens(&mut self, expected: Keyword, tokens: &[Token]) -> bool {
4272        self.keyword_with_tokens(expected, tokens, true)
4273    }
4274
4275    /// Peeks to see if the current token is the `expected` keyword followed by specified tokens
4276    /// without consuming them.
4277    ///
4278    /// See [Self::parse_keyword_with_tokens] for details.
4279    pub(crate) fn peek_keyword_with_tokens(&mut self, expected: Keyword, tokens: &[Token]) -> bool {
4280        self.keyword_with_tokens(expected, tokens, false)
4281    }
4282
4283    fn keyword_with_tokens(&mut self, expected: Keyword, tokens: &[Token], consume: bool) -> bool {
4284        match &self.peek_token_ref().token {
4285            Token::Word(w) if expected == w.keyword => {
4286                for (idx, token) in tokens.iter().enumerate() {
4287                    if self.peek_nth_token_ref(idx + 1).token != *token {
4288                        return false;
4289                    }
4290                }
4291
4292                if consume {
4293                    for _ in 0..(tokens.len() + 1) {
4294                        self.advance_token();
4295                    }
4296                }
4297
4298                true
4299            }
4300            _ => false,
4301        }
4302    }
4303
4304    /// If the current and subsequent tokens exactly match the `keywords`
4305    /// sequence, consume them and returns true. Otherwise, no tokens are
4306    /// consumed and returns false
4307    #[must_use]
4308    pub fn parse_keywords(&mut self, keywords: &[Keyword]) -> bool {
4309        let index = self.index;
4310        for &keyword in keywords {
4311            if !self.parse_keyword(keyword) {
4312                // println!("parse_keywords aborting .. did not find {:?}", keyword);
4313                // reset index and return immediately
4314                self.index = index;
4315                return false;
4316            }
4317        }
4318        true
4319    }
4320
4321    /// If the current token is one of the given `keywords`, returns the keyword
4322    /// that matches, without consuming the token. Otherwise, returns [`None`].
4323    #[must_use]
4324    pub fn peek_one_of_keywords(&self, keywords: &[Keyword]) -> Option<Keyword> {
4325        for keyword in keywords {
4326            if self.peek_keyword(*keyword) {
4327                return Some(*keyword);
4328            }
4329        }
4330        None
4331    }
4332
4333    /// If the current token is one of the given `keywords`, consume the token
4334    /// and return the keyword that matches. Otherwise, no tokens are consumed
4335    /// and returns [`None`].
4336    #[must_use]
4337    pub fn parse_one_of_keywords(&mut self, keywords: &[Keyword]) -> Option<Keyword> {
4338        match &self.peek_token_ref().token {
4339            Token::Word(w) => {
4340                keywords
4341                    .iter()
4342                    .find(|keyword| **keyword == w.keyword)
4343                    .map(|keyword| {
4344                        self.advance_token();
4345                        *keyword
4346                    })
4347            }
4348            _ => None,
4349        }
4350    }
4351
4352    /// If the current token is one of the expected keywords, consume the token
4353    /// and return the keyword that matches. Otherwise, return an error.
4354    pub fn expect_one_of_keywords(&mut self, keywords: &[Keyword]) -> Result<Keyword, ParserError> {
4355        if let Some(keyword) = self.parse_one_of_keywords(keywords) {
4356            Ok(keyword)
4357        } else {
4358            let keywords: Vec<String> = keywords.iter().map(|x| format!("{x:?}")).collect();
4359            self.expected_ref(
4360                &format!("one of {}", keywords.join(" or ")),
4361                self.peek_token_ref(),
4362            )
4363        }
4364    }
4365
4366    /// If the current token is the `expected` keyword, consume the token.
4367    /// Otherwise, return an error.
4368    ///
4369    // todo deprecate in favor of expected_keyword_is
4370    pub fn expect_keyword(&mut self, expected: Keyword) -> Result<TokenWithSpan, ParserError> {
4371        if self.parse_keyword(expected) {
4372            Ok(self.get_current_token().clone())
4373        } else {
4374            self.expected_ref(format!("{:?}", &expected).as_str(), self.peek_token_ref())
4375        }
4376    }
4377
4378    /// If the current token is the `expected` keyword, consume the token.
4379    /// Otherwise, return an error.
4380    ///
4381    /// This differs from expect_keyword only in that the matched keyword
4382    /// token is not returned.
4383    pub fn expect_keyword_is(&mut self, expected: Keyword) -> Result<(), ParserError> {
4384        if self.parse_keyword(expected) {
4385            Ok(())
4386        } else {
4387            self.expected_ref(format!("{:?}", &expected).as_str(), self.peek_token_ref())
4388        }
4389    }
4390
4391    /// If the current and subsequent tokens exactly match the `keywords`
4392    /// sequence, consume them and returns Ok. Otherwise, return an Error.
4393    pub fn expect_keywords(&mut self, expected: &[Keyword]) -> Result<(), ParserError> {
4394        for &kw in expected {
4395            self.expect_keyword_is(kw)?;
4396        }
4397        Ok(())
4398    }
4399
4400    /// Consume the next token if it matches the expected token, otherwise return false
4401    ///
4402    /// See [Self::advance_token] to consume the token unconditionally
4403    #[must_use]
4404    pub fn consume_token(&mut self, expected: &Token) -> bool {
4405        if self.peek_token_ref() == expected {
4406            self.advance_token();
4407            true
4408        } else {
4409            false
4410        }
4411    }
4412
4413    /// If the current and subsequent tokens exactly match the `tokens`
4414    /// sequence, consume them and returns true. Otherwise, no tokens are
4415    /// consumed and returns false
4416    #[must_use]
4417    pub fn consume_tokens(&mut self, tokens: &[Token]) -> bool {
4418        let index = self.index;
4419        for token in tokens {
4420            if !self.consume_token(token) {
4421                self.index = index;
4422                return false;
4423            }
4424        }
4425        true
4426    }
4427
4428    /// Bail out if the current token is not an expected keyword, or consume it if it is
4429    pub fn expect_token(&mut self, expected: &Token) -> Result<TokenWithSpan, ParserError> {
4430        if self.peek_token_ref() == expected {
4431            Ok(self.next_token())
4432        } else {
4433            self.expected_ref(&expected.to_string(), self.peek_token_ref())
4434        }
4435    }
4436
4437    fn parse<T: FromStr>(s: String, loc: Location) -> Result<T, ParserError>
4438    where
4439        <T as FromStr>::Err: Display,
4440    {
4441        s.parse::<T>().map_err(|e| {
4442            ParserError::ParserError(format!(
4443                "Could not parse '{s}' as {}: {e}{loc}",
4444                core::any::type_name::<T>()
4445            ))
4446        })
4447    }
4448
4449    /// Parse a comma-separated list of 1+ SelectItem
4450    pub fn parse_projection(&mut self) -> Result<Vec<SelectItem>, ParserError> {
4451        // BigQuery and Snowflake allow trailing commas, but only in project lists
4452        // e.g. `SELECT 1, 2, FROM t`
4453        // https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#trailing_commas
4454        // https://docs.snowflake.com/en/release-notes/2024/8_11#select-supports-trailing-commas
4455
4456        let trailing_commas =
4457            self.options.trailing_commas | self.dialect.supports_projection_trailing_commas();
4458
4459        self.parse_comma_separated_with_trailing_commas(
4460            |p| p.parse_select_item(),
4461            trailing_commas,
4462            Self::is_reserved_for_column_alias,
4463        )
4464    }
4465
4466    pub fn parse_actions_list(&mut self) -> Result<Vec<Action>, ParserError> {
4467        let mut values = vec![];
4468        loop {
4469            values.push(self.parse_grant_permission()?);
4470            if !self.consume_token(&Token::Comma) {
4471                break;
4472            } else if self.options.trailing_commas {
4473                match self.peek_token().token {
4474                    Token::Word(kw) if kw.keyword == Keyword::ON => {
4475                        break;
4476                    }
4477                    Token::RParen
4478                    | Token::SemiColon
4479                    | Token::EOF
4480                    | Token::RBracket
4481                    | Token::RBrace => break,
4482                    _ => continue,
4483                }
4484            }
4485        }
4486        Ok(values)
4487    }
4488
4489    /// Parse a list of [TableWithJoins]
4490    fn parse_table_with_joins(&mut self) -> Result<Vec<TableWithJoins>, ParserError> {
4491        let trailing_commas = self.dialect.supports_from_trailing_commas();
4492
4493        self.parse_comma_separated_with_trailing_commas(
4494            Parser::parse_table_and_joins,
4495            trailing_commas,
4496            |kw, parser| !self.dialect.is_table_factor(kw, parser),
4497        )
4498    }
4499
4500    /// Parse the comma of a comma-separated syntax element.
4501    /// `R` is a predicate that should return true if the next
4502    /// keyword is a reserved keyword.
4503    /// Allows for control over trailing commas
4504    ///
4505    /// Returns true if there is a next element
4506    fn is_parse_comma_separated_end_with_trailing_commas<R>(
4507        &mut self,
4508        trailing_commas: bool,
4509        is_reserved_keyword: &R,
4510    ) -> bool
4511    where
4512        R: Fn(&Keyword, &mut Parser) -> bool,
4513    {
4514        if !self.consume_token(&Token::Comma) {
4515            true
4516        } else if trailing_commas {
4517            let token = self.next_token().token;
4518            let is_end = match token {
4519                Token::Word(ref kw) if is_reserved_keyword(&kw.keyword, self) => true,
4520                Token::RParen | Token::SemiColon | Token::EOF | Token::RBracket | Token::RBrace => {
4521                    true
4522                }
4523                _ => false,
4524            };
4525            self.prev_token();
4526
4527            is_end
4528        } else {
4529            false
4530        }
4531    }
4532
4533    /// Parse the comma of a comma-separated syntax element.
4534    /// Returns true if there is a next element
4535    fn is_parse_comma_separated_end(&mut self) -> bool {
4536        self.is_parse_comma_separated_end_with_trailing_commas(
4537            self.options.trailing_commas,
4538            &Self::is_reserved_for_column_alias,
4539        )
4540    }
4541
4542    /// Parse a comma-separated list of 1+ items accepted by `F`
4543    pub fn parse_comma_separated<T, F>(&mut self, f: F) -> Result<Vec<T>, ParserError>
4544    where
4545        F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4546    {
4547        self.parse_comma_separated_with_trailing_commas(
4548            f,
4549            self.options.trailing_commas,
4550            Self::is_reserved_for_column_alias,
4551        )
4552    }
4553
4554    /// Parse a comma-separated list of 1+ items accepted by `F`.
4555    /// `R` is a predicate that should return true if the next
4556    /// keyword is a reserved keyword.
4557    /// Allows for control over trailing commas.
4558    fn parse_comma_separated_with_trailing_commas<T, F, R>(
4559        &mut self,
4560        mut f: F,
4561        trailing_commas: bool,
4562        is_reserved_keyword: R,
4563    ) -> Result<Vec<T>, ParserError>
4564    where
4565        F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4566        R: Fn(&Keyword, &mut Parser) -> bool,
4567    {
4568        let mut values = vec![];
4569        loop {
4570            values.push(f(self)?);
4571            if self.is_parse_comma_separated_end_with_trailing_commas(
4572                trailing_commas,
4573                &is_reserved_keyword,
4574            ) {
4575                break;
4576            }
4577        }
4578        Ok(values)
4579    }
4580
4581    /// Parse a period-separated list of 1+ items accepted by `F`
4582    fn parse_period_separated<T, F>(&mut self, mut f: F) -> Result<Vec<T>, ParserError>
4583    where
4584        F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4585    {
4586        let mut values = vec![];
4587        loop {
4588            values.push(f(self)?);
4589            if !self.consume_token(&Token::Period) {
4590                break;
4591            }
4592        }
4593        Ok(values)
4594    }
4595
4596    /// Parse a keyword-separated list of 1+ items accepted by `F`
4597    pub fn parse_keyword_separated<T, F>(
4598        &mut self,
4599        keyword: Keyword,
4600        mut f: F,
4601    ) -> Result<Vec<T>, ParserError>
4602    where
4603        F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4604    {
4605        let mut values = vec![];
4606        loop {
4607            values.push(f(self)?);
4608            if !self.parse_keyword(keyword) {
4609                break;
4610            }
4611        }
4612        Ok(values)
4613    }
4614
4615    pub fn parse_parenthesized<T, F>(&mut self, mut f: F) -> Result<T, ParserError>
4616    where
4617        F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4618    {
4619        self.expect_token(&Token::LParen)?;
4620        let res = f(self)?;
4621        self.expect_token(&Token::RParen)?;
4622        Ok(res)
4623    }
4624
4625    /// Parse a comma-separated list of 0+ items accepted by `F`
4626    /// * `end_token` - expected end token for the closure (e.g. [Token::RParen], [Token::RBrace] ...)
4627    pub fn parse_comma_separated0<T, F>(
4628        &mut self,
4629        f: F,
4630        end_token: Token,
4631    ) -> Result<Vec<T>, ParserError>
4632    where
4633        F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4634    {
4635        if self.peek_token().token == end_token {
4636            return Ok(vec![]);
4637        }
4638
4639        if self.options.trailing_commas && self.peek_tokens() == [Token::Comma, end_token] {
4640            let _ = self.consume_token(&Token::Comma);
4641            return Ok(vec![]);
4642        }
4643
4644        self.parse_comma_separated(f)
4645    }
4646
4647    /// Parses 0 or more statements, each followed by a semicolon.
4648    /// If the next token is any of `terminal_keywords` then no more
4649    /// statements will be parsed.
4650    pub(crate) fn parse_statement_list(
4651        &mut self,
4652        terminal_keywords: &[Keyword],
4653    ) -> Result<Vec<Statement>, ParserError> {
4654        let mut values = vec![];
4655        loop {
4656            match &self.peek_nth_token_ref(0).token {
4657                Token::EOF => break,
4658                Token::Word(w) => {
4659                    if w.quote_style.is_none() && terminal_keywords.contains(&w.keyword) {
4660                        break;
4661                    }
4662                }
4663                _ => {}
4664            }
4665
4666            values.push(self.parse_statement()?);
4667            self.expect_token(&Token::SemiColon)?;
4668        }
4669        Ok(values)
4670    }
4671
4672    /// Default implementation of a predicate that returns true if
4673    /// the specified keyword is reserved for column alias.
4674    /// See [Dialect::is_column_alias]
4675    fn is_reserved_for_column_alias(kw: &Keyword, parser: &mut Parser) -> bool {
4676        !parser.dialect.is_column_alias(kw, parser)
4677    }
4678
4679    /// Run a parser method `f`, reverting back to the current position if unsuccessful.
4680    /// Returns `ParserError::RecursionLimitExceeded` if `f` returns a `RecursionLimitExceeded`.
4681    /// Returns `Ok(None)` if `f` returns any other error.
4682    pub fn maybe_parse<T, F>(&mut self, f: F) -> Result<Option<T>, ParserError>
4683    where
4684        F: FnMut(&mut Parser) -> Result<T, ParserError>,
4685    {
4686        match self.try_parse(f) {
4687            Ok(t) => Ok(Some(t)),
4688            Err(ParserError::RecursionLimitExceeded) => Err(ParserError::RecursionLimitExceeded),
4689            _ => Ok(None),
4690        }
4691    }
4692
4693    /// Run a parser method `f`, reverting back to the current position if unsuccessful.
4694    pub fn try_parse<T, F>(&mut self, mut f: F) -> Result<T, ParserError>
4695    where
4696        F: FnMut(&mut Parser) -> Result<T, ParserError>,
4697    {
4698        let index = self.index;
4699        match f(self) {
4700            Ok(t) => Ok(t),
4701            Err(e) => {
4702                // Unwind stack if limit exceeded
4703                self.index = index;
4704                Err(e)
4705            }
4706        }
4707    }
4708
4709    /// Parse either `ALL`, `DISTINCT` or `DISTINCT ON (...)`. Returns [`None`] if `ALL` is parsed
4710    /// and results in a [`ParserError`] if both `ALL` and `DISTINCT` are found.
4711    pub fn parse_all_or_distinct(&mut self) -> Result<Option<Distinct>, ParserError> {
4712        let loc = self.peek_token().span.start;
4713        let all = self.parse_keyword(Keyword::ALL);
4714        let distinct = self.parse_keyword(Keyword::DISTINCT);
4715        if !distinct {
4716            return Ok(None);
4717        }
4718        if all {
4719            return parser_err!("Cannot specify both ALL and DISTINCT".to_string(), loc);
4720        }
4721        let on = self.parse_keyword(Keyword::ON);
4722        if !on {
4723            return Ok(Some(Distinct::Distinct));
4724        }
4725
4726        self.expect_token(&Token::LParen)?;
4727        let col_names = if self.consume_token(&Token::RParen) {
4728            self.prev_token();
4729            Vec::new()
4730        } else {
4731            self.parse_comma_separated(Parser::parse_expr)?
4732        };
4733        self.expect_token(&Token::RParen)?;
4734        Ok(Some(Distinct::On(col_names)))
4735    }
4736
4737    /// Parse a SQL CREATE statement
4738    pub fn parse_create(&mut self) -> Result<Statement, ParserError> {
4739        let or_replace = self.parse_keywords(&[Keyword::OR, Keyword::REPLACE]);
4740        let or_alter = self.parse_keywords(&[Keyword::OR, Keyword::ALTER]);
4741        let local = self.parse_one_of_keywords(&[Keyword::LOCAL]).is_some();
4742        let global = self.parse_one_of_keywords(&[Keyword::GLOBAL]).is_some();
4743        let transient = self.parse_one_of_keywords(&[Keyword::TRANSIENT]).is_some();
4744        let global: Option<bool> = if global {
4745            Some(true)
4746        } else if local {
4747            Some(false)
4748        } else {
4749            None
4750        };
4751        let temporary = self
4752            .parse_one_of_keywords(&[Keyword::TEMP, Keyword::TEMPORARY])
4753            .is_some();
4754        let persistent = dialect_of!(self is DuckDbDialect)
4755            && self.parse_one_of_keywords(&[Keyword::PERSISTENT]).is_some();
4756        let create_view_params = self.parse_create_view_params()?;
4757        if self.parse_keyword(Keyword::TABLE) {
4758            self.parse_create_table(or_replace, temporary, global, transient)
4759        } else if self.peek_keyword(Keyword::MATERIALIZED)
4760            || self.peek_keyword(Keyword::VIEW)
4761            || self.peek_keywords(&[Keyword::SECURE, Keyword::MATERIALIZED, Keyword::VIEW])
4762            || self.peek_keywords(&[Keyword::SECURE, Keyword::VIEW])
4763        {
4764            self.parse_create_view(or_alter, or_replace, temporary, create_view_params)
4765        } else if self.parse_keyword(Keyword::POLICY) {
4766            self.parse_create_policy()
4767        } else if self.parse_keyword(Keyword::EXTERNAL) {
4768            self.parse_create_external_table(or_replace)
4769        } else if self.parse_keyword(Keyword::FUNCTION) {
4770            self.parse_create_function(or_alter, or_replace, temporary)
4771        } else if self.parse_keyword(Keyword::DOMAIN) {
4772            self.parse_create_domain()
4773        } else if self.parse_keyword(Keyword::TRIGGER) {
4774            self.parse_create_trigger(temporary, or_alter, or_replace, false)
4775        } else if self.parse_keywords(&[Keyword::CONSTRAINT, Keyword::TRIGGER]) {
4776            self.parse_create_trigger(temporary, or_alter, or_replace, true)
4777        } else if self.parse_keyword(Keyword::MACRO) {
4778            self.parse_create_macro(or_replace, temporary)
4779        } else if self.parse_keyword(Keyword::SECRET) {
4780            self.parse_create_secret(or_replace, temporary, persistent)
4781        } else if self.parse_keyword(Keyword::USER) {
4782            self.parse_create_user(or_replace)
4783        } else if or_replace {
4784            self.expected(
4785                "[EXTERNAL] TABLE or [MATERIALIZED] VIEW or FUNCTION after CREATE OR REPLACE",
4786                self.peek_token(),
4787            )
4788        } else if self.parse_keyword(Keyword::EXTENSION) {
4789            self.parse_create_extension()
4790        } else if self.parse_keyword(Keyword::INDEX) {
4791            self.parse_create_index(false)
4792        } else if self.parse_keywords(&[Keyword::UNIQUE, Keyword::INDEX]) {
4793            self.parse_create_index(true)
4794        } else if self.parse_keyword(Keyword::VIRTUAL) {
4795            self.parse_create_virtual_table()
4796        } else if self.parse_keyword(Keyword::SCHEMA) {
4797            self.parse_create_schema()
4798        } else if self.parse_keyword(Keyword::DATABASE) {
4799            self.parse_create_database()
4800        } else if self.parse_keyword(Keyword::ROLE) {
4801            self.parse_create_role()
4802        } else if self.parse_keyword(Keyword::SEQUENCE) {
4803            self.parse_create_sequence(temporary)
4804        } else if self.parse_keyword(Keyword::TYPE) {
4805            self.parse_create_type()
4806        } else if self.parse_keyword(Keyword::PROCEDURE) {
4807            self.parse_create_procedure(or_alter)
4808        } else if self.parse_keyword(Keyword::CONNECTOR) {
4809            self.parse_create_connector()
4810        } else if self.parse_keyword(Keyword::OPERATOR) {
4811            // Check if this is CREATE OPERATOR FAMILY or CREATE OPERATOR CLASS
4812            if self.parse_keyword(Keyword::FAMILY) {
4813                self.parse_create_operator_family()
4814            } else if self.parse_keyword(Keyword::CLASS) {
4815                self.parse_create_operator_class()
4816            } else {
4817                self.parse_create_operator()
4818            }
4819        } else if self.parse_keyword(Keyword::SERVER) {
4820            self.parse_pg_create_server()
4821        } else {
4822            self.expected("an object type after CREATE", self.peek_token())
4823        }
4824    }
4825
4826    fn parse_create_user(&mut self, or_replace: bool) -> Result<Statement, ParserError> {
4827        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
4828        let name = self.parse_identifier()?;
4829        let options = self
4830            .parse_key_value_options(false, &[Keyword::WITH, Keyword::TAG])?
4831            .options;
4832        let with_tags = self.parse_keyword(Keyword::WITH);
4833        let tags = if self.parse_keyword(Keyword::TAG) {
4834            self.parse_key_value_options(true, &[])?.options
4835        } else {
4836            vec![]
4837        };
4838        Ok(Statement::CreateUser(CreateUser {
4839            or_replace,
4840            if_not_exists,
4841            name,
4842            options: KeyValueOptions {
4843                options,
4844                delimiter: KeyValueOptionsDelimiter::Space,
4845            },
4846            with_tags,
4847            tags: KeyValueOptions {
4848                options: tags,
4849                delimiter: KeyValueOptionsDelimiter::Comma,
4850            },
4851        }))
4852    }
4853
4854    /// See [DuckDB Docs](https://duckdb.org/docs/sql/statements/create_secret.html) for more details.
4855    pub fn parse_create_secret(
4856        &mut self,
4857        or_replace: bool,
4858        temporary: bool,
4859        persistent: bool,
4860    ) -> Result<Statement, ParserError> {
4861        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
4862
4863        let mut storage_specifier = None;
4864        let mut name = None;
4865        if self.peek_token() != Token::LParen {
4866            if self.parse_keyword(Keyword::IN) {
4867                storage_specifier = self.parse_identifier().ok()
4868            } else {
4869                name = self.parse_identifier().ok();
4870            }
4871
4872            // Storage specifier may follow the name
4873            if storage_specifier.is_none()
4874                && self.peek_token() != Token::LParen
4875                && self.parse_keyword(Keyword::IN)
4876            {
4877                storage_specifier = self.parse_identifier().ok();
4878            }
4879        }
4880
4881        self.expect_token(&Token::LParen)?;
4882        self.expect_keyword_is(Keyword::TYPE)?;
4883        let secret_type = self.parse_identifier()?;
4884
4885        let mut options = Vec::new();
4886        if self.consume_token(&Token::Comma) {
4887            options.append(&mut self.parse_comma_separated(|p| {
4888                let key = p.parse_identifier()?;
4889                let value = p.parse_identifier()?;
4890                Ok(SecretOption { key, value })
4891            })?);
4892        }
4893        self.expect_token(&Token::RParen)?;
4894
4895        let temp = match (temporary, persistent) {
4896            (true, false) => Some(true),
4897            (false, true) => Some(false),
4898            (false, false) => None,
4899            _ => self.expected("TEMPORARY or PERSISTENT", self.peek_token())?,
4900        };
4901
4902        Ok(Statement::CreateSecret {
4903            or_replace,
4904            temporary: temp,
4905            if_not_exists,
4906            name,
4907            storage_specifier,
4908            secret_type,
4909            options,
4910        })
4911    }
4912
4913    /// Parse a CACHE TABLE statement
4914    pub fn parse_cache_table(&mut self) -> Result<Statement, ParserError> {
4915        let (mut table_flag, mut options, mut has_as, mut query) = (None, vec![], false, None);
4916        if self.parse_keyword(Keyword::TABLE) {
4917            let table_name = self.parse_object_name(false)?;
4918            if self.peek_token().token != Token::EOF {
4919                if let Token::Word(word) = self.peek_token().token {
4920                    if word.keyword == Keyword::OPTIONS {
4921                        options = self.parse_options(Keyword::OPTIONS)?
4922                    }
4923                };
4924
4925                if self.peek_token().token != Token::EOF {
4926                    let (a, q) = self.parse_as_query()?;
4927                    has_as = a;
4928                    query = Some(q);
4929                }
4930
4931                Ok(Statement::Cache {
4932                    table_flag,
4933                    table_name,
4934                    has_as,
4935                    options,
4936                    query,
4937                })
4938            } else {
4939                Ok(Statement::Cache {
4940                    table_flag,
4941                    table_name,
4942                    has_as,
4943                    options,
4944                    query,
4945                })
4946            }
4947        } else {
4948            table_flag = Some(self.parse_object_name(false)?);
4949            if self.parse_keyword(Keyword::TABLE) {
4950                let table_name = self.parse_object_name(false)?;
4951                if self.peek_token() != Token::EOF {
4952                    if let Token::Word(word) = self.peek_token().token {
4953                        if word.keyword == Keyword::OPTIONS {
4954                            options = self.parse_options(Keyword::OPTIONS)?
4955                        }
4956                    };
4957
4958                    if self.peek_token() != Token::EOF {
4959                        let (a, q) = self.parse_as_query()?;
4960                        has_as = a;
4961                        query = Some(q);
4962                    }
4963
4964                    Ok(Statement::Cache {
4965                        table_flag,
4966                        table_name,
4967                        has_as,
4968                        options,
4969                        query,
4970                    })
4971                } else {
4972                    Ok(Statement::Cache {
4973                        table_flag,
4974                        table_name,
4975                        has_as,
4976                        options,
4977                        query,
4978                    })
4979                }
4980            } else {
4981                if self.peek_token() == Token::EOF {
4982                    self.prev_token();
4983                }
4984                self.expected("a `TABLE` keyword", self.peek_token())
4985            }
4986        }
4987    }
4988
4989    /// Parse 'AS' before as query,such as `WITH XXX AS SELECT XXX` oer `CACHE TABLE AS SELECT XXX`
4990    pub fn parse_as_query(&mut self) -> Result<(bool, Box<Query>), ParserError> {
4991        match self.peek_token().token {
4992            Token::Word(word) => match word.keyword {
4993                Keyword::AS => {
4994                    self.next_token();
4995                    Ok((true, self.parse_query()?))
4996                }
4997                _ => Ok((false, self.parse_query()?)),
4998            },
4999            _ => self.expected("a QUERY statement", self.peek_token()),
5000        }
5001    }
5002
5003    /// Parse a UNCACHE TABLE statement
5004    pub fn parse_uncache_table(&mut self) -> Result<Statement, ParserError> {
5005        self.expect_keyword_is(Keyword::TABLE)?;
5006        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
5007        let table_name = self.parse_object_name(false)?;
5008        Ok(Statement::UNCache {
5009            table_name,
5010            if_exists,
5011        })
5012    }
5013
5014    /// SQLite-specific `CREATE VIRTUAL TABLE`
5015    pub fn parse_create_virtual_table(&mut self) -> Result<Statement, ParserError> {
5016        self.expect_keyword_is(Keyword::TABLE)?;
5017        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5018        let table_name = self.parse_object_name(false)?;
5019        self.expect_keyword_is(Keyword::USING)?;
5020        let module_name = self.parse_identifier()?;
5021        // SQLite docs note that module "arguments syntax is sufficiently
5022        // general that the arguments can be made to appear as column
5023        // definitions in a traditional CREATE TABLE statement", but
5024        // we don't implement that.
5025        let module_args = self.parse_parenthesized_column_list(Optional, false)?;
5026        Ok(Statement::CreateVirtualTable {
5027            name: table_name,
5028            if_not_exists,
5029            module_name,
5030            module_args,
5031        })
5032    }
5033
5034    pub fn parse_create_schema(&mut self) -> Result<Statement, ParserError> {
5035        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5036
5037        let schema_name = self.parse_schema_name()?;
5038
5039        let default_collate_spec = if self.parse_keywords(&[Keyword::DEFAULT, Keyword::COLLATE]) {
5040            Some(self.parse_expr()?)
5041        } else {
5042            None
5043        };
5044
5045        let with = if self.peek_keyword(Keyword::WITH) {
5046            Some(self.parse_options(Keyword::WITH)?)
5047        } else {
5048            None
5049        };
5050
5051        let options = if self.peek_keyword(Keyword::OPTIONS) {
5052            Some(self.parse_options(Keyword::OPTIONS)?)
5053        } else {
5054            None
5055        };
5056
5057        let clone = if self.parse_keyword(Keyword::CLONE) {
5058            Some(self.parse_object_name(false)?)
5059        } else {
5060            None
5061        };
5062
5063        Ok(Statement::CreateSchema {
5064            schema_name,
5065            if_not_exists,
5066            with,
5067            options,
5068            default_collate_spec,
5069            clone,
5070        })
5071    }
5072
5073    fn parse_schema_name(&mut self) -> Result<SchemaName, ParserError> {
5074        if self.parse_keyword(Keyword::AUTHORIZATION) {
5075            Ok(SchemaName::UnnamedAuthorization(self.parse_identifier()?))
5076        } else {
5077            let name = self.parse_object_name(false)?;
5078
5079            if self.parse_keyword(Keyword::AUTHORIZATION) {
5080                Ok(SchemaName::NamedAuthorization(
5081                    name,
5082                    self.parse_identifier()?,
5083                ))
5084            } else {
5085                Ok(SchemaName::Simple(name))
5086            }
5087        }
5088    }
5089
5090    pub fn parse_create_database(&mut self) -> Result<Statement, ParserError> {
5091        let ine = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5092        let db_name = self.parse_object_name(false)?;
5093        let mut location = None;
5094        let mut managed_location = None;
5095        loop {
5096            match self.parse_one_of_keywords(&[Keyword::LOCATION, Keyword::MANAGEDLOCATION]) {
5097                Some(Keyword::LOCATION) => location = Some(self.parse_literal_string()?),
5098                Some(Keyword::MANAGEDLOCATION) => {
5099                    managed_location = Some(self.parse_literal_string()?)
5100                }
5101                _ => break,
5102            }
5103        }
5104        let clone = if self.parse_keyword(Keyword::CLONE) {
5105            Some(self.parse_object_name(false)?)
5106        } else {
5107            None
5108        };
5109
5110        Ok(Statement::CreateDatabase {
5111            db_name,
5112            if_not_exists: ine,
5113            location,
5114            managed_location,
5115            or_replace: false,
5116            transient: false,
5117            clone,
5118            data_retention_time_in_days: None,
5119            max_data_extension_time_in_days: None,
5120            external_volume: None,
5121            catalog: None,
5122            replace_invalid_characters: None,
5123            default_ddl_collation: None,
5124            storage_serialization_policy: None,
5125            comment: None,
5126            catalog_sync: None,
5127            catalog_sync_namespace_mode: None,
5128            catalog_sync_namespace_flatten_delimiter: None,
5129            with_tags: None,
5130            with_contacts: None,
5131        })
5132    }
5133
5134    pub fn parse_optional_create_function_using(
5135        &mut self,
5136    ) -> Result<Option<CreateFunctionUsing>, ParserError> {
5137        if !self.parse_keyword(Keyword::USING) {
5138            return Ok(None);
5139        };
5140        let keyword =
5141            self.expect_one_of_keywords(&[Keyword::JAR, Keyword::FILE, Keyword::ARCHIVE])?;
5142
5143        let uri = self.parse_literal_string()?;
5144
5145        match keyword {
5146            Keyword::JAR => Ok(Some(CreateFunctionUsing::Jar(uri))),
5147            Keyword::FILE => Ok(Some(CreateFunctionUsing::File(uri))),
5148            Keyword::ARCHIVE => Ok(Some(CreateFunctionUsing::Archive(uri))),
5149            _ => self.expected(
5150                "JAR, FILE or ARCHIVE, got {:?}",
5151                TokenWithSpan::wrap(Token::make_keyword(format!("{keyword:?}").as_str())),
5152            ),
5153        }
5154    }
5155
5156    pub fn parse_create_function(
5157        &mut self,
5158        or_alter: bool,
5159        or_replace: bool,
5160        temporary: bool,
5161    ) -> Result<Statement, ParserError> {
5162        if dialect_of!(self is HiveDialect) {
5163            self.parse_hive_create_function(or_replace, temporary)
5164        } else if dialect_of!(self is PostgreSqlDialect | GenericDialect) {
5165            self.parse_postgres_create_function(or_replace, temporary)
5166        } else if dialect_of!(self is DuckDbDialect) {
5167            self.parse_create_macro(or_replace, temporary)
5168        } else if dialect_of!(self is BigQueryDialect) {
5169            self.parse_bigquery_create_function(or_replace, temporary)
5170        } else if dialect_of!(self is MsSqlDialect) {
5171            self.parse_mssql_create_function(or_alter, or_replace, temporary)
5172        } else {
5173            self.prev_token();
5174            self.expected("an object type after CREATE", self.peek_token())
5175        }
5176    }
5177
5178    /// Parse `CREATE FUNCTION` for [PostgreSQL]
5179    ///
5180    /// [PostgreSQL]: https://www.postgresql.org/docs/15/sql-createfunction.html
5181    fn parse_postgres_create_function(
5182        &mut self,
5183        or_replace: bool,
5184        temporary: bool,
5185    ) -> Result<Statement, ParserError> {
5186        let name = self.parse_object_name(false)?;
5187
5188        self.expect_token(&Token::LParen)?;
5189        let args = if Token::RParen != self.peek_token_ref().token {
5190            self.parse_comma_separated(Parser::parse_function_arg)?
5191        } else {
5192            vec![]
5193        };
5194        self.expect_token(&Token::RParen)?;
5195
5196        let return_type = if self.parse_keyword(Keyword::RETURNS) {
5197            Some(self.parse_data_type()?)
5198        } else {
5199            None
5200        };
5201
5202        #[derive(Default)]
5203        struct Body {
5204            language: Option<Ident>,
5205            behavior: Option<FunctionBehavior>,
5206            function_body: Option<CreateFunctionBody>,
5207            called_on_null: Option<FunctionCalledOnNull>,
5208            parallel: Option<FunctionParallel>,
5209        }
5210        let mut body = Body::default();
5211        loop {
5212            fn ensure_not_set<T>(field: &Option<T>, name: &str) -> Result<(), ParserError> {
5213                if field.is_some() {
5214                    return Err(ParserError::ParserError(format!(
5215                        "{name} specified more than once",
5216                    )));
5217                }
5218                Ok(())
5219            }
5220            if self.parse_keyword(Keyword::AS) {
5221                ensure_not_set(&body.function_body, "AS")?;
5222                body.function_body = Some(self.parse_create_function_body_string()?);
5223            } else if self.parse_keyword(Keyword::LANGUAGE) {
5224                ensure_not_set(&body.language, "LANGUAGE")?;
5225                body.language = Some(self.parse_identifier()?);
5226            } else if self.parse_keyword(Keyword::IMMUTABLE) {
5227                ensure_not_set(&body.behavior, "IMMUTABLE | STABLE | VOLATILE")?;
5228                body.behavior = Some(FunctionBehavior::Immutable);
5229            } else if self.parse_keyword(Keyword::STABLE) {
5230                ensure_not_set(&body.behavior, "IMMUTABLE | STABLE | VOLATILE")?;
5231                body.behavior = Some(FunctionBehavior::Stable);
5232            } else if self.parse_keyword(Keyword::VOLATILE) {
5233                ensure_not_set(&body.behavior, "IMMUTABLE | STABLE | VOLATILE")?;
5234                body.behavior = Some(FunctionBehavior::Volatile);
5235            } else if self.parse_keywords(&[
5236                Keyword::CALLED,
5237                Keyword::ON,
5238                Keyword::NULL,
5239                Keyword::INPUT,
5240            ]) {
5241                ensure_not_set(
5242                    &body.called_on_null,
5243                    "CALLED ON NULL INPUT | RETURNS NULL ON NULL INPUT | STRICT",
5244                )?;
5245                body.called_on_null = Some(FunctionCalledOnNull::CalledOnNullInput);
5246            } else if self.parse_keywords(&[
5247                Keyword::RETURNS,
5248                Keyword::NULL,
5249                Keyword::ON,
5250                Keyword::NULL,
5251                Keyword::INPUT,
5252            ]) {
5253                ensure_not_set(
5254                    &body.called_on_null,
5255                    "CALLED ON NULL INPUT | RETURNS NULL ON NULL INPUT | STRICT",
5256                )?;
5257                body.called_on_null = Some(FunctionCalledOnNull::ReturnsNullOnNullInput);
5258            } else if self.parse_keyword(Keyword::STRICT) {
5259                ensure_not_set(
5260                    &body.called_on_null,
5261                    "CALLED ON NULL INPUT | RETURNS NULL ON NULL INPUT | STRICT",
5262                )?;
5263                body.called_on_null = Some(FunctionCalledOnNull::Strict);
5264            } else if self.parse_keyword(Keyword::PARALLEL) {
5265                ensure_not_set(&body.parallel, "PARALLEL { UNSAFE | RESTRICTED | SAFE }")?;
5266                if self.parse_keyword(Keyword::UNSAFE) {
5267                    body.parallel = Some(FunctionParallel::Unsafe);
5268                } else if self.parse_keyword(Keyword::RESTRICTED) {
5269                    body.parallel = Some(FunctionParallel::Restricted);
5270                } else if self.parse_keyword(Keyword::SAFE) {
5271                    body.parallel = Some(FunctionParallel::Safe);
5272                } else {
5273                    return self.expected("one of UNSAFE | RESTRICTED | SAFE", self.peek_token());
5274                }
5275            } else if self.parse_keyword(Keyword::RETURN) {
5276                ensure_not_set(&body.function_body, "RETURN")?;
5277                body.function_body = Some(CreateFunctionBody::Return(self.parse_expr()?));
5278            } else {
5279                break;
5280            }
5281        }
5282
5283        Ok(Statement::CreateFunction(CreateFunction {
5284            or_alter: false,
5285            or_replace,
5286            temporary,
5287            name,
5288            args: Some(args),
5289            return_type,
5290            behavior: body.behavior,
5291            called_on_null: body.called_on_null,
5292            parallel: body.parallel,
5293            language: body.language,
5294            function_body: body.function_body,
5295            if_not_exists: false,
5296            using: None,
5297            determinism_specifier: None,
5298            options: None,
5299            remote_connection: None,
5300        }))
5301    }
5302
5303    /// Parse `CREATE FUNCTION` for [Hive]
5304    ///
5305    /// [Hive]: https://cwiki.apache.org/confluence/display/hive/languagemanual+ddl#LanguageManualDDL-Create/Drop/ReloadFunction
5306    fn parse_hive_create_function(
5307        &mut self,
5308        or_replace: bool,
5309        temporary: bool,
5310    ) -> Result<Statement, ParserError> {
5311        let name = self.parse_object_name(false)?;
5312        self.expect_keyword_is(Keyword::AS)?;
5313
5314        let body = self.parse_create_function_body_string()?;
5315        let using = self.parse_optional_create_function_using()?;
5316
5317        Ok(Statement::CreateFunction(CreateFunction {
5318            or_alter: false,
5319            or_replace,
5320            temporary,
5321            name,
5322            function_body: Some(body),
5323            using,
5324            if_not_exists: false,
5325            args: None,
5326            return_type: None,
5327            behavior: None,
5328            called_on_null: None,
5329            parallel: None,
5330            language: None,
5331            determinism_specifier: None,
5332            options: None,
5333            remote_connection: None,
5334        }))
5335    }
5336
5337    /// Parse `CREATE FUNCTION` for [BigQuery]
5338    ///
5339    /// [BigQuery]: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#create_function_statement
5340    fn parse_bigquery_create_function(
5341        &mut self,
5342        or_replace: bool,
5343        temporary: bool,
5344    ) -> Result<Statement, ParserError> {
5345        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5346        let (name, args) = self.parse_create_function_name_and_params()?;
5347
5348        let return_type = if self.parse_keyword(Keyword::RETURNS) {
5349            Some(self.parse_data_type()?)
5350        } else {
5351            None
5352        };
5353
5354        let determinism_specifier = if self.parse_keyword(Keyword::DETERMINISTIC) {
5355            Some(FunctionDeterminismSpecifier::Deterministic)
5356        } else if self.parse_keywords(&[Keyword::NOT, Keyword::DETERMINISTIC]) {
5357            Some(FunctionDeterminismSpecifier::NotDeterministic)
5358        } else {
5359            None
5360        };
5361
5362        let language = if self.parse_keyword(Keyword::LANGUAGE) {
5363            Some(self.parse_identifier()?)
5364        } else {
5365            None
5366        };
5367
5368        let remote_connection =
5369            if self.parse_keywords(&[Keyword::REMOTE, Keyword::WITH, Keyword::CONNECTION]) {
5370                Some(self.parse_object_name(false)?)
5371            } else {
5372                None
5373            };
5374
5375        // `OPTIONS` may come before of after the function body but
5376        // may be specified at most once.
5377        let mut options = self.maybe_parse_options(Keyword::OPTIONS)?;
5378
5379        let function_body = if remote_connection.is_none() {
5380            self.expect_keyword_is(Keyword::AS)?;
5381            let expr = self.parse_expr()?;
5382            if options.is_none() {
5383                options = self.maybe_parse_options(Keyword::OPTIONS)?;
5384                Some(CreateFunctionBody::AsBeforeOptions {
5385                    body: expr,
5386                    link_symbol: None,
5387                })
5388            } else {
5389                Some(CreateFunctionBody::AsAfterOptions(expr))
5390            }
5391        } else {
5392            None
5393        };
5394
5395        Ok(Statement::CreateFunction(CreateFunction {
5396            or_alter: false,
5397            or_replace,
5398            temporary,
5399            if_not_exists,
5400            name,
5401            args: Some(args),
5402            return_type,
5403            function_body,
5404            language,
5405            determinism_specifier,
5406            options,
5407            remote_connection,
5408            using: None,
5409            behavior: None,
5410            called_on_null: None,
5411            parallel: None,
5412        }))
5413    }
5414
5415    /// Parse `CREATE FUNCTION` for [MsSql]
5416    ///
5417    /// [MsSql]: https://learn.microsoft.com/en-us/sql/t-sql/statements/create-function-transact-sql
5418    fn parse_mssql_create_function(
5419        &mut self,
5420        or_alter: bool,
5421        or_replace: bool,
5422        temporary: bool,
5423    ) -> Result<Statement, ParserError> {
5424        let (name, args) = self.parse_create_function_name_and_params()?;
5425
5426        self.expect_keyword(Keyword::RETURNS)?;
5427
5428        let return_table = self.maybe_parse(|p| {
5429            let return_table_name = p.parse_identifier()?;
5430
5431            p.expect_keyword_is(Keyword::TABLE)?;
5432            p.prev_token();
5433
5434            let table_column_defs = match p.parse_data_type()? {
5435                DataType::Table(Some(table_column_defs)) if !table_column_defs.is_empty() => {
5436                    table_column_defs
5437                }
5438                _ => parser_err!(
5439                    "Expected table column definitions after TABLE keyword",
5440                    p.peek_token().span.start
5441                )?,
5442            };
5443
5444            Ok(DataType::NamedTable {
5445                name: ObjectName(vec![ObjectNamePart::Identifier(return_table_name)]),
5446                columns: table_column_defs,
5447            })
5448        })?;
5449
5450        let return_type = if return_table.is_some() {
5451            return_table
5452        } else {
5453            Some(self.parse_data_type()?)
5454        };
5455
5456        let _ = self.parse_keyword(Keyword::AS);
5457
5458        let function_body = if self.peek_keyword(Keyword::BEGIN) {
5459            let begin_token = self.expect_keyword(Keyword::BEGIN)?;
5460            let statements = self.parse_statement_list(&[Keyword::END])?;
5461            let end_token = self.expect_keyword(Keyword::END)?;
5462
5463            Some(CreateFunctionBody::AsBeginEnd(BeginEndStatements {
5464                begin_token: AttachedToken(begin_token),
5465                statements,
5466                end_token: AttachedToken(end_token),
5467            }))
5468        } else if self.parse_keyword(Keyword::RETURN) {
5469            if self.peek_token() == Token::LParen {
5470                Some(CreateFunctionBody::AsReturnExpr(self.parse_expr()?))
5471            } else if self.peek_keyword(Keyword::SELECT) {
5472                let select = self.parse_select()?;
5473                Some(CreateFunctionBody::AsReturnSelect(select))
5474            } else {
5475                parser_err!(
5476                    "Expected a subquery (or bare SELECT statement) after RETURN",
5477                    self.peek_token().span.start
5478                )?
5479            }
5480        } else {
5481            parser_err!("Unparsable function body", self.peek_token().span.start)?
5482        };
5483
5484        Ok(Statement::CreateFunction(CreateFunction {
5485            or_alter,
5486            or_replace,
5487            temporary,
5488            if_not_exists: false,
5489            name,
5490            args: Some(args),
5491            return_type,
5492            function_body,
5493            language: None,
5494            determinism_specifier: None,
5495            options: None,
5496            remote_connection: None,
5497            using: None,
5498            behavior: None,
5499            called_on_null: None,
5500            parallel: None,
5501        }))
5502    }
5503
5504    fn parse_create_function_name_and_params(
5505        &mut self,
5506    ) -> Result<(ObjectName, Vec<OperateFunctionArg>), ParserError> {
5507        let name = self.parse_object_name(false)?;
5508        let parse_function_param =
5509            |parser: &mut Parser| -> Result<OperateFunctionArg, ParserError> {
5510                let name = parser.parse_identifier()?;
5511                let data_type = parser.parse_data_type()?;
5512                let default_expr = if parser.consume_token(&Token::Eq) {
5513                    Some(parser.parse_expr()?)
5514                } else {
5515                    None
5516                };
5517
5518                Ok(OperateFunctionArg {
5519                    mode: None,
5520                    name: Some(name),
5521                    data_type,
5522                    default_expr,
5523                })
5524            };
5525        self.expect_token(&Token::LParen)?;
5526        let args = self.parse_comma_separated0(parse_function_param, Token::RParen)?;
5527        self.expect_token(&Token::RParen)?;
5528        Ok((name, args))
5529    }
5530
5531    fn parse_function_arg(&mut self) -> Result<OperateFunctionArg, ParserError> {
5532        let mode = if self.parse_keyword(Keyword::IN) {
5533            Some(ArgMode::In)
5534        } else if self.parse_keyword(Keyword::OUT) {
5535            Some(ArgMode::Out)
5536        } else if self.parse_keyword(Keyword::INOUT) {
5537            Some(ArgMode::InOut)
5538        } else {
5539            None
5540        };
5541
5542        // parse: [ argname ] argtype
5543        let mut name = None;
5544        let mut data_type = self.parse_data_type()?;
5545
5546        // To check whether the first token is a name or a type, we need to
5547        // peek the next token, which if it is another type keyword, then the
5548        // first token is a name and not a type in itself.
5549        let data_type_idx = self.get_current_index();
5550
5551        // DEFAULT will be parsed as `DataType::Custom`, which is undesirable in this context
5552        fn parse_data_type_no_default(parser: &mut Parser) -> Result<DataType, ParserError> {
5553            if parser.peek_keyword(Keyword::DEFAULT) {
5554                // This dummy error is ignored in `maybe_parse`
5555                parser_err!(
5556                    "The DEFAULT keyword is not a type",
5557                    parser.peek_token().span.start
5558                )
5559            } else {
5560                parser.parse_data_type()
5561            }
5562        }
5563
5564        if let Some(next_data_type) = self.maybe_parse(parse_data_type_no_default)? {
5565            let token = self.token_at(data_type_idx);
5566
5567            // We ensure that the token is a `Word` token, and not other special tokens.
5568            if !matches!(token.token, Token::Word(_)) {
5569                return self.expected("a name or type", token.clone());
5570            }
5571
5572            name = Some(Ident::new(token.to_string()));
5573            data_type = next_data_type;
5574        }
5575
5576        let default_expr = if self.parse_keyword(Keyword::DEFAULT) || self.consume_token(&Token::Eq)
5577        {
5578            Some(self.parse_expr()?)
5579        } else {
5580            None
5581        };
5582        Ok(OperateFunctionArg {
5583            mode,
5584            name,
5585            data_type,
5586            default_expr,
5587        })
5588    }
5589
5590    /// Parse statements of the DropTrigger type such as:
5591    ///
5592    /// ```sql
5593    /// DROP TRIGGER [ IF EXISTS ] name ON table_name [ CASCADE | RESTRICT ]
5594    /// ```
5595    pub fn parse_drop_trigger(&mut self) -> Result<Statement, ParserError> {
5596        if !dialect_of!(self is PostgreSqlDialect | SQLiteDialect | GenericDialect | MySqlDialect | MsSqlDialect)
5597        {
5598            self.prev_token();
5599            return self.expected("an object type after DROP", self.peek_token());
5600        }
5601        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
5602        let trigger_name = self.parse_object_name(false)?;
5603        let table_name = if self.parse_keyword(Keyword::ON) {
5604            Some(self.parse_object_name(false)?)
5605        } else {
5606            None
5607        };
5608        let option = match self.parse_one_of_keywords(&[Keyword::CASCADE, Keyword::RESTRICT]) {
5609            Some(Keyword::CASCADE) => Some(ReferentialAction::Cascade),
5610            Some(Keyword::RESTRICT) => Some(ReferentialAction::Restrict),
5611            Some(unexpected_keyword) => return Err(ParserError::ParserError(
5612                format!("Internal parser error: expected any of {{CASCADE, RESTRICT}}, got {unexpected_keyword:?}"),
5613            )),
5614            None => None,
5615        };
5616        Ok(Statement::DropTrigger(DropTrigger {
5617            if_exists,
5618            trigger_name,
5619            table_name,
5620            option,
5621        }))
5622    }
5623
5624    pub fn parse_create_trigger(
5625        &mut self,
5626        temporary: bool,
5627        or_alter: bool,
5628        or_replace: bool,
5629        is_constraint: bool,
5630    ) -> Result<Statement, ParserError> {
5631        if !dialect_of!(self is PostgreSqlDialect | SQLiteDialect | GenericDialect | MySqlDialect | MsSqlDialect)
5632        {
5633            self.prev_token();
5634            return self.expected("an object type after CREATE", self.peek_token());
5635        }
5636
5637        let name = self.parse_object_name(false)?;
5638        let period = self.maybe_parse(|parser| parser.parse_trigger_period())?;
5639
5640        let events = self.parse_keyword_separated(Keyword::OR, Parser::parse_trigger_event)?;
5641        self.expect_keyword_is(Keyword::ON)?;
5642        let table_name = self.parse_object_name(false)?;
5643
5644        let referenced_table_name = if self.parse_keyword(Keyword::FROM) {
5645            self.parse_object_name(true).ok()
5646        } else {
5647            None
5648        };
5649
5650        let characteristics = self.parse_constraint_characteristics()?;
5651
5652        let mut referencing = vec![];
5653        if self.parse_keyword(Keyword::REFERENCING) {
5654            while let Some(refer) = self.parse_trigger_referencing()? {
5655                referencing.push(refer);
5656            }
5657        }
5658
5659        let trigger_object = if self.parse_keyword(Keyword::FOR) {
5660            let include_each = self.parse_keyword(Keyword::EACH);
5661            let trigger_object =
5662                match self.expect_one_of_keywords(&[Keyword::ROW, Keyword::STATEMENT])? {
5663                    Keyword::ROW => TriggerObject::Row,
5664                    Keyword::STATEMENT => TriggerObject::Statement,
5665                    unexpected_keyword => return Err(ParserError::ParserError(
5666                        format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in ROW/STATEMENT"),
5667                    )),
5668                };
5669
5670            Some(if include_each {
5671                TriggerObjectKind::ForEach(trigger_object)
5672            } else {
5673                TriggerObjectKind::For(trigger_object)
5674            })
5675        } else {
5676            let _ = self.parse_keyword(Keyword::FOR);
5677
5678            None
5679        };
5680
5681        let condition = self
5682            .parse_keyword(Keyword::WHEN)
5683            .then(|| self.parse_expr())
5684            .transpose()?;
5685
5686        let mut exec_body = None;
5687        let mut statements = None;
5688        if self.parse_keyword(Keyword::EXECUTE) {
5689            exec_body = Some(self.parse_trigger_exec_body()?);
5690        } else {
5691            statements = Some(self.parse_conditional_statements(&[Keyword::END])?);
5692        }
5693
5694        Ok(CreateTrigger {
5695            or_alter,
5696            temporary,
5697            or_replace,
5698            is_constraint,
5699            name,
5700            period,
5701            period_before_table: true,
5702            events,
5703            table_name,
5704            referenced_table_name,
5705            referencing,
5706            trigger_object,
5707            condition,
5708            exec_body,
5709            statements_as: false,
5710            statements,
5711            characteristics,
5712        }
5713        .into())
5714    }
5715
5716    pub fn parse_trigger_period(&mut self) -> Result<TriggerPeriod, ParserError> {
5717        Ok(
5718            match self.expect_one_of_keywords(&[
5719                Keyword::FOR,
5720                Keyword::BEFORE,
5721                Keyword::AFTER,
5722                Keyword::INSTEAD,
5723            ])? {
5724                Keyword::FOR => TriggerPeriod::For,
5725                Keyword::BEFORE => TriggerPeriod::Before,
5726                Keyword::AFTER => TriggerPeriod::After,
5727                Keyword::INSTEAD => self
5728                    .expect_keyword_is(Keyword::OF)
5729                    .map(|_| TriggerPeriod::InsteadOf)?,
5730                unexpected_keyword => return Err(ParserError::ParserError(
5731                    format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in trigger period"),
5732                )),
5733            },
5734        )
5735    }
5736
5737    pub fn parse_trigger_event(&mut self) -> Result<TriggerEvent, ParserError> {
5738        Ok(
5739            match self.expect_one_of_keywords(&[
5740                Keyword::INSERT,
5741                Keyword::UPDATE,
5742                Keyword::DELETE,
5743                Keyword::TRUNCATE,
5744            ])? {
5745                Keyword::INSERT => TriggerEvent::Insert,
5746                Keyword::UPDATE => {
5747                    if self.parse_keyword(Keyword::OF) {
5748                        let cols = self.parse_comma_separated(Parser::parse_identifier)?;
5749                        TriggerEvent::Update(cols)
5750                    } else {
5751                        TriggerEvent::Update(vec![])
5752                    }
5753                }
5754                Keyword::DELETE => TriggerEvent::Delete,
5755                Keyword::TRUNCATE => TriggerEvent::Truncate,
5756                unexpected_keyword => return Err(ParserError::ParserError(
5757                    format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in trigger event"),
5758                )),
5759            },
5760        )
5761    }
5762
5763    pub fn parse_trigger_referencing(&mut self) -> Result<Option<TriggerReferencing>, ParserError> {
5764        let refer_type = match self.parse_one_of_keywords(&[Keyword::OLD, Keyword::NEW]) {
5765            Some(Keyword::OLD) if self.parse_keyword(Keyword::TABLE) => {
5766                TriggerReferencingType::OldTable
5767            }
5768            Some(Keyword::NEW) if self.parse_keyword(Keyword::TABLE) => {
5769                TriggerReferencingType::NewTable
5770            }
5771            _ => {
5772                return Ok(None);
5773            }
5774        };
5775
5776        let is_as = self.parse_keyword(Keyword::AS);
5777        let transition_relation_name = self.parse_object_name(false)?;
5778        Ok(Some(TriggerReferencing {
5779            refer_type,
5780            is_as,
5781            transition_relation_name,
5782        }))
5783    }
5784
5785    pub fn parse_trigger_exec_body(&mut self) -> Result<TriggerExecBody, ParserError> {
5786        Ok(TriggerExecBody {
5787            exec_type: match self
5788                .expect_one_of_keywords(&[Keyword::FUNCTION, Keyword::PROCEDURE])?
5789            {
5790                Keyword::FUNCTION => TriggerExecBodyType::Function,
5791                Keyword::PROCEDURE => TriggerExecBodyType::Procedure,
5792                unexpected_keyword => return Err(ParserError::ParserError(
5793                    format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in trigger exec body"),
5794                )),
5795            },
5796            func_desc: self.parse_function_desc()?,
5797        })
5798    }
5799
5800    pub fn parse_create_macro(
5801        &mut self,
5802        or_replace: bool,
5803        temporary: bool,
5804    ) -> Result<Statement, ParserError> {
5805        if dialect_of!(self is DuckDbDialect |  GenericDialect) {
5806            let name = self.parse_object_name(false)?;
5807            self.expect_token(&Token::LParen)?;
5808            let args = if self.consume_token(&Token::RParen) {
5809                self.prev_token();
5810                None
5811            } else {
5812                Some(self.parse_comma_separated(Parser::parse_macro_arg)?)
5813            };
5814
5815            self.expect_token(&Token::RParen)?;
5816            self.expect_keyword_is(Keyword::AS)?;
5817
5818            Ok(Statement::CreateMacro {
5819                or_replace,
5820                temporary,
5821                name,
5822                args,
5823                definition: if self.parse_keyword(Keyword::TABLE) {
5824                    MacroDefinition::Table(self.parse_query()?)
5825                } else {
5826                    MacroDefinition::Expr(self.parse_expr()?)
5827                },
5828            })
5829        } else {
5830            self.prev_token();
5831            self.expected("an object type after CREATE", self.peek_token())
5832        }
5833    }
5834
5835    fn parse_macro_arg(&mut self) -> Result<MacroArg, ParserError> {
5836        let name = self.parse_identifier()?;
5837
5838        let default_expr =
5839            if self.consume_token(&Token::Assignment) || self.consume_token(&Token::RArrow) {
5840                Some(self.parse_expr()?)
5841            } else {
5842                None
5843            };
5844        Ok(MacroArg { name, default_expr })
5845    }
5846
5847    pub fn parse_create_external_table(
5848        &mut self,
5849        or_replace: bool,
5850    ) -> Result<Statement, ParserError> {
5851        self.expect_keyword_is(Keyword::TABLE)?;
5852        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5853        let table_name = self.parse_object_name(false)?;
5854        let (columns, constraints) = self.parse_columns()?;
5855
5856        let hive_distribution = self.parse_hive_distribution()?;
5857        let hive_formats = self.parse_hive_formats()?;
5858
5859        let file_format = if let Some(ref hf) = hive_formats {
5860            if let Some(ref ff) = hf.storage {
5861                match ff {
5862                    HiveIOFormat::FileFormat { format } => Some(*format),
5863                    _ => None,
5864                }
5865            } else {
5866                None
5867            }
5868        } else {
5869            None
5870        };
5871        let location = hive_formats.as_ref().and_then(|hf| hf.location.clone());
5872        let table_properties = self.parse_options(Keyword::TBLPROPERTIES)?;
5873        let table_options = if !table_properties.is_empty() {
5874            CreateTableOptions::TableProperties(table_properties)
5875        } else {
5876            CreateTableOptions::None
5877        };
5878        Ok(CreateTableBuilder::new(table_name)
5879            .columns(columns)
5880            .constraints(constraints)
5881            .hive_distribution(hive_distribution)
5882            .hive_formats(hive_formats)
5883            .table_options(table_options)
5884            .or_replace(or_replace)
5885            .if_not_exists(if_not_exists)
5886            .external(true)
5887            .file_format(file_format)
5888            .location(location)
5889            .build())
5890    }
5891
5892    pub fn parse_file_format(&mut self) -> Result<FileFormat, ParserError> {
5893        let next_token = self.next_token();
5894        match &next_token.token {
5895            Token::Word(w) => match w.keyword {
5896                Keyword::AVRO => Ok(FileFormat::AVRO),
5897                Keyword::JSONFILE => Ok(FileFormat::JSONFILE),
5898                Keyword::ORC => Ok(FileFormat::ORC),
5899                Keyword::PARQUET => Ok(FileFormat::PARQUET),
5900                Keyword::RCFILE => Ok(FileFormat::RCFILE),
5901                Keyword::SEQUENCEFILE => Ok(FileFormat::SEQUENCEFILE),
5902                Keyword::TEXTFILE => Ok(FileFormat::TEXTFILE),
5903                _ => self.expected("fileformat", next_token),
5904            },
5905            _ => self.expected("fileformat", next_token),
5906        }
5907    }
5908
5909    fn parse_analyze_format_kind(&mut self) -> Result<AnalyzeFormatKind, ParserError> {
5910        if self.consume_token(&Token::Eq) {
5911            Ok(AnalyzeFormatKind::Assignment(self.parse_analyze_format()?))
5912        } else {
5913            Ok(AnalyzeFormatKind::Keyword(self.parse_analyze_format()?))
5914        }
5915    }
5916
5917    pub fn parse_analyze_format(&mut self) -> Result<AnalyzeFormat, ParserError> {
5918        let next_token = self.next_token();
5919        match &next_token.token {
5920            Token::Word(w) => match w.keyword {
5921                Keyword::TEXT => Ok(AnalyzeFormat::TEXT),
5922                Keyword::GRAPHVIZ => Ok(AnalyzeFormat::GRAPHVIZ),
5923                Keyword::JSON => Ok(AnalyzeFormat::JSON),
5924                _ => self.expected("fileformat", next_token),
5925            },
5926            _ => self.expected("fileformat", next_token),
5927        }
5928    }
5929
5930    pub fn parse_create_view(
5931        &mut self,
5932        or_alter: bool,
5933        or_replace: bool,
5934        temporary: bool,
5935        create_view_params: Option<CreateViewParams>,
5936    ) -> Result<Statement, ParserError> {
5937        let secure = self.parse_keyword(Keyword::SECURE);
5938        let materialized = self.parse_keyword(Keyword::MATERIALIZED);
5939        self.expect_keyword_is(Keyword::VIEW)?;
5940        let allow_unquoted_hyphen = dialect_of!(self is BigQueryDialect);
5941        // Tries to parse IF NOT EXISTS either before name or after name
5942        // Name before IF NOT EXISTS is supported by snowflake but undocumented
5943        let if_not_exists_first =
5944            self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5945        let name = self.parse_object_name(allow_unquoted_hyphen)?;
5946        let name_before_not_exists = !if_not_exists_first
5947            && self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5948        let if_not_exists = if_not_exists_first || name_before_not_exists;
5949        // Many dialects support `OR ALTER` right after `CREATE`, but we don't (yet).
5950        // ANSI SQL and Postgres support RECURSIVE here, but we don't support it either.
5951        let columns = self.parse_view_columns()?;
5952        let mut options = CreateTableOptions::None;
5953        let with_options = self.parse_options(Keyword::WITH)?;
5954        if !with_options.is_empty() {
5955            options = CreateTableOptions::With(with_options);
5956        }
5957
5958        let cluster_by = if self.parse_keyword(Keyword::CLUSTER) {
5959            self.expect_keyword_is(Keyword::BY)?;
5960            self.parse_parenthesized_column_list(Optional, false)?
5961        } else {
5962            vec![]
5963        };
5964
5965        if dialect_of!(self is BigQueryDialect | GenericDialect) {
5966            if let Some(opts) = self.maybe_parse_options(Keyword::OPTIONS)? {
5967                if !opts.is_empty() {
5968                    options = CreateTableOptions::Options(opts);
5969                }
5970            };
5971        }
5972
5973        let to = if dialect_of!(self is ClickHouseDialect | GenericDialect)
5974            && self.parse_keyword(Keyword::TO)
5975        {
5976            Some(self.parse_object_name(false)?)
5977        } else {
5978            None
5979        };
5980
5981        let comment = if dialect_of!(self is SnowflakeDialect | GenericDialect)
5982            && self.parse_keyword(Keyword::COMMENT)
5983        {
5984            self.expect_token(&Token::Eq)?;
5985            Some(self.parse_comment_value()?)
5986        } else {
5987            None
5988        };
5989
5990        self.expect_keyword_is(Keyword::AS)?;
5991        let query = self.parse_query()?;
5992        // Optional `WITH [ CASCADED | LOCAL ] CHECK OPTION` is widely supported here.
5993
5994        let with_no_schema_binding = dialect_of!(self is RedshiftSqlDialect | GenericDialect)
5995            && self.parse_keywords(&[
5996                Keyword::WITH,
5997                Keyword::NO,
5998                Keyword::SCHEMA,
5999                Keyword::BINDING,
6000            ]);
6001
6002        Ok(CreateView {
6003            or_alter,
6004            name,
6005            columns,
6006            query,
6007            materialized,
6008            secure,
6009            or_replace,
6010            options,
6011            cluster_by,
6012            comment,
6013            with_no_schema_binding,
6014            if_not_exists,
6015            temporary,
6016            to,
6017            params: create_view_params,
6018            name_before_not_exists,
6019        }
6020        .into())
6021    }
6022
6023    /// Parse optional parameters for the `CREATE VIEW` statement supported by [MySQL].
6024    ///
6025    /// [MySQL]: https://dev.mysql.com/doc/refman/9.1/en/create-view.html
6026    fn parse_create_view_params(&mut self) -> Result<Option<CreateViewParams>, ParserError> {
6027        let algorithm = if self.parse_keyword(Keyword::ALGORITHM) {
6028            self.expect_token(&Token::Eq)?;
6029            Some(
6030                match self.expect_one_of_keywords(&[
6031                    Keyword::UNDEFINED,
6032                    Keyword::MERGE,
6033                    Keyword::TEMPTABLE,
6034                ])? {
6035                    Keyword::UNDEFINED => CreateViewAlgorithm::Undefined,
6036                    Keyword::MERGE => CreateViewAlgorithm::Merge,
6037                    Keyword::TEMPTABLE => CreateViewAlgorithm::TempTable,
6038                    _ => {
6039                        self.prev_token();
6040                        let found = self.next_token();
6041                        return self
6042                            .expected("UNDEFINED or MERGE or TEMPTABLE after ALGORITHM =", found);
6043                    }
6044                },
6045            )
6046        } else {
6047            None
6048        };
6049        let definer = if self.parse_keyword(Keyword::DEFINER) {
6050            self.expect_token(&Token::Eq)?;
6051            Some(self.parse_grantee_name()?)
6052        } else {
6053            None
6054        };
6055        let security = if self.parse_keywords(&[Keyword::SQL, Keyword::SECURITY]) {
6056            Some(
6057                match self.expect_one_of_keywords(&[Keyword::DEFINER, Keyword::INVOKER])? {
6058                    Keyword::DEFINER => CreateViewSecurity::Definer,
6059                    Keyword::INVOKER => CreateViewSecurity::Invoker,
6060                    _ => {
6061                        self.prev_token();
6062                        let found = self.next_token();
6063                        return self.expected("DEFINER or INVOKER after SQL SECURITY", found);
6064                    }
6065                },
6066            )
6067        } else {
6068            None
6069        };
6070        if algorithm.is_some() || definer.is_some() || security.is_some() {
6071            Ok(Some(CreateViewParams {
6072                algorithm,
6073                definer,
6074                security,
6075            }))
6076        } else {
6077            Ok(None)
6078        }
6079    }
6080
6081    pub fn parse_create_role(&mut self) -> Result<Statement, ParserError> {
6082        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
6083        let names = self.parse_comma_separated(|p| p.parse_object_name(false))?;
6084
6085        let _ = self.parse_keyword(Keyword::WITH); // [ WITH ]
6086
6087        let optional_keywords = if dialect_of!(self is MsSqlDialect) {
6088            vec![Keyword::AUTHORIZATION]
6089        } else if dialect_of!(self is PostgreSqlDialect) {
6090            vec![
6091                Keyword::LOGIN,
6092                Keyword::NOLOGIN,
6093                Keyword::INHERIT,
6094                Keyword::NOINHERIT,
6095                Keyword::BYPASSRLS,
6096                Keyword::NOBYPASSRLS,
6097                Keyword::PASSWORD,
6098                Keyword::CREATEDB,
6099                Keyword::NOCREATEDB,
6100                Keyword::CREATEROLE,
6101                Keyword::NOCREATEROLE,
6102                Keyword::SUPERUSER,
6103                Keyword::NOSUPERUSER,
6104                Keyword::REPLICATION,
6105                Keyword::NOREPLICATION,
6106                Keyword::CONNECTION,
6107                Keyword::VALID,
6108                Keyword::IN,
6109                Keyword::ROLE,
6110                Keyword::ADMIN,
6111                Keyword::USER,
6112            ]
6113        } else {
6114            vec![]
6115        };
6116
6117        // MSSQL
6118        let mut authorization_owner = None;
6119        // Postgres
6120        let mut login = None;
6121        let mut inherit = None;
6122        let mut bypassrls = None;
6123        let mut password = None;
6124        let mut create_db = None;
6125        let mut create_role = None;
6126        let mut superuser = None;
6127        let mut replication = None;
6128        let mut connection_limit = None;
6129        let mut valid_until = None;
6130        let mut in_role = vec![];
6131        let mut in_group = vec![];
6132        let mut role = vec![];
6133        let mut user = vec![];
6134        let mut admin = vec![];
6135
6136        while let Some(keyword) = self.parse_one_of_keywords(&optional_keywords) {
6137            let loc = self
6138                .tokens
6139                .get(self.index - 1)
6140                .map_or(Location { line: 0, column: 0 }, |t| t.span.start);
6141            match keyword {
6142                Keyword::AUTHORIZATION => {
6143                    if authorization_owner.is_some() {
6144                        parser_err!("Found multiple AUTHORIZATION", loc)
6145                    } else {
6146                        authorization_owner = Some(self.parse_object_name(false)?);
6147                        Ok(())
6148                    }
6149                }
6150                Keyword::LOGIN | Keyword::NOLOGIN => {
6151                    if login.is_some() {
6152                        parser_err!("Found multiple LOGIN or NOLOGIN", loc)
6153                    } else {
6154                        login = Some(keyword == Keyword::LOGIN);
6155                        Ok(())
6156                    }
6157                }
6158                Keyword::INHERIT | Keyword::NOINHERIT => {
6159                    if inherit.is_some() {
6160                        parser_err!("Found multiple INHERIT or NOINHERIT", loc)
6161                    } else {
6162                        inherit = Some(keyword == Keyword::INHERIT);
6163                        Ok(())
6164                    }
6165                }
6166                Keyword::BYPASSRLS | Keyword::NOBYPASSRLS => {
6167                    if bypassrls.is_some() {
6168                        parser_err!("Found multiple BYPASSRLS or NOBYPASSRLS", loc)
6169                    } else {
6170                        bypassrls = Some(keyword == Keyword::BYPASSRLS);
6171                        Ok(())
6172                    }
6173                }
6174                Keyword::CREATEDB | Keyword::NOCREATEDB => {
6175                    if create_db.is_some() {
6176                        parser_err!("Found multiple CREATEDB or NOCREATEDB", loc)
6177                    } else {
6178                        create_db = Some(keyword == Keyword::CREATEDB);
6179                        Ok(())
6180                    }
6181                }
6182                Keyword::CREATEROLE | Keyword::NOCREATEROLE => {
6183                    if create_role.is_some() {
6184                        parser_err!("Found multiple CREATEROLE or NOCREATEROLE", loc)
6185                    } else {
6186                        create_role = Some(keyword == Keyword::CREATEROLE);
6187                        Ok(())
6188                    }
6189                }
6190                Keyword::SUPERUSER | Keyword::NOSUPERUSER => {
6191                    if superuser.is_some() {
6192                        parser_err!("Found multiple SUPERUSER or NOSUPERUSER", loc)
6193                    } else {
6194                        superuser = Some(keyword == Keyword::SUPERUSER);
6195                        Ok(())
6196                    }
6197                }
6198                Keyword::REPLICATION | Keyword::NOREPLICATION => {
6199                    if replication.is_some() {
6200                        parser_err!("Found multiple REPLICATION or NOREPLICATION", loc)
6201                    } else {
6202                        replication = Some(keyword == Keyword::REPLICATION);
6203                        Ok(())
6204                    }
6205                }
6206                Keyword::PASSWORD => {
6207                    if password.is_some() {
6208                        parser_err!("Found multiple PASSWORD", loc)
6209                    } else {
6210                        password = if self.parse_keyword(Keyword::NULL) {
6211                            Some(Password::NullPassword)
6212                        } else {
6213                            Some(Password::Password(Expr::Value(self.parse_value()?)))
6214                        };
6215                        Ok(())
6216                    }
6217                }
6218                Keyword::CONNECTION => {
6219                    self.expect_keyword_is(Keyword::LIMIT)?;
6220                    if connection_limit.is_some() {
6221                        parser_err!("Found multiple CONNECTION LIMIT", loc)
6222                    } else {
6223                        connection_limit = Some(Expr::Value(self.parse_number_value()?));
6224                        Ok(())
6225                    }
6226                }
6227                Keyword::VALID => {
6228                    self.expect_keyword_is(Keyword::UNTIL)?;
6229                    if valid_until.is_some() {
6230                        parser_err!("Found multiple VALID UNTIL", loc)
6231                    } else {
6232                        valid_until = Some(Expr::Value(self.parse_value()?));
6233                        Ok(())
6234                    }
6235                }
6236                Keyword::IN => {
6237                    if self.parse_keyword(Keyword::ROLE) {
6238                        if !in_role.is_empty() {
6239                            parser_err!("Found multiple IN ROLE", loc)
6240                        } else {
6241                            in_role = self.parse_comma_separated(|p| p.parse_identifier())?;
6242                            Ok(())
6243                        }
6244                    } else if self.parse_keyword(Keyword::GROUP) {
6245                        if !in_group.is_empty() {
6246                            parser_err!("Found multiple IN GROUP", loc)
6247                        } else {
6248                            in_group = self.parse_comma_separated(|p| p.parse_identifier())?;
6249                            Ok(())
6250                        }
6251                    } else {
6252                        self.expected("ROLE or GROUP after IN", self.peek_token())
6253                    }
6254                }
6255                Keyword::ROLE => {
6256                    if !role.is_empty() {
6257                        parser_err!("Found multiple ROLE", loc)
6258                    } else {
6259                        role = self.parse_comma_separated(|p| p.parse_identifier())?;
6260                        Ok(())
6261                    }
6262                }
6263                Keyword::USER => {
6264                    if !user.is_empty() {
6265                        parser_err!("Found multiple USER", loc)
6266                    } else {
6267                        user = self.parse_comma_separated(|p| p.parse_identifier())?;
6268                        Ok(())
6269                    }
6270                }
6271                Keyword::ADMIN => {
6272                    if !admin.is_empty() {
6273                        parser_err!("Found multiple ADMIN", loc)
6274                    } else {
6275                        admin = self.parse_comma_separated(|p| p.parse_identifier())?;
6276                        Ok(())
6277                    }
6278                }
6279                _ => break,
6280            }?
6281        }
6282
6283        Ok(CreateRole {
6284            names,
6285            if_not_exists,
6286            login,
6287            inherit,
6288            bypassrls,
6289            password,
6290            create_db,
6291            create_role,
6292            replication,
6293            superuser,
6294            connection_limit,
6295            valid_until,
6296            in_role,
6297            in_group,
6298            role,
6299            user,
6300            admin,
6301            authorization_owner,
6302        }
6303        .into())
6304    }
6305
6306    pub fn parse_owner(&mut self) -> Result<Owner, ParserError> {
6307        let owner = match self.parse_one_of_keywords(&[Keyword::CURRENT_USER, Keyword::CURRENT_ROLE, Keyword::SESSION_USER]) {
6308            Some(Keyword::CURRENT_USER) => Owner::CurrentUser,
6309            Some(Keyword::CURRENT_ROLE) => Owner::CurrentRole,
6310            Some(Keyword::SESSION_USER) => Owner::SessionUser,
6311            Some(unexpected_keyword) => return Err(ParserError::ParserError(
6312                format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in owner"),
6313            )),
6314            None => {
6315                match self.parse_identifier() {
6316                    Ok(ident) => Owner::Ident(ident),
6317                    Err(e) => {
6318                        return Err(ParserError::ParserError(format!("Expected: CURRENT_USER, CURRENT_ROLE, SESSION_USER or identifier after OWNER TO. {e}")))
6319                    }
6320                }
6321            }
6322        };
6323        Ok(owner)
6324    }
6325
6326    /// Parses a [Statement::CreateDomain] statement.
6327    fn parse_create_domain(&mut self) -> Result<Statement, ParserError> {
6328        let name = self.parse_object_name(false)?;
6329        self.expect_keyword_is(Keyword::AS)?;
6330        let data_type = self.parse_data_type()?;
6331        let collation = if self.parse_keyword(Keyword::COLLATE) {
6332            Some(self.parse_identifier()?)
6333        } else {
6334            None
6335        };
6336        let default = if self.parse_keyword(Keyword::DEFAULT) {
6337            Some(self.parse_expr()?)
6338        } else {
6339            None
6340        };
6341        let mut constraints = Vec::new();
6342        while let Some(constraint) = self.parse_optional_table_constraint()? {
6343            constraints.push(constraint);
6344        }
6345
6346        Ok(Statement::CreateDomain(CreateDomain {
6347            name,
6348            data_type,
6349            collation,
6350            default,
6351            constraints,
6352        }))
6353    }
6354
6355    /// ```sql
6356    ///     CREATE POLICY name ON table_name [ AS { PERMISSIVE | RESTRICTIVE } ]
6357    ///     [ FOR { ALL | SELECT | INSERT | UPDATE | DELETE } ]
6358    ///     [ TO { role_name | PUBLIC | CURRENT_USER | CURRENT_ROLE | SESSION_USER } [, ...] ]
6359    ///     [ USING ( using_expression ) ]
6360    ///     [ WITH CHECK ( with_check_expression ) ]
6361    /// ```
6362    ///
6363    /// [PostgreSQL Documentation](https://www.postgresql.org/docs/current/sql-createpolicy.html)
6364    pub fn parse_create_policy(&mut self) -> Result<Statement, ParserError> {
6365        let name = self.parse_identifier()?;
6366        self.expect_keyword_is(Keyword::ON)?;
6367        let table_name = self.parse_object_name(false)?;
6368
6369        let policy_type = if self.parse_keyword(Keyword::AS) {
6370            let keyword =
6371                self.expect_one_of_keywords(&[Keyword::PERMISSIVE, Keyword::RESTRICTIVE])?;
6372            Some(match keyword {
6373                Keyword::PERMISSIVE => CreatePolicyType::Permissive,
6374                Keyword::RESTRICTIVE => CreatePolicyType::Restrictive,
6375                unexpected_keyword => return Err(ParserError::ParserError(
6376                    format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in policy type"),
6377                )),
6378            })
6379        } else {
6380            None
6381        };
6382
6383        let command = if self.parse_keyword(Keyword::FOR) {
6384            let keyword = self.expect_one_of_keywords(&[
6385                Keyword::ALL,
6386                Keyword::SELECT,
6387                Keyword::INSERT,
6388                Keyword::UPDATE,
6389                Keyword::DELETE,
6390            ])?;
6391            Some(match keyword {
6392                Keyword::ALL => CreatePolicyCommand::All,
6393                Keyword::SELECT => CreatePolicyCommand::Select,
6394                Keyword::INSERT => CreatePolicyCommand::Insert,
6395                Keyword::UPDATE => CreatePolicyCommand::Update,
6396                Keyword::DELETE => CreatePolicyCommand::Delete,
6397                unexpected_keyword => return Err(ParserError::ParserError(
6398                    format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in policy command"),
6399                )),
6400            })
6401        } else {
6402            None
6403        };
6404
6405        let to = if self.parse_keyword(Keyword::TO) {
6406            Some(self.parse_comma_separated(|p| p.parse_owner())?)
6407        } else {
6408            None
6409        };
6410
6411        let using = if self.parse_keyword(Keyword::USING) {
6412            self.expect_token(&Token::LParen)?;
6413            let expr = self.parse_expr()?;
6414            self.expect_token(&Token::RParen)?;
6415            Some(expr)
6416        } else {
6417            None
6418        };
6419
6420        let with_check = if self.parse_keywords(&[Keyword::WITH, Keyword::CHECK]) {
6421            self.expect_token(&Token::LParen)?;
6422            let expr = self.parse_expr()?;
6423            self.expect_token(&Token::RParen)?;
6424            Some(expr)
6425        } else {
6426            None
6427        };
6428
6429        Ok(CreatePolicy {
6430            name,
6431            table_name,
6432            policy_type,
6433            command,
6434            to,
6435            using,
6436            with_check,
6437        })
6438    }
6439
6440    /// ```sql
6441    /// CREATE CONNECTOR [IF NOT EXISTS] connector_name
6442    /// [TYPE datasource_type]
6443    /// [URL datasource_url]
6444    /// [COMMENT connector_comment]
6445    /// [WITH DCPROPERTIES(property_name=property_value, ...)]
6446    /// ```
6447    ///
6448    /// [Hive Documentation](https://cwiki.apache.org/confluence/pages/viewpage.action?pageId=27362034#LanguageManualDDL-CreateDataConnectorCreateConnector)
6449    pub fn parse_create_connector(&mut self) -> Result<Statement, ParserError> {
6450        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
6451        let name = self.parse_identifier()?;
6452
6453        let connector_type = if self.parse_keyword(Keyword::TYPE) {
6454            Some(self.parse_literal_string()?)
6455        } else {
6456            None
6457        };
6458
6459        let url = if self.parse_keyword(Keyword::URL) {
6460            Some(self.parse_literal_string()?)
6461        } else {
6462            None
6463        };
6464
6465        let comment = self.parse_optional_inline_comment()?;
6466
6467        let with_dcproperties =
6468            match self.parse_options_with_keywords(&[Keyword::WITH, Keyword::DCPROPERTIES])? {
6469                properties if !properties.is_empty() => Some(properties),
6470                _ => None,
6471            };
6472
6473        Ok(Statement::CreateConnector(CreateConnector {
6474            name,
6475            if_not_exists,
6476            connector_type,
6477            url,
6478            comment,
6479            with_dcproperties,
6480        }))
6481    }
6482
6483    /// Parse an operator name, which can contain special characters like +, -, <, >, =
6484    /// that are tokenized as operator tokens rather than identifiers.
6485    /// This is used for PostgreSQL CREATE OPERATOR statements.
6486    ///
6487    /// Examples: `+`, `myschema.+`, `pg_catalog.<=`
6488    fn parse_operator_name(&mut self) -> Result<ObjectName, ParserError> {
6489        let mut parts = vec![];
6490        loop {
6491            parts.push(ObjectNamePart::Identifier(Ident::new(
6492                self.next_token().to_string(),
6493            )));
6494            if !self.consume_token(&Token::Period) {
6495                break;
6496            }
6497        }
6498        Ok(ObjectName(parts))
6499    }
6500
6501    /// Parse a [Statement::CreateOperator]
6502    ///
6503    /// [PostgreSQL Documentation](https://www.postgresql.org/docs/current/sql-createoperator.html)
6504    pub fn parse_create_operator(&mut self) -> Result<Statement, ParserError> {
6505        let name = self.parse_operator_name()?;
6506        self.expect_token(&Token::LParen)?;
6507
6508        let mut function: Option<ObjectName> = None;
6509        let mut is_procedure = false;
6510        let mut left_arg: Option<DataType> = None;
6511        let mut right_arg: Option<DataType> = None;
6512        let mut options: Vec<OperatorOption> = Vec::new();
6513
6514        loop {
6515            let keyword = self.expect_one_of_keywords(&[
6516                Keyword::FUNCTION,
6517                Keyword::PROCEDURE,
6518                Keyword::LEFTARG,
6519                Keyword::RIGHTARG,
6520                Keyword::COMMUTATOR,
6521                Keyword::NEGATOR,
6522                Keyword::RESTRICT,
6523                Keyword::JOIN,
6524                Keyword::HASHES,
6525                Keyword::MERGES,
6526            ])?;
6527
6528            match keyword {
6529                Keyword::HASHES if !options.iter().any(|o| matches!(o, OperatorOption::Hashes)) => {
6530                    options.push(OperatorOption::Hashes);
6531                }
6532                Keyword::MERGES if !options.iter().any(|o| matches!(o, OperatorOption::Merges)) => {
6533                    options.push(OperatorOption::Merges);
6534                }
6535                Keyword::FUNCTION | Keyword::PROCEDURE if function.is_none() => {
6536                    self.expect_token(&Token::Eq)?;
6537                    function = Some(self.parse_object_name(false)?);
6538                    is_procedure = keyword == Keyword::PROCEDURE;
6539                }
6540                Keyword::LEFTARG if left_arg.is_none() => {
6541                    self.expect_token(&Token::Eq)?;
6542                    left_arg = Some(self.parse_data_type()?);
6543                }
6544                Keyword::RIGHTARG if right_arg.is_none() => {
6545                    self.expect_token(&Token::Eq)?;
6546                    right_arg = Some(self.parse_data_type()?);
6547                }
6548                Keyword::COMMUTATOR
6549                    if !options
6550                        .iter()
6551                        .any(|o| matches!(o, OperatorOption::Commutator(_))) =>
6552                {
6553                    self.expect_token(&Token::Eq)?;
6554                    if self.parse_keyword(Keyword::OPERATOR) {
6555                        self.expect_token(&Token::LParen)?;
6556                        let op = self.parse_operator_name()?;
6557                        self.expect_token(&Token::RParen)?;
6558                        options.push(OperatorOption::Commutator(op));
6559                    } else {
6560                        options.push(OperatorOption::Commutator(self.parse_operator_name()?));
6561                    }
6562                }
6563                Keyword::NEGATOR
6564                    if !options
6565                        .iter()
6566                        .any(|o| matches!(o, OperatorOption::Negator(_))) =>
6567                {
6568                    self.expect_token(&Token::Eq)?;
6569                    if self.parse_keyword(Keyword::OPERATOR) {
6570                        self.expect_token(&Token::LParen)?;
6571                        let op = self.parse_operator_name()?;
6572                        self.expect_token(&Token::RParen)?;
6573                        options.push(OperatorOption::Negator(op));
6574                    } else {
6575                        options.push(OperatorOption::Negator(self.parse_operator_name()?));
6576                    }
6577                }
6578                Keyword::RESTRICT
6579                    if !options
6580                        .iter()
6581                        .any(|o| matches!(o, OperatorOption::Restrict(_))) =>
6582                {
6583                    self.expect_token(&Token::Eq)?;
6584                    options.push(OperatorOption::Restrict(Some(
6585                        self.parse_object_name(false)?,
6586                    )));
6587                }
6588                Keyword::JOIN if !options.iter().any(|o| matches!(o, OperatorOption::Join(_))) => {
6589                    self.expect_token(&Token::Eq)?;
6590                    options.push(OperatorOption::Join(Some(self.parse_object_name(false)?)));
6591                }
6592                _ => {
6593                    return Err(ParserError::ParserError(format!(
6594                        "Duplicate or unexpected keyword {:?} in CREATE OPERATOR",
6595                        keyword
6596                    )))
6597                }
6598            }
6599
6600            if !self.consume_token(&Token::Comma) {
6601                break;
6602            }
6603        }
6604
6605        // Expect closing parenthesis
6606        self.expect_token(&Token::RParen)?;
6607
6608        // FUNCTION is required
6609        let function = function.ok_or_else(|| {
6610            ParserError::ParserError("CREATE OPERATOR requires FUNCTION parameter".to_string())
6611        })?;
6612
6613        Ok(Statement::CreateOperator(CreateOperator {
6614            name,
6615            function,
6616            is_procedure,
6617            left_arg,
6618            right_arg,
6619            options,
6620        }))
6621    }
6622
6623    /// Parse a [Statement::CreateOperatorFamily]
6624    ///
6625    /// [PostgreSQL Documentation](https://www.postgresql.org/docs/current/sql-createopfamily.html)
6626    pub fn parse_create_operator_family(&mut self) -> Result<Statement, ParserError> {
6627        let name = self.parse_object_name(false)?;
6628        self.expect_keyword(Keyword::USING)?;
6629        let using = self.parse_identifier()?;
6630
6631        Ok(Statement::CreateOperatorFamily(CreateOperatorFamily {
6632            name,
6633            using,
6634        }))
6635    }
6636
6637    /// Parse a [Statement::CreateOperatorClass]
6638    ///
6639    /// [PostgreSQL Documentation](https://www.postgresql.org/docs/current/sql-createopclass.html)
6640    pub fn parse_create_operator_class(&mut self) -> Result<Statement, ParserError> {
6641        let name = self.parse_object_name(false)?;
6642        let default = self.parse_keyword(Keyword::DEFAULT);
6643        self.expect_keywords(&[Keyword::FOR, Keyword::TYPE])?;
6644        let for_type = self.parse_data_type()?;
6645        self.expect_keyword(Keyword::USING)?;
6646        let using = self.parse_identifier()?;
6647
6648        let family = if self.parse_keyword(Keyword::FAMILY) {
6649            Some(self.parse_object_name(false)?)
6650        } else {
6651            None
6652        };
6653
6654        self.expect_keyword(Keyword::AS)?;
6655
6656        let mut items = vec![];
6657        loop {
6658            if self.parse_keyword(Keyword::OPERATOR) {
6659                let strategy_number = self.parse_literal_uint()? as u32;
6660                let operator_name = self.parse_operator_name()?;
6661
6662                // Optional operator argument types
6663                let op_types = if self.consume_token(&Token::LParen) {
6664                    let left = self.parse_data_type()?;
6665                    self.expect_token(&Token::Comma)?;
6666                    let right = self.parse_data_type()?;
6667                    self.expect_token(&Token::RParen)?;
6668                    Some(OperatorArgTypes { left, right })
6669                } else {
6670                    None
6671                };
6672
6673                // Optional purpose
6674                let purpose = if self.parse_keyword(Keyword::FOR) {
6675                    if self.parse_keyword(Keyword::SEARCH) {
6676                        Some(OperatorPurpose::ForSearch)
6677                    } else if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
6678                        let sort_family = self.parse_object_name(false)?;
6679                        Some(OperatorPurpose::ForOrderBy { sort_family })
6680                    } else {
6681                        return self.expected("SEARCH or ORDER BY after FOR", self.peek_token());
6682                    }
6683                } else {
6684                    None
6685                };
6686
6687                items.push(OperatorClassItem::Operator {
6688                    strategy_number,
6689                    operator_name,
6690                    op_types,
6691                    purpose,
6692                });
6693            } else if self.parse_keyword(Keyword::FUNCTION) {
6694                let support_number = self.parse_literal_uint()? as u32;
6695
6696                // Optional operator types
6697                let op_types =
6698                    if self.consume_token(&Token::LParen) && self.peek_token() != Token::RParen {
6699                        let mut types = vec![];
6700                        loop {
6701                            types.push(self.parse_data_type()?);
6702                            if !self.consume_token(&Token::Comma) {
6703                                break;
6704                            }
6705                        }
6706                        self.expect_token(&Token::RParen)?;
6707                        Some(types)
6708                    } else if self.consume_token(&Token::LParen) {
6709                        self.expect_token(&Token::RParen)?;
6710                        Some(vec![])
6711                    } else {
6712                        None
6713                    };
6714
6715                let function_name = self.parse_object_name(false)?;
6716
6717                // Function argument types
6718                let argument_types = if self.consume_token(&Token::LParen) {
6719                    let mut types = vec![];
6720                    loop {
6721                        if self.peek_token() == Token::RParen {
6722                            break;
6723                        }
6724                        types.push(self.parse_data_type()?);
6725                        if !self.consume_token(&Token::Comma) {
6726                            break;
6727                        }
6728                    }
6729                    self.expect_token(&Token::RParen)?;
6730                    types
6731                } else {
6732                    vec![]
6733                };
6734
6735                items.push(OperatorClassItem::Function {
6736                    support_number,
6737                    op_types,
6738                    function_name,
6739                    argument_types,
6740                });
6741            } else if self.parse_keyword(Keyword::STORAGE) {
6742                let storage_type = self.parse_data_type()?;
6743                items.push(OperatorClassItem::Storage { storage_type });
6744            } else {
6745                break;
6746            }
6747
6748            // Check for comma separator
6749            if !self.consume_token(&Token::Comma) {
6750                break;
6751            }
6752        }
6753
6754        Ok(Statement::CreateOperatorClass(CreateOperatorClass {
6755            name,
6756            default,
6757            for_type,
6758            using,
6759            family,
6760            items,
6761        }))
6762    }
6763
6764    pub fn parse_drop(&mut self) -> Result<Statement, ParserError> {
6765        // MySQL dialect supports `TEMPORARY`
6766        let temporary = dialect_of!(self is MySqlDialect | GenericDialect | DuckDbDialect)
6767            && self.parse_keyword(Keyword::TEMPORARY);
6768        let persistent = dialect_of!(self is DuckDbDialect)
6769            && self.parse_one_of_keywords(&[Keyword::PERSISTENT]).is_some();
6770
6771        let object_type = if self.parse_keyword(Keyword::TABLE) {
6772            ObjectType::Table
6773        } else if self.parse_keyword(Keyword::VIEW) {
6774            ObjectType::View
6775        } else if self.parse_keywords(&[Keyword::MATERIALIZED, Keyword::VIEW]) {
6776            ObjectType::MaterializedView
6777        } else if self.parse_keyword(Keyword::INDEX) {
6778            ObjectType::Index
6779        } else if self.parse_keyword(Keyword::ROLE) {
6780            ObjectType::Role
6781        } else if self.parse_keyword(Keyword::SCHEMA) {
6782            ObjectType::Schema
6783        } else if self.parse_keyword(Keyword::DATABASE) {
6784            ObjectType::Database
6785        } else if self.parse_keyword(Keyword::SEQUENCE) {
6786            ObjectType::Sequence
6787        } else if self.parse_keyword(Keyword::STAGE) {
6788            ObjectType::Stage
6789        } else if self.parse_keyword(Keyword::TYPE) {
6790            ObjectType::Type
6791        } else if self.parse_keyword(Keyword::USER) {
6792            ObjectType::User
6793        } else if self.parse_keyword(Keyword::STREAM) {
6794            ObjectType::Stream
6795        } else if self.parse_keyword(Keyword::FUNCTION) {
6796            return self.parse_drop_function();
6797        } else if self.parse_keyword(Keyword::POLICY) {
6798            return self.parse_drop_policy();
6799        } else if self.parse_keyword(Keyword::CONNECTOR) {
6800            return self.parse_drop_connector();
6801        } else if self.parse_keyword(Keyword::DOMAIN) {
6802            return self.parse_drop_domain();
6803        } else if self.parse_keyword(Keyword::PROCEDURE) {
6804            return self.parse_drop_procedure();
6805        } else if self.parse_keyword(Keyword::SECRET) {
6806            return self.parse_drop_secret(temporary, persistent);
6807        } else if self.parse_keyword(Keyword::TRIGGER) {
6808            return self.parse_drop_trigger();
6809        } else if self.parse_keyword(Keyword::EXTENSION) {
6810            return self.parse_drop_extension();
6811        } else if self.parse_keyword(Keyword::OPERATOR) {
6812            // Check if this is DROP OPERATOR FAMILY or DROP OPERATOR CLASS
6813            return if self.parse_keyword(Keyword::FAMILY) {
6814                self.parse_drop_operator_family()
6815            } else if self.parse_keyword(Keyword::CLASS) {
6816                self.parse_drop_operator_class()
6817            } else {
6818                self.parse_drop_operator()
6819            };
6820        } else {
6821            return self.expected(
6822                "CONNECTOR, DATABASE, EXTENSION, FUNCTION, INDEX, OPERATOR, POLICY, PROCEDURE, ROLE, SCHEMA, SECRET, SEQUENCE, STAGE, TABLE, TRIGGER, TYPE, VIEW, MATERIALIZED VIEW or USER after DROP",
6823                self.peek_token(),
6824            );
6825        };
6826        // Many dialects support the non-standard `IF EXISTS` clause and allow
6827        // specifying multiple objects to delete in a single statement
6828        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6829        let names = self.parse_comma_separated(|p| p.parse_object_name(false))?;
6830
6831        let loc = self.peek_token().span.start;
6832        let cascade = self.parse_keyword(Keyword::CASCADE);
6833        let restrict = self.parse_keyword(Keyword::RESTRICT);
6834        let purge = self.parse_keyword(Keyword::PURGE);
6835        if cascade && restrict {
6836            return parser_err!("Cannot specify both CASCADE and RESTRICT in DROP", loc);
6837        }
6838        if object_type == ObjectType::Role && (cascade || restrict || purge) {
6839            return parser_err!(
6840                "Cannot specify CASCADE, RESTRICT, or PURGE in DROP ROLE",
6841                loc
6842            );
6843        }
6844        let table = if self.parse_keyword(Keyword::ON) {
6845            Some(self.parse_object_name(false)?)
6846        } else {
6847            None
6848        };
6849        Ok(Statement::Drop {
6850            object_type,
6851            if_exists,
6852            names,
6853            cascade,
6854            restrict,
6855            purge,
6856            temporary,
6857            table,
6858        })
6859    }
6860
6861    fn parse_optional_drop_behavior(&mut self) -> Option<DropBehavior> {
6862        match self.parse_one_of_keywords(&[Keyword::CASCADE, Keyword::RESTRICT]) {
6863            Some(Keyword::CASCADE) => Some(DropBehavior::Cascade),
6864            Some(Keyword::RESTRICT) => Some(DropBehavior::Restrict),
6865            _ => None,
6866        }
6867    }
6868
6869    /// ```sql
6870    /// DROP FUNCTION [ IF EXISTS ] name [ ( [ [ argmode ] [ argname ] argtype [, ...] ] ) ] [, ...]
6871    /// [ CASCADE | RESTRICT ]
6872    /// ```
6873    fn parse_drop_function(&mut self) -> Result<Statement, ParserError> {
6874        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6875        let func_desc = self.parse_comma_separated(Parser::parse_function_desc)?;
6876        let drop_behavior = self.parse_optional_drop_behavior();
6877        Ok(Statement::DropFunction(DropFunction {
6878            if_exists,
6879            func_desc,
6880            drop_behavior,
6881        }))
6882    }
6883
6884    /// ```sql
6885    /// DROP POLICY [ IF EXISTS ] name ON table_name [ CASCADE | RESTRICT ]
6886    /// ```
6887    ///
6888    /// [PostgreSQL Documentation](https://www.postgresql.org/docs/current/sql-droppolicy.html)
6889    fn parse_drop_policy(&mut self) -> Result<Statement, ParserError> {
6890        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6891        let name = self.parse_identifier()?;
6892        self.expect_keyword_is(Keyword::ON)?;
6893        let table_name = self.parse_object_name(false)?;
6894        let drop_behavior = self.parse_optional_drop_behavior();
6895        Ok(Statement::DropPolicy {
6896            if_exists,
6897            name,
6898            table_name,
6899            drop_behavior,
6900        })
6901    }
6902    /// ```sql
6903    /// DROP CONNECTOR [IF EXISTS] name
6904    /// ```
6905    ///
6906    /// See [Hive](https://cwiki.apache.org/confluence/pages/viewpage.action?pageId=27362034#LanguageManualDDL-DropConnector)
6907    fn parse_drop_connector(&mut self) -> Result<Statement, ParserError> {
6908        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6909        let name = self.parse_identifier()?;
6910        Ok(Statement::DropConnector { if_exists, name })
6911    }
6912
6913    /// ```sql
6914    /// DROP DOMAIN [ IF EXISTS ] name [ CASCADE | RESTRICT ]
6915    /// ```
6916    fn parse_drop_domain(&mut self) -> Result<Statement, ParserError> {
6917        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6918        let name = self.parse_object_name(false)?;
6919        let drop_behavior = self.parse_optional_drop_behavior();
6920        Ok(Statement::DropDomain(DropDomain {
6921            if_exists,
6922            name,
6923            drop_behavior,
6924        }))
6925    }
6926
6927    /// ```sql
6928    /// DROP PROCEDURE [ IF EXISTS ] name [ ( [ [ argmode ] [ argname ] argtype [, ...] ] ) ] [, ...]
6929    /// [ CASCADE | RESTRICT ]
6930    /// ```
6931    fn parse_drop_procedure(&mut self) -> Result<Statement, ParserError> {
6932        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6933        let proc_desc = self.parse_comma_separated(Parser::parse_function_desc)?;
6934        let drop_behavior = self.parse_optional_drop_behavior();
6935        Ok(Statement::DropProcedure {
6936            if_exists,
6937            proc_desc,
6938            drop_behavior,
6939        })
6940    }
6941
6942    fn parse_function_desc(&mut self) -> Result<FunctionDesc, ParserError> {
6943        let name = self.parse_object_name(false)?;
6944
6945        let args = if self.consume_token(&Token::LParen) {
6946            if self.consume_token(&Token::RParen) {
6947                Some(vec![])
6948            } else {
6949                let args = self.parse_comma_separated(Parser::parse_function_arg)?;
6950                self.expect_token(&Token::RParen)?;
6951                Some(args)
6952            }
6953        } else {
6954            None
6955        };
6956
6957        Ok(FunctionDesc { name, args })
6958    }
6959
6960    /// See [DuckDB Docs](https://duckdb.org/docs/sql/statements/create_secret.html) for more details.
6961    fn parse_drop_secret(
6962        &mut self,
6963        temporary: bool,
6964        persistent: bool,
6965    ) -> Result<Statement, ParserError> {
6966        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6967        let name = self.parse_identifier()?;
6968        let storage_specifier = if self.parse_keyword(Keyword::FROM) {
6969            self.parse_identifier().ok()
6970        } else {
6971            None
6972        };
6973        let temp = match (temporary, persistent) {
6974            (true, false) => Some(true),
6975            (false, true) => Some(false),
6976            (false, false) => None,
6977            _ => self.expected("TEMPORARY or PERSISTENT", self.peek_token())?,
6978        };
6979
6980        Ok(Statement::DropSecret {
6981            if_exists,
6982            temporary: temp,
6983            name,
6984            storage_specifier,
6985        })
6986    }
6987
6988    /// Parse a `DECLARE` statement.
6989    ///
6990    /// ```sql
6991    /// DECLARE name [ BINARY ] [ ASENSITIVE | INSENSITIVE ] [ [ NO ] SCROLL ]
6992    ///     CURSOR [ { WITH | WITHOUT } HOLD ] FOR query
6993    /// ```
6994    ///
6995    /// The syntax can vary significantly between warehouses. See the grammar
6996    /// on the warehouse specific function in such cases.
6997    pub fn parse_declare(&mut self) -> Result<Statement, ParserError> {
6998        if dialect_of!(self is BigQueryDialect) {
6999            return self.parse_big_query_declare();
7000        }
7001        if dialect_of!(self is SnowflakeDialect) {
7002            return self.parse_snowflake_declare();
7003        }
7004        if dialect_of!(self is MsSqlDialect) {
7005            return self.parse_mssql_declare();
7006        }
7007
7008        let name = self.parse_identifier()?;
7009
7010        let binary = Some(self.parse_keyword(Keyword::BINARY));
7011        let sensitive = if self.parse_keyword(Keyword::INSENSITIVE) {
7012            Some(true)
7013        } else if self.parse_keyword(Keyword::ASENSITIVE) {
7014            Some(false)
7015        } else {
7016            None
7017        };
7018        let scroll = if self.parse_keyword(Keyword::SCROLL) {
7019            Some(true)
7020        } else if self.parse_keywords(&[Keyword::NO, Keyword::SCROLL]) {
7021            Some(false)
7022        } else {
7023            None
7024        };
7025
7026        self.expect_keyword_is(Keyword::CURSOR)?;
7027        let declare_type = Some(DeclareType::Cursor);
7028
7029        let hold = match self.parse_one_of_keywords(&[Keyword::WITH, Keyword::WITHOUT]) {
7030            Some(keyword) => {
7031                self.expect_keyword_is(Keyword::HOLD)?;
7032
7033                match keyword {
7034                    Keyword::WITH => Some(true),
7035                    Keyword::WITHOUT => Some(false),
7036                    unexpected_keyword => return Err(ParserError::ParserError(
7037                        format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in cursor hold"),
7038                    )),
7039                }
7040            }
7041            None => None,
7042        };
7043
7044        self.expect_keyword_is(Keyword::FOR)?;
7045
7046        let query = Some(self.parse_query()?);
7047
7048        Ok(Statement::Declare {
7049            stmts: vec![Declare {
7050                names: vec![name],
7051                data_type: None,
7052                assignment: None,
7053                declare_type,
7054                binary,
7055                sensitive,
7056                scroll,
7057                hold,
7058                for_query: query,
7059            }],
7060        })
7061    }
7062
7063    /// Parse a [BigQuery] `DECLARE` statement.
7064    ///
7065    /// Syntax:
7066    /// ```text
7067    /// DECLARE variable_name[, ...] [{ <variable_type> | <DEFAULT expression> }];
7068    /// ```
7069    /// [BigQuery]: https://cloud.google.com/bigquery/docs/reference/standard-sql/procedural-language#declare
7070    pub fn parse_big_query_declare(&mut self) -> Result<Statement, ParserError> {
7071        let names = self.parse_comma_separated(Parser::parse_identifier)?;
7072
7073        let data_type = match self.peek_token().token {
7074            Token::Word(w) if w.keyword == Keyword::DEFAULT => None,
7075            _ => Some(self.parse_data_type()?),
7076        };
7077
7078        let expr = if data_type.is_some() {
7079            if self.parse_keyword(Keyword::DEFAULT) {
7080                Some(self.parse_expr()?)
7081            } else {
7082                None
7083            }
7084        } else {
7085            // If no variable type - default expression must be specified, per BQ docs.
7086            // i.e `DECLARE foo;` is invalid.
7087            self.expect_keyword_is(Keyword::DEFAULT)?;
7088            Some(self.parse_expr()?)
7089        };
7090
7091        Ok(Statement::Declare {
7092            stmts: vec![Declare {
7093                names,
7094                data_type,
7095                assignment: expr.map(|expr| DeclareAssignment::Default(Box::new(expr))),
7096                declare_type: None,
7097                binary: None,
7098                sensitive: None,
7099                scroll: None,
7100                hold: None,
7101                for_query: None,
7102            }],
7103        })
7104    }
7105
7106    /// Parse a [Snowflake] `DECLARE` statement.
7107    ///
7108    /// Syntax:
7109    /// ```text
7110    /// DECLARE
7111    ///   [{ <variable_declaration>
7112    ///      | <cursor_declaration>
7113    ///      | <resultset_declaration>
7114    ///      | <exception_declaration> }; ... ]
7115    ///
7116    /// <variable_declaration>
7117    /// <variable_name> [<type>] [ { DEFAULT | := } <expression>]
7118    ///
7119    /// <cursor_declaration>
7120    /// <cursor_name> CURSOR FOR <query>
7121    ///
7122    /// <resultset_declaration>
7123    /// <resultset_name> RESULTSET [ { DEFAULT | := } ( <query> ) ] ;
7124    ///
7125    /// <exception_declaration>
7126    /// <exception_name> EXCEPTION [ ( <exception_number> , '<exception_message>' ) ] ;
7127    /// ```
7128    ///
7129    /// [Snowflake]: https://docs.snowflake.com/en/sql-reference/snowflake-scripting/declare
7130    pub fn parse_snowflake_declare(&mut self) -> Result<Statement, ParserError> {
7131        let mut stmts = vec![];
7132        loop {
7133            let name = self.parse_identifier()?;
7134            let (declare_type, for_query, assigned_expr, data_type) =
7135                if self.parse_keyword(Keyword::CURSOR) {
7136                    self.expect_keyword_is(Keyword::FOR)?;
7137                    match self.peek_token().token {
7138                        Token::Word(w) if w.keyword == Keyword::SELECT => (
7139                            Some(DeclareType::Cursor),
7140                            Some(self.parse_query()?),
7141                            None,
7142                            None,
7143                        ),
7144                        _ => (
7145                            Some(DeclareType::Cursor),
7146                            None,
7147                            Some(DeclareAssignment::For(Box::new(self.parse_expr()?))),
7148                            None,
7149                        ),
7150                    }
7151                } else if self.parse_keyword(Keyword::RESULTSET) {
7152                    let assigned_expr = if self.peek_token().token != Token::SemiColon {
7153                        self.parse_snowflake_variable_declaration_expression()?
7154                    } else {
7155                        // Nothing more to do. The statement has no further parameters.
7156                        None
7157                    };
7158
7159                    (Some(DeclareType::ResultSet), None, assigned_expr, None)
7160                } else if self.parse_keyword(Keyword::EXCEPTION) {
7161                    let assigned_expr = if self.peek_token().token == Token::LParen {
7162                        Some(DeclareAssignment::Expr(Box::new(self.parse_expr()?)))
7163                    } else {
7164                        // Nothing more to do. The statement has no further parameters.
7165                        None
7166                    };
7167
7168                    (Some(DeclareType::Exception), None, assigned_expr, None)
7169                } else {
7170                    // Without an explicit keyword, the only valid option is variable declaration.
7171                    let (assigned_expr, data_type) = if let Some(assigned_expr) =
7172                        self.parse_snowflake_variable_declaration_expression()?
7173                    {
7174                        (Some(assigned_expr), None)
7175                    } else if let Token::Word(_) = self.peek_token().token {
7176                        let data_type = self.parse_data_type()?;
7177                        (
7178                            self.parse_snowflake_variable_declaration_expression()?,
7179                            Some(data_type),
7180                        )
7181                    } else {
7182                        (None, None)
7183                    };
7184                    (None, None, assigned_expr, data_type)
7185                };
7186            let stmt = Declare {
7187                names: vec![name],
7188                data_type,
7189                assignment: assigned_expr,
7190                declare_type,
7191                binary: None,
7192                sensitive: None,
7193                scroll: None,
7194                hold: None,
7195                for_query,
7196            };
7197
7198            stmts.push(stmt);
7199            if self.consume_token(&Token::SemiColon) {
7200                match self.peek_token().token {
7201                    Token::Word(w)
7202                        if ALL_KEYWORDS
7203                            .binary_search(&w.value.to_uppercase().as_str())
7204                            .is_err() =>
7205                    {
7206                        // Not a keyword - start of a new declaration.
7207                        continue;
7208                    }
7209                    _ => {
7210                        // Put back the semicolon, this is the end of the DECLARE statement.
7211                        self.prev_token();
7212                    }
7213                }
7214            }
7215
7216            break;
7217        }
7218
7219        Ok(Statement::Declare { stmts })
7220    }
7221
7222    /// Parse a [MsSql] `DECLARE` statement.
7223    ///
7224    /// Syntax:
7225    /// ```text
7226    /// DECLARE
7227    // {
7228    //   { @local_variable [AS] data_type [ = value ] }
7229    //   | { @cursor_variable_name CURSOR [ FOR ] }
7230    // } [ ,...n ]
7231    /// ```
7232    /// [MsSql]: https://learn.microsoft.com/en-us/sql/t-sql/language-elements/declare-local-variable-transact-sql?view=sql-server-ver16
7233    pub fn parse_mssql_declare(&mut self) -> Result<Statement, ParserError> {
7234        let stmts = self.parse_comma_separated(Parser::parse_mssql_declare_stmt)?;
7235
7236        Ok(Statement::Declare { stmts })
7237    }
7238
7239    /// Parse the body of a [MsSql] `DECLARE`statement.
7240    ///
7241    /// Syntax:
7242    /// ```text
7243    // {
7244    //   { @local_variable [AS] data_type [ = value ] }
7245    //   | { @cursor_variable_name CURSOR [ FOR ]}
7246    // } [ ,...n ]
7247    /// ```
7248    /// [MsSql]: https://learn.microsoft.com/en-us/sql/t-sql/language-elements/declare-local-variable-transact-sql?view=sql-server-ver16
7249    pub fn parse_mssql_declare_stmt(&mut self) -> Result<Declare, ParserError> {
7250        let name = {
7251            let ident = self.parse_identifier()?;
7252            if !ident.value.starts_with('@')
7253                && !matches!(
7254                    self.peek_token().token,
7255                    Token::Word(w) if w.keyword == Keyword::CURSOR
7256                )
7257            {
7258                Err(ParserError::TokenizerError(
7259                    "Invalid MsSql variable declaration.".to_string(),
7260                ))
7261            } else {
7262                Ok(ident)
7263            }
7264        }?;
7265
7266        let (declare_type, data_type) = match self.peek_token().token {
7267            Token::Word(w) => match w.keyword {
7268                Keyword::CURSOR => {
7269                    self.next_token();
7270                    (Some(DeclareType::Cursor), None)
7271                }
7272                Keyword::AS => {
7273                    self.next_token();
7274                    (None, Some(self.parse_data_type()?))
7275                }
7276                _ => (None, Some(self.parse_data_type()?)),
7277            },
7278            _ => (None, Some(self.parse_data_type()?)),
7279        };
7280
7281        let (for_query, assignment) = if self.peek_keyword(Keyword::FOR) {
7282            self.next_token();
7283            let query = Some(self.parse_query()?);
7284            (query, None)
7285        } else {
7286            let assignment = self.parse_mssql_variable_declaration_expression()?;
7287            (None, assignment)
7288        };
7289
7290        Ok(Declare {
7291            names: vec![name],
7292            data_type,
7293            assignment,
7294            declare_type,
7295            binary: None,
7296            sensitive: None,
7297            scroll: None,
7298            hold: None,
7299            for_query,
7300        })
7301    }
7302
7303    /// Parses the assigned expression in a variable declaration.
7304    ///
7305    /// Syntax:
7306    /// ```text
7307    /// [ { DEFAULT | := } <expression>]
7308    /// ```
7309    /// <https://docs.snowflake.com/en/sql-reference/snowflake-scripting/declare#variable-declaration-syntax>
7310    pub fn parse_snowflake_variable_declaration_expression(
7311        &mut self,
7312    ) -> Result<Option<DeclareAssignment>, ParserError> {
7313        Ok(match self.peek_token().token {
7314            Token::Word(w) if w.keyword == Keyword::DEFAULT => {
7315                self.next_token(); // Skip `DEFAULT`
7316                Some(DeclareAssignment::Default(Box::new(self.parse_expr()?)))
7317            }
7318            Token::Assignment => {
7319                self.next_token(); // Skip `:=`
7320                Some(DeclareAssignment::DuckAssignment(Box::new(
7321                    self.parse_expr()?,
7322                )))
7323            }
7324            _ => None,
7325        })
7326    }
7327
7328    /// Parses the assigned expression in a variable declaration.
7329    ///
7330    /// Syntax:
7331    /// ```text
7332    /// [ = <expression>]
7333    /// ```
7334    pub fn parse_mssql_variable_declaration_expression(
7335        &mut self,
7336    ) -> Result<Option<DeclareAssignment>, ParserError> {
7337        Ok(match self.peek_token().token {
7338            Token::Eq => {
7339                self.next_token(); // Skip `=`
7340                Some(DeclareAssignment::MsSqlAssignment(Box::new(
7341                    self.parse_expr()?,
7342                )))
7343            }
7344            _ => None,
7345        })
7346    }
7347
7348    // FETCH [ direction { FROM | IN } ] cursor INTO target;
7349    pub fn parse_fetch_statement(&mut self) -> Result<Statement, ParserError> {
7350        let direction = if self.parse_keyword(Keyword::NEXT) {
7351            FetchDirection::Next
7352        } else if self.parse_keyword(Keyword::PRIOR) {
7353            FetchDirection::Prior
7354        } else if self.parse_keyword(Keyword::FIRST) {
7355            FetchDirection::First
7356        } else if self.parse_keyword(Keyword::LAST) {
7357            FetchDirection::Last
7358        } else if self.parse_keyword(Keyword::ABSOLUTE) {
7359            FetchDirection::Absolute {
7360                limit: self.parse_number_value()?.value,
7361            }
7362        } else if self.parse_keyword(Keyword::RELATIVE) {
7363            FetchDirection::Relative {
7364                limit: self.parse_number_value()?.value,
7365            }
7366        } else if self.parse_keyword(Keyword::FORWARD) {
7367            if self.parse_keyword(Keyword::ALL) {
7368                FetchDirection::ForwardAll
7369            } else {
7370                FetchDirection::Forward {
7371                    // TODO: Support optional
7372                    limit: Some(self.parse_number_value()?.value),
7373                }
7374            }
7375        } else if self.parse_keyword(Keyword::BACKWARD) {
7376            if self.parse_keyword(Keyword::ALL) {
7377                FetchDirection::BackwardAll
7378            } else {
7379                FetchDirection::Backward {
7380                    // TODO: Support optional
7381                    limit: Some(self.parse_number_value()?.value),
7382                }
7383            }
7384        } else if self.parse_keyword(Keyword::ALL) {
7385            FetchDirection::All
7386        } else {
7387            FetchDirection::Count {
7388                limit: self.parse_number_value()?.value,
7389            }
7390        };
7391
7392        let position = if self.peek_keyword(Keyword::FROM) {
7393            self.expect_keyword(Keyword::FROM)?;
7394            FetchPosition::From
7395        } else if self.peek_keyword(Keyword::IN) {
7396            self.expect_keyword(Keyword::IN)?;
7397            FetchPosition::In
7398        } else {
7399            return parser_err!("Expected FROM or IN", self.peek_token().span.start);
7400        };
7401
7402        let name = self.parse_identifier()?;
7403
7404        let into = if self.parse_keyword(Keyword::INTO) {
7405            Some(self.parse_object_name(false)?)
7406        } else {
7407            None
7408        };
7409
7410        Ok(Statement::Fetch {
7411            name,
7412            direction,
7413            position,
7414            into,
7415        })
7416    }
7417
7418    pub fn parse_discard(&mut self) -> Result<Statement, ParserError> {
7419        let object_type = if self.parse_keyword(Keyword::ALL) {
7420            DiscardObject::ALL
7421        } else if self.parse_keyword(Keyword::PLANS) {
7422            DiscardObject::PLANS
7423        } else if self.parse_keyword(Keyword::SEQUENCES) {
7424            DiscardObject::SEQUENCES
7425        } else if self.parse_keyword(Keyword::TEMP) || self.parse_keyword(Keyword::TEMPORARY) {
7426            DiscardObject::TEMP
7427        } else {
7428            return self.expected(
7429                "ALL, PLANS, SEQUENCES, TEMP or TEMPORARY after DISCARD",
7430                self.peek_token(),
7431            );
7432        };
7433        Ok(Statement::Discard { object_type })
7434    }
7435
7436    pub fn parse_create_index(&mut self, unique: bool) -> Result<Statement, ParserError> {
7437        let concurrently = self.parse_keyword(Keyword::CONCURRENTLY);
7438        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
7439
7440        let mut using = None;
7441
7442        let index_name = if if_not_exists || !self.parse_keyword(Keyword::ON) {
7443            let index_name = self.parse_object_name(false)?;
7444            // MySQL allows `USING index_type` either before or after `ON table_name`
7445            using = self.parse_optional_using_then_index_type()?;
7446            self.expect_keyword_is(Keyword::ON)?;
7447            Some(index_name)
7448        } else {
7449            None
7450        };
7451
7452        let table_name = self.parse_object_name(false)?;
7453
7454        // MySQL allows having two `USING` clauses.
7455        // In that case, the second clause overwrites the first.
7456        using = self.parse_optional_using_then_index_type()?.or(using);
7457
7458        let columns = self.parse_parenthesized_index_column_list()?;
7459
7460        let include = if self.parse_keyword(Keyword::INCLUDE) {
7461            self.expect_token(&Token::LParen)?;
7462            let columns = self.parse_comma_separated(|p| p.parse_identifier())?;
7463            self.expect_token(&Token::RParen)?;
7464            columns
7465        } else {
7466            vec![]
7467        };
7468
7469        let nulls_distinct = if self.parse_keyword(Keyword::NULLS) {
7470            let not = self.parse_keyword(Keyword::NOT);
7471            self.expect_keyword_is(Keyword::DISTINCT)?;
7472            Some(!not)
7473        } else {
7474            None
7475        };
7476
7477        let with = if self.dialect.supports_create_index_with_clause()
7478            && self.parse_keyword(Keyword::WITH)
7479        {
7480            self.expect_token(&Token::LParen)?;
7481            let with_params = self.parse_comma_separated(Parser::parse_expr)?;
7482            self.expect_token(&Token::RParen)?;
7483            with_params
7484        } else {
7485            Vec::new()
7486        };
7487
7488        let predicate = if self.parse_keyword(Keyword::WHERE) {
7489            Some(self.parse_expr()?)
7490        } else {
7491            None
7492        };
7493
7494        // MySQL options (including the modern style of `USING` after the column list instead of
7495        // before, which is deprecated) shouldn't conflict with other preceding options (e.g. `WITH
7496        // PARSER` won't be caught by the above `WITH` clause parsing because MySQL doesn't set that
7497        // support flag). This is probably invalid syntax for other dialects, but it is simpler to
7498        // parse it anyway (as we do inside `ALTER TABLE` and `CREATE TABLE` parsing).
7499        let index_options = self.parse_index_options()?;
7500
7501        // MySQL allows `ALGORITHM` and `LOCK` options. Unlike in `ALTER TABLE`, they need not be comma separated.
7502        let mut alter_options = Vec::new();
7503        while self
7504            .peek_one_of_keywords(&[Keyword::ALGORITHM, Keyword::LOCK])
7505            .is_some()
7506        {
7507            alter_options.push(self.parse_alter_table_operation()?)
7508        }
7509
7510        Ok(Statement::CreateIndex(CreateIndex {
7511            name: index_name,
7512            table_name,
7513            using,
7514            columns,
7515            unique,
7516            concurrently,
7517            if_not_exists,
7518            include,
7519            nulls_distinct,
7520            with,
7521            predicate,
7522            index_options,
7523            alter_options,
7524        }))
7525    }
7526
7527    pub fn parse_create_extension(&mut self) -> Result<Statement, ParserError> {
7528        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
7529        let name = self.parse_identifier()?;
7530
7531        let (schema, version, cascade) = if self.parse_keyword(Keyword::WITH) {
7532            let schema = if self.parse_keyword(Keyword::SCHEMA) {
7533                Some(self.parse_identifier()?)
7534            } else {
7535                None
7536            };
7537
7538            let version = if self.parse_keyword(Keyword::VERSION) {
7539                Some(self.parse_identifier()?)
7540            } else {
7541                None
7542            };
7543
7544            let cascade = self.parse_keyword(Keyword::CASCADE);
7545
7546            (schema, version, cascade)
7547        } else {
7548            (None, None, false)
7549        };
7550
7551        Ok(CreateExtension {
7552            name,
7553            if_not_exists,
7554            schema,
7555            version,
7556            cascade,
7557        }
7558        .into())
7559    }
7560
7561    /// Parse a PostgreSQL-specific [Statement::DropExtension] statement.
7562    pub fn parse_drop_extension(&mut self) -> Result<Statement, ParserError> {
7563        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7564        let names = self.parse_comma_separated(|p| p.parse_identifier())?;
7565        let cascade_or_restrict =
7566            self.parse_one_of_keywords(&[Keyword::CASCADE, Keyword::RESTRICT]);
7567        Ok(Statement::DropExtension(DropExtension {
7568            names,
7569            if_exists,
7570            cascade_or_restrict: cascade_or_restrict
7571                .map(|k| match k {
7572                    Keyword::CASCADE => Ok(ReferentialAction::Cascade),
7573                    Keyword::RESTRICT => Ok(ReferentialAction::Restrict),
7574                    _ => self.expected("CASCADE or RESTRICT", self.peek_token()),
7575                })
7576                .transpose()?,
7577        }))
7578    }
7579
7580    /// Parse a[Statement::DropOperator] statement.
7581    ///
7582    pub fn parse_drop_operator(&mut self) -> Result<Statement, ParserError> {
7583        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7584        let operators = self.parse_comma_separated(|p| p.parse_drop_operator_signature())?;
7585        let drop_behavior = self.parse_optional_drop_behavior();
7586        Ok(Statement::DropOperator(DropOperator {
7587            if_exists,
7588            operators,
7589            drop_behavior,
7590        }))
7591    }
7592
7593    /// Parse an operator signature for a [Statement::DropOperator]
7594    /// Format: `name ( { left_type | NONE } , right_type )`
7595    fn parse_drop_operator_signature(&mut self) -> Result<DropOperatorSignature, ParserError> {
7596        let name = self.parse_operator_name()?;
7597        self.expect_token(&Token::LParen)?;
7598
7599        // Parse left operand type (or NONE for prefix operators)
7600        let left_type = if self.parse_keyword(Keyword::NONE) {
7601            None
7602        } else {
7603            Some(self.parse_data_type()?)
7604        };
7605
7606        self.expect_token(&Token::Comma)?;
7607
7608        // Parse right operand type (always required)
7609        let right_type = self.parse_data_type()?;
7610
7611        self.expect_token(&Token::RParen)?;
7612
7613        Ok(DropOperatorSignature {
7614            name,
7615            left_type,
7616            right_type,
7617        })
7618    }
7619
7620    /// Parse a [Statement::DropOperatorFamily]
7621    ///
7622    /// [PostgreSQL Documentation](https://www.postgresql.org/docs/current/sql-dropopfamily.html)
7623    pub fn parse_drop_operator_family(&mut self) -> Result<Statement, ParserError> {
7624        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7625        let names = self.parse_comma_separated(|p| p.parse_object_name(false))?;
7626        self.expect_keyword(Keyword::USING)?;
7627        let using = self.parse_identifier()?;
7628        let drop_behavior = self.parse_optional_drop_behavior();
7629        Ok(Statement::DropOperatorFamily(DropOperatorFamily {
7630            if_exists,
7631            names,
7632            using,
7633            drop_behavior,
7634        }))
7635    }
7636
7637    /// Parse a [Statement::DropOperatorClass]
7638    ///
7639    /// [PostgreSQL Documentation](https://www.postgresql.org/docs/current/sql-dropopclass.html)
7640    pub fn parse_drop_operator_class(&mut self) -> Result<Statement, ParserError> {
7641        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7642        let names = self.parse_comma_separated(|p| p.parse_object_name(false))?;
7643        self.expect_keyword(Keyword::USING)?;
7644        let using = self.parse_identifier()?;
7645        let drop_behavior = self.parse_optional_drop_behavior();
7646        Ok(Statement::DropOperatorClass(DropOperatorClass {
7647            if_exists,
7648            names,
7649            using,
7650            drop_behavior,
7651        }))
7652    }
7653
7654    //TODO: Implement parsing for Skewed
7655    pub fn parse_hive_distribution(&mut self) -> Result<HiveDistributionStyle, ParserError> {
7656        if self.parse_keywords(&[Keyword::PARTITIONED, Keyword::BY]) {
7657            self.expect_token(&Token::LParen)?;
7658            let columns = self.parse_comma_separated(Parser::parse_column_def)?;
7659            self.expect_token(&Token::RParen)?;
7660            Ok(HiveDistributionStyle::PARTITIONED { columns })
7661        } else {
7662            Ok(HiveDistributionStyle::NONE)
7663        }
7664    }
7665
7666    pub fn parse_hive_formats(&mut self) -> Result<Option<HiveFormat>, ParserError> {
7667        let mut hive_format: Option<HiveFormat> = None;
7668        loop {
7669            match self.parse_one_of_keywords(&[
7670                Keyword::ROW,
7671                Keyword::STORED,
7672                Keyword::LOCATION,
7673                Keyword::WITH,
7674            ]) {
7675                Some(Keyword::ROW) => {
7676                    hive_format
7677                        .get_or_insert_with(HiveFormat::default)
7678                        .row_format = Some(self.parse_row_format()?);
7679                }
7680                Some(Keyword::STORED) => {
7681                    self.expect_keyword_is(Keyword::AS)?;
7682                    if self.parse_keyword(Keyword::INPUTFORMAT) {
7683                        let input_format = self.parse_expr()?;
7684                        self.expect_keyword_is(Keyword::OUTPUTFORMAT)?;
7685                        let output_format = self.parse_expr()?;
7686                        hive_format.get_or_insert_with(HiveFormat::default).storage =
7687                            Some(HiveIOFormat::IOF {
7688                                input_format,
7689                                output_format,
7690                            });
7691                    } else {
7692                        let format = self.parse_file_format()?;
7693                        hive_format.get_or_insert_with(HiveFormat::default).storage =
7694                            Some(HiveIOFormat::FileFormat { format });
7695                    }
7696                }
7697                Some(Keyword::LOCATION) => {
7698                    hive_format.get_or_insert_with(HiveFormat::default).location =
7699                        Some(self.parse_literal_string()?);
7700                }
7701                Some(Keyword::WITH) => {
7702                    self.prev_token();
7703                    let properties = self
7704                        .parse_options_with_keywords(&[Keyword::WITH, Keyword::SERDEPROPERTIES])?;
7705                    if !properties.is_empty() {
7706                        hive_format
7707                            .get_or_insert_with(HiveFormat::default)
7708                            .serde_properties = Some(properties);
7709                    } else {
7710                        break;
7711                    }
7712                }
7713                None => break,
7714                _ => break,
7715            }
7716        }
7717
7718        Ok(hive_format)
7719    }
7720
7721    pub fn parse_row_format(&mut self) -> Result<HiveRowFormat, ParserError> {
7722        self.expect_keyword_is(Keyword::FORMAT)?;
7723        match self.parse_one_of_keywords(&[Keyword::SERDE, Keyword::DELIMITED]) {
7724            Some(Keyword::SERDE) => {
7725                let class = self.parse_literal_string()?;
7726                Ok(HiveRowFormat::SERDE { class })
7727            }
7728            _ => {
7729                let mut row_delimiters = vec![];
7730
7731                loop {
7732                    match self.parse_one_of_keywords(&[
7733                        Keyword::FIELDS,
7734                        Keyword::COLLECTION,
7735                        Keyword::MAP,
7736                        Keyword::LINES,
7737                        Keyword::NULL,
7738                    ]) {
7739                        Some(Keyword::FIELDS) => {
7740                            if self.parse_keywords(&[Keyword::TERMINATED, Keyword::BY]) {
7741                                row_delimiters.push(HiveRowDelimiter {
7742                                    delimiter: HiveDelimiter::FieldsTerminatedBy,
7743                                    char: self.parse_identifier()?,
7744                                });
7745
7746                                if self.parse_keywords(&[Keyword::ESCAPED, Keyword::BY]) {
7747                                    row_delimiters.push(HiveRowDelimiter {
7748                                        delimiter: HiveDelimiter::FieldsEscapedBy,
7749                                        char: self.parse_identifier()?,
7750                                    });
7751                                }
7752                            } else {
7753                                break;
7754                            }
7755                        }
7756                        Some(Keyword::COLLECTION) => {
7757                            if self.parse_keywords(&[
7758                                Keyword::ITEMS,
7759                                Keyword::TERMINATED,
7760                                Keyword::BY,
7761                            ]) {
7762                                row_delimiters.push(HiveRowDelimiter {
7763                                    delimiter: HiveDelimiter::CollectionItemsTerminatedBy,
7764                                    char: self.parse_identifier()?,
7765                                });
7766                            } else {
7767                                break;
7768                            }
7769                        }
7770                        Some(Keyword::MAP) => {
7771                            if self.parse_keywords(&[
7772                                Keyword::KEYS,
7773                                Keyword::TERMINATED,
7774                                Keyword::BY,
7775                            ]) {
7776                                row_delimiters.push(HiveRowDelimiter {
7777                                    delimiter: HiveDelimiter::MapKeysTerminatedBy,
7778                                    char: self.parse_identifier()?,
7779                                });
7780                            } else {
7781                                break;
7782                            }
7783                        }
7784                        Some(Keyword::LINES) => {
7785                            if self.parse_keywords(&[Keyword::TERMINATED, Keyword::BY]) {
7786                                row_delimiters.push(HiveRowDelimiter {
7787                                    delimiter: HiveDelimiter::LinesTerminatedBy,
7788                                    char: self.parse_identifier()?,
7789                                });
7790                            } else {
7791                                break;
7792                            }
7793                        }
7794                        Some(Keyword::NULL) => {
7795                            if self.parse_keywords(&[Keyword::DEFINED, Keyword::AS]) {
7796                                row_delimiters.push(HiveRowDelimiter {
7797                                    delimiter: HiveDelimiter::NullDefinedAs,
7798                                    char: self.parse_identifier()?,
7799                                });
7800                            } else {
7801                                break;
7802                            }
7803                        }
7804                        _ => {
7805                            break;
7806                        }
7807                    }
7808                }
7809
7810                Ok(HiveRowFormat::DELIMITED {
7811                    delimiters: row_delimiters,
7812                })
7813            }
7814        }
7815    }
7816
7817    fn parse_optional_on_cluster(&mut self) -> Result<Option<Ident>, ParserError> {
7818        if self.parse_keywords(&[Keyword::ON, Keyword::CLUSTER]) {
7819            Ok(Some(self.parse_identifier()?))
7820        } else {
7821            Ok(None)
7822        }
7823    }
7824
7825    pub fn parse_create_table(
7826        &mut self,
7827        or_replace: bool,
7828        temporary: bool,
7829        global: Option<bool>,
7830        transient: bool,
7831    ) -> Result<Statement, ParserError> {
7832        let allow_unquoted_hyphen = dialect_of!(self is BigQueryDialect);
7833        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
7834        let table_name = self.parse_object_name(allow_unquoted_hyphen)?;
7835
7836        // PostgreSQL PARTITION OF for child partition tables
7837        let partition_of = if dialect_of!(self is PostgreSqlDialect | GenericDialect)
7838            && self.parse_keywords(&[Keyword::PARTITION, Keyword::OF])
7839        {
7840            Some(self.parse_object_name(allow_unquoted_hyphen)?)
7841        } else {
7842            None
7843        };
7844
7845        // Clickhouse has `ON CLUSTER 'cluster'` syntax for DDLs
7846        let on_cluster = self.parse_optional_on_cluster()?;
7847
7848        let like = self.maybe_parse_create_table_like(allow_unquoted_hyphen)?;
7849
7850        let clone = if self.parse_keyword(Keyword::CLONE) {
7851            self.parse_object_name(allow_unquoted_hyphen).ok()
7852        } else {
7853            None
7854        };
7855
7856        // parse optional column list (schema)
7857        let (columns, constraints) = self.parse_columns()?;
7858        let comment_after_column_def =
7859            if dialect_of!(self is HiveDialect) && self.parse_keyword(Keyword::COMMENT) {
7860                let next_token = self.next_token();
7861                match next_token.token {
7862                    Token::SingleQuotedString(str) => Some(CommentDef::WithoutEq(str)),
7863                    _ => self.expected("comment", next_token)?,
7864                }
7865            } else {
7866                None
7867            };
7868
7869        // PostgreSQL PARTITION OF: partition bound specification
7870        let for_values = if partition_of.is_some() {
7871            Some(self.parse_partition_for_values()?)
7872        } else {
7873            None
7874        };
7875
7876        // SQLite supports `WITHOUT ROWID` at the end of `CREATE TABLE`
7877        let without_rowid = self.parse_keywords(&[Keyword::WITHOUT, Keyword::ROWID]);
7878
7879        let hive_distribution = self.parse_hive_distribution()?;
7880        let clustered_by = self.parse_optional_clustered_by()?;
7881        let hive_formats = self.parse_hive_formats()?;
7882
7883        let create_table_config = self.parse_optional_create_table_config()?;
7884
7885        // ClickHouse supports `PRIMARY KEY`, before `ORDER BY`
7886        // https://clickhouse.com/docs/en/sql-reference/statements/create/table#primary-key
7887        let primary_key = if dialect_of!(self is ClickHouseDialect | GenericDialect)
7888            && self.parse_keywords(&[Keyword::PRIMARY, Keyword::KEY])
7889        {
7890            Some(Box::new(self.parse_expr()?))
7891        } else {
7892            None
7893        };
7894
7895        let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
7896            if self.consume_token(&Token::LParen) {
7897                let columns = if self.peek_token() != Token::RParen {
7898                    self.parse_comma_separated(|p| p.parse_expr())?
7899                } else {
7900                    vec![]
7901                };
7902                self.expect_token(&Token::RParen)?;
7903                Some(OneOrManyWithParens::Many(columns))
7904            } else {
7905                Some(OneOrManyWithParens::One(self.parse_expr()?))
7906            }
7907        } else {
7908            None
7909        };
7910
7911        let on_commit = if self.parse_keywords(&[Keyword::ON, Keyword::COMMIT]) {
7912            Some(self.parse_create_table_on_commit()?)
7913        } else {
7914            None
7915        };
7916
7917        let strict = self.parse_keyword(Keyword::STRICT);
7918
7919        // Parse optional `AS ( query )`
7920        let query = if self.parse_keyword(Keyword::AS) {
7921            Some(self.parse_query()?)
7922        } else if self.dialect.supports_create_table_select() && self.parse_keyword(Keyword::SELECT)
7923        {
7924            // rewind the SELECT keyword
7925            self.prev_token();
7926            Some(self.parse_query()?)
7927        } else {
7928            None
7929        };
7930
7931        Ok(CreateTableBuilder::new(table_name)
7932            .temporary(temporary)
7933            .columns(columns)
7934            .constraints(constraints)
7935            .or_replace(or_replace)
7936            .if_not_exists(if_not_exists)
7937            .transient(transient)
7938            .hive_distribution(hive_distribution)
7939            .hive_formats(hive_formats)
7940            .global(global)
7941            .query(query)
7942            .without_rowid(without_rowid)
7943            .like(like)
7944            .clone_clause(clone)
7945            .comment_after_column_def(comment_after_column_def)
7946            .order_by(order_by)
7947            .on_commit(on_commit)
7948            .on_cluster(on_cluster)
7949            .clustered_by(clustered_by)
7950            .partition_by(create_table_config.partition_by)
7951            .cluster_by(create_table_config.cluster_by)
7952            .inherits(create_table_config.inherits)
7953            .partition_of(partition_of)
7954            .for_values(for_values)
7955            .table_options(create_table_config.table_options)
7956            .primary_key(primary_key)
7957            .strict(strict)
7958            .build())
7959    }
7960
7961    fn maybe_parse_create_table_like(
7962        &mut self,
7963        allow_unquoted_hyphen: bool,
7964    ) -> Result<Option<CreateTableLikeKind>, ParserError> {
7965        let like = if self.dialect.supports_create_table_like_parenthesized()
7966            && self.consume_token(&Token::LParen)
7967        {
7968            if self.parse_keyword(Keyword::LIKE) {
7969                let name = self.parse_object_name(allow_unquoted_hyphen)?;
7970                let defaults = if self.parse_keywords(&[Keyword::INCLUDING, Keyword::DEFAULTS]) {
7971                    Some(CreateTableLikeDefaults::Including)
7972                } else if self.parse_keywords(&[Keyword::EXCLUDING, Keyword::DEFAULTS]) {
7973                    Some(CreateTableLikeDefaults::Excluding)
7974                } else {
7975                    None
7976                };
7977                self.expect_token(&Token::RParen)?;
7978                Some(CreateTableLikeKind::Parenthesized(CreateTableLike {
7979                    name,
7980                    defaults,
7981                }))
7982            } else {
7983                // Rollback the '(' it's probably the columns list
7984                self.prev_token();
7985                None
7986            }
7987        } else if self.parse_keyword(Keyword::LIKE) || self.parse_keyword(Keyword::ILIKE) {
7988            let name = self.parse_object_name(allow_unquoted_hyphen)?;
7989            Some(CreateTableLikeKind::Plain(CreateTableLike {
7990                name,
7991                defaults: None,
7992            }))
7993        } else {
7994            None
7995        };
7996        Ok(like)
7997    }
7998
7999    pub(crate) fn parse_create_table_on_commit(&mut self) -> Result<OnCommit, ParserError> {
8000        if self.parse_keywords(&[Keyword::DELETE, Keyword::ROWS]) {
8001            Ok(OnCommit::DeleteRows)
8002        } else if self.parse_keywords(&[Keyword::PRESERVE, Keyword::ROWS]) {
8003            Ok(OnCommit::PreserveRows)
8004        } else if self.parse_keywords(&[Keyword::DROP]) {
8005            Ok(OnCommit::Drop)
8006        } else {
8007            parser_err!(
8008                "Expecting DELETE ROWS, PRESERVE ROWS or DROP",
8009                self.peek_token()
8010            )
8011        }
8012    }
8013
8014    /// Parse PostgreSQL partition bound specification for PARTITION OF.
8015    ///
8016    /// Parses: `FOR VALUES partition_bound_spec | DEFAULT`
8017    ///
8018    /// [PostgreSQL](https://www.postgresql.org/docs/current/sql-createtable.html)
8019    fn parse_partition_for_values(&mut self) -> Result<ForValues, ParserError> {
8020        if self.parse_keyword(Keyword::DEFAULT) {
8021            return Ok(ForValues::Default);
8022        }
8023
8024        self.expect_keywords(&[Keyword::FOR, Keyword::VALUES])?;
8025
8026        if self.parse_keyword(Keyword::IN) {
8027            // FOR VALUES IN (expr, ...)
8028            self.expect_token(&Token::LParen)?;
8029            let values = self.parse_comma_separated(Parser::parse_expr)?;
8030            self.expect_token(&Token::RParen)?;
8031            Ok(ForValues::In(values))
8032        } else if self.parse_keyword(Keyword::FROM) {
8033            // FOR VALUES FROM (...) TO (...)
8034            self.expect_token(&Token::LParen)?;
8035            let from = self.parse_comma_separated(Parser::parse_partition_bound_value)?;
8036            self.expect_token(&Token::RParen)?;
8037            self.expect_keyword(Keyword::TO)?;
8038            self.expect_token(&Token::LParen)?;
8039            let to = self.parse_comma_separated(Parser::parse_partition_bound_value)?;
8040            self.expect_token(&Token::RParen)?;
8041            Ok(ForValues::From { from, to })
8042        } else if self.parse_keyword(Keyword::WITH) {
8043            // FOR VALUES WITH (MODULUS n, REMAINDER r)
8044            self.expect_token(&Token::LParen)?;
8045            self.expect_keyword(Keyword::MODULUS)?;
8046            let modulus = self.parse_literal_uint()?;
8047            self.expect_token(&Token::Comma)?;
8048            self.expect_keyword(Keyword::REMAINDER)?;
8049            let remainder = self.parse_literal_uint()?;
8050            self.expect_token(&Token::RParen)?;
8051            Ok(ForValues::With { modulus, remainder })
8052        } else {
8053            self.expected("IN, FROM, or WITH after FOR VALUES", self.peek_token())
8054        }
8055    }
8056
8057    /// Parse a single partition bound value (MINVALUE, MAXVALUE, or expression).
8058    fn parse_partition_bound_value(&mut self) -> Result<PartitionBoundValue, ParserError> {
8059        if self.parse_keyword(Keyword::MINVALUE) {
8060            Ok(PartitionBoundValue::MinValue)
8061        } else if self.parse_keyword(Keyword::MAXVALUE) {
8062            Ok(PartitionBoundValue::MaxValue)
8063        } else {
8064            Ok(PartitionBoundValue::Expr(self.parse_expr()?))
8065        }
8066    }
8067
8068    /// Parse configuration like inheritance, partitioning, clustering information during the table creation.
8069    ///
8070    /// [BigQuery](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#syntax_2)
8071    /// [PostgreSQL](https://www.postgresql.org/docs/current/ddl-partitioning.html)
8072    /// [MySql](https://dev.mysql.com/doc/refman/8.4/en/create-table.html)
8073    fn parse_optional_create_table_config(
8074        &mut self,
8075    ) -> Result<CreateTableConfiguration, ParserError> {
8076        let mut table_options = CreateTableOptions::None;
8077
8078        let inherits = if self.parse_keyword(Keyword::INHERITS) {
8079            Some(self.parse_parenthesized_qualified_column_list(IsOptional::Mandatory, false)?)
8080        } else {
8081            None
8082        };
8083
8084        // PostgreSQL supports `WITH ( options )`, before `AS`
8085        let with_options = self.parse_options(Keyword::WITH)?;
8086        if !with_options.is_empty() {
8087            table_options = CreateTableOptions::With(with_options)
8088        }
8089
8090        let table_properties = self.parse_options(Keyword::TBLPROPERTIES)?;
8091        if !table_properties.is_empty() {
8092            table_options = CreateTableOptions::TableProperties(table_properties);
8093        }
8094        let partition_by = if dialect_of!(self is BigQueryDialect | PostgreSqlDialect | GenericDialect)
8095            && self.parse_keywords(&[Keyword::PARTITION, Keyword::BY])
8096        {
8097            Some(Box::new(self.parse_expr()?))
8098        } else {
8099            None
8100        };
8101
8102        let mut cluster_by = None;
8103        if dialect_of!(self is BigQueryDialect | GenericDialect) {
8104            if self.parse_keywords(&[Keyword::CLUSTER, Keyword::BY]) {
8105                cluster_by = Some(WrappedCollection::NoWrapping(
8106                    self.parse_comma_separated(|p| p.parse_expr())?,
8107                ));
8108            };
8109
8110            if let Token::Word(word) = self.peek_token().token {
8111                if word.keyword == Keyword::OPTIONS {
8112                    table_options =
8113                        CreateTableOptions::Options(self.parse_options(Keyword::OPTIONS)?)
8114                }
8115            };
8116        }
8117
8118        if !dialect_of!(self is HiveDialect) && table_options == CreateTableOptions::None {
8119            let plain_options = self.parse_plain_options()?;
8120            if !plain_options.is_empty() {
8121                table_options = CreateTableOptions::Plain(plain_options)
8122            }
8123        };
8124
8125        Ok(CreateTableConfiguration {
8126            partition_by,
8127            cluster_by,
8128            inherits,
8129            table_options,
8130        })
8131    }
8132
8133    fn parse_plain_option(&mut self) -> Result<Option<SqlOption>, ParserError> {
8134        // Single parameter option
8135        // <https://dev.mysql.com/doc/refman/8.4/en/create-table.html>
8136        if self.parse_keywords(&[Keyword::START, Keyword::TRANSACTION]) {
8137            return Ok(Some(SqlOption::Ident(Ident::new("START TRANSACTION"))));
8138        }
8139
8140        // Custom option
8141        // <https://dev.mysql.com/doc/refman/8.4/en/create-table.html>
8142        if self.parse_keywords(&[Keyword::COMMENT]) {
8143            let has_eq = self.consume_token(&Token::Eq);
8144            let value = self.next_token();
8145
8146            let comment = match (has_eq, value.token) {
8147                (true, Token::SingleQuotedString(s)) => {
8148                    Ok(Some(SqlOption::Comment(CommentDef::WithEq(s))))
8149                }
8150                (false, Token::SingleQuotedString(s)) => {
8151                    Ok(Some(SqlOption::Comment(CommentDef::WithoutEq(s))))
8152                }
8153                (_, token) => {
8154                    self.expected("Token::SingleQuotedString", TokenWithSpan::wrap(token))
8155                }
8156            };
8157            return comment;
8158        }
8159
8160        // <https://dev.mysql.com/doc/refman/8.4/en/create-table.html>
8161        // <https://clickhouse.com/docs/sql-reference/statements/create/table>
8162        if self.parse_keywords(&[Keyword::ENGINE]) {
8163            let _ = self.consume_token(&Token::Eq);
8164            let value = self.next_token();
8165
8166            let engine = match value.token {
8167                Token::Word(w) => {
8168                    let parameters = if self.peek_token() == Token::LParen {
8169                        self.parse_parenthesized_identifiers()?
8170                    } else {
8171                        vec![]
8172                    };
8173
8174                    Ok(Some(SqlOption::NamedParenthesizedList(
8175                        NamedParenthesizedList {
8176                            key: Ident::new("ENGINE"),
8177                            name: Some(Ident::new(w.value)),
8178                            values: parameters,
8179                        },
8180                    )))
8181                }
8182                _ => {
8183                    return self.expected("Token::Word", value)?;
8184                }
8185            };
8186
8187            return engine;
8188        }
8189
8190        // <https://dev.mysql.com/doc/refman/8.4/en/create-table.html>
8191        if self.parse_keywords(&[Keyword::TABLESPACE]) {
8192            let _ = self.consume_token(&Token::Eq);
8193            let value = self.next_token();
8194
8195            let tablespace = match value.token {
8196                Token::Word(Word { value: name, .. }) | Token::SingleQuotedString(name) => {
8197                    let storage = match self.parse_keyword(Keyword::STORAGE) {
8198                        true => {
8199                            let _ = self.consume_token(&Token::Eq);
8200                            let storage_token = self.next_token();
8201                            match &storage_token.token {
8202                                Token::Word(w) => match w.value.to_uppercase().as_str() {
8203                                    "DISK" => Some(StorageType::Disk),
8204                                    "MEMORY" => Some(StorageType::Memory),
8205                                    _ => self
8206                                        .expected("Storage type (DISK or MEMORY)", storage_token)?,
8207                                },
8208                                _ => self.expected("Token::Word", storage_token)?,
8209                            }
8210                        }
8211                        false => None,
8212                    };
8213
8214                    Ok(Some(SqlOption::TableSpace(TablespaceOption {
8215                        name,
8216                        storage,
8217                    })))
8218                }
8219                _ => {
8220                    return self.expected("Token::Word", value)?;
8221                }
8222            };
8223
8224            return tablespace;
8225        }
8226
8227        // <https://dev.mysql.com/doc/refman/8.4/en/create-table.html>
8228        if self.parse_keyword(Keyword::UNION) {
8229            let _ = self.consume_token(&Token::Eq);
8230            let value = self.next_token();
8231
8232            match value.token {
8233                Token::LParen => {
8234                    let tables: Vec<Ident> =
8235                        self.parse_comma_separated0(Parser::parse_identifier, Token::RParen)?;
8236                    self.expect_token(&Token::RParen)?;
8237
8238                    return Ok(Some(SqlOption::NamedParenthesizedList(
8239                        NamedParenthesizedList {
8240                            key: Ident::new("UNION"),
8241                            name: None,
8242                            values: tables,
8243                        },
8244                    )));
8245                }
8246                _ => {
8247                    return self.expected("Token::LParen", value)?;
8248                }
8249            }
8250        }
8251
8252        // Key/Value parameter option
8253        let key = if self.parse_keywords(&[Keyword::DEFAULT, Keyword::CHARSET]) {
8254            Ident::new("DEFAULT CHARSET")
8255        } else if self.parse_keyword(Keyword::CHARSET) {
8256            Ident::new("CHARSET")
8257        } else if self.parse_keywords(&[Keyword::DEFAULT, Keyword::CHARACTER, Keyword::SET]) {
8258            Ident::new("DEFAULT CHARACTER SET")
8259        } else if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
8260            Ident::new("CHARACTER SET")
8261        } else if self.parse_keywords(&[Keyword::DEFAULT, Keyword::COLLATE]) {
8262            Ident::new("DEFAULT COLLATE")
8263        } else if self.parse_keyword(Keyword::COLLATE) {
8264            Ident::new("COLLATE")
8265        } else if self.parse_keywords(&[Keyword::DATA, Keyword::DIRECTORY]) {
8266            Ident::new("DATA DIRECTORY")
8267        } else if self.parse_keywords(&[Keyword::INDEX, Keyword::DIRECTORY]) {
8268            Ident::new("INDEX DIRECTORY")
8269        } else if self.parse_keyword(Keyword::KEY_BLOCK_SIZE) {
8270            Ident::new("KEY_BLOCK_SIZE")
8271        } else if self.parse_keyword(Keyword::ROW_FORMAT) {
8272            Ident::new("ROW_FORMAT")
8273        } else if self.parse_keyword(Keyword::PACK_KEYS) {
8274            Ident::new("PACK_KEYS")
8275        } else if self.parse_keyword(Keyword::STATS_AUTO_RECALC) {
8276            Ident::new("STATS_AUTO_RECALC")
8277        } else if self.parse_keyword(Keyword::STATS_PERSISTENT) {
8278            Ident::new("STATS_PERSISTENT")
8279        } else if self.parse_keyword(Keyword::STATS_SAMPLE_PAGES) {
8280            Ident::new("STATS_SAMPLE_PAGES")
8281        } else if self.parse_keyword(Keyword::DELAY_KEY_WRITE) {
8282            Ident::new("DELAY_KEY_WRITE")
8283        } else if self.parse_keyword(Keyword::COMPRESSION) {
8284            Ident::new("COMPRESSION")
8285        } else if self.parse_keyword(Keyword::ENCRYPTION) {
8286            Ident::new("ENCRYPTION")
8287        } else if self.parse_keyword(Keyword::MAX_ROWS) {
8288            Ident::new("MAX_ROWS")
8289        } else if self.parse_keyword(Keyword::MIN_ROWS) {
8290            Ident::new("MIN_ROWS")
8291        } else if self.parse_keyword(Keyword::AUTOEXTEND_SIZE) {
8292            Ident::new("AUTOEXTEND_SIZE")
8293        } else if self.parse_keyword(Keyword::AVG_ROW_LENGTH) {
8294            Ident::new("AVG_ROW_LENGTH")
8295        } else if self.parse_keyword(Keyword::CHECKSUM) {
8296            Ident::new("CHECKSUM")
8297        } else if self.parse_keyword(Keyword::CONNECTION) {
8298            Ident::new("CONNECTION")
8299        } else if self.parse_keyword(Keyword::ENGINE_ATTRIBUTE) {
8300            Ident::new("ENGINE_ATTRIBUTE")
8301        } else if self.parse_keyword(Keyword::PASSWORD) {
8302            Ident::new("PASSWORD")
8303        } else if self.parse_keyword(Keyword::SECONDARY_ENGINE_ATTRIBUTE) {
8304            Ident::new("SECONDARY_ENGINE_ATTRIBUTE")
8305        } else if self.parse_keyword(Keyword::INSERT_METHOD) {
8306            Ident::new("INSERT_METHOD")
8307        } else if self.parse_keyword(Keyword::AUTO_INCREMENT) {
8308            Ident::new("AUTO_INCREMENT")
8309        } else {
8310            return Ok(None);
8311        };
8312
8313        let _ = self.consume_token(&Token::Eq);
8314
8315        let value = match self
8316            .maybe_parse(|parser| parser.parse_value())?
8317            .map(Expr::Value)
8318        {
8319            Some(expr) => expr,
8320            None => Expr::Identifier(self.parse_identifier()?),
8321        };
8322
8323        Ok(Some(SqlOption::KeyValue { key, value }))
8324    }
8325
8326    pub fn parse_plain_options(&mut self) -> Result<Vec<SqlOption>, ParserError> {
8327        let mut options = Vec::new();
8328
8329        while let Some(option) = self.parse_plain_option()? {
8330            options.push(option);
8331            // Some dialects support comma-separated options; it shouldn't introduce ambiguity to
8332            // consume it for all dialects.
8333            let _ = self.consume_token(&Token::Comma);
8334        }
8335
8336        Ok(options)
8337    }
8338
8339    pub fn parse_optional_inline_comment(&mut self) -> Result<Option<CommentDef>, ParserError> {
8340        let comment = if self.parse_keyword(Keyword::COMMENT) {
8341            let has_eq = self.consume_token(&Token::Eq);
8342            let comment = self.parse_comment_value()?;
8343            Some(if has_eq {
8344                CommentDef::WithEq(comment)
8345            } else {
8346                CommentDef::WithoutEq(comment)
8347            })
8348        } else {
8349            None
8350        };
8351        Ok(comment)
8352    }
8353
8354    pub fn parse_comment_value(&mut self) -> Result<String, ParserError> {
8355        let next_token = self.next_token();
8356        let value = match next_token.token {
8357            Token::SingleQuotedString(str) => str,
8358            Token::DollarQuotedString(str) => str.value,
8359            _ => self.expected("string literal", next_token)?,
8360        };
8361        Ok(value)
8362    }
8363
8364    pub fn parse_optional_procedure_parameters(
8365        &mut self,
8366    ) -> Result<Option<Vec<ProcedureParam>>, ParserError> {
8367        let mut params = vec![];
8368        if !self.consume_token(&Token::LParen) || self.consume_token(&Token::RParen) {
8369            return Ok(Some(params));
8370        }
8371        loop {
8372            if let Token::Word(_) = self.peek_token().token {
8373                params.push(self.parse_procedure_param()?)
8374            }
8375            let comma = self.consume_token(&Token::Comma);
8376            if self.consume_token(&Token::RParen) {
8377                // allow a trailing comma, even though it's not in standard
8378                break;
8379            } else if !comma {
8380                return self.expected("',' or ')' after parameter definition", self.peek_token());
8381            }
8382        }
8383        Ok(Some(params))
8384    }
8385
8386    pub fn parse_columns(&mut self) -> Result<(Vec<ColumnDef>, Vec<TableConstraint>), ParserError> {
8387        let mut columns = vec![];
8388        let mut constraints = vec![];
8389        if !self.consume_token(&Token::LParen) || self.consume_token(&Token::RParen) {
8390            return Ok((columns, constraints));
8391        }
8392
8393        loop {
8394            if let Some(constraint) = self.parse_optional_table_constraint()? {
8395                constraints.push(constraint);
8396            } else if let Token::Word(_) = self.peek_token().token {
8397                columns.push(self.parse_column_def()?);
8398            } else {
8399                return self.expected("column name or constraint definition", self.peek_token());
8400            }
8401
8402            let comma = self.consume_token(&Token::Comma);
8403            let rparen = self.peek_token().token == Token::RParen;
8404
8405            if !comma && !rparen {
8406                return self.expected("',' or ')' after column definition", self.peek_token());
8407            };
8408
8409            if rparen
8410                && (!comma
8411                    || self.dialect.supports_column_definition_trailing_commas()
8412                    || self.options.trailing_commas)
8413            {
8414                let _ = self.consume_token(&Token::RParen);
8415                break;
8416            }
8417        }
8418
8419        Ok((columns, constraints))
8420    }
8421
8422    pub fn parse_procedure_param(&mut self) -> Result<ProcedureParam, ParserError> {
8423        let mode = if self.parse_keyword(Keyword::IN) {
8424            Some(ArgMode::In)
8425        } else if self.parse_keyword(Keyword::OUT) {
8426            Some(ArgMode::Out)
8427        } else if self.parse_keyword(Keyword::INOUT) {
8428            Some(ArgMode::InOut)
8429        } else {
8430            None
8431        };
8432        let name = self.parse_identifier()?;
8433        let data_type = self.parse_data_type()?;
8434        let default = if self.consume_token(&Token::Eq) {
8435            Some(self.parse_expr()?)
8436        } else {
8437            None
8438        };
8439
8440        Ok(ProcedureParam {
8441            name,
8442            data_type,
8443            mode,
8444            default,
8445        })
8446    }
8447
8448    pub fn parse_column_def(&mut self) -> Result<ColumnDef, ParserError> {
8449        let col_name = self.parse_identifier()?;
8450        let data_type = if self.is_column_type_sqlite_unspecified() {
8451            DataType::Unspecified
8452        } else {
8453            self.parse_data_type()?
8454        };
8455        let mut options = vec![];
8456        loop {
8457            if self.parse_keyword(Keyword::CONSTRAINT) {
8458                let name = Some(self.parse_identifier()?);
8459                if let Some(option) = self.parse_optional_column_option()? {
8460                    options.push(ColumnOptionDef { name, option });
8461                } else {
8462                    return self.expected(
8463                        "constraint details after CONSTRAINT <name>",
8464                        self.peek_token(),
8465                    );
8466                }
8467            } else if let Some(option) = self.parse_optional_column_option()? {
8468                options.push(ColumnOptionDef { name: None, option });
8469            } else {
8470                break;
8471            };
8472        }
8473        Ok(ColumnDef {
8474            name: col_name,
8475            data_type,
8476            options,
8477        })
8478    }
8479
8480    fn is_column_type_sqlite_unspecified(&mut self) -> bool {
8481        if dialect_of!(self is SQLiteDialect) {
8482            match self.peek_token().token {
8483                Token::Word(word) => matches!(
8484                    word.keyword,
8485                    Keyword::CONSTRAINT
8486                        | Keyword::PRIMARY
8487                        | Keyword::NOT
8488                        | Keyword::UNIQUE
8489                        | Keyword::CHECK
8490                        | Keyword::DEFAULT
8491                        | Keyword::COLLATE
8492                        | Keyword::REFERENCES
8493                        | Keyword::GENERATED
8494                        | Keyword::AS
8495                ),
8496                _ => true, // e.g. comma immediately after column name
8497            }
8498        } else {
8499            false
8500        }
8501    }
8502
8503    pub fn parse_optional_column_option(&mut self) -> Result<Option<ColumnOption>, ParserError> {
8504        if let Some(option) = self.dialect.parse_column_option(self)? {
8505            return option;
8506        }
8507
8508        self.with_state(
8509            ColumnDefinition,
8510            |parser| -> Result<Option<ColumnOption>, ParserError> {
8511                parser.parse_optional_column_option_inner()
8512            },
8513        )
8514    }
8515
8516    fn parse_optional_column_option_inner(&mut self) -> Result<Option<ColumnOption>, ParserError> {
8517        if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
8518            Ok(Some(ColumnOption::CharacterSet(
8519                self.parse_object_name(false)?,
8520            )))
8521        } else if self.parse_keywords(&[Keyword::COLLATE]) {
8522            Ok(Some(ColumnOption::Collation(
8523                self.parse_object_name(false)?,
8524            )))
8525        } else if self.parse_keywords(&[Keyword::NOT, Keyword::NULL]) {
8526            Ok(Some(ColumnOption::NotNull))
8527        } else if self.parse_keywords(&[Keyword::COMMENT]) {
8528            Ok(Some(ColumnOption::Comment(self.parse_comment_value()?)))
8529        } else if self.parse_keyword(Keyword::NULL) {
8530            Ok(Some(ColumnOption::Null))
8531        } else if self.parse_keyword(Keyword::DEFAULT) {
8532            Ok(Some(ColumnOption::Default(
8533                self.parse_column_option_expr()?,
8534            )))
8535        } else if dialect_of!(self is ClickHouseDialect| GenericDialect)
8536            && self.parse_keyword(Keyword::MATERIALIZED)
8537        {
8538            Ok(Some(ColumnOption::Materialized(
8539                self.parse_column_option_expr()?,
8540            )))
8541        } else if dialect_of!(self is ClickHouseDialect| GenericDialect)
8542            && self.parse_keyword(Keyword::ALIAS)
8543        {
8544            Ok(Some(ColumnOption::Alias(self.parse_column_option_expr()?)))
8545        } else if dialect_of!(self is ClickHouseDialect| GenericDialect)
8546            && self.parse_keyword(Keyword::EPHEMERAL)
8547        {
8548            // The expression is optional for the EPHEMERAL syntax, so we need to check
8549            // if the column definition has remaining tokens before parsing the expression.
8550            if matches!(self.peek_token().token, Token::Comma | Token::RParen) {
8551                Ok(Some(ColumnOption::Ephemeral(None)))
8552            } else {
8553                Ok(Some(ColumnOption::Ephemeral(Some(
8554                    self.parse_column_option_expr()?,
8555                ))))
8556            }
8557        } else if self.parse_keywords(&[Keyword::PRIMARY, Keyword::KEY]) {
8558            let characteristics = self.parse_constraint_characteristics()?;
8559            Ok(Some(
8560                PrimaryKeyConstraint {
8561                    name: None,
8562                    index_name: None,
8563                    index_type: None,
8564                    columns: vec![],
8565                    index_options: vec![],
8566                    characteristics,
8567                }
8568                .into(),
8569            ))
8570        } else if self.parse_keyword(Keyword::UNIQUE) {
8571            let characteristics = self.parse_constraint_characteristics()?;
8572            Ok(Some(
8573                UniqueConstraint {
8574                    name: None,
8575                    index_name: None,
8576                    index_type_display: KeyOrIndexDisplay::None,
8577                    index_type: None,
8578                    columns: vec![],
8579                    index_options: vec![],
8580                    characteristics,
8581                    nulls_distinct: NullsDistinctOption::None,
8582                }
8583                .into(),
8584            ))
8585        } else if self.parse_keyword(Keyword::REFERENCES) {
8586            let foreign_table = self.parse_object_name(false)?;
8587            // PostgreSQL allows omitting the column list and
8588            // uses the primary key column of the foreign table by default
8589            let referred_columns = self.parse_parenthesized_column_list(Optional, false)?;
8590            let mut match_kind = None;
8591            let mut on_delete = None;
8592            let mut on_update = None;
8593            loop {
8594                if match_kind.is_none() && self.parse_keyword(Keyword::MATCH) {
8595                    match_kind = Some(self.parse_match_kind()?);
8596                } else if on_delete.is_none()
8597                    && self.parse_keywords(&[Keyword::ON, Keyword::DELETE])
8598                {
8599                    on_delete = Some(self.parse_referential_action()?);
8600                } else if on_update.is_none()
8601                    && self.parse_keywords(&[Keyword::ON, Keyword::UPDATE])
8602                {
8603                    on_update = Some(self.parse_referential_action()?);
8604                } else {
8605                    break;
8606                }
8607            }
8608            let characteristics = self.parse_constraint_characteristics()?;
8609
8610            Ok(Some(
8611                ForeignKeyConstraint {
8612                    name: None,       // Column-level constraints don't have names
8613                    index_name: None, // Not applicable for column-level constraints
8614                    columns: vec![],  // Not applicable for column-level constraints
8615                    foreign_table,
8616                    referred_columns,
8617                    on_delete,
8618                    on_update,
8619                    match_kind,
8620                    characteristics,
8621                }
8622                .into(),
8623            ))
8624        } else if self.parse_keyword(Keyword::CHECK) {
8625            self.expect_token(&Token::LParen)?;
8626            // since `CHECK` requires parentheses, we can parse the inner expression in ParserState::Normal
8627            let expr: Expr = self.with_state(ParserState::Normal, |p| p.parse_expr())?;
8628            self.expect_token(&Token::RParen)?;
8629            Ok(Some(
8630                CheckConstraint {
8631                    name: None, // Column-level check constraints don't have names
8632                    expr: Box::new(expr),
8633                    enforced: None, // Could be extended later to support MySQL ENFORCED/NOT ENFORCED
8634                }
8635                .into(),
8636            ))
8637        } else if self.parse_keyword(Keyword::AUTO_INCREMENT)
8638            && dialect_of!(self is MySqlDialect | GenericDialect)
8639        {
8640            // Support AUTO_INCREMENT for MySQL
8641            Ok(Some(ColumnOption::DialectSpecific(vec![
8642                Token::make_keyword("AUTO_INCREMENT"),
8643            ])))
8644        } else if self.parse_keyword(Keyword::AUTOINCREMENT)
8645            && dialect_of!(self is SQLiteDialect |  GenericDialect)
8646        {
8647            // Support AUTOINCREMENT for SQLite
8648            Ok(Some(ColumnOption::DialectSpecific(vec![
8649                Token::make_keyword("AUTOINCREMENT"),
8650            ])))
8651        } else if self.parse_keyword(Keyword::ASC)
8652            && self.dialect.supports_asc_desc_in_column_definition()
8653        {
8654            // Support ASC for SQLite
8655            Ok(Some(ColumnOption::DialectSpecific(vec![
8656                Token::make_keyword("ASC"),
8657            ])))
8658        } else if self.parse_keyword(Keyword::DESC)
8659            && self.dialect.supports_asc_desc_in_column_definition()
8660        {
8661            // Support DESC for SQLite
8662            Ok(Some(ColumnOption::DialectSpecific(vec![
8663                Token::make_keyword("DESC"),
8664            ])))
8665        } else if self.parse_keywords(&[Keyword::ON, Keyword::UPDATE])
8666            && dialect_of!(self is MySqlDialect | GenericDialect)
8667        {
8668            let expr = self.parse_column_option_expr()?;
8669            Ok(Some(ColumnOption::OnUpdate(expr)))
8670        } else if self.parse_keyword(Keyword::GENERATED) {
8671            self.parse_optional_column_option_generated()
8672        } else if dialect_of!(self is BigQueryDialect | GenericDialect)
8673            && self.parse_keyword(Keyword::OPTIONS)
8674        {
8675            self.prev_token();
8676            Ok(Some(ColumnOption::Options(
8677                self.parse_options(Keyword::OPTIONS)?,
8678            )))
8679        } else if self.parse_keyword(Keyword::AS)
8680            && dialect_of!(self is MySqlDialect | SQLiteDialect | DuckDbDialect | GenericDialect)
8681        {
8682            self.parse_optional_column_option_as()
8683        } else if self.parse_keyword(Keyword::SRID)
8684            && dialect_of!(self is MySqlDialect | GenericDialect)
8685        {
8686            Ok(Some(ColumnOption::Srid(Box::new(
8687                self.parse_column_option_expr()?,
8688            ))))
8689        } else if self.parse_keyword(Keyword::IDENTITY)
8690            && dialect_of!(self is MsSqlDialect | GenericDialect)
8691        {
8692            let parameters = if self.consume_token(&Token::LParen) {
8693                let seed = self.parse_number()?;
8694                self.expect_token(&Token::Comma)?;
8695                let increment = self.parse_number()?;
8696                self.expect_token(&Token::RParen)?;
8697
8698                Some(IdentityPropertyFormatKind::FunctionCall(
8699                    IdentityParameters { seed, increment },
8700                ))
8701            } else {
8702                None
8703            };
8704            Ok(Some(ColumnOption::Identity(
8705                IdentityPropertyKind::Identity(IdentityProperty {
8706                    parameters,
8707                    order: None,
8708                }),
8709            )))
8710        } else if dialect_of!(self is SQLiteDialect | GenericDialect)
8711            && self.parse_keywords(&[Keyword::ON, Keyword::CONFLICT])
8712        {
8713            // Support ON CONFLICT for SQLite
8714            Ok(Some(ColumnOption::OnConflict(
8715                self.expect_one_of_keywords(&[
8716                    Keyword::ROLLBACK,
8717                    Keyword::ABORT,
8718                    Keyword::FAIL,
8719                    Keyword::IGNORE,
8720                    Keyword::REPLACE,
8721                ])?,
8722            )))
8723        } else if self.parse_keyword(Keyword::INVISIBLE) {
8724            Ok(Some(ColumnOption::Invisible))
8725        } else {
8726            Ok(None)
8727        }
8728    }
8729
8730    /// When parsing some column option expressions we need to revert to [ParserState::Normal] since
8731    /// `NOT NULL` is allowed as an alias for `IS NOT NULL`.
8732    /// In those cases we use this helper instead of calling [Parser::parse_expr] directly.
8733    ///
8734    /// For example, consider these `CREATE TABLE` statements:
8735    /// ```sql
8736    /// CREATE TABLE foo (abc BOOL DEFAULT (42 NOT NULL) NOT NULL);
8737    /// ```
8738    /// vs
8739    /// ```sql
8740    /// CREATE TABLE foo (abc BOOL NOT NULL);
8741    /// ```
8742    ///
8743    /// In the first we should parse the inner portion of `(42 NOT NULL)` as [Expr::IsNotNull],
8744    /// whereas is both statements that trailing `NOT NULL` should only be parsed as a
8745    /// [ColumnOption::NotNull].
8746    fn parse_column_option_expr(&mut self) -> Result<Expr, ParserError> {
8747        if self.peek_token_ref().token == Token::LParen {
8748            let expr: Expr = self.with_state(ParserState::Normal, |p| p.parse_prefix())?;
8749            Ok(expr)
8750        } else {
8751            Ok(self.parse_expr()?)
8752        }
8753    }
8754
8755    pub(crate) fn parse_tag(&mut self) -> Result<Tag, ParserError> {
8756        let name = self.parse_object_name(false)?;
8757        self.expect_token(&Token::Eq)?;
8758        let value = self.parse_literal_string()?;
8759
8760        Ok(Tag::new(name, value))
8761    }
8762
8763    fn parse_optional_column_option_generated(
8764        &mut self,
8765    ) -> Result<Option<ColumnOption>, ParserError> {
8766        if self.parse_keywords(&[Keyword::ALWAYS, Keyword::AS, Keyword::IDENTITY]) {
8767            let mut sequence_options = vec![];
8768            if self.expect_token(&Token::LParen).is_ok() {
8769                sequence_options = self.parse_create_sequence_options()?;
8770                self.expect_token(&Token::RParen)?;
8771            }
8772            Ok(Some(ColumnOption::Generated {
8773                generated_as: GeneratedAs::Always,
8774                sequence_options: Some(sequence_options),
8775                generation_expr: None,
8776                generation_expr_mode: None,
8777                generated_keyword: true,
8778            }))
8779        } else if self.parse_keywords(&[
8780            Keyword::BY,
8781            Keyword::DEFAULT,
8782            Keyword::AS,
8783            Keyword::IDENTITY,
8784        ]) {
8785            let mut sequence_options = vec![];
8786            if self.expect_token(&Token::LParen).is_ok() {
8787                sequence_options = self.parse_create_sequence_options()?;
8788                self.expect_token(&Token::RParen)?;
8789            }
8790            Ok(Some(ColumnOption::Generated {
8791                generated_as: GeneratedAs::ByDefault,
8792                sequence_options: Some(sequence_options),
8793                generation_expr: None,
8794                generation_expr_mode: None,
8795                generated_keyword: true,
8796            }))
8797        } else if self.parse_keywords(&[Keyword::ALWAYS, Keyword::AS]) {
8798            if self.expect_token(&Token::LParen).is_ok() {
8799                let expr: Expr = self.with_state(ParserState::Normal, |p| p.parse_expr())?;
8800                self.expect_token(&Token::RParen)?;
8801                let (gen_as, expr_mode) = if self.parse_keywords(&[Keyword::STORED]) {
8802                    Ok((
8803                        GeneratedAs::ExpStored,
8804                        Some(GeneratedExpressionMode::Stored),
8805                    ))
8806                } else if dialect_of!(self is PostgreSqlDialect) {
8807                    // Postgres' AS IDENTITY branches are above, this one needs STORED
8808                    self.expected("STORED", self.peek_token())
8809                } else if self.parse_keywords(&[Keyword::VIRTUAL]) {
8810                    Ok((GeneratedAs::Always, Some(GeneratedExpressionMode::Virtual)))
8811                } else {
8812                    Ok((GeneratedAs::Always, None))
8813                }?;
8814
8815                Ok(Some(ColumnOption::Generated {
8816                    generated_as: gen_as,
8817                    sequence_options: None,
8818                    generation_expr: Some(expr),
8819                    generation_expr_mode: expr_mode,
8820                    generated_keyword: true,
8821                }))
8822            } else {
8823                Ok(None)
8824            }
8825        } else {
8826            Ok(None)
8827        }
8828    }
8829
8830    fn parse_optional_column_option_as(&mut self) -> Result<Option<ColumnOption>, ParserError> {
8831        // Some DBs allow 'AS (expr)', shorthand for GENERATED ALWAYS AS
8832        self.expect_token(&Token::LParen)?;
8833        let expr = self.parse_expr()?;
8834        self.expect_token(&Token::RParen)?;
8835
8836        let (gen_as, expr_mode) = if self.parse_keywords(&[Keyword::STORED]) {
8837            (
8838                GeneratedAs::ExpStored,
8839                Some(GeneratedExpressionMode::Stored),
8840            )
8841        } else if self.parse_keywords(&[Keyword::VIRTUAL]) {
8842            (GeneratedAs::Always, Some(GeneratedExpressionMode::Virtual))
8843        } else {
8844            (GeneratedAs::Always, None)
8845        };
8846
8847        Ok(Some(ColumnOption::Generated {
8848            generated_as: gen_as,
8849            sequence_options: None,
8850            generation_expr: Some(expr),
8851            generation_expr_mode: expr_mode,
8852            generated_keyword: false,
8853        }))
8854    }
8855
8856    pub fn parse_optional_clustered_by(&mut self) -> Result<Option<ClusteredBy>, ParserError> {
8857        let clustered_by = if dialect_of!(self is HiveDialect|GenericDialect)
8858            && self.parse_keywords(&[Keyword::CLUSTERED, Keyword::BY])
8859        {
8860            let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
8861
8862            let sorted_by = if self.parse_keywords(&[Keyword::SORTED, Keyword::BY]) {
8863                self.expect_token(&Token::LParen)?;
8864                let sorted_by_columns = self.parse_comma_separated(|p| p.parse_order_by_expr())?;
8865                self.expect_token(&Token::RParen)?;
8866                Some(sorted_by_columns)
8867            } else {
8868                None
8869            };
8870
8871            self.expect_keyword_is(Keyword::INTO)?;
8872            let num_buckets = self.parse_number_value()?.value;
8873            self.expect_keyword_is(Keyword::BUCKETS)?;
8874            Some(ClusteredBy {
8875                columns,
8876                sorted_by,
8877                num_buckets,
8878            })
8879        } else {
8880            None
8881        };
8882        Ok(clustered_by)
8883    }
8884
8885    pub fn parse_referential_action(&mut self) -> Result<ReferentialAction, ParserError> {
8886        if self.parse_keyword(Keyword::RESTRICT) {
8887            Ok(ReferentialAction::Restrict)
8888        } else if self.parse_keyword(Keyword::CASCADE) {
8889            Ok(ReferentialAction::Cascade)
8890        } else if self.parse_keywords(&[Keyword::SET, Keyword::NULL]) {
8891            Ok(ReferentialAction::SetNull)
8892        } else if self.parse_keywords(&[Keyword::NO, Keyword::ACTION]) {
8893            Ok(ReferentialAction::NoAction)
8894        } else if self.parse_keywords(&[Keyword::SET, Keyword::DEFAULT]) {
8895            Ok(ReferentialAction::SetDefault)
8896        } else {
8897            self.expected(
8898                "one of RESTRICT, CASCADE, SET NULL, NO ACTION or SET DEFAULT",
8899                self.peek_token(),
8900            )
8901        }
8902    }
8903
8904    pub fn parse_match_kind(&mut self) -> Result<ConstraintReferenceMatchKind, ParserError> {
8905        if self.parse_keyword(Keyword::FULL) {
8906            Ok(ConstraintReferenceMatchKind::Full)
8907        } else if self.parse_keyword(Keyword::PARTIAL) {
8908            Ok(ConstraintReferenceMatchKind::Partial)
8909        } else if self.parse_keyword(Keyword::SIMPLE) {
8910            Ok(ConstraintReferenceMatchKind::Simple)
8911        } else {
8912            self.expected("one of FULL, PARTIAL or SIMPLE", self.peek_token())
8913        }
8914    }
8915
8916    pub fn parse_constraint_characteristics(
8917        &mut self,
8918    ) -> Result<Option<ConstraintCharacteristics>, ParserError> {
8919        let mut cc = ConstraintCharacteristics::default();
8920
8921        loop {
8922            if cc.deferrable.is_none() && self.parse_keywords(&[Keyword::NOT, Keyword::DEFERRABLE])
8923            {
8924                cc.deferrable = Some(false);
8925            } else if cc.deferrable.is_none() && self.parse_keyword(Keyword::DEFERRABLE) {
8926                cc.deferrable = Some(true);
8927            } else if cc.initially.is_none() && self.parse_keyword(Keyword::INITIALLY) {
8928                if self.parse_keyword(Keyword::DEFERRED) {
8929                    cc.initially = Some(DeferrableInitial::Deferred);
8930                } else if self.parse_keyword(Keyword::IMMEDIATE) {
8931                    cc.initially = Some(DeferrableInitial::Immediate);
8932                } else {
8933                    self.expected("one of DEFERRED or IMMEDIATE", self.peek_token())?;
8934                }
8935            } else if cc.enforced.is_none() && self.parse_keyword(Keyword::ENFORCED) {
8936                cc.enforced = Some(true);
8937            } else if cc.enforced.is_none()
8938                && self.parse_keywords(&[Keyword::NOT, Keyword::ENFORCED])
8939            {
8940                cc.enforced = Some(false);
8941            } else {
8942                break;
8943            }
8944        }
8945
8946        if cc.deferrable.is_some() || cc.initially.is_some() || cc.enforced.is_some() {
8947            Ok(Some(cc))
8948        } else {
8949            Ok(None)
8950        }
8951    }
8952
8953    pub fn parse_optional_table_constraint(
8954        &mut self,
8955    ) -> Result<Option<TableConstraint>, ParserError> {
8956        let name = if self.parse_keyword(Keyword::CONSTRAINT) {
8957            Some(self.parse_identifier()?)
8958        } else {
8959            None
8960        };
8961
8962        let next_token = self.next_token();
8963        match next_token.token {
8964            Token::Word(w) if w.keyword == Keyword::UNIQUE => {
8965                let index_type_display = self.parse_index_type_display();
8966                if !dialect_of!(self is GenericDialect | MySqlDialect)
8967                    && !index_type_display.is_none()
8968                {
8969                    return self
8970                        .expected("`index_name` or `(column_name [, ...])`", self.peek_token());
8971                }
8972
8973                let nulls_distinct = self.parse_optional_nulls_distinct()?;
8974
8975                // optional index name
8976                let index_name = self.parse_optional_ident()?;
8977                let index_type = self.parse_optional_using_then_index_type()?;
8978
8979                let columns = self.parse_parenthesized_index_column_list()?;
8980                let index_options = self.parse_index_options()?;
8981                let characteristics = self.parse_constraint_characteristics()?;
8982                Ok(Some(
8983                    UniqueConstraint {
8984                        name,
8985                        index_name,
8986                        index_type_display,
8987                        index_type,
8988                        columns,
8989                        index_options,
8990                        characteristics,
8991                        nulls_distinct,
8992                    }
8993                    .into(),
8994                ))
8995            }
8996            Token::Word(w) if w.keyword == Keyword::PRIMARY => {
8997                // after `PRIMARY` always stay `KEY`
8998                self.expect_keyword_is(Keyword::KEY)?;
8999
9000                // optional index name
9001                let index_name = self.parse_optional_ident()?;
9002                let index_type = self.parse_optional_using_then_index_type()?;
9003
9004                let columns = self.parse_parenthesized_index_column_list()?;
9005                let index_options = self.parse_index_options()?;
9006                let characteristics = self.parse_constraint_characteristics()?;
9007                Ok(Some(
9008                    PrimaryKeyConstraint {
9009                        name,
9010                        index_name,
9011                        index_type,
9012                        columns,
9013                        index_options,
9014                        characteristics,
9015                    }
9016                    .into(),
9017                ))
9018            }
9019            Token::Word(w) if w.keyword == Keyword::FOREIGN => {
9020                self.expect_keyword_is(Keyword::KEY)?;
9021                let index_name = self.parse_optional_ident()?;
9022                let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
9023                self.expect_keyword_is(Keyword::REFERENCES)?;
9024                let foreign_table = self.parse_object_name(false)?;
9025                let referred_columns = self.parse_parenthesized_column_list(Optional, false)?;
9026                let mut match_kind = None;
9027                let mut on_delete = None;
9028                let mut on_update = None;
9029                loop {
9030                    if match_kind.is_none() && self.parse_keyword(Keyword::MATCH) {
9031                        match_kind = Some(self.parse_match_kind()?);
9032                    } else if on_delete.is_none()
9033                        && self.parse_keywords(&[Keyword::ON, Keyword::DELETE])
9034                    {
9035                        on_delete = Some(self.parse_referential_action()?);
9036                    } else if on_update.is_none()
9037                        && self.parse_keywords(&[Keyword::ON, Keyword::UPDATE])
9038                    {
9039                        on_update = Some(self.parse_referential_action()?);
9040                    } else {
9041                        break;
9042                    }
9043                }
9044
9045                let characteristics = self.parse_constraint_characteristics()?;
9046
9047                Ok(Some(
9048                    ForeignKeyConstraint {
9049                        name,
9050                        index_name,
9051                        columns,
9052                        foreign_table,
9053                        referred_columns,
9054                        on_delete,
9055                        on_update,
9056                        match_kind,
9057                        characteristics,
9058                    }
9059                    .into(),
9060                ))
9061            }
9062            Token::Word(w) if w.keyword == Keyword::CHECK => {
9063                self.expect_token(&Token::LParen)?;
9064                let expr = Box::new(self.parse_expr()?);
9065                self.expect_token(&Token::RParen)?;
9066
9067                let enforced = if self.parse_keyword(Keyword::ENFORCED) {
9068                    Some(true)
9069                } else if self.parse_keywords(&[Keyword::NOT, Keyword::ENFORCED]) {
9070                    Some(false)
9071                } else {
9072                    None
9073                };
9074
9075                Ok(Some(
9076                    CheckConstraint {
9077                        name,
9078                        expr,
9079                        enforced,
9080                    }
9081                    .into(),
9082                ))
9083            }
9084            Token::Word(w)
9085                if (w.keyword == Keyword::INDEX || w.keyword == Keyword::KEY)
9086                    && dialect_of!(self is GenericDialect | MySqlDialect)
9087                    && name.is_none() =>
9088            {
9089                let display_as_key = w.keyword == Keyword::KEY;
9090
9091                let name = match self.peek_token().token {
9092                    Token::Word(word) if word.keyword == Keyword::USING => None,
9093                    _ => self.parse_optional_ident()?,
9094                };
9095
9096                let index_type = self.parse_optional_using_then_index_type()?;
9097                let columns = self.parse_parenthesized_index_column_list()?;
9098                let index_options = self.parse_index_options()?;
9099
9100                Ok(Some(
9101                    IndexConstraint {
9102                        display_as_key,
9103                        name,
9104                        index_type,
9105                        columns,
9106                        index_options,
9107                    }
9108                    .into(),
9109                ))
9110            }
9111            Token::Word(w)
9112                if (w.keyword == Keyword::FULLTEXT || w.keyword == Keyword::SPATIAL)
9113                    && dialect_of!(self is GenericDialect | MySqlDialect) =>
9114            {
9115                if let Some(name) = name {
9116                    return self.expected(
9117                        "FULLTEXT or SPATIAL option without constraint name",
9118                        TokenWithSpan {
9119                            token: Token::make_keyword(&name.to_string()),
9120                            span: next_token.span,
9121                        },
9122                    );
9123                }
9124
9125                let fulltext = w.keyword == Keyword::FULLTEXT;
9126
9127                let index_type_display = self.parse_index_type_display();
9128
9129                let opt_index_name = self.parse_optional_ident()?;
9130
9131                let columns = self.parse_parenthesized_index_column_list()?;
9132
9133                Ok(Some(
9134                    FullTextOrSpatialConstraint {
9135                        fulltext,
9136                        index_type_display,
9137                        opt_index_name,
9138                        columns,
9139                    }
9140                    .into(),
9141                ))
9142            }
9143            _ => {
9144                if name.is_some() {
9145                    self.expected("PRIMARY, UNIQUE, FOREIGN, or CHECK", next_token)
9146                } else {
9147                    self.prev_token();
9148                    Ok(None)
9149                }
9150            }
9151        }
9152    }
9153
9154    fn parse_optional_nulls_distinct(&mut self) -> Result<NullsDistinctOption, ParserError> {
9155        Ok(if self.parse_keyword(Keyword::NULLS) {
9156            let not = self.parse_keyword(Keyword::NOT);
9157            self.expect_keyword_is(Keyword::DISTINCT)?;
9158            if not {
9159                NullsDistinctOption::NotDistinct
9160            } else {
9161                NullsDistinctOption::Distinct
9162            }
9163        } else {
9164            NullsDistinctOption::None
9165        })
9166    }
9167
9168    pub fn maybe_parse_options(
9169        &mut self,
9170        keyword: Keyword,
9171    ) -> Result<Option<Vec<SqlOption>>, ParserError> {
9172        if let Token::Word(word) = self.peek_token().token {
9173            if word.keyword == keyword {
9174                return Ok(Some(self.parse_options(keyword)?));
9175            }
9176        };
9177        Ok(None)
9178    }
9179
9180    pub fn parse_options(&mut self, keyword: Keyword) -> Result<Vec<SqlOption>, ParserError> {
9181        if self.parse_keyword(keyword) {
9182            self.expect_token(&Token::LParen)?;
9183            let options = self.parse_comma_separated0(Parser::parse_sql_option, Token::RParen)?;
9184            self.expect_token(&Token::RParen)?;
9185            Ok(options)
9186        } else {
9187            Ok(vec![])
9188        }
9189    }
9190
9191    pub fn parse_options_with_keywords(
9192        &mut self,
9193        keywords: &[Keyword],
9194    ) -> Result<Vec<SqlOption>, ParserError> {
9195        if self.parse_keywords(keywords) {
9196            self.expect_token(&Token::LParen)?;
9197            let options = self.parse_comma_separated(Parser::parse_sql_option)?;
9198            self.expect_token(&Token::RParen)?;
9199            Ok(options)
9200        } else {
9201            Ok(vec![])
9202        }
9203    }
9204
9205    pub fn parse_index_type(&mut self) -> Result<IndexType, ParserError> {
9206        Ok(if self.parse_keyword(Keyword::BTREE) {
9207            IndexType::BTree
9208        } else if self.parse_keyword(Keyword::HASH) {
9209            IndexType::Hash
9210        } else if self.parse_keyword(Keyword::GIN) {
9211            IndexType::GIN
9212        } else if self.parse_keyword(Keyword::GIST) {
9213            IndexType::GiST
9214        } else if self.parse_keyword(Keyword::SPGIST) {
9215            IndexType::SPGiST
9216        } else if self.parse_keyword(Keyword::BRIN) {
9217            IndexType::BRIN
9218        } else if self.parse_keyword(Keyword::BLOOM) {
9219            IndexType::Bloom
9220        } else {
9221            IndexType::Custom(self.parse_identifier()?)
9222        })
9223    }
9224
9225    /// Optionally parse the `USING` keyword, followed by an [IndexType]
9226    /// Example:
9227    /// ```sql
9228    //// USING BTREE (name, age DESC)
9229    /// ```
9230    pub fn parse_optional_using_then_index_type(
9231        &mut self,
9232    ) -> Result<Option<IndexType>, ParserError> {
9233        if self.parse_keyword(Keyword::USING) {
9234            Ok(Some(self.parse_index_type()?))
9235        } else {
9236            Ok(None)
9237        }
9238    }
9239
9240    /// Parse `[ident]`, mostly `ident` is name, like:
9241    /// `window_name`, `index_name`, ...
9242    pub fn parse_optional_ident(&mut self) -> Result<Option<Ident>, ParserError> {
9243        self.maybe_parse(|parser| parser.parse_identifier())
9244    }
9245
9246    #[must_use]
9247    pub fn parse_index_type_display(&mut self) -> KeyOrIndexDisplay {
9248        if self.parse_keyword(Keyword::KEY) {
9249            KeyOrIndexDisplay::Key
9250        } else if self.parse_keyword(Keyword::INDEX) {
9251            KeyOrIndexDisplay::Index
9252        } else {
9253            KeyOrIndexDisplay::None
9254        }
9255    }
9256
9257    pub fn parse_optional_index_option(&mut self) -> Result<Option<IndexOption>, ParserError> {
9258        if let Some(index_type) = self.parse_optional_using_then_index_type()? {
9259            Ok(Some(IndexOption::Using(index_type)))
9260        } else if self.parse_keyword(Keyword::COMMENT) {
9261            let s = self.parse_literal_string()?;
9262            Ok(Some(IndexOption::Comment(s)))
9263        } else {
9264            Ok(None)
9265        }
9266    }
9267
9268    pub fn parse_index_options(&mut self) -> Result<Vec<IndexOption>, ParserError> {
9269        let mut options = Vec::new();
9270
9271        loop {
9272            match self.parse_optional_index_option()? {
9273                Some(index_option) => options.push(index_option),
9274                None => return Ok(options),
9275            }
9276        }
9277    }
9278
9279    pub fn parse_sql_option(&mut self) -> Result<SqlOption, ParserError> {
9280        let is_mssql = dialect_of!(self is MsSqlDialect|GenericDialect);
9281
9282        match self.peek_token().token {
9283            Token::Word(w) if w.keyword == Keyword::HEAP && is_mssql => {
9284                Ok(SqlOption::Ident(self.parse_identifier()?))
9285            }
9286            Token::Word(w) if w.keyword == Keyword::PARTITION && is_mssql => {
9287                self.parse_option_partition()
9288            }
9289            Token::Word(w) if w.keyword == Keyword::CLUSTERED && is_mssql => {
9290                self.parse_option_clustered()
9291            }
9292            _ => {
9293                let name = self.parse_identifier()?;
9294                self.expect_token(&Token::Eq)?;
9295                let value = self.parse_expr()?;
9296
9297                Ok(SqlOption::KeyValue { key: name, value })
9298            }
9299        }
9300    }
9301
9302    pub fn parse_option_clustered(&mut self) -> Result<SqlOption, ParserError> {
9303        if self.parse_keywords(&[
9304            Keyword::CLUSTERED,
9305            Keyword::COLUMNSTORE,
9306            Keyword::INDEX,
9307            Keyword::ORDER,
9308        ]) {
9309            Ok(SqlOption::Clustered(
9310                TableOptionsClustered::ColumnstoreIndexOrder(
9311                    self.parse_parenthesized_column_list(IsOptional::Mandatory, false)?,
9312                ),
9313            ))
9314        } else if self.parse_keywords(&[Keyword::CLUSTERED, Keyword::COLUMNSTORE, Keyword::INDEX]) {
9315            Ok(SqlOption::Clustered(
9316                TableOptionsClustered::ColumnstoreIndex,
9317            ))
9318        } else if self.parse_keywords(&[Keyword::CLUSTERED, Keyword::INDEX]) {
9319            self.expect_token(&Token::LParen)?;
9320
9321            let columns = self.parse_comma_separated(|p| {
9322                let name = p.parse_identifier()?;
9323                let asc = p.parse_asc_desc();
9324
9325                Ok(ClusteredIndex { name, asc })
9326            })?;
9327
9328            self.expect_token(&Token::RParen)?;
9329
9330            Ok(SqlOption::Clustered(TableOptionsClustered::Index(columns)))
9331        } else {
9332            Err(ParserError::ParserError(
9333                "invalid CLUSTERED sequence".to_string(),
9334            ))
9335        }
9336    }
9337
9338    pub fn parse_option_partition(&mut self) -> Result<SqlOption, ParserError> {
9339        self.expect_keyword_is(Keyword::PARTITION)?;
9340        self.expect_token(&Token::LParen)?;
9341        let column_name = self.parse_identifier()?;
9342
9343        self.expect_keyword_is(Keyword::RANGE)?;
9344        let range_direction = if self.parse_keyword(Keyword::LEFT) {
9345            Some(PartitionRangeDirection::Left)
9346        } else if self.parse_keyword(Keyword::RIGHT) {
9347            Some(PartitionRangeDirection::Right)
9348        } else {
9349            None
9350        };
9351
9352        self.expect_keywords(&[Keyword::FOR, Keyword::VALUES])?;
9353        self.expect_token(&Token::LParen)?;
9354
9355        let for_values = self.parse_comma_separated(Parser::parse_expr)?;
9356
9357        self.expect_token(&Token::RParen)?;
9358        self.expect_token(&Token::RParen)?;
9359
9360        Ok(SqlOption::Partition {
9361            column_name,
9362            range_direction,
9363            for_values,
9364        })
9365    }
9366
9367    pub fn parse_partition(&mut self) -> Result<Partition, ParserError> {
9368        self.expect_token(&Token::LParen)?;
9369        let partitions = self.parse_comma_separated(Parser::parse_expr)?;
9370        self.expect_token(&Token::RParen)?;
9371        Ok(Partition::Partitions(partitions))
9372    }
9373
9374    pub fn parse_projection_select(&mut self) -> Result<ProjectionSelect, ParserError> {
9375        self.expect_token(&Token::LParen)?;
9376        self.expect_keyword_is(Keyword::SELECT)?;
9377        let projection = self.parse_projection()?;
9378        let group_by = self.parse_optional_group_by()?;
9379        let order_by = self.parse_optional_order_by()?;
9380        self.expect_token(&Token::RParen)?;
9381        Ok(ProjectionSelect {
9382            projection,
9383            group_by,
9384            order_by,
9385        })
9386    }
9387    pub fn parse_alter_table_add_projection(&mut self) -> Result<AlterTableOperation, ParserError> {
9388        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
9389        let name = self.parse_identifier()?;
9390        let query = self.parse_projection_select()?;
9391        Ok(AlterTableOperation::AddProjection {
9392            if_not_exists,
9393            name,
9394            select: query,
9395        })
9396    }
9397
9398    pub fn parse_alter_table_operation(&mut self) -> Result<AlterTableOperation, ParserError> {
9399        let operation = if self.parse_keyword(Keyword::ADD) {
9400            if let Some(constraint) = self.parse_optional_table_constraint()? {
9401                let not_valid = self.parse_keywords(&[Keyword::NOT, Keyword::VALID]);
9402                AlterTableOperation::AddConstraint {
9403                    constraint,
9404                    not_valid,
9405                }
9406            } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
9407                && self.parse_keyword(Keyword::PROJECTION)
9408            {
9409                return self.parse_alter_table_add_projection();
9410            } else {
9411                let if_not_exists =
9412                    self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
9413                let mut new_partitions = vec![];
9414                loop {
9415                    if self.parse_keyword(Keyword::PARTITION) {
9416                        new_partitions.push(self.parse_partition()?);
9417                    } else {
9418                        break;
9419                    }
9420                }
9421                if !new_partitions.is_empty() {
9422                    AlterTableOperation::AddPartitions {
9423                        if_not_exists,
9424                        new_partitions,
9425                    }
9426                } else {
9427                    let column_keyword = self.parse_keyword(Keyword::COLUMN);
9428
9429                    let if_not_exists = if dialect_of!(self is PostgreSqlDialect | BigQueryDialect | DuckDbDialect | GenericDialect)
9430                    {
9431                        self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS])
9432                            || if_not_exists
9433                    } else {
9434                        false
9435                    };
9436
9437                    let column_def = self.parse_column_def()?;
9438
9439                    let column_position = self.parse_column_position()?;
9440
9441                    AlterTableOperation::AddColumn {
9442                        column_keyword,
9443                        if_not_exists,
9444                        column_def,
9445                        column_position,
9446                    }
9447                }
9448            }
9449        } else if self.parse_keyword(Keyword::RENAME) {
9450            if dialect_of!(self is PostgreSqlDialect) && self.parse_keyword(Keyword::CONSTRAINT) {
9451                let old_name = self.parse_identifier()?;
9452                self.expect_keyword_is(Keyword::TO)?;
9453                let new_name = self.parse_identifier()?;
9454                AlterTableOperation::RenameConstraint { old_name, new_name }
9455            } else if self.parse_keyword(Keyword::TO) {
9456                let table_name = self.parse_object_name(false)?;
9457                AlterTableOperation::RenameTable {
9458                    table_name: RenameTableNameKind::To(table_name),
9459                }
9460            } else if self.parse_keyword(Keyword::AS) {
9461                let table_name = self.parse_object_name(false)?;
9462                AlterTableOperation::RenameTable {
9463                    table_name: RenameTableNameKind::As(table_name),
9464                }
9465            } else {
9466                let _ = self.parse_keyword(Keyword::COLUMN); // [ COLUMN ]
9467                let old_column_name = self.parse_identifier()?;
9468                self.expect_keyword_is(Keyword::TO)?;
9469                let new_column_name = self.parse_identifier()?;
9470                AlterTableOperation::RenameColumn {
9471                    old_column_name,
9472                    new_column_name,
9473                }
9474            }
9475        } else if self.parse_keyword(Keyword::DISABLE) {
9476            if self.parse_keywords(&[Keyword::ROW, Keyword::LEVEL, Keyword::SECURITY]) {
9477                AlterTableOperation::DisableRowLevelSecurity {}
9478            } else if self.parse_keyword(Keyword::RULE) {
9479                let name = self.parse_identifier()?;
9480                AlterTableOperation::DisableRule { name }
9481            } else if self.parse_keyword(Keyword::TRIGGER) {
9482                let name = self.parse_identifier()?;
9483                AlterTableOperation::DisableTrigger { name }
9484            } else {
9485                return self.expected(
9486                    "ROW LEVEL SECURITY, RULE, or TRIGGER after DISABLE",
9487                    self.peek_token(),
9488                );
9489            }
9490        } else if self.parse_keyword(Keyword::ENABLE) {
9491            if self.parse_keywords(&[Keyword::ALWAYS, Keyword::RULE]) {
9492                let name = self.parse_identifier()?;
9493                AlterTableOperation::EnableAlwaysRule { name }
9494            } else if self.parse_keywords(&[Keyword::ALWAYS, Keyword::TRIGGER]) {
9495                let name = self.parse_identifier()?;
9496                AlterTableOperation::EnableAlwaysTrigger { name }
9497            } else if self.parse_keywords(&[Keyword::ROW, Keyword::LEVEL, Keyword::SECURITY]) {
9498                AlterTableOperation::EnableRowLevelSecurity {}
9499            } else if self.parse_keywords(&[Keyword::REPLICA, Keyword::RULE]) {
9500                let name = self.parse_identifier()?;
9501                AlterTableOperation::EnableReplicaRule { name }
9502            } else if self.parse_keywords(&[Keyword::REPLICA, Keyword::TRIGGER]) {
9503                let name = self.parse_identifier()?;
9504                AlterTableOperation::EnableReplicaTrigger { name }
9505            } else if self.parse_keyword(Keyword::RULE) {
9506                let name = self.parse_identifier()?;
9507                AlterTableOperation::EnableRule { name }
9508            } else if self.parse_keyword(Keyword::TRIGGER) {
9509                let name = self.parse_identifier()?;
9510                AlterTableOperation::EnableTrigger { name }
9511            } else {
9512                return self.expected(
9513                    "ALWAYS, REPLICA, ROW LEVEL SECURITY, RULE, or TRIGGER after ENABLE",
9514                    self.peek_token(),
9515                );
9516            }
9517        } else if self.parse_keywords(&[Keyword::CLEAR, Keyword::PROJECTION])
9518            && dialect_of!(self is ClickHouseDialect|GenericDialect)
9519        {
9520            let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
9521            let name = self.parse_identifier()?;
9522            let partition = if self.parse_keywords(&[Keyword::IN, Keyword::PARTITION]) {
9523                Some(self.parse_identifier()?)
9524            } else {
9525                None
9526            };
9527            AlterTableOperation::ClearProjection {
9528                if_exists,
9529                name,
9530                partition,
9531            }
9532        } else if self.parse_keywords(&[Keyword::MATERIALIZE, Keyword::PROJECTION])
9533            && dialect_of!(self is ClickHouseDialect|GenericDialect)
9534        {
9535            let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
9536            let name = self.parse_identifier()?;
9537            let partition = if self.parse_keywords(&[Keyword::IN, Keyword::PARTITION]) {
9538                Some(self.parse_identifier()?)
9539            } else {
9540                None
9541            };
9542            AlterTableOperation::MaterializeProjection {
9543                if_exists,
9544                name,
9545                partition,
9546            }
9547        } else if self.parse_keyword(Keyword::DROP) {
9548            if self.parse_keywords(&[Keyword::IF, Keyword::EXISTS, Keyword::PARTITION]) {
9549                self.expect_token(&Token::LParen)?;
9550                let partitions = self.parse_comma_separated(Parser::parse_expr)?;
9551                self.expect_token(&Token::RParen)?;
9552                AlterTableOperation::DropPartitions {
9553                    partitions,
9554                    if_exists: true,
9555                }
9556            } else if self.parse_keyword(Keyword::PARTITION) {
9557                self.expect_token(&Token::LParen)?;
9558                let partitions = self.parse_comma_separated(Parser::parse_expr)?;
9559                self.expect_token(&Token::RParen)?;
9560                AlterTableOperation::DropPartitions {
9561                    partitions,
9562                    if_exists: false,
9563                }
9564            } else if self.parse_keyword(Keyword::CONSTRAINT) {
9565                let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
9566                let name = self.parse_identifier()?;
9567                let drop_behavior = self.parse_optional_drop_behavior();
9568                AlterTableOperation::DropConstraint {
9569                    if_exists,
9570                    name,
9571                    drop_behavior,
9572                }
9573            } else if self.parse_keywords(&[Keyword::PRIMARY, Keyword::KEY]) {
9574                let drop_behavior = self.parse_optional_drop_behavior();
9575                AlterTableOperation::DropPrimaryKey { drop_behavior }
9576            } else if self.parse_keywords(&[Keyword::FOREIGN, Keyword::KEY]) {
9577                let name = self.parse_identifier()?;
9578                let drop_behavior = self.parse_optional_drop_behavior();
9579                AlterTableOperation::DropForeignKey {
9580                    name,
9581                    drop_behavior,
9582                }
9583            } else if self.parse_keyword(Keyword::INDEX) {
9584                let name = self.parse_identifier()?;
9585                AlterTableOperation::DropIndex { name }
9586            } else if self.parse_keyword(Keyword::PROJECTION)
9587                && dialect_of!(self is ClickHouseDialect|GenericDialect)
9588            {
9589                let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
9590                let name = self.parse_identifier()?;
9591                AlterTableOperation::DropProjection { if_exists, name }
9592            } else if self.parse_keywords(&[Keyword::CLUSTERING, Keyword::KEY]) {
9593                AlterTableOperation::DropClusteringKey
9594            } else {
9595                let has_column_keyword = self.parse_keyword(Keyword::COLUMN); // [ COLUMN ]
9596                let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
9597                let column_names = if self.dialect.supports_comma_separated_drop_column_list() {
9598                    self.parse_comma_separated(Parser::parse_identifier)?
9599                } else {
9600                    vec![self.parse_identifier()?]
9601                };
9602                let drop_behavior = self.parse_optional_drop_behavior();
9603                AlterTableOperation::DropColumn {
9604                    has_column_keyword,
9605                    column_names,
9606                    if_exists,
9607                    drop_behavior,
9608                }
9609            }
9610        } else if self.parse_keyword(Keyword::PARTITION) {
9611            self.expect_token(&Token::LParen)?;
9612            let before = self.parse_comma_separated(Parser::parse_expr)?;
9613            self.expect_token(&Token::RParen)?;
9614            self.expect_keyword_is(Keyword::RENAME)?;
9615            self.expect_keywords(&[Keyword::TO, Keyword::PARTITION])?;
9616            self.expect_token(&Token::LParen)?;
9617            let renames = self.parse_comma_separated(Parser::parse_expr)?;
9618            self.expect_token(&Token::RParen)?;
9619            AlterTableOperation::RenamePartitions {
9620                old_partitions: before,
9621                new_partitions: renames,
9622            }
9623        } else if self.parse_keyword(Keyword::CHANGE) {
9624            let _ = self.parse_keyword(Keyword::COLUMN); // [ COLUMN ]
9625            let old_name = self.parse_identifier()?;
9626            let new_name = self.parse_identifier()?;
9627            let data_type = self.parse_data_type()?;
9628            let mut options = vec![];
9629            while let Some(option) = self.parse_optional_column_option()? {
9630                options.push(option);
9631            }
9632
9633            let column_position = self.parse_column_position()?;
9634
9635            AlterTableOperation::ChangeColumn {
9636                old_name,
9637                new_name,
9638                data_type,
9639                options,
9640                column_position,
9641            }
9642        } else if self.parse_keyword(Keyword::MODIFY) {
9643            let _ = self.parse_keyword(Keyword::COLUMN); // [ COLUMN ]
9644            let col_name = self.parse_identifier()?;
9645            let data_type = self.parse_data_type()?;
9646            let mut options = vec![];
9647            while let Some(option) = self.parse_optional_column_option()? {
9648                options.push(option);
9649            }
9650
9651            let column_position = self.parse_column_position()?;
9652
9653            AlterTableOperation::ModifyColumn {
9654                col_name,
9655                data_type,
9656                options,
9657                column_position,
9658            }
9659        } else if self.parse_keyword(Keyword::ALTER) {
9660            let _ = self.parse_keyword(Keyword::COLUMN); // [ COLUMN ]
9661            let column_name = self.parse_identifier()?;
9662            let is_postgresql = dialect_of!(self is PostgreSqlDialect);
9663
9664            let op: AlterColumnOperation = if self.parse_keywords(&[
9665                Keyword::SET,
9666                Keyword::NOT,
9667                Keyword::NULL,
9668            ]) {
9669                AlterColumnOperation::SetNotNull {}
9670            } else if self.parse_keywords(&[Keyword::DROP, Keyword::NOT, Keyword::NULL]) {
9671                AlterColumnOperation::DropNotNull {}
9672            } else if self.parse_keywords(&[Keyword::SET, Keyword::DEFAULT]) {
9673                AlterColumnOperation::SetDefault {
9674                    value: self.parse_expr()?,
9675                }
9676            } else if self.parse_keywords(&[Keyword::DROP, Keyword::DEFAULT]) {
9677                AlterColumnOperation::DropDefault {}
9678            } else if self.parse_keywords(&[Keyword::SET, Keyword::DATA, Keyword::TYPE]) {
9679                self.parse_set_data_type(true)?
9680            } else if self.parse_keyword(Keyword::TYPE) {
9681                self.parse_set_data_type(false)?
9682            } else if self.parse_keywords(&[Keyword::ADD, Keyword::GENERATED]) {
9683                let generated_as = if self.parse_keyword(Keyword::ALWAYS) {
9684                    Some(GeneratedAs::Always)
9685                } else if self.parse_keywords(&[Keyword::BY, Keyword::DEFAULT]) {
9686                    Some(GeneratedAs::ByDefault)
9687                } else {
9688                    None
9689                };
9690
9691                self.expect_keywords(&[Keyword::AS, Keyword::IDENTITY])?;
9692
9693                let mut sequence_options: Option<Vec<SequenceOptions>> = None;
9694
9695                if self.peek_token().token == Token::LParen {
9696                    self.expect_token(&Token::LParen)?;
9697                    sequence_options = Some(self.parse_create_sequence_options()?);
9698                    self.expect_token(&Token::RParen)?;
9699                }
9700
9701                AlterColumnOperation::AddGenerated {
9702                    generated_as,
9703                    sequence_options,
9704                }
9705            } else {
9706                let message = if is_postgresql {
9707                    "SET/DROP NOT NULL, SET DEFAULT, SET DATA TYPE, or ADD GENERATED after ALTER COLUMN"
9708                } else {
9709                    "SET/DROP NOT NULL, SET DEFAULT, or SET DATA TYPE after ALTER COLUMN"
9710                };
9711
9712                return self.expected(message, self.peek_token());
9713            };
9714            AlterTableOperation::AlterColumn { column_name, op }
9715        } else if self.parse_keyword(Keyword::SWAP) {
9716            self.expect_keyword_is(Keyword::WITH)?;
9717            let table_name = self.parse_object_name(false)?;
9718            AlterTableOperation::SwapWith { table_name }
9719        } else if dialect_of!(self is PostgreSqlDialect | GenericDialect)
9720            && self.parse_keywords(&[Keyword::OWNER, Keyword::TO])
9721        {
9722            let new_owner = self.parse_owner()?;
9723            AlterTableOperation::OwnerTo { new_owner }
9724        } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
9725            && self.parse_keyword(Keyword::ATTACH)
9726        {
9727            AlterTableOperation::AttachPartition {
9728                partition: self.parse_part_or_partition()?,
9729            }
9730        } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
9731            && self.parse_keyword(Keyword::DETACH)
9732        {
9733            AlterTableOperation::DetachPartition {
9734                partition: self.parse_part_or_partition()?,
9735            }
9736        } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
9737            && self.parse_keyword(Keyword::FREEZE)
9738        {
9739            let partition = self.parse_part_or_partition()?;
9740            let with_name = if self.parse_keyword(Keyword::WITH) {
9741                self.expect_keyword_is(Keyword::NAME)?;
9742                Some(self.parse_identifier()?)
9743            } else {
9744                None
9745            };
9746            AlterTableOperation::FreezePartition {
9747                partition,
9748                with_name,
9749            }
9750        } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
9751            && self.parse_keyword(Keyword::UNFREEZE)
9752        {
9753            let partition = self.parse_part_or_partition()?;
9754            let with_name = if self.parse_keyword(Keyword::WITH) {
9755                self.expect_keyword_is(Keyword::NAME)?;
9756                Some(self.parse_identifier()?)
9757            } else {
9758                None
9759            };
9760            AlterTableOperation::UnfreezePartition {
9761                partition,
9762                with_name,
9763            }
9764        } else if self.parse_keywords(&[Keyword::CLUSTER, Keyword::BY]) {
9765            self.expect_token(&Token::LParen)?;
9766            let exprs = self.parse_comma_separated(|parser| parser.parse_expr())?;
9767            self.expect_token(&Token::RParen)?;
9768            AlterTableOperation::ClusterBy { exprs }
9769        } else if self.parse_keywords(&[Keyword::SUSPEND, Keyword::RECLUSTER]) {
9770            AlterTableOperation::SuspendRecluster
9771        } else if self.parse_keywords(&[Keyword::RESUME, Keyword::RECLUSTER]) {
9772            AlterTableOperation::ResumeRecluster
9773        } else if self.parse_keyword(Keyword::LOCK) {
9774            let equals = self.consume_token(&Token::Eq);
9775            let lock = match self.parse_one_of_keywords(&[
9776                Keyword::DEFAULT,
9777                Keyword::EXCLUSIVE,
9778                Keyword::NONE,
9779                Keyword::SHARED,
9780            ]) {
9781                Some(Keyword::DEFAULT) => AlterTableLock::Default,
9782                Some(Keyword::EXCLUSIVE) => AlterTableLock::Exclusive,
9783                Some(Keyword::NONE) => AlterTableLock::None,
9784                Some(Keyword::SHARED) => AlterTableLock::Shared,
9785                _ => self.expected(
9786                    "DEFAULT, EXCLUSIVE, NONE or SHARED after LOCK [=]",
9787                    self.peek_token(),
9788                )?,
9789            };
9790            AlterTableOperation::Lock { equals, lock }
9791        } else if self.parse_keyword(Keyword::ALGORITHM) {
9792            let equals = self.consume_token(&Token::Eq);
9793            let algorithm = match self.parse_one_of_keywords(&[
9794                Keyword::DEFAULT,
9795                Keyword::INSTANT,
9796                Keyword::INPLACE,
9797                Keyword::COPY,
9798            ]) {
9799                Some(Keyword::DEFAULT) => AlterTableAlgorithm::Default,
9800                Some(Keyword::INSTANT) => AlterTableAlgorithm::Instant,
9801                Some(Keyword::INPLACE) => AlterTableAlgorithm::Inplace,
9802                Some(Keyword::COPY) => AlterTableAlgorithm::Copy,
9803                _ => self.expected(
9804                    "DEFAULT, INSTANT, INPLACE, or COPY after ALGORITHM [=]",
9805                    self.peek_token(),
9806                )?,
9807            };
9808            AlterTableOperation::Algorithm { equals, algorithm }
9809        } else if self.parse_keyword(Keyword::AUTO_INCREMENT) {
9810            let equals = self.consume_token(&Token::Eq);
9811            let value = self.parse_number_value()?;
9812            AlterTableOperation::AutoIncrement { equals, value }
9813        } else if self.parse_keywords(&[Keyword::REPLICA, Keyword::IDENTITY]) {
9814            let identity = if self.parse_keyword(Keyword::NONE) {
9815                ReplicaIdentity::None
9816            } else if self.parse_keyword(Keyword::FULL) {
9817                ReplicaIdentity::Full
9818            } else if self.parse_keyword(Keyword::DEFAULT) {
9819                ReplicaIdentity::Default
9820            } else if self.parse_keywords(&[Keyword::USING, Keyword::INDEX]) {
9821                ReplicaIdentity::Index(self.parse_identifier()?)
9822            } else {
9823                return self.expected(
9824                    "NONE, FULL, DEFAULT, or USING INDEX index_name after REPLICA IDENTITY",
9825                    self.peek_token(),
9826                );
9827            };
9828
9829            AlterTableOperation::ReplicaIdentity { identity }
9830        } else if self.parse_keywords(&[Keyword::VALIDATE, Keyword::CONSTRAINT]) {
9831            let name = self.parse_identifier()?;
9832            AlterTableOperation::ValidateConstraint { name }
9833        } else {
9834            let mut options =
9835                self.parse_options_with_keywords(&[Keyword::SET, Keyword::TBLPROPERTIES])?;
9836            if !options.is_empty() {
9837                AlterTableOperation::SetTblProperties {
9838                    table_properties: options,
9839                }
9840            } else {
9841                options = self.parse_options(Keyword::SET)?;
9842                if !options.is_empty() {
9843                    AlterTableOperation::SetOptionsParens { options }
9844                } else {
9845                    return self.expected(
9846                    "ADD, RENAME, PARTITION, SWAP, DROP, REPLICA IDENTITY, SET, or SET TBLPROPERTIES after ALTER TABLE",
9847                    self.peek_token(),
9848                  );
9849                }
9850            }
9851        };
9852        Ok(operation)
9853    }
9854
9855    fn parse_set_data_type(&mut self, had_set: bool) -> Result<AlterColumnOperation, ParserError> {
9856        let data_type = self.parse_data_type()?;
9857        let using = if self.dialect.supports_alter_column_type_using()
9858            && self.parse_keyword(Keyword::USING)
9859        {
9860            Some(self.parse_expr()?)
9861        } else {
9862            None
9863        };
9864        Ok(AlterColumnOperation::SetDataType {
9865            data_type,
9866            using,
9867            had_set,
9868        })
9869    }
9870
9871    fn parse_part_or_partition(&mut self) -> Result<Partition, ParserError> {
9872        let keyword = self.expect_one_of_keywords(&[Keyword::PART, Keyword::PARTITION])?;
9873        match keyword {
9874            Keyword::PART => Ok(Partition::Part(self.parse_expr()?)),
9875            Keyword::PARTITION => Ok(Partition::Expr(self.parse_expr()?)),
9876            // unreachable because expect_one_of_keywords used above
9877            unexpected_keyword => Err(ParserError::ParserError(
9878                format!("Internal parser error: expected any of {{PART, PARTITION}}, got {unexpected_keyword:?}"),
9879            )),
9880        }
9881    }
9882
9883    pub fn parse_alter(&mut self) -> Result<Statement, ParserError> {
9884        let object_type = self.expect_one_of_keywords(&[
9885            Keyword::VIEW,
9886            Keyword::TYPE,
9887            Keyword::TABLE,
9888            Keyword::INDEX,
9889            Keyword::ROLE,
9890            Keyword::POLICY,
9891            Keyword::CONNECTOR,
9892            Keyword::ICEBERG,
9893            Keyword::SCHEMA,
9894            Keyword::USER,
9895            Keyword::OPERATOR,
9896        ])?;
9897        match object_type {
9898            Keyword::SCHEMA => {
9899                self.prev_token();
9900                self.prev_token();
9901                self.parse_alter_schema()
9902            }
9903            Keyword::VIEW => self.parse_alter_view(),
9904            Keyword::TYPE => self.parse_alter_type(),
9905            Keyword::TABLE => self.parse_alter_table(false),
9906            Keyword::ICEBERG => {
9907                self.expect_keyword(Keyword::TABLE)?;
9908                self.parse_alter_table(true)
9909            }
9910            Keyword::INDEX => {
9911                let index_name = self.parse_object_name(false)?;
9912                let operation = if self.parse_keyword(Keyword::RENAME) {
9913                    if self.parse_keyword(Keyword::TO) {
9914                        let index_name = self.parse_object_name(false)?;
9915                        AlterIndexOperation::RenameIndex { index_name }
9916                    } else {
9917                        return self.expected("TO after RENAME", self.peek_token());
9918                    }
9919                } else {
9920                    return self.expected("RENAME after ALTER INDEX", self.peek_token());
9921                };
9922
9923                Ok(Statement::AlterIndex {
9924                    name: index_name,
9925                    operation,
9926                })
9927            }
9928            Keyword::OPERATOR => self.parse_alter_operator(),
9929            Keyword::ROLE => self.parse_alter_role(),
9930            Keyword::POLICY => self.parse_alter_policy(),
9931            Keyword::CONNECTOR => self.parse_alter_connector(),
9932            Keyword::USER => self.parse_alter_user(),
9933            // unreachable because expect_one_of_keywords used above
9934            unexpected_keyword => Err(ParserError::ParserError(
9935                format!("Internal parser error: expected any of {{VIEW, TYPE, TABLE, INDEX, ROLE, POLICY, CONNECTOR, ICEBERG, SCHEMA, USER, OPERATOR}}, got {unexpected_keyword:?}"),
9936            )),
9937        }
9938    }
9939
9940    /// Parse a [Statement::AlterTable]
9941    pub fn parse_alter_table(&mut self, iceberg: bool) -> Result<Statement, ParserError> {
9942        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
9943        let only = self.parse_keyword(Keyword::ONLY); // [ ONLY ]
9944        let table_name = self.parse_object_name(false)?;
9945        let on_cluster = self.parse_optional_on_cluster()?;
9946        let operations = self.parse_comma_separated(Parser::parse_alter_table_operation)?;
9947
9948        let mut location = None;
9949        if self.parse_keyword(Keyword::LOCATION) {
9950            location = Some(HiveSetLocation {
9951                has_set: false,
9952                location: self.parse_identifier()?,
9953            });
9954        } else if self.parse_keywords(&[Keyword::SET, Keyword::LOCATION]) {
9955            location = Some(HiveSetLocation {
9956                has_set: true,
9957                location: self.parse_identifier()?,
9958            });
9959        }
9960
9961        let end_token = if self.peek_token_ref().token == Token::SemiColon {
9962            self.peek_token_ref().clone()
9963        } else {
9964            self.get_current_token().clone()
9965        };
9966
9967        Ok(AlterTable {
9968            name: table_name,
9969            if_exists,
9970            only,
9971            operations,
9972            location,
9973            on_cluster,
9974            table_type: if iceberg {
9975                Some(AlterTableType::Iceberg)
9976            } else {
9977                None
9978            },
9979            end_token: AttachedToken(end_token),
9980        }
9981        .into())
9982    }
9983
9984    pub fn parse_alter_view(&mut self) -> Result<Statement, ParserError> {
9985        let name = self.parse_object_name(false)?;
9986        let columns = self.parse_parenthesized_column_list(Optional, false)?;
9987
9988        let with_options = self.parse_options(Keyword::WITH)?;
9989
9990        self.expect_keyword_is(Keyword::AS)?;
9991        let query = self.parse_query()?;
9992
9993        Ok(Statement::AlterView {
9994            name,
9995            columns,
9996            query,
9997            with_options,
9998        })
9999    }
10000
10001    /// Parse a [Statement::AlterType]
10002    pub fn parse_alter_type(&mut self) -> Result<Statement, ParserError> {
10003        let name = self.parse_object_name(false)?;
10004
10005        if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
10006            let new_name = self.parse_identifier()?;
10007            Ok(Statement::AlterType(AlterType {
10008                name,
10009                operation: AlterTypeOperation::Rename(AlterTypeRename { new_name }),
10010            }))
10011        } else if self.parse_keywords(&[Keyword::ADD, Keyword::VALUE]) {
10012            let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
10013            let new_enum_value = self.parse_identifier()?;
10014            let position = if self.parse_keyword(Keyword::BEFORE) {
10015                Some(AlterTypeAddValuePosition::Before(self.parse_identifier()?))
10016            } else if self.parse_keyword(Keyword::AFTER) {
10017                Some(AlterTypeAddValuePosition::After(self.parse_identifier()?))
10018            } else {
10019                None
10020            };
10021
10022            Ok(Statement::AlterType(AlterType {
10023                name,
10024                operation: AlterTypeOperation::AddValue(AlterTypeAddValue {
10025                    if_not_exists,
10026                    value: new_enum_value,
10027                    position,
10028                }),
10029            }))
10030        } else if self.parse_keywords(&[Keyword::RENAME, Keyword::VALUE]) {
10031            let existing_enum_value = self.parse_identifier()?;
10032            self.expect_keyword(Keyword::TO)?;
10033            let new_enum_value = self.parse_identifier()?;
10034
10035            Ok(Statement::AlterType(AlterType {
10036                name,
10037                operation: AlterTypeOperation::RenameValue(AlterTypeRenameValue {
10038                    from: existing_enum_value,
10039                    to: new_enum_value,
10040                }),
10041            }))
10042        } else {
10043            self.expected_ref(
10044                "{RENAME TO | { RENAME | ADD } VALUE}",
10045                self.peek_token_ref(),
10046            )
10047        }
10048    }
10049
10050    /// Parse a [Statement::AlterOperator]
10051    ///
10052    /// [PostgreSQL Documentation](https://www.postgresql.org/docs/current/sql-alteroperator.html)
10053    pub fn parse_alter_operator(&mut self) -> Result<Statement, ParserError> {
10054        let name = self.parse_operator_name()?;
10055
10056        // Parse (left_type, right_type)
10057        self.expect_token(&Token::LParen)?;
10058
10059        let left_type = if self.parse_keyword(Keyword::NONE) {
10060            None
10061        } else {
10062            Some(self.parse_data_type()?)
10063        };
10064
10065        self.expect_token(&Token::Comma)?;
10066        let right_type = self.parse_data_type()?;
10067        self.expect_token(&Token::RParen)?;
10068
10069        // Parse the operation
10070        let operation = if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
10071            let owner = if self.parse_keyword(Keyword::CURRENT_ROLE) {
10072                Owner::CurrentRole
10073            } else if self.parse_keyword(Keyword::CURRENT_USER) {
10074                Owner::CurrentUser
10075            } else if self.parse_keyword(Keyword::SESSION_USER) {
10076                Owner::SessionUser
10077            } else {
10078                Owner::Ident(self.parse_identifier()?)
10079            };
10080            AlterOperatorOperation::OwnerTo(owner)
10081        } else if self.parse_keywords(&[Keyword::SET, Keyword::SCHEMA]) {
10082            let schema_name = self.parse_object_name(false)?;
10083            AlterOperatorOperation::SetSchema { schema_name }
10084        } else if self.parse_keyword(Keyword::SET) {
10085            self.expect_token(&Token::LParen)?;
10086
10087            let mut options = Vec::new();
10088            loop {
10089                let keyword = self.expect_one_of_keywords(&[
10090                    Keyword::RESTRICT,
10091                    Keyword::JOIN,
10092                    Keyword::COMMUTATOR,
10093                    Keyword::NEGATOR,
10094                    Keyword::HASHES,
10095                    Keyword::MERGES,
10096                ])?;
10097
10098                match keyword {
10099                    Keyword::RESTRICT => {
10100                        self.expect_token(&Token::Eq)?;
10101                        let proc_name = if self.parse_keyword(Keyword::NONE) {
10102                            None
10103                        } else {
10104                            Some(self.parse_object_name(false)?)
10105                        };
10106                        options.push(OperatorOption::Restrict(proc_name));
10107                    }
10108                    Keyword::JOIN => {
10109                        self.expect_token(&Token::Eq)?;
10110                        let proc_name = if self.parse_keyword(Keyword::NONE) {
10111                            None
10112                        } else {
10113                            Some(self.parse_object_name(false)?)
10114                        };
10115                        options.push(OperatorOption::Join(proc_name));
10116                    }
10117                    Keyword::COMMUTATOR => {
10118                        self.expect_token(&Token::Eq)?;
10119                        let op_name = self.parse_operator_name()?;
10120                        options.push(OperatorOption::Commutator(op_name));
10121                    }
10122                    Keyword::NEGATOR => {
10123                        self.expect_token(&Token::Eq)?;
10124                        let op_name = self.parse_operator_name()?;
10125                        options.push(OperatorOption::Negator(op_name));
10126                    }
10127                    Keyword::HASHES => {
10128                        options.push(OperatorOption::Hashes);
10129                    }
10130                    Keyword::MERGES => {
10131                        options.push(OperatorOption::Merges);
10132                    }
10133                    unexpected_keyword => return Err(ParserError::ParserError(
10134                        format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in operator option"),
10135                    )),
10136                }
10137
10138                if !self.consume_token(&Token::Comma) {
10139                    break;
10140                }
10141            }
10142
10143            self.expect_token(&Token::RParen)?;
10144            AlterOperatorOperation::Set { options }
10145        } else {
10146            return self.expected_ref(
10147                "OWNER TO, SET SCHEMA, or SET after ALTER OPERATOR",
10148                self.peek_token_ref(),
10149            );
10150        };
10151
10152        Ok(Statement::AlterOperator(AlterOperator {
10153            name,
10154            left_type,
10155            right_type,
10156            operation,
10157        }))
10158    }
10159
10160    // Parse a [Statement::AlterSchema]
10161    // ALTER SCHEMA [ IF EXISTS ] schema_name
10162    pub fn parse_alter_schema(&mut self) -> Result<Statement, ParserError> {
10163        self.expect_keywords(&[Keyword::ALTER, Keyword::SCHEMA])?;
10164        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
10165        let name = self.parse_object_name(false)?;
10166        let operation = if self.parse_keywords(&[Keyword::SET, Keyword::OPTIONS]) {
10167            self.prev_token();
10168            let options = self.parse_options(Keyword::OPTIONS)?;
10169            AlterSchemaOperation::SetOptionsParens { options }
10170        } else if self.parse_keywords(&[Keyword::SET, Keyword::DEFAULT, Keyword::COLLATE]) {
10171            let collate = self.parse_expr()?;
10172            AlterSchemaOperation::SetDefaultCollate { collate }
10173        } else if self.parse_keywords(&[Keyword::ADD, Keyword::REPLICA]) {
10174            let replica = self.parse_identifier()?;
10175            let options = if self.peek_keyword(Keyword::OPTIONS) {
10176                Some(self.parse_options(Keyword::OPTIONS)?)
10177            } else {
10178                None
10179            };
10180            AlterSchemaOperation::AddReplica { replica, options }
10181        } else if self.parse_keywords(&[Keyword::DROP, Keyword::REPLICA]) {
10182            let replica = self.parse_identifier()?;
10183            AlterSchemaOperation::DropReplica { replica }
10184        } else if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
10185            let new_name = self.parse_object_name(false)?;
10186            AlterSchemaOperation::Rename { name: new_name }
10187        } else if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
10188            let owner = self.parse_owner()?;
10189            AlterSchemaOperation::OwnerTo { owner }
10190        } else {
10191            return self.expected_ref("ALTER SCHEMA operation", self.peek_token_ref());
10192        };
10193        Ok(Statement::AlterSchema(AlterSchema {
10194            name,
10195            if_exists,
10196            operations: vec![operation],
10197        }))
10198    }
10199
10200    /// Parse a `CALL procedure_name(arg1, arg2, ...)`
10201    /// or `CALL procedure_name` statement
10202    pub fn parse_call(&mut self) -> Result<Statement, ParserError> {
10203        let object_name = self.parse_object_name(false)?;
10204        if self.peek_token().token == Token::LParen {
10205            match self.parse_function(object_name)? {
10206                Expr::Function(f) => Ok(Statement::Call(f)),
10207                other => parser_err!(
10208                    format!("Expected a simple procedure call but found: {other}"),
10209                    self.peek_token().span.start
10210                ),
10211            }
10212        } else {
10213            Ok(Statement::Call(Function {
10214                name: object_name,
10215                uses_odbc_syntax: false,
10216                parameters: FunctionArguments::None,
10217                args: FunctionArguments::None,
10218                over: None,
10219                filter: None,
10220                null_treatment: None,
10221                within_group: vec![],
10222            }))
10223        }
10224    }
10225
10226    /// Parse a copy statement
10227    pub fn parse_copy(&mut self) -> Result<Statement, ParserError> {
10228        let source;
10229        if self.consume_token(&Token::LParen) {
10230            source = CopySource::Query(self.parse_query()?);
10231            self.expect_token(&Token::RParen)?;
10232        } else {
10233            let table_name = self.parse_object_name(false)?;
10234            let columns = self.parse_parenthesized_column_list(Optional, false)?;
10235            source = CopySource::Table {
10236                table_name,
10237                columns,
10238            };
10239        }
10240        let to = match self.parse_one_of_keywords(&[Keyword::FROM, Keyword::TO]) {
10241            Some(Keyword::FROM) => false,
10242            Some(Keyword::TO) => true,
10243            _ => self.expected("FROM or TO", self.peek_token())?,
10244        };
10245        if !to {
10246            // Use a separate if statement to prevent Rust compiler from complaining about
10247            // "if statement in this position is unstable: https://github.com/rust-lang/rust/issues/53667"
10248            if let CopySource::Query(_) = source {
10249                return Err(ParserError::ParserError(
10250                    "COPY ... FROM does not support query as a source".to_string(),
10251                ));
10252            }
10253        }
10254        let target = if self.parse_keyword(Keyword::STDIN) {
10255            CopyTarget::Stdin
10256        } else if self.parse_keyword(Keyword::STDOUT) {
10257            CopyTarget::Stdout
10258        } else if self.parse_keyword(Keyword::PROGRAM) {
10259            CopyTarget::Program {
10260                command: self.parse_literal_string()?,
10261            }
10262        } else {
10263            CopyTarget::File {
10264                filename: self.parse_literal_string()?,
10265            }
10266        };
10267        let _ = self.parse_keyword(Keyword::WITH); // [ WITH ]
10268        let mut options = vec![];
10269        if self.consume_token(&Token::LParen) {
10270            options = self.parse_comma_separated(Parser::parse_copy_option)?;
10271            self.expect_token(&Token::RParen)?;
10272        }
10273        let mut legacy_options = vec![];
10274        while let Some(opt) = self.maybe_parse(|parser| parser.parse_copy_legacy_option())? {
10275            legacy_options.push(opt);
10276        }
10277        let values = if let CopyTarget::Stdin = target {
10278            self.expect_token(&Token::SemiColon)?;
10279            self.parse_tsv()
10280        } else {
10281            vec![]
10282        };
10283        Ok(Statement::Copy {
10284            source,
10285            to,
10286            target,
10287            options,
10288            legacy_options,
10289            values,
10290        })
10291    }
10292
10293    /// Parse [Statement::Open]
10294    fn parse_open(&mut self) -> Result<Statement, ParserError> {
10295        self.expect_keyword(Keyword::OPEN)?;
10296        Ok(Statement::Open(OpenStatement {
10297            cursor_name: self.parse_identifier()?,
10298        }))
10299    }
10300
10301    pub fn parse_close(&mut self) -> Result<Statement, ParserError> {
10302        let cursor = if self.parse_keyword(Keyword::ALL) {
10303            CloseCursor::All
10304        } else {
10305            let name = self.parse_identifier()?;
10306
10307            CloseCursor::Specific { name }
10308        };
10309
10310        Ok(Statement::Close { cursor })
10311    }
10312
10313    fn parse_copy_option(&mut self) -> Result<CopyOption, ParserError> {
10314        let ret = match self.parse_one_of_keywords(&[
10315            Keyword::FORMAT,
10316            Keyword::FREEZE,
10317            Keyword::DELIMITER,
10318            Keyword::NULL,
10319            Keyword::HEADER,
10320            Keyword::QUOTE,
10321            Keyword::ESCAPE,
10322            Keyword::FORCE_QUOTE,
10323            Keyword::FORCE_NOT_NULL,
10324            Keyword::FORCE_NULL,
10325            Keyword::ENCODING,
10326        ]) {
10327            Some(Keyword::FORMAT) => CopyOption::Format(self.parse_identifier()?),
10328            Some(Keyword::FREEZE) => CopyOption::Freeze(!matches!(
10329                self.parse_one_of_keywords(&[Keyword::TRUE, Keyword::FALSE]),
10330                Some(Keyword::FALSE)
10331            )),
10332            Some(Keyword::DELIMITER) => CopyOption::Delimiter(self.parse_literal_char()?),
10333            Some(Keyword::NULL) => CopyOption::Null(self.parse_literal_string()?),
10334            Some(Keyword::HEADER) => CopyOption::Header(!matches!(
10335                self.parse_one_of_keywords(&[Keyword::TRUE, Keyword::FALSE]),
10336                Some(Keyword::FALSE)
10337            )),
10338            Some(Keyword::QUOTE) => CopyOption::Quote(self.parse_literal_char()?),
10339            Some(Keyword::ESCAPE) => CopyOption::Escape(self.parse_literal_char()?),
10340            Some(Keyword::FORCE_QUOTE) => {
10341                CopyOption::ForceQuote(self.parse_parenthesized_column_list(Mandatory, false)?)
10342            }
10343            Some(Keyword::FORCE_NOT_NULL) => {
10344                CopyOption::ForceNotNull(self.parse_parenthesized_column_list(Mandatory, false)?)
10345            }
10346            Some(Keyword::FORCE_NULL) => {
10347                CopyOption::ForceNull(self.parse_parenthesized_column_list(Mandatory, false)?)
10348            }
10349            Some(Keyword::ENCODING) => CopyOption::Encoding(self.parse_literal_string()?),
10350            _ => self.expected("option", self.peek_token())?,
10351        };
10352        Ok(ret)
10353    }
10354
10355    fn parse_copy_legacy_option(&mut self) -> Result<CopyLegacyOption, ParserError> {
10356        // FORMAT \[ AS \] is optional
10357        if self.parse_keyword(Keyword::FORMAT) {
10358            let _ = self.parse_keyword(Keyword::AS);
10359        }
10360
10361        let ret = match self.parse_one_of_keywords(&[
10362            Keyword::ACCEPTANYDATE,
10363            Keyword::ACCEPTINVCHARS,
10364            Keyword::ADDQUOTES,
10365            Keyword::ALLOWOVERWRITE,
10366            Keyword::BINARY,
10367            Keyword::BLANKSASNULL,
10368            Keyword::BZIP2,
10369            Keyword::CLEANPATH,
10370            Keyword::COMPUPDATE,
10371            Keyword::CSV,
10372            Keyword::DATEFORMAT,
10373            Keyword::DELIMITER,
10374            Keyword::EMPTYASNULL,
10375            Keyword::ENCRYPTED,
10376            Keyword::ESCAPE,
10377            Keyword::EXTENSION,
10378            Keyword::FIXEDWIDTH,
10379            Keyword::GZIP,
10380            Keyword::HEADER,
10381            Keyword::IAM_ROLE,
10382            Keyword::IGNOREHEADER,
10383            Keyword::JSON,
10384            Keyword::MANIFEST,
10385            Keyword::MAXFILESIZE,
10386            Keyword::NULL,
10387            Keyword::PARALLEL,
10388            Keyword::PARQUET,
10389            Keyword::PARTITION,
10390            Keyword::REGION,
10391            Keyword::REMOVEQUOTES,
10392            Keyword::ROWGROUPSIZE,
10393            Keyword::STATUPDATE,
10394            Keyword::TIMEFORMAT,
10395            Keyword::TRUNCATECOLUMNS,
10396            Keyword::ZSTD,
10397        ]) {
10398            Some(Keyword::ACCEPTANYDATE) => CopyLegacyOption::AcceptAnyDate,
10399            Some(Keyword::ACCEPTINVCHARS) => {
10400                let _ = self.parse_keyword(Keyword::AS); // [ AS ]
10401                let ch = if matches!(self.peek_token().token, Token::SingleQuotedString(_)) {
10402                    Some(self.parse_literal_string()?)
10403                } else {
10404                    None
10405                };
10406                CopyLegacyOption::AcceptInvChars(ch)
10407            }
10408            Some(Keyword::ADDQUOTES) => CopyLegacyOption::AddQuotes,
10409            Some(Keyword::ALLOWOVERWRITE) => CopyLegacyOption::AllowOverwrite,
10410            Some(Keyword::BINARY) => CopyLegacyOption::Binary,
10411            Some(Keyword::BLANKSASNULL) => CopyLegacyOption::BlankAsNull,
10412            Some(Keyword::BZIP2) => CopyLegacyOption::Bzip2,
10413            Some(Keyword::CLEANPATH) => CopyLegacyOption::CleanPath,
10414            Some(Keyword::COMPUPDATE) => {
10415                let preset = self.parse_keyword(Keyword::PRESET);
10416                let enabled = match self.parse_one_of_keywords(&[
10417                    Keyword::TRUE,
10418                    Keyword::FALSE,
10419                    Keyword::ON,
10420                    Keyword::OFF,
10421                ]) {
10422                    Some(Keyword::TRUE) | Some(Keyword::ON) => Some(true),
10423                    Some(Keyword::FALSE) | Some(Keyword::OFF) => Some(false),
10424                    _ => None,
10425                };
10426                CopyLegacyOption::CompUpdate { preset, enabled }
10427            }
10428            Some(Keyword::CSV) => CopyLegacyOption::Csv({
10429                let mut opts = vec![];
10430                while let Some(opt) =
10431                    self.maybe_parse(|parser| parser.parse_copy_legacy_csv_option())?
10432                {
10433                    opts.push(opt);
10434                }
10435                opts
10436            }),
10437            Some(Keyword::DATEFORMAT) => {
10438                let _ = self.parse_keyword(Keyword::AS);
10439                let fmt = if matches!(self.peek_token().token, Token::SingleQuotedString(_)) {
10440                    Some(self.parse_literal_string()?)
10441                } else {
10442                    None
10443                };
10444                CopyLegacyOption::DateFormat(fmt)
10445            }
10446            Some(Keyword::DELIMITER) => {
10447                let _ = self.parse_keyword(Keyword::AS);
10448                CopyLegacyOption::Delimiter(self.parse_literal_char()?)
10449            }
10450            Some(Keyword::EMPTYASNULL) => CopyLegacyOption::EmptyAsNull,
10451            Some(Keyword::ENCRYPTED) => {
10452                let auto = self.parse_keyword(Keyword::AUTO);
10453                CopyLegacyOption::Encrypted { auto }
10454            }
10455            Some(Keyword::ESCAPE) => CopyLegacyOption::Escape,
10456            Some(Keyword::EXTENSION) => {
10457                let ext = self.parse_literal_string()?;
10458                CopyLegacyOption::Extension(ext)
10459            }
10460            Some(Keyword::FIXEDWIDTH) => {
10461                let spec = self.parse_literal_string()?;
10462                CopyLegacyOption::FixedWidth(spec)
10463            }
10464            Some(Keyword::GZIP) => CopyLegacyOption::Gzip,
10465            Some(Keyword::HEADER) => CopyLegacyOption::Header,
10466            Some(Keyword::IAM_ROLE) => CopyLegacyOption::IamRole(self.parse_iam_role_kind()?),
10467            Some(Keyword::IGNOREHEADER) => {
10468                let _ = self.parse_keyword(Keyword::AS);
10469                let num_rows = self.parse_literal_uint()?;
10470                CopyLegacyOption::IgnoreHeader(num_rows)
10471            }
10472            Some(Keyword::JSON) => CopyLegacyOption::Json,
10473            Some(Keyword::MANIFEST) => {
10474                let verbose = self.parse_keyword(Keyword::VERBOSE);
10475                CopyLegacyOption::Manifest { verbose }
10476            }
10477            Some(Keyword::MAXFILESIZE) => {
10478                let _ = self.parse_keyword(Keyword::AS);
10479                let size = self.parse_number_value()?.value;
10480                let unit = match self.parse_one_of_keywords(&[Keyword::MB, Keyword::GB]) {
10481                    Some(Keyword::MB) => Some(FileSizeUnit::MB),
10482                    Some(Keyword::GB) => Some(FileSizeUnit::GB),
10483                    _ => None,
10484                };
10485                CopyLegacyOption::MaxFileSize(FileSize { size, unit })
10486            }
10487            Some(Keyword::NULL) => {
10488                let _ = self.parse_keyword(Keyword::AS);
10489                CopyLegacyOption::Null(self.parse_literal_string()?)
10490            }
10491            Some(Keyword::PARALLEL) => {
10492                let enabled = match self.parse_one_of_keywords(&[
10493                    Keyword::TRUE,
10494                    Keyword::FALSE,
10495                    Keyword::ON,
10496                    Keyword::OFF,
10497                ]) {
10498                    Some(Keyword::TRUE) | Some(Keyword::ON) => Some(true),
10499                    Some(Keyword::FALSE) | Some(Keyword::OFF) => Some(false),
10500                    _ => None,
10501                };
10502                CopyLegacyOption::Parallel(enabled)
10503            }
10504            Some(Keyword::PARQUET) => CopyLegacyOption::Parquet,
10505            Some(Keyword::PARTITION) => {
10506                self.expect_keyword(Keyword::BY)?;
10507                let columns = self.parse_parenthesized_column_list(IsOptional::Mandatory, false)?;
10508                let include = self.parse_keyword(Keyword::INCLUDE);
10509                CopyLegacyOption::PartitionBy(UnloadPartitionBy { columns, include })
10510            }
10511            Some(Keyword::REGION) => {
10512                let _ = self.parse_keyword(Keyword::AS);
10513                let region = self.parse_literal_string()?;
10514                CopyLegacyOption::Region(region)
10515            }
10516            Some(Keyword::REMOVEQUOTES) => CopyLegacyOption::RemoveQuotes,
10517            Some(Keyword::ROWGROUPSIZE) => {
10518                let _ = self.parse_keyword(Keyword::AS);
10519                let file_size = self.parse_file_size()?;
10520                CopyLegacyOption::RowGroupSize(file_size)
10521            }
10522            Some(Keyword::STATUPDATE) => {
10523                let enabled = match self.parse_one_of_keywords(&[
10524                    Keyword::TRUE,
10525                    Keyword::FALSE,
10526                    Keyword::ON,
10527                    Keyword::OFF,
10528                ]) {
10529                    Some(Keyword::TRUE) | Some(Keyword::ON) => Some(true),
10530                    Some(Keyword::FALSE) | Some(Keyword::OFF) => Some(false),
10531                    _ => None,
10532                };
10533                CopyLegacyOption::StatUpdate(enabled)
10534            }
10535            Some(Keyword::TIMEFORMAT) => {
10536                let _ = self.parse_keyword(Keyword::AS);
10537                let fmt = if matches!(self.peek_token().token, Token::SingleQuotedString(_)) {
10538                    Some(self.parse_literal_string()?)
10539                } else {
10540                    None
10541                };
10542                CopyLegacyOption::TimeFormat(fmt)
10543            }
10544            Some(Keyword::TRUNCATECOLUMNS) => CopyLegacyOption::TruncateColumns,
10545            Some(Keyword::ZSTD) => CopyLegacyOption::Zstd,
10546            _ => self.expected("option", self.peek_token())?,
10547        };
10548        Ok(ret)
10549    }
10550
10551    fn parse_file_size(&mut self) -> Result<FileSize, ParserError> {
10552        let size = self.parse_number_value()?.value;
10553        let unit = self.maybe_parse_file_size_unit();
10554        Ok(FileSize { size, unit })
10555    }
10556
10557    fn maybe_parse_file_size_unit(&mut self) -> Option<FileSizeUnit> {
10558        match self.parse_one_of_keywords(&[Keyword::MB, Keyword::GB]) {
10559            Some(Keyword::MB) => Some(FileSizeUnit::MB),
10560            Some(Keyword::GB) => Some(FileSizeUnit::GB),
10561            _ => None,
10562        }
10563    }
10564
10565    fn parse_iam_role_kind(&mut self) -> Result<IamRoleKind, ParserError> {
10566        if self.parse_keyword(Keyword::DEFAULT) {
10567            Ok(IamRoleKind::Default)
10568        } else {
10569            let arn = self.parse_literal_string()?;
10570            Ok(IamRoleKind::Arn(arn))
10571        }
10572    }
10573
10574    fn parse_copy_legacy_csv_option(&mut self) -> Result<CopyLegacyCsvOption, ParserError> {
10575        let ret = match self.parse_one_of_keywords(&[
10576            Keyword::HEADER,
10577            Keyword::QUOTE,
10578            Keyword::ESCAPE,
10579            Keyword::FORCE,
10580        ]) {
10581            Some(Keyword::HEADER) => CopyLegacyCsvOption::Header,
10582            Some(Keyword::QUOTE) => {
10583                let _ = self.parse_keyword(Keyword::AS); // [ AS ]
10584                CopyLegacyCsvOption::Quote(self.parse_literal_char()?)
10585            }
10586            Some(Keyword::ESCAPE) => {
10587                let _ = self.parse_keyword(Keyword::AS); // [ AS ]
10588                CopyLegacyCsvOption::Escape(self.parse_literal_char()?)
10589            }
10590            Some(Keyword::FORCE) if self.parse_keywords(&[Keyword::NOT, Keyword::NULL]) => {
10591                CopyLegacyCsvOption::ForceNotNull(
10592                    self.parse_comma_separated(|p| p.parse_identifier())?,
10593                )
10594            }
10595            Some(Keyword::FORCE) if self.parse_keywords(&[Keyword::QUOTE]) => {
10596                CopyLegacyCsvOption::ForceQuote(
10597                    self.parse_comma_separated(|p| p.parse_identifier())?,
10598                )
10599            }
10600            _ => self.expected("csv option", self.peek_token())?,
10601        };
10602        Ok(ret)
10603    }
10604
10605    fn parse_literal_char(&mut self) -> Result<char, ParserError> {
10606        let s = self.parse_literal_string()?;
10607        if s.len() != 1 {
10608            let loc = self
10609                .tokens
10610                .get(self.index - 1)
10611                .map_or(Location { line: 0, column: 0 }, |t| t.span.start);
10612            return parser_err!(format!("Expect a char, found {s:?}"), loc);
10613        }
10614        Ok(s.chars().next().unwrap())
10615    }
10616
10617    /// Parse a tab separated values in
10618    /// COPY payload
10619    pub fn parse_tsv(&mut self) -> Vec<Option<String>> {
10620        self.parse_tab_value()
10621    }
10622
10623    pub fn parse_tab_value(&mut self) -> Vec<Option<String>> {
10624        let mut values = vec![];
10625        let mut content = String::from("");
10626        while let Some(t) = self.next_token_no_skip().map(|t| &t.token) {
10627            match t {
10628                Token::Whitespace(Whitespace::Tab) => {
10629                    values.push(Some(content.to_string()));
10630                    content.clear();
10631                }
10632                Token::Whitespace(Whitespace::Newline) => {
10633                    values.push(Some(content.to_string()));
10634                    content.clear();
10635                }
10636                Token::Backslash => {
10637                    if self.consume_token(&Token::Period) {
10638                        return values;
10639                    }
10640                    if let Token::Word(w) = self.next_token().token {
10641                        if w.value == "N" {
10642                            values.push(None);
10643                        }
10644                    }
10645                }
10646                _ => {
10647                    content.push_str(&t.to_string());
10648                }
10649            }
10650        }
10651        values
10652    }
10653
10654    /// Parse a literal value (numbers, strings, date/time, booleans)
10655    pub fn parse_value(&mut self) -> Result<ValueWithSpan, ParserError> {
10656        let next_token = self.next_token();
10657        let span = next_token.span;
10658        let ok_value = |value: Value| Ok(value.with_span(span));
10659        match next_token.token {
10660            Token::Word(w) => match w.keyword {
10661                Keyword::TRUE if self.dialect.supports_boolean_literals() => {
10662                    ok_value(Value::Boolean(true))
10663                }
10664                Keyword::FALSE if self.dialect.supports_boolean_literals() => {
10665                    ok_value(Value::Boolean(false))
10666                }
10667                Keyword::NULL => ok_value(Value::Null),
10668                Keyword::NoKeyword if w.quote_style.is_some() => match w.quote_style {
10669                    Some('"') => ok_value(Value::DoubleQuotedString(w.value)),
10670                    Some('\'') => ok_value(Value::SingleQuotedString(w.value)),
10671                    _ => self.expected(
10672                        "A value?",
10673                        TokenWithSpan {
10674                            token: Token::Word(w),
10675                            span,
10676                        },
10677                    )?,
10678                },
10679                _ => self.expected(
10680                    "a concrete value",
10681                    TokenWithSpan {
10682                        token: Token::Word(w),
10683                        span,
10684                    },
10685                ),
10686            },
10687            // The call to n.parse() returns a bigdecimal when the
10688            // bigdecimal feature is enabled, and is otherwise a no-op
10689            // (i.e., it returns the input string).
10690            Token::Number(n, l) => ok_value(Value::Number(Self::parse(n, span.start)?, l)),
10691            Token::SingleQuotedString(ref s) => ok_value(Value::SingleQuotedString(
10692                self.maybe_concat_string_literal(s.to_string()),
10693            )),
10694            Token::DoubleQuotedString(ref s) => ok_value(Value::DoubleQuotedString(
10695                self.maybe_concat_string_literal(s.to_string()),
10696            )),
10697            Token::TripleSingleQuotedString(ref s) => {
10698                ok_value(Value::TripleSingleQuotedString(s.to_string()))
10699            }
10700            Token::TripleDoubleQuotedString(ref s) => {
10701                ok_value(Value::TripleDoubleQuotedString(s.to_string()))
10702            }
10703            Token::DollarQuotedString(ref s) => ok_value(Value::DollarQuotedString(s.clone())),
10704            Token::SingleQuotedByteStringLiteral(ref s) => {
10705                ok_value(Value::SingleQuotedByteStringLiteral(s.clone()))
10706            }
10707            Token::DoubleQuotedByteStringLiteral(ref s) => {
10708                ok_value(Value::DoubleQuotedByteStringLiteral(s.clone()))
10709            }
10710            Token::TripleSingleQuotedByteStringLiteral(ref s) => {
10711                ok_value(Value::TripleSingleQuotedByteStringLiteral(s.clone()))
10712            }
10713            Token::TripleDoubleQuotedByteStringLiteral(ref s) => {
10714                ok_value(Value::TripleDoubleQuotedByteStringLiteral(s.clone()))
10715            }
10716            Token::SingleQuotedRawStringLiteral(ref s) => {
10717                ok_value(Value::SingleQuotedRawStringLiteral(s.clone()))
10718            }
10719            Token::DoubleQuotedRawStringLiteral(ref s) => {
10720                ok_value(Value::DoubleQuotedRawStringLiteral(s.clone()))
10721            }
10722            Token::TripleSingleQuotedRawStringLiteral(ref s) => {
10723                ok_value(Value::TripleSingleQuotedRawStringLiteral(s.clone()))
10724            }
10725            Token::TripleDoubleQuotedRawStringLiteral(ref s) => {
10726                ok_value(Value::TripleDoubleQuotedRawStringLiteral(s.clone()))
10727            }
10728            Token::NationalStringLiteral(ref s) => {
10729                ok_value(Value::NationalStringLiteral(s.to_string()))
10730            }
10731            Token::EscapedStringLiteral(ref s) => {
10732                ok_value(Value::EscapedStringLiteral(s.to_string()))
10733            }
10734            Token::UnicodeStringLiteral(ref s) => {
10735                ok_value(Value::UnicodeStringLiteral(s.to_string()))
10736            }
10737            Token::HexStringLiteral(ref s) => ok_value(Value::HexStringLiteral(s.to_string())),
10738            Token::Placeholder(ref s) => ok_value(Value::Placeholder(s.to_string())),
10739            tok @ Token::Colon | tok @ Token::AtSign => {
10740                // 1. Not calling self.parse_identifier(false)?
10741                //    because only in placeholder we want to check
10742                //    numbers as idfentifies.  This because snowflake
10743                //    allows numbers as placeholders
10744                // 2. Not calling self.next_token() to enforce `tok`
10745                //    be followed immediately by a word/number, ie.
10746                //    without any whitespace in between
10747                let next_token = self.next_token_no_skip().unwrap_or(&EOF_TOKEN).clone();
10748                let ident = match next_token.token {
10749                    Token::Word(w) => Ok(w.into_ident(next_token.span)),
10750                    Token::Number(w, false) => Ok(Ident::with_span(next_token.span, w)),
10751                    _ => self.expected("placeholder", next_token),
10752                }?;
10753                Ok(Value::Placeholder(tok.to_string() + &ident.value)
10754                    .with_span(Span::new(span.start, ident.span.end)))
10755            }
10756            unexpected => self.expected(
10757                "a value",
10758                TokenWithSpan {
10759                    token: unexpected,
10760                    span,
10761                },
10762            ),
10763        }
10764    }
10765
10766    fn maybe_concat_string_literal(&mut self, mut str: String) -> String {
10767        if self.dialect.supports_string_literal_concatenation() {
10768            while let Token::SingleQuotedString(ref s) | Token::DoubleQuotedString(ref s) =
10769                self.peek_token_ref().token
10770            {
10771                str.push_str(s.clone().as_str());
10772                self.advance_token();
10773            }
10774        }
10775        str
10776    }
10777
10778    /// Parse an unsigned numeric literal
10779    pub fn parse_number_value(&mut self) -> Result<ValueWithSpan, ParserError> {
10780        let value_wrapper = self.parse_value()?;
10781        match &value_wrapper.value {
10782            Value::Number(_, _) => Ok(value_wrapper),
10783            Value::Placeholder(_) => Ok(value_wrapper),
10784            _ => {
10785                self.prev_token();
10786                self.expected("literal number", self.peek_token())
10787            }
10788        }
10789    }
10790
10791    /// Parse a numeric literal as an expression. Returns a [`Expr::UnaryOp`] if the number is signed,
10792    /// otherwise returns a [`Expr::Value`]
10793    pub fn parse_number(&mut self) -> Result<Expr, ParserError> {
10794        let next_token = self.next_token();
10795        match next_token.token {
10796            Token::Plus => Ok(Expr::UnaryOp {
10797                op: UnaryOperator::Plus,
10798                expr: Box::new(Expr::Value(self.parse_number_value()?)),
10799            }),
10800            Token::Minus => Ok(Expr::UnaryOp {
10801                op: UnaryOperator::Minus,
10802                expr: Box::new(Expr::Value(self.parse_number_value()?)),
10803            }),
10804            _ => {
10805                self.prev_token();
10806                Ok(Expr::Value(self.parse_number_value()?))
10807            }
10808        }
10809    }
10810
10811    fn parse_introduced_string_expr(&mut self) -> Result<Expr, ParserError> {
10812        let next_token = self.next_token();
10813        let span = next_token.span;
10814        match next_token.token {
10815            Token::SingleQuotedString(ref s) => Ok(Expr::Value(
10816                Value::SingleQuotedString(s.to_string()).with_span(span),
10817            )),
10818            Token::DoubleQuotedString(ref s) => Ok(Expr::Value(
10819                Value::DoubleQuotedString(s.to_string()).with_span(span),
10820            )),
10821            Token::HexStringLiteral(ref s) => Ok(Expr::Value(
10822                Value::HexStringLiteral(s.to_string()).with_span(span),
10823            )),
10824            unexpected => self.expected(
10825                "a string value",
10826                TokenWithSpan {
10827                    token: unexpected,
10828                    span,
10829                },
10830            ),
10831        }
10832    }
10833
10834    /// Parse an unsigned literal integer/long
10835    pub fn parse_literal_uint(&mut self) -> Result<u64, ParserError> {
10836        let next_token = self.next_token();
10837        match next_token.token {
10838            Token::Number(s, _) => Self::parse::<u64>(s, next_token.span.start),
10839            _ => self.expected("literal int", next_token),
10840        }
10841    }
10842
10843    /// Parse the body of a `CREATE FUNCTION` specified as a string.
10844    /// e.g. `CREATE FUNCTION ... AS $$ body $$`.
10845    fn parse_create_function_body_string(&mut self) -> Result<CreateFunctionBody, ParserError> {
10846        let parse_string_expr = |parser: &mut Parser| -> Result<Expr, ParserError> {
10847            let peek_token = parser.peek_token();
10848            let span = peek_token.span;
10849            match peek_token.token {
10850                Token::DollarQuotedString(s) if dialect_of!(parser is PostgreSqlDialect | GenericDialect) =>
10851                {
10852                    parser.next_token();
10853                    Ok(Expr::Value(Value::DollarQuotedString(s).with_span(span)))
10854                }
10855                _ => Ok(Expr::Value(
10856                    Value::SingleQuotedString(parser.parse_literal_string()?).with_span(span),
10857                )),
10858            }
10859        };
10860
10861        Ok(CreateFunctionBody::AsBeforeOptions {
10862            body: parse_string_expr(self)?,
10863            link_symbol: if self.consume_token(&Token::Comma) {
10864                Some(parse_string_expr(self)?)
10865            } else {
10866                None
10867            },
10868        })
10869    }
10870
10871    /// Parse a literal string
10872    pub fn parse_literal_string(&mut self) -> Result<String, ParserError> {
10873        let next_token = self.next_token();
10874        match next_token.token {
10875            Token::Word(Word {
10876                value,
10877                keyword: Keyword::NoKeyword,
10878                ..
10879            }) => Ok(value),
10880            Token::SingleQuotedString(s) => Ok(s),
10881            Token::DoubleQuotedString(s) => Ok(s),
10882            Token::EscapedStringLiteral(s) if dialect_of!(self is PostgreSqlDialect | GenericDialect) => {
10883                Ok(s)
10884            }
10885            Token::UnicodeStringLiteral(s) => Ok(s),
10886            _ => self.expected("literal string", next_token),
10887        }
10888    }
10889
10890    /// Parse a boolean string
10891    pub(crate) fn parse_boolean_string(&mut self) -> Result<bool, ParserError> {
10892        match self.parse_one_of_keywords(&[Keyword::TRUE, Keyword::FALSE]) {
10893            Some(Keyword::TRUE) => Ok(true),
10894            Some(Keyword::FALSE) => Ok(false),
10895            _ => self.expected("TRUE or FALSE", self.peek_token()),
10896        }
10897    }
10898
10899    /// Parse a literal unicode normalization clause
10900    pub fn parse_unicode_is_normalized(&mut self, expr: Expr) -> Result<Expr, ParserError> {
10901        let neg = self.parse_keyword(Keyword::NOT);
10902        let normalized_form = self.maybe_parse(|parser| {
10903            match parser.parse_one_of_keywords(&[
10904                Keyword::NFC,
10905                Keyword::NFD,
10906                Keyword::NFKC,
10907                Keyword::NFKD,
10908            ]) {
10909                Some(Keyword::NFC) => Ok(NormalizationForm::NFC),
10910                Some(Keyword::NFD) => Ok(NormalizationForm::NFD),
10911                Some(Keyword::NFKC) => Ok(NormalizationForm::NFKC),
10912                Some(Keyword::NFKD) => Ok(NormalizationForm::NFKD),
10913                _ => parser.expected("unicode normalization form", parser.peek_token()),
10914            }
10915        })?;
10916        if self.parse_keyword(Keyword::NORMALIZED) {
10917            return Ok(Expr::IsNormalized {
10918                expr: Box::new(expr),
10919                form: normalized_form,
10920                negated: neg,
10921            });
10922        }
10923        self.expected("unicode normalization form", self.peek_token())
10924    }
10925
10926    pub fn parse_enum_values(&mut self) -> Result<Vec<EnumMember>, ParserError> {
10927        self.expect_token(&Token::LParen)?;
10928        let values = self.parse_comma_separated(|parser| {
10929            let name = parser.parse_literal_string()?;
10930            let e = if parser.consume_token(&Token::Eq) {
10931                let value = parser.parse_number()?;
10932                EnumMember::NamedValue(name, value)
10933            } else {
10934                EnumMember::Name(name)
10935            };
10936            Ok(e)
10937        })?;
10938        self.expect_token(&Token::RParen)?;
10939
10940        Ok(values)
10941    }
10942
10943    /// Parse a SQL datatype (in the context of a CREATE TABLE statement for example)
10944    pub fn parse_data_type(&mut self) -> Result<DataType, ParserError> {
10945        let (ty, trailing_bracket) = self.parse_data_type_helper()?;
10946        if trailing_bracket.0 {
10947            return parser_err!(
10948                format!("unmatched > after parsing data type {ty}"),
10949                self.peek_token()
10950            );
10951        }
10952
10953        Ok(ty)
10954    }
10955
10956    fn parse_data_type_helper(
10957        &mut self,
10958    ) -> Result<(DataType, MatchedTrailingBracket), ParserError> {
10959        let dialect = self.dialect;
10960        self.advance_token();
10961        let next_token = self.get_current_token();
10962        let next_token_index = self.get_current_index();
10963
10964        let mut trailing_bracket: MatchedTrailingBracket = false.into();
10965        let mut data = match &next_token.token {
10966            Token::Word(w) => match w.keyword {
10967                Keyword::BOOLEAN => Ok(DataType::Boolean),
10968                Keyword::BOOL => Ok(DataType::Bool),
10969                Keyword::FLOAT => {
10970                    let precision = self.parse_exact_number_optional_precision_scale()?;
10971
10972                    if self.parse_keyword(Keyword::UNSIGNED) {
10973                        Ok(DataType::FloatUnsigned(precision))
10974                    } else {
10975                        Ok(DataType::Float(precision))
10976                    }
10977                }
10978                Keyword::REAL => {
10979                    if self.parse_keyword(Keyword::UNSIGNED) {
10980                        Ok(DataType::RealUnsigned)
10981                    } else {
10982                        Ok(DataType::Real)
10983                    }
10984                }
10985                Keyword::FLOAT4 => Ok(DataType::Float4),
10986                Keyword::FLOAT32 => Ok(DataType::Float32),
10987                Keyword::FLOAT64 => Ok(DataType::Float64),
10988                Keyword::FLOAT8 => Ok(DataType::Float8),
10989                Keyword::DOUBLE => {
10990                    if self.parse_keyword(Keyword::PRECISION) {
10991                        if self.parse_keyword(Keyword::UNSIGNED) {
10992                            Ok(DataType::DoublePrecisionUnsigned)
10993                        } else {
10994                            Ok(DataType::DoublePrecision)
10995                        }
10996                    } else {
10997                        let precision = self.parse_exact_number_optional_precision_scale()?;
10998
10999                        if self.parse_keyword(Keyword::UNSIGNED) {
11000                            Ok(DataType::DoubleUnsigned(precision))
11001                        } else {
11002                            Ok(DataType::Double(precision))
11003                        }
11004                    }
11005                }
11006                Keyword::TINYINT => {
11007                    let optional_precision = self.parse_optional_precision();
11008                    if self.parse_keyword(Keyword::UNSIGNED) {
11009                        Ok(DataType::TinyIntUnsigned(optional_precision?))
11010                    } else {
11011                        if dialect.supports_data_type_signed_suffix() {
11012                            let _ = self.parse_keyword(Keyword::SIGNED);
11013                        }
11014                        Ok(DataType::TinyInt(optional_precision?))
11015                    }
11016                }
11017                Keyword::INT2 => {
11018                    let optional_precision = self.parse_optional_precision();
11019                    if self.parse_keyword(Keyword::UNSIGNED) {
11020                        Ok(DataType::Int2Unsigned(optional_precision?))
11021                    } else {
11022                        Ok(DataType::Int2(optional_precision?))
11023                    }
11024                }
11025                Keyword::SMALLINT => {
11026                    let optional_precision = self.parse_optional_precision();
11027                    if self.parse_keyword(Keyword::UNSIGNED) {
11028                        Ok(DataType::SmallIntUnsigned(optional_precision?))
11029                    } else {
11030                        if dialect.supports_data_type_signed_suffix() {
11031                            let _ = self.parse_keyword(Keyword::SIGNED);
11032                        }
11033                        Ok(DataType::SmallInt(optional_precision?))
11034                    }
11035                }
11036                Keyword::MEDIUMINT => {
11037                    let optional_precision = self.parse_optional_precision();
11038                    if self.parse_keyword(Keyword::UNSIGNED) {
11039                        Ok(DataType::MediumIntUnsigned(optional_precision?))
11040                    } else {
11041                        if dialect.supports_data_type_signed_suffix() {
11042                            let _ = self.parse_keyword(Keyword::SIGNED);
11043                        }
11044                        Ok(DataType::MediumInt(optional_precision?))
11045                    }
11046                }
11047                Keyword::INT => {
11048                    let optional_precision = self.parse_optional_precision();
11049                    if self.parse_keyword(Keyword::UNSIGNED) {
11050                        Ok(DataType::IntUnsigned(optional_precision?))
11051                    } else {
11052                        if dialect.supports_data_type_signed_suffix() {
11053                            let _ = self.parse_keyword(Keyword::SIGNED);
11054                        }
11055                        Ok(DataType::Int(optional_precision?))
11056                    }
11057                }
11058                Keyword::INT4 => {
11059                    let optional_precision = self.parse_optional_precision();
11060                    if self.parse_keyword(Keyword::UNSIGNED) {
11061                        Ok(DataType::Int4Unsigned(optional_precision?))
11062                    } else {
11063                        Ok(DataType::Int4(optional_precision?))
11064                    }
11065                }
11066                Keyword::INT8 => {
11067                    let optional_precision = self.parse_optional_precision();
11068                    if self.parse_keyword(Keyword::UNSIGNED) {
11069                        Ok(DataType::Int8Unsigned(optional_precision?))
11070                    } else {
11071                        Ok(DataType::Int8(optional_precision?))
11072                    }
11073                }
11074                Keyword::INT16 => Ok(DataType::Int16),
11075                Keyword::INT32 => Ok(DataType::Int32),
11076                Keyword::INT64 => Ok(DataType::Int64),
11077                Keyword::INT128 => Ok(DataType::Int128),
11078                Keyword::INT256 => Ok(DataType::Int256),
11079                Keyword::INTEGER => {
11080                    let optional_precision = self.parse_optional_precision();
11081                    if self.parse_keyword(Keyword::UNSIGNED) {
11082                        Ok(DataType::IntegerUnsigned(optional_precision?))
11083                    } else {
11084                        if dialect.supports_data_type_signed_suffix() {
11085                            let _ = self.parse_keyword(Keyword::SIGNED);
11086                        }
11087                        Ok(DataType::Integer(optional_precision?))
11088                    }
11089                }
11090                Keyword::BIGINT => {
11091                    let optional_precision = self.parse_optional_precision();
11092                    if self.parse_keyword(Keyword::UNSIGNED) {
11093                        Ok(DataType::BigIntUnsigned(optional_precision?))
11094                    } else {
11095                        if dialect.supports_data_type_signed_suffix() {
11096                            let _ = self.parse_keyword(Keyword::SIGNED);
11097                        }
11098                        Ok(DataType::BigInt(optional_precision?))
11099                    }
11100                }
11101                Keyword::HUGEINT => Ok(DataType::HugeInt),
11102                Keyword::UBIGINT => Ok(DataType::UBigInt),
11103                Keyword::UHUGEINT => Ok(DataType::UHugeInt),
11104                Keyword::USMALLINT => Ok(DataType::USmallInt),
11105                Keyword::UTINYINT => Ok(DataType::UTinyInt),
11106                Keyword::UINT8 => Ok(DataType::UInt8),
11107                Keyword::UINT16 => Ok(DataType::UInt16),
11108                Keyword::UINT32 => Ok(DataType::UInt32),
11109                Keyword::UINT64 => Ok(DataType::UInt64),
11110                Keyword::UINT128 => Ok(DataType::UInt128),
11111                Keyword::UINT256 => Ok(DataType::UInt256),
11112                Keyword::VARCHAR => Ok(DataType::Varchar(self.parse_optional_character_length()?)),
11113                Keyword::NVARCHAR => {
11114                    Ok(DataType::Nvarchar(self.parse_optional_character_length()?))
11115                }
11116                Keyword::CHARACTER => {
11117                    if self.parse_keyword(Keyword::VARYING) {
11118                        Ok(DataType::CharacterVarying(
11119                            self.parse_optional_character_length()?,
11120                        ))
11121                    } else if self.parse_keywords(&[Keyword::LARGE, Keyword::OBJECT]) {
11122                        Ok(DataType::CharacterLargeObject(
11123                            self.parse_optional_precision()?,
11124                        ))
11125                    } else {
11126                        Ok(DataType::Character(self.parse_optional_character_length()?))
11127                    }
11128                }
11129                Keyword::CHAR => {
11130                    if self.parse_keyword(Keyword::VARYING) {
11131                        Ok(DataType::CharVarying(
11132                            self.parse_optional_character_length()?,
11133                        ))
11134                    } else if self.parse_keywords(&[Keyword::LARGE, Keyword::OBJECT]) {
11135                        Ok(DataType::CharLargeObject(self.parse_optional_precision()?))
11136                    } else {
11137                        Ok(DataType::Char(self.parse_optional_character_length()?))
11138                    }
11139                }
11140                Keyword::CLOB => Ok(DataType::Clob(self.parse_optional_precision()?)),
11141                Keyword::BINARY => Ok(DataType::Binary(self.parse_optional_precision()?)),
11142                Keyword::VARBINARY => Ok(DataType::Varbinary(self.parse_optional_binary_length()?)),
11143                Keyword::BLOB => Ok(DataType::Blob(self.parse_optional_precision()?)),
11144                Keyword::TINYBLOB => Ok(DataType::TinyBlob),
11145                Keyword::MEDIUMBLOB => Ok(DataType::MediumBlob),
11146                Keyword::LONGBLOB => Ok(DataType::LongBlob),
11147                Keyword::BYTES => Ok(DataType::Bytes(self.parse_optional_precision()?)),
11148                Keyword::BIT => {
11149                    if self.parse_keyword(Keyword::VARYING) {
11150                        Ok(DataType::BitVarying(self.parse_optional_precision()?))
11151                    } else {
11152                        Ok(DataType::Bit(self.parse_optional_precision()?))
11153                    }
11154                }
11155                Keyword::VARBIT => Ok(DataType::VarBit(self.parse_optional_precision()?)),
11156                Keyword::UUID => Ok(DataType::Uuid),
11157                Keyword::DATE => Ok(DataType::Date),
11158                Keyword::DATE32 => Ok(DataType::Date32),
11159                Keyword::DATETIME => Ok(DataType::Datetime(self.parse_optional_precision()?)),
11160                Keyword::DATETIME64 => {
11161                    self.prev_token();
11162                    let (precision, time_zone) = self.parse_datetime_64()?;
11163                    Ok(DataType::Datetime64(precision, time_zone))
11164                }
11165                Keyword::TIMESTAMP => {
11166                    let precision = self.parse_optional_precision()?;
11167                    let tz = if self.parse_keyword(Keyword::WITH) {
11168                        self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
11169                        TimezoneInfo::WithTimeZone
11170                    } else if self.parse_keyword(Keyword::WITHOUT) {
11171                        self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
11172                        TimezoneInfo::WithoutTimeZone
11173                    } else {
11174                        TimezoneInfo::None
11175                    };
11176                    Ok(DataType::Timestamp(precision, tz))
11177                }
11178                Keyword::TIMESTAMPTZ => Ok(DataType::Timestamp(
11179                    self.parse_optional_precision()?,
11180                    TimezoneInfo::Tz,
11181                )),
11182                Keyword::TIMESTAMP_NTZ => {
11183                    Ok(DataType::TimestampNtz(self.parse_optional_precision()?))
11184                }
11185                Keyword::TIME => {
11186                    let precision = self.parse_optional_precision()?;
11187                    let tz = if self.parse_keyword(Keyword::WITH) {
11188                        self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
11189                        TimezoneInfo::WithTimeZone
11190                    } else if self.parse_keyword(Keyword::WITHOUT) {
11191                        self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
11192                        TimezoneInfo::WithoutTimeZone
11193                    } else {
11194                        TimezoneInfo::None
11195                    };
11196                    Ok(DataType::Time(precision, tz))
11197                }
11198                Keyword::TIMETZ => Ok(DataType::Time(
11199                    self.parse_optional_precision()?,
11200                    TimezoneInfo::Tz,
11201                )),
11202                Keyword::INTERVAL => {
11203                    if self.dialect.supports_interval_options() {
11204                        let fields = self.maybe_parse_optional_interval_fields()?;
11205                        let precision = self.parse_optional_precision()?;
11206                        Ok(DataType::Interval { fields, precision })
11207                    } else {
11208                        Ok(DataType::Interval {
11209                            fields: None,
11210                            precision: None,
11211                        })
11212                    }
11213                }
11214                Keyword::JSON => Ok(DataType::JSON),
11215                Keyword::JSONB => Ok(DataType::JSONB),
11216                Keyword::REGCLASS => Ok(DataType::Regclass),
11217                Keyword::STRING => Ok(DataType::String(self.parse_optional_precision()?)),
11218                Keyword::FIXEDSTRING => {
11219                    self.expect_token(&Token::LParen)?;
11220                    let character_length = self.parse_literal_uint()?;
11221                    self.expect_token(&Token::RParen)?;
11222                    Ok(DataType::FixedString(character_length))
11223                }
11224                Keyword::TEXT => Ok(DataType::Text),
11225                Keyword::TINYTEXT => Ok(DataType::TinyText),
11226                Keyword::MEDIUMTEXT => Ok(DataType::MediumText),
11227                Keyword::LONGTEXT => Ok(DataType::LongText),
11228                Keyword::BYTEA => Ok(DataType::Bytea),
11229                Keyword::NUMERIC => Ok(DataType::Numeric(
11230                    self.parse_exact_number_optional_precision_scale()?,
11231                )),
11232                Keyword::DECIMAL => {
11233                    let precision = self.parse_exact_number_optional_precision_scale()?;
11234
11235                    if self.parse_keyword(Keyword::UNSIGNED) {
11236                        Ok(DataType::DecimalUnsigned(precision))
11237                    } else {
11238                        Ok(DataType::Decimal(precision))
11239                    }
11240                }
11241                Keyword::DEC => {
11242                    let precision = self.parse_exact_number_optional_precision_scale()?;
11243
11244                    if self.parse_keyword(Keyword::UNSIGNED) {
11245                        Ok(DataType::DecUnsigned(precision))
11246                    } else {
11247                        Ok(DataType::Dec(precision))
11248                    }
11249                }
11250                Keyword::BIGNUMERIC => Ok(DataType::BigNumeric(
11251                    self.parse_exact_number_optional_precision_scale()?,
11252                )),
11253                Keyword::BIGDECIMAL => Ok(DataType::BigDecimal(
11254                    self.parse_exact_number_optional_precision_scale()?,
11255                )),
11256                Keyword::ENUM => Ok(DataType::Enum(self.parse_enum_values()?, None)),
11257                Keyword::ENUM8 => Ok(DataType::Enum(self.parse_enum_values()?, Some(8))),
11258                Keyword::ENUM16 => Ok(DataType::Enum(self.parse_enum_values()?, Some(16))),
11259                Keyword::SET => Ok(DataType::Set(self.parse_string_values()?)),
11260                Keyword::ARRAY => {
11261                    if dialect_of!(self is SnowflakeDialect) {
11262                        Ok(DataType::Array(ArrayElemTypeDef::None))
11263                    } else if dialect_of!(self is ClickHouseDialect) {
11264                        Ok(self.parse_sub_type(|internal_type| {
11265                            DataType::Array(ArrayElemTypeDef::Parenthesis(internal_type))
11266                        })?)
11267                    } else {
11268                        self.expect_token(&Token::Lt)?;
11269                        let (inside_type, _trailing_bracket) = self.parse_data_type_helper()?;
11270                        trailing_bracket = self.expect_closing_angle_bracket(_trailing_bracket)?;
11271                        Ok(DataType::Array(ArrayElemTypeDef::AngleBracket(Box::new(
11272                            inside_type,
11273                        ))))
11274                    }
11275                }
11276                Keyword::STRUCT if dialect_is!(dialect is DuckDbDialect) => {
11277                    self.prev_token();
11278                    let field_defs = self.parse_duckdb_struct_type_def()?;
11279                    Ok(DataType::Struct(field_defs, StructBracketKind::Parentheses))
11280                }
11281                Keyword::STRUCT if dialect_is!(dialect is BigQueryDialect | GenericDialect) => {
11282                    self.prev_token();
11283                    let (field_defs, _trailing_bracket) =
11284                        self.parse_struct_type_def(Self::parse_struct_field_def)?;
11285                    trailing_bracket = _trailing_bracket;
11286                    Ok(DataType::Struct(
11287                        field_defs,
11288                        StructBracketKind::AngleBrackets,
11289                    ))
11290                }
11291                Keyword::UNION if dialect_is!(dialect is DuckDbDialect | GenericDialect) => {
11292                    self.prev_token();
11293                    let fields = self.parse_union_type_def()?;
11294                    Ok(DataType::Union(fields))
11295                }
11296                Keyword::NULLABLE if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
11297                    Ok(self.parse_sub_type(DataType::Nullable)?)
11298                }
11299                Keyword::LOWCARDINALITY if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
11300                    Ok(self.parse_sub_type(DataType::LowCardinality)?)
11301                }
11302                Keyword::MAP if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
11303                    self.prev_token();
11304                    let (key_data_type, value_data_type) = self.parse_click_house_map_def()?;
11305                    Ok(DataType::Map(
11306                        Box::new(key_data_type),
11307                        Box::new(value_data_type),
11308                    ))
11309                }
11310                Keyword::NESTED if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
11311                    self.expect_token(&Token::LParen)?;
11312                    let field_defs = self.parse_comma_separated(Parser::parse_column_def)?;
11313                    self.expect_token(&Token::RParen)?;
11314                    Ok(DataType::Nested(field_defs))
11315                }
11316                Keyword::TUPLE if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
11317                    self.prev_token();
11318                    let field_defs = self.parse_click_house_tuple_def()?;
11319                    Ok(DataType::Tuple(field_defs))
11320                }
11321                Keyword::TRIGGER => Ok(DataType::Trigger),
11322                Keyword::ANY if self.peek_keyword(Keyword::TYPE) => {
11323                    let _ = self.parse_keyword(Keyword::TYPE);
11324                    Ok(DataType::AnyType)
11325                }
11326                Keyword::TABLE => {
11327                    // an LParen after the TABLE keyword indicates that table columns are being defined
11328                    // whereas no LParen indicates an anonymous table expression will be returned
11329                    if self.peek_token() == Token::LParen {
11330                        let columns = self.parse_returns_table_columns()?;
11331                        Ok(DataType::Table(Some(columns)))
11332                    } else {
11333                        Ok(DataType::Table(None))
11334                    }
11335                }
11336                Keyword::SIGNED => {
11337                    if self.parse_keyword(Keyword::INTEGER) {
11338                        Ok(DataType::SignedInteger)
11339                    } else {
11340                        Ok(DataType::Signed)
11341                    }
11342                }
11343                Keyword::UNSIGNED => {
11344                    if self.parse_keyword(Keyword::INTEGER) {
11345                        Ok(DataType::UnsignedInteger)
11346                    } else {
11347                        Ok(DataType::Unsigned)
11348                    }
11349                }
11350                Keyword::TSVECTOR if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
11351                    Ok(DataType::TsVector)
11352                }
11353                Keyword::TSQUERY if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
11354                    Ok(DataType::TsQuery)
11355                }
11356                _ => {
11357                    self.prev_token();
11358                    let type_name = self.parse_object_name(false)?;
11359                    if let Some(modifiers) = self.parse_optional_type_modifiers()? {
11360                        Ok(DataType::Custom(type_name, modifiers))
11361                    } else {
11362                        Ok(DataType::Custom(type_name, vec![]))
11363                    }
11364                }
11365            },
11366            _ => self.expected_at("a data type name", next_token_index),
11367        }?;
11368
11369        if self.dialect.supports_array_typedef_with_brackets() {
11370            while self.consume_token(&Token::LBracket) {
11371                // Parse optional array data type size
11372                let size = self.maybe_parse(|p| p.parse_literal_uint())?;
11373                self.expect_token(&Token::RBracket)?;
11374                data = DataType::Array(ArrayElemTypeDef::SquareBracket(Box::new(data), size))
11375            }
11376        }
11377        Ok((data, trailing_bracket))
11378    }
11379
11380    fn parse_returns_table_column(&mut self) -> Result<ColumnDef, ParserError> {
11381        self.parse_column_def()
11382    }
11383
11384    fn parse_returns_table_columns(&mut self) -> Result<Vec<ColumnDef>, ParserError> {
11385        self.expect_token(&Token::LParen)?;
11386        let columns = self.parse_comma_separated(Parser::parse_returns_table_column)?;
11387        self.expect_token(&Token::RParen)?;
11388        Ok(columns)
11389    }
11390
11391    pub fn parse_string_values(&mut self) -> Result<Vec<String>, ParserError> {
11392        self.expect_token(&Token::LParen)?;
11393        let mut values = Vec::new();
11394        loop {
11395            let next_token = self.next_token();
11396            match next_token.token {
11397                Token::SingleQuotedString(value) => values.push(value),
11398                _ => self.expected("a string", next_token)?,
11399            }
11400            let next_token = self.next_token();
11401            match next_token.token {
11402                Token::Comma => (),
11403                Token::RParen => break,
11404                _ => self.expected(", or }", next_token)?,
11405            }
11406        }
11407        Ok(values)
11408    }
11409
11410    /// Strictly parse `identifier AS identifier`
11411    pub fn parse_identifier_with_alias(&mut self) -> Result<IdentWithAlias, ParserError> {
11412        let ident = self.parse_identifier()?;
11413        self.expect_keyword_is(Keyword::AS)?;
11414        let alias = self.parse_identifier()?;
11415        Ok(IdentWithAlias { ident, alias })
11416    }
11417
11418    /// Parse `identifier [AS] identifier` where the AS keyword is optional
11419    fn parse_identifier_with_optional_alias(&mut self) -> Result<IdentWithAlias, ParserError> {
11420        let ident = self.parse_identifier()?;
11421        let _after_as = self.parse_keyword(Keyword::AS);
11422        let alias = self.parse_identifier()?;
11423        Ok(IdentWithAlias { ident, alias })
11424    }
11425
11426    /// Parse comma-separated list of parenthesized queries for pipe operators
11427    fn parse_pipe_operator_queries(&mut self) -> Result<Vec<Query>, ParserError> {
11428        self.parse_comma_separated(|parser| {
11429            parser.expect_token(&Token::LParen)?;
11430            let query = parser.parse_query()?;
11431            parser.expect_token(&Token::RParen)?;
11432            Ok(*query)
11433        })
11434    }
11435
11436    /// Parse set quantifier for pipe operators that require DISTINCT. E.g. INTERSECT and EXCEPT
11437    fn parse_distinct_required_set_quantifier(
11438        &mut self,
11439        operator_name: &str,
11440    ) -> Result<SetQuantifier, ParserError> {
11441        let quantifier = self.parse_set_quantifier(&Some(SetOperator::Intersect));
11442        match quantifier {
11443            SetQuantifier::Distinct | SetQuantifier::DistinctByName => Ok(quantifier),
11444            _ => Err(ParserError::ParserError(format!(
11445                "{operator_name} pipe operator requires DISTINCT modifier",
11446            ))),
11447        }
11448    }
11449
11450    /// Parse optional identifier alias (with or without AS keyword)
11451    fn parse_identifier_optional_alias(&mut self) -> Result<Option<Ident>, ParserError> {
11452        if self.parse_keyword(Keyword::AS) {
11453            Ok(Some(self.parse_identifier()?))
11454        } else {
11455            // Check if the next token is an identifier (implicit alias)
11456            self.maybe_parse(|parser| parser.parse_identifier())
11457        }
11458    }
11459
11460    /// Optionally parses an alias for a select list item
11461    fn maybe_parse_select_item_alias(&mut self) -> Result<Option<Ident>, ParserError> {
11462        fn validator(explicit: bool, kw: &Keyword, parser: &mut Parser) -> bool {
11463            parser.dialect.is_select_item_alias(explicit, kw, parser)
11464        }
11465        self.parse_optional_alias_inner(None, validator)
11466    }
11467
11468    /// Optionally parses an alias for a table like in `... FROM generate_series(1, 10) AS t (col)`.
11469    /// In this case, the alias is allowed to optionally name the columns in the table, in
11470    /// addition to the table itself.
11471    pub fn maybe_parse_table_alias(&mut self) -> Result<Option<TableAlias>, ParserError> {
11472        fn validator(explicit: bool, kw: &Keyword, parser: &mut Parser) -> bool {
11473            parser.dialect.is_table_factor_alias(explicit, kw, parser)
11474        }
11475        let explicit = self.peek_keyword(Keyword::AS);
11476        match self.parse_optional_alias_inner(None, validator)? {
11477            Some(name) => {
11478                let columns = self.parse_table_alias_column_defs()?;
11479                Ok(Some(TableAlias {
11480                    explicit,
11481                    name,
11482                    columns,
11483                }))
11484            }
11485            None => Ok(None),
11486        }
11487    }
11488
11489    fn parse_table_index_hints(&mut self) -> Result<Vec<TableIndexHints>, ParserError> {
11490        let mut hints = vec![];
11491        while let Some(hint_type) =
11492            self.parse_one_of_keywords(&[Keyword::USE, Keyword::IGNORE, Keyword::FORCE])
11493        {
11494            let hint_type = match hint_type {
11495                Keyword::USE => TableIndexHintType::Use,
11496                Keyword::IGNORE => TableIndexHintType::Ignore,
11497                Keyword::FORCE => TableIndexHintType::Force,
11498                _ => {
11499                    return self.expected(
11500                        "expected to match USE/IGNORE/FORCE keyword",
11501                        self.peek_token(),
11502                    )
11503                }
11504            };
11505            let index_type = match self.parse_one_of_keywords(&[Keyword::INDEX, Keyword::KEY]) {
11506                Some(Keyword::INDEX) => TableIndexType::Index,
11507                Some(Keyword::KEY) => TableIndexType::Key,
11508                _ => {
11509                    return self.expected("expected to match INDEX/KEY keyword", self.peek_token())
11510                }
11511            };
11512            let for_clause = if self.parse_keyword(Keyword::FOR) {
11513                let clause = if self.parse_keyword(Keyword::JOIN) {
11514                    TableIndexHintForClause::Join
11515                } else if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
11516                    TableIndexHintForClause::OrderBy
11517                } else if self.parse_keywords(&[Keyword::GROUP, Keyword::BY]) {
11518                    TableIndexHintForClause::GroupBy
11519                } else {
11520                    return self.expected(
11521                        "expected to match FOR/ORDER BY/GROUP BY table hint in for clause",
11522                        self.peek_token(),
11523                    );
11524                };
11525                Some(clause)
11526            } else {
11527                None
11528            };
11529
11530            self.expect_token(&Token::LParen)?;
11531            let index_names = if self.peek_token().token != Token::RParen {
11532                self.parse_comma_separated(Parser::parse_identifier)?
11533            } else {
11534                vec![]
11535            };
11536            self.expect_token(&Token::RParen)?;
11537            hints.push(TableIndexHints {
11538                hint_type,
11539                index_type,
11540                for_clause,
11541                index_names,
11542            });
11543        }
11544        Ok(hints)
11545    }
11546
11547    /// Wrapper for parse_optional_alias_inner, left for backwards-compatibility
11548    /// but new flows should use the context-specific methods such as `maybe_parse_select_item_alias`
11549    /// and `maybe_parse_table_alias`.
11550    pub fn parse_optional_alias(
11551        &mut self,
11552        reserved_kwds: &[Keyword],
11553    ) -> Result<Option<Ident>, ParserError> {
11554        fn validator(_explicit: bool, _kw: &Keyword, _parser: &mut Parser) -> bool {
11555            false
11556        }
11557        self.parse_optional_alias_inner(Some(reserved_kwds), validator)
11558    }
11559
11560    /// Parses an optional alias after a SQL element such as a select list item
11561    /// or a table name.
11562    ///
11563    /// This method accepts an optional list of reserved keywords or a function
11564    /// to call to validate if a keyword should be parsed as an alias, to allow
11565    /// callers to customize the parsing logic based on their context.
11566    fn parse_optional_alias_inner<F>(
11567        &mut self,
11568        reserved_kwds: Option<&[Keyword]>,
11569        validator: F,
11570    ) -> Result<Option<Ident>, ParserError>
11571    where
11572        F: Fn(bool, &Keyword, &mut Parser) -> bool,
11573    {
11574        let after_as = self.parse_keyword(Keyword::AS);
11575
11576        let next_token = self.next_token();
11577        match next_token.token {
11578            // By default, if a word is located after the `AS` keyword we consider it an alias
11579            // as long as it's not reserved.
11580            Token::Word(w)
11581                if after_as || reserved_kwds.is_some_and(|x| !x.contains(&w.keyword)) =>
11582            {
11583                Ok(Some(w.into_ident(next_token.span)))
11584            }
11585            // This pattern allows for customizing the acceptance of words as aliases based on the caller's
11586            // context, such as to what SQL element this word is a potential alias of (select item alias, table name
11587            // alias, etc.) or dialect-specific logic that goes beyond a simple list of reserved keywords.
11588            Token::Word(w) if validator(after_as, &w.keyword, self) => {
11589                Ok(Some(w.into_ident(next_token.span)))
11590            }
11591            // For backwards-compatibility, we accept quoted strings as aliases regardless of the context.
11592            Token::SingleQuotedString(s) => Ok(Some(Ident::with_quote('\'', s))),
11593            Token::DoubleQuotedString(s) => Ok(Some(Ident::with_quote('\"', s))),
11594            _ => {
11595                if after_as {
11596                    return self.expected("an identifier after AS", next_token);
11597                }
11598                self.prev_token();
11599                Ok(None) // no alias found
11600            }
11601        }
11602    }
11603
11604    pub fn parse_optional_group_by(&mut self) -> Result<Option<GroupByExpr>, ParserError> {
11605        if self.parse_keywords(&[Keyword::GROUP, Keyword::BY]) {
11606            let expressions = if self.parse_keyword(Keyword::ALL) {
11607                None
11608            } else {
11609                Some(self.parse_comma_separated(Parser::parse_group_by_expr)?)
11610            };
11611
11612            let mut modifiers = vec![];
11613            if self.dialect.supports_group_by_with_modifier() {
11614                loop {
11615                    if !self.parse_keyword(Keyword::WITH) {
11616                        break;
11617                    }
11618                    let keyword = self.expect_one_of_keywords(&[
11619                        Keyword::ROLLUP,
11620                        Keyword::CUBE,
11621                        Keyword::TOTALS,
11622                    ])?;
11623                    modifiers.push(match keyword {
11624                        Keyword::ROLLUP => GroupByWithModifier::Rollup,
11625                        Keyword::CUBE => GroupByWithModifier::Cube,
11626                        Keyword::TOTALS => GroupByWithModifier::Totals,
11627                        _ => {
11628                            return parser_err!(
11629                                "BUG: expected to match GroupBy modifier keyword",
11630                                self.peek_token().span.start
11631                            )
11632                        }
11633                    });
11634                }
11635            }
11636            if self.parse_keywords(&[Keyword::GROUPING, Keyword::SETS]) {
11637                self.expect_token(&Token::LParen)?;
11638                let result = self.parse_comma_separated(|p| {
11639                    if p.peek_token_ref().token == Token::LParen {
11640                        p.parse_tuple(true, true)
11641                    } else {
11642                        Ok(vec![p.parse_expr()?])
11643                    }
11644                })?;
11645                self.expect_token(&Token::RParen)?;
11646                modifiers.push(GroupByWithModifier::GroupingSets(Expr::GroupingSets(
11647                    result,
11648                )));
11649            };
11650            let group_by = match expressions {
11651                None => GroupByExpr::All(modifiers),
11652                Some(exprs) => GroupByExpr::Expressions(exprs, modifiers),
11653            };
11654            Ok(Some(group_by))
11655        } else {
11656            Ok(None)
11657        }
11658    }
11659
11660    pub fn parse_optional_order_by(&mut self) -> Result<Option<OrderBy>, ParserError> {
11661        if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
11662            let order_by =
11663                if self.dialect.supports_order_by_all() && self.parse_keyword(Keyword::ALL) {
11664                    let order_by_options = self.parse_order_by_options()?;
11665                    OrderBy {
11666                        kind: OrderByKind::All(order_by_options),
11667                        interpolate: None,
11668                    }
11669                } else {
11670                    let exprs = self.parse_comma_separated(Parser::parse_order_by_expr)?;
11671                    let interpolate = if dialect_of!(self is ClickHouseDialect | GenericDialect) {
11672                        self.parse_interpolations()?
11673                    } else {
11674                        None
11675                    };
11676                    OrderBy {
11677                        kind: OrderByKind::Expressions(exprs),
11678                        interpolate,
11679                    }
11680                };
11681            Ok(Some(order_by))
11682        } else {
11683            Ok(None)
11684        }
11685    }
11686
11687    fn parse_optional_limit_clause(&mut self) -> Result<Option<LimitClause>, ParserError> {
11688        let mut offset = if self.parse_keyword(Keyword::OFFSET) {
11689            Some(self.parse_offset()?)
11690        } else {
11691            None
11692        };
11693
11694        let (limit, limit_by) = if self.parse_keyword(Keyword::LIMIT) {
11695            let expr = self.parse_limit()?;
11696
11697            if self.dialect.supports_limit_comma()
11698                && offset.is_none()
11699                && expr.is_some() // ALL not supported with comma
11700                && self.consume_token(&Token::Comma)
11701            {
11702                let offset = expr.ok_or_else(|| {
11703                    ParserError::ParserError(
11704                        "Missing offset for LIMIT <offset>, <limit>".to_string(),
11705                    )
11706                })?;
11707                return Ok(Some(LimitClause::OffsetCommaLimit {
11708                    offset,
11709                    limit: self.parse_expr()?,
11710                }));
11711            }
11712
11713            let limit_by = if dialect_of!(self is ClickHouseDialect | GenericDialect)
11714                && self.parse_keyword(Keyword::BY)
11715            {
11716                Some(self.parse_comma_separated(Parser::parse_expr)?)
11717            } else {
11718                None
11719            };
11720
11721            (Some(expr), limit_by)
11722        } else {
11723            (None, None)
11724        };
11725
11726        if offset.is_none() && limit.is_some() && self.parse_keyword(Keyword::OFFSET) {
11727            offset = Some(self.parse_offset()?);
11728        }
11729
11730        if offset.is_some() || (limit.is_some() && limit != Some(None)) || limit_by.is_some() {
11731            Ok(Some(LimitClause::LimitOffset {
11732                limit: limit.unwrap_or_default(),
11733                offset,
11734                limit_by: limit_by.unwrap_or_default(),
11735            }))
11736        } else {
11737            Ok(None)
11738        }
11739    }
11740
11741    /// Parse a table object for insertion
11742    /// e.g. `some_database.some_table` or `FUNCTION some_table_func(...)`
11743    pub fn parse_table_object(&mut self) -> Result<TableObject, ParserError> {
11744        if self.dialect.supports_insert_table_function() && self.parse_keyword(Keyword::FUNCTION) {
11745            let fn_name = self.parse_object_name(false)?;
11746            self.parse_function_call(fn_name)
11747                .map(TableObject::TableFunction)
11748        } else {
11749            self.parse_object_name(false).map(TableObject::TableName)
11750        }
11751    }
11752
11753    /// Parse a possibly qualified, possibly quoted identifier, e.g.
11754    /// `foo` or `myschema."table"
11755    ///
11756    /// The `in_table_clause` parameter indicates whether the object name is a table in a FROM, JOIN,
11757    /// or similar table clause. Currently, this is used only to support unquoted hyphenated identifiers
11758    /// in this context on BigQuery.
11759    pub fn parse_object_name(&mut self, in_table_clause: bool) -> Result<ObjectName, ParserError> {
11760        self.parse_object_name_inner(in_table_clause, false)
11761    }
11762
11763    /// Parse a possibly qualified, possibly quoted identifier, e.g.
11764    /// `foo` or `myschema."table"
11765    ///
11766    /// The `in_table_clause` parameter indicates whether the object name is a table in a FROM, JOIN,
11767    /// or similar table clause. Currently, this is used only to support unquoted hyphenated identifiers
11768    /// in this context on BigQuery.
11769    ///
11770    /// The `allow_wildcards` parameter indicates whether to allow for wildcards in the object name
11771    /// e.g. *, *.*, `foo`.*, or "foo"."bar"
11772    fn parse_object_name_inner(
11773        &mut self,
11774        in_table_clause: bool,
11775        allow_wildcards: bool,
11776    ) -> Result<ObjectName, ParserError> {
11777        let mut parts = vec![];
11778        if dialect_of!(self is BigQueryDialect) && in_table_clause {
11779            loop {
11780                let (ident, end_with_period) = self.parse_unquoted_hyphenated_identifier()?;
11781                parts.push(ObjectNamePart::Identifier(ident));
11782                if !self.consume_token(&Token::Period) && !end_with_period {
11783                    break;
11784                }
11785            }
11786        } else {
11787            loop {
11788                if allow_wildcards && self.peek_token().token == Token::Mul {
11789                    let span = self.next_token().span;
11790                    parts.push(ObjectNamePart::Identifier(Ident {
11791                        value: Token::Mul.to_string(),
11792                        quote_style: None,
11793                        span,
11794                    }));
11795                } else if dialect_of!(self is BigQueryDialect) && in_table_clause {
11796                    let (ident, end_with_period) = self.parse_unquoted_hyphenated_identifier()?;
11797                    parts.push(ObjectNamePart::Identifier(ident));
11798                    if !self.consume_token(&Token::Period) && !end_with_period {
11799                        break;
11800                    }
11801                } else if self.dialect.supports_object_name_double_dot_notation()
11802                    && parts.len() == 1
11803                    && matches!(self.peek_token().token, Token::Period)
11804                {
11805                    // Empty string here means default schema
11806                    parts.push(ObjectNamePart::Identifier(Ident::new("")));
11807                } else {
11808                    let ident = self.parse_identifier()?;
11809                    let part = if self
11810                        .dialect
11811                        .is_identifier_generating_function_name(&ident, &parts)
11812                    {
11813                        self.expect_token(&Token::LParen)?;
11814                        let args: Vec<FunctionArg> =
11815                            self.parse_comma_separated0(Self::parse_function_args, Token::RParen)?;
11816                        self.expect_token(&Token::RParen)?;
11817                        ObjectNamePart::Function(ObjectNamePartFunction { name: ident, args })
11818                    } else {
11819                        ObjectNamePart::Identifier(ident)
11820                    };
11821                    parts.push(part);
11822                }
11823
11824                if !self.consume_token(&Token::Period) {
11825                    break;
11826                }
11827            }
11828        }
11829
11830        // BigQuery accepts any number of quoted identifiers of a table name.
11831        // https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#quoted_identifiers
11832        if dialect_of!(self is BigQueryDialect)
11833            && parts.iter().any(|part| {
11834                part.as_ident()
11835                    .is_some_and(|ident| ident.value.contains('.'))
11836            })
11837        {
11838            parts = parts
11839                .into_iter()
11840                .flat_map(|part| match part.as_ident() {
11841                    Some(ident) => ident
11842                        .value
11843                        .split('.')
11844                        .map(|value| {
11845                            ObjectNamePart::Identifier(Ident {
11846                                value: value.into(),
11847                                quote_style: ident.quote_style,
11848                                span: ident.span,
11849                            })
11850                        })
11851                        .collect::<Vec<_>>(),
11852                    None => vec![part],
11853                })
11854                .collect()
11855        }
11856
11857        Ok(ObjectName(parts))
11858    }
11859
11860    /// Parse identifiers
11861    pub fn parse_identifiers(&mut self) -> Result<Vec<Ident>, ParserError> {
11862        let mut idents = vec![];
11863        loop {
11864            match &self.peek_token_ref().token {
11865                Token::Word(w) => {
11866                    idents.push(w.clone().into_ident(self.peek_token_ref().span));
11867                }
11868                Token::EOF | Token::Eq => break,
11869                _ => {}
11870            }
11871            self.advance_token();
11872        }
11873        Ok(idents)
11874    }
11875
11876    /// Parse identifiers of form ident1[.identN]*
11877    ///
11878    /// Similar in functionality to [parse_identifiers], with difference
11879    /// being this function is much more strict about parsing a valid multipart identifier, not
11880    /// allowing extraneous tokens to be parsed, otherwise it fails.
11881    ///
11882    /// For example:
11883    ///
11884    /// ```rust
11885    /// use sqlparser::ast::Ident;
11886    /// use sqlparser::dialect::GenericDialect;
11887    /// use sqlparser::parser::Parser;
11888    ///
11889    /// let dialect = GenericDialect {};
11890    /// let expected = vec![Ident::new("one"), Ident::new("two")];
11891    ///
11892    /// // expected usage
11893    /// let sql = "one.two";
11894    /// let mut parser = Parser::new(&dialect).try_with_sql(sql).unwrap();
11895    /// let actual = parser.parse_multipart_identifier().unwrap();
11896    /// assert_eq!(&actual, &expected);
11897    ///
11898    /// // parse_identifiers is more loose on what it allows, parsing successfully
11899    /// let sql = "one + two";
11900    /// let mut parser = Parser::new(&dialect).try_with_sql(sql).unwrap();
11901    /// let actual = parser.parse_identifiers().unwrap();
11902    /// assert_eq!(&actual, &expected);
11903    ///
11904    /// // expected to strictly fail due to + separator
11905    /// let sql = "one + two";
11906    /// let mut parser = Parser::new(&dialect).try_with_sql(sql).unwrap();
11907    /// let actual = parser.parse_multipart_identifier().unwrap_err();
11908    /// assert_eq!(
11909    ///     actual.to_string(),
11910    ///     "sql parser error: Unexpected token in identifier: +"
11911    /// );
11912    /// ```
11913    ///
11914    /// [parse_identifiers]: Parser::parse_identifiers
11915    pub fn parse_multipart_identifier(&mut self) -> Result<Vec<Ident>, ParserError> {
11916        let mut idents = vec![];
11917
11918        // expecting at least one word for identifier
11919        let next_token = self.next_token();
11920        match next_token.token {
11921            Token::Word(w) => idents.push(w.into_ident(next_token.span)),
11922            Token::EOF => {
11923                return Err(ParserError::ParserError(
11924                    "Empty input when parsing identifier".to_string(),
11925                ))?
11926            }
11927            token => {
11928                return Err(ParserError::ParserError(format!(
11929                    "Unexpected token in identifier: {token}"
11930                )))?
11931            }
11932        };
11933
11934        // parse optional next parts if exist
11935        loop {
11936            match self.next_token().token {
11937                // ensure that optional period is succeeded by another identifier
11938                Token::Period => {
11939                    let next_token = self.next_token();
11940                    match next_token.token {
11941                        Token::Word(w) => idents.push(w.into_ident(next_token.span)),
11942                        Token::EOF => {
11943                            return Err(ParserError::ParserError(
11944                                "Trailing period in identifier".to_string(),
11945                            ))?
11946                        }
11947                        token => {
11948                            return Err(ParserError::ParserError(format!(
11949                                "Unexpected token following period in identifier: {token}"
11950                            )))?
11951                        }
11952                    }
11953                }
11954                Token::EOF => break,
11955                token => {
11956                    return Err(ParserError::ParserError(format!(
11957                        "Unexpected token in identifier: {token}"
11958                    )))?;
11959                }
11960            }
11961        }
11962
11963        Ok(idents)
11964    }
11965
11966    /// Parse a simple one-word identifier (possibly quoted, possibly a keyword)
11967    pub fn parse_identifier(&mut self) -> Result<Ident, ParserError> {
11968        let next_token = self.next_token();
11969        match next_token.token {
11970            Token::Word(w) => Ok(w.into_ident(next_token.span)),
11971            Token::SingleQuotedString(s) => Ok(Ident::with_quote('\'', s)),
11972            Token::DoubleQuotedString(s) => Ok(Ident::with_quote('\"', s)),
11973            _ => self.expected("identifier", next_token),
11974        }
11975    }
11976
11977    /// On BigQuery, hyphens are permitted in unquoted identifiers inside of a FROM or
11978    /// TABLE clause.
11979    ///
11980    /// The first segment must be an ordinary unquoted identifier, e.g. it must not start
11981    /// with a digit. Subsequent segments are either must either be valid identifiers or
11982    /// integers, e.g. foo-123 is allowed, but foo-123a is not.
11983    ///
11984    /// [BigQuery-lexical](https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical)
11985    ///
11986    /// Return a tuple of the identifier and a boolean indicating it ends with a period.
11987    fn parse_unquoted_hyphenated_identifier(&mut self) -> Result<(Ident, bool), ParserError> {
11988        match self.peek_token().token {
11989            Token::Word(w) => {
11990                let quote_style_is_none = w.quote_style.is_none();
11991                let mut requires_whitespace = false;
11992                let mut ident = w.into_ident(self.next_token().span);
11993                if quote_style_is_none {
11994                    while matches!(self.peek_token_no_skip().token, Token::Minus) {
11995                        self.next_token();
11996                        ident.value.push('-');
11997
11998                        let token = self
11999                            .next_token_no_skip()
12000                            .cloned()
12001                            .unwrap_or(TokenWithSpan::wrap(Token::EOF));
12002                        requires_whitespace = match token.token {
12003                            Token::Word(next_word) if next_word.quote_style.is_none() => {
12004                                ident.value.push_str(&next_word.value);
12005                                false
12006                            }
12007                            Token::Number(s, false) => {
12008                                // A number token can represent a decimal value ending with a period, e.g., `Number('123.')`.
12009                                // However, for an [ObjectName], it is part of a hyphenated identifier, e.g., `foo-123.bar`.
12010                                //
12011                                // If a number token is followed by a period, it is part of an [ObjectName].
12012                                // Return the identifier with `true` if the number token is followed by a period, indicating that
12013                                // parsing should continue for the next part of the hyphenated identifier.
12014                                if s.ends_with('.') {
12015                                    let Some(s) = s.split('.').next().filter(|s| {
12016                                        !s.is_empty() && s.chars().all(|c| c.is_ascii_digit())
12017                                    }) else {
12018                                        return self.expected(
12019                                            "continuation of hyphenated identifier",
12020                                            TokenWithSpan::new(Token::Number(s, false), token.span),
12021                                        );
12022                                    };
12023                                    ident.value.push_str(s);
12024                                    return Ok((ident, true));
12025                                } else {
12026                                    ident.value.push_str(&s);
12027                                }
12028                                // If next token is period, then it is part of an ObjectName and we don't expect whitespace
12029                                // after the number.
12030                                !matches!(self.peek_token().token, Token::Period)
12031                            }
12032                            _ => {
12033                                return self
12034                                    .expected("continuation of hyphenated identifier", token);
12035                            }
12036                        }
12037                    }
12038
12039                    // If the last segment was a number, we must check that it's followed by whitespace,
12040                    // otherwise foo-123a will be parsed as `foo-123` with the alias `a`.
12041                    if requires_whitespace {
12042                        let token = self.next_token();
12043                        if !matches!(token.token, Token::EOF | Token::Whitespace(_)) {
12044                            return self
12045                                .expected("whitespace following hyphenated identifier", token);
12046                        }
12047                    }
12048                }
12049                Ok((ident, false))
12050            }
12051            _ => Ok((self.parse_identifier()?, false)),
12052        }
12053    }
12054
12055    /// Parses a parenthesized, comma-separated list of column definitions within a view.
12056    fn parse_view_columns(&mut self) -> Result<Vec<ViewColumnDef>, ParserError> {
12057        if self.consume_token(&Token::LParen) {
12058            if self.peek_token().token == Token::RParen {
12059                self.next_token();
12060                Ok(vec![])
12061            } else {
12062                let cols = self.parse_comma_separated_with_trailing_commas(
12063                    Parser::parse_view_column,
12064                    self.dialect.supports_column_definition_trailing_commas(),
12065                    Self::is_reserved_for_column_alias,
12066                )?;
12067                self.expect_token(&Token::RParen)?;
12068                Ok(cols)
12069            }
12070        } else {
12071            Ok(vec![])
12072        }
12073    }
12074
12075    /// Parses a column definition within a view.
12076    fn parse_view_column(&mut self) -> Result<ViewColumnDef, ParserError> {
12077        let name = self.parse_identifier()?;
12078        let options = self.parse_view_column_options()?;
12079        let data_type = if dialect_of!(self is ClickHouseDialect) {
12080            Some(self.parse_data_type()?)
12081        } else {
12082            None
12083        };
12084        Ok(ViewColumnDef {
12085            name,
12086            data_type,
12087            options,
12088        })
12089    }
12090
12091    fn parse_view_column_options(&mut self) -> Result<Option<ColumnOptions>, ParserError> {
12092        let mut options = Vec::new();
12093        loop {
12094            let option = self.parse_optional_column_option()?;
12095            if let Some(option) = option {
12096                options.push(option);
12097            } else {
12098                break;
12099            }
12100        }
12101        if options.is_empty() {
12102            Ok(None)
12103        } else if self.dialect.supports_space_separated_column_options() {
12104            Ok(Some(ColumnOptions::SpaceSeparated(options)))
12105        } else {
12106            Ok(Some(ColumnOptions::CommaSeparated(options)))
12107        }
12108    }
12109
12110    /// Parses a parenthesized comma-separated list of unqualified, possibly quoted identifiers.
12111    /// For example: `(col1, "col 2", ...)`
12112    pub fn parse_parenthesized_column_list(
12113        &mut self,
12114        optional: IsOptional,
12115        allow_empty: bool,
12116    ) -> Result<Vec<Ident>, ParserError> {
12117        self.parse_parenthesized_column_list_inner(optional, allow_empty, |p| p.parse_identifier())
12118    }
12119
12120    pub fn parse_parenthesized_compound_identifier_list(
12121        &mut self,
12122        optional: IsOptional,
12123        allow_empty: bool,
12124    ) -> Result<Vec<Expr>, ParserError> {
12125        self.parse_parenthesized_column_list_inner(optional, allow_empty, |p| {
12126            Ok(Expr::CompoundIdentifier(
12127                p.parse_period_separated(|p| p.parse_identifier())?,
12128            ))
12129        })
12130    }
12131
12132    /// Parses a parenthesized comma-separated list of index columns, which can be arbitrary
12133    /// expressions with ordering information (and an opclass in some dialects).
12134    fn parse_parenthesized_index_column_list(&mut self) -> Result<Vec<IndexColumn>, ParserError> {
12135        self.parse_parenthesized_column_list_inner(Mandatory, false, |p| {
12136            p.parse_create_index_expr()
12137        })
12138    }
12139
12140    /// Parses a parenthesized comma-separated list of qualified, possibly quoted identifiers.
12141    /// For example: `(db1.sc1.tbl1.col1, db1.sc1.tbl1."col 2", ...)`
12142    pub fn parse_parenthesized_qualified_column_list(
12143        &mut self,
12144        optional: IsOptional,
12145        allow_empty: bool,
12146    ) -> Result<Vec<ObjectName>, ParserError> {
12147        self.parse_parenthesized_column_list_inner(optional, allow_empty, |p| {
12148            p.parse_object_name(true)
12149        })
12150    }
12151
12152    /// Parses a parenthesized comma-separated list of columns using
12153    /// the provided function to parse each element.
12154    fn parse_parenthesized_column_list_inner<F, T>(
12155        &mut self,
12156        optional: IsOptional,
12157        allow_empty: bool,
12158        mut f: F,
12159    ) -> Result<Vec<T>, ParserError>
12160    where
12161        F: FnMut(&mut Parser) -> Result<T, ParserError>,
12162    {
12163        if self.consume_token(&Token::LParen) {
12164            if allow_empty && self.peek_token().token == Token::RParen {
12165                self.next_token();
12166                Ok(vec![])
12167            } else {
12168                let cols = self.parse_comma_separated(|p| f(p))?;
12169                self.expect_token(&Token::RParen)?;
12170                Ok(cols)
12171            }
12172        } else if optional == Optional {
12173            Ok(vec![])
12174        } else {
12175            self.expected("a list of columns in parentheses", self.peek_token())
12176        }
12177    }
12178
12179    /// Parses a parenthesized comma-separated list of table alias column definitions.
12180    fn parse_table_alias_column_defs(&mut self) -> Result<Vec<TableAliasColumnDef>, ParserError> {
12181        if self.consume_token(&Token::LParen) {
12182            let cols = self.parse_comma_separated(|p| {
12183                let name = p.parse_identifier()?;
12184                let data_type = p.maybe_parse(|p| p.parse_data_type())?;
12185                Ok(TableAliasColumnDef { name, data_type })
12186            })?;
12187            self.expect_token(&Token::RParen)?;
12188            Ok(cols)
12189        } else {
12190            Ok(vec![])
12191        }
12192    }
12193
12194    pub fn parse_precision(&mut self) -> Result<u64, ParserError> {
12195        self.expect_token(&Token::LParen)?;
12196        let n = self.parse_literal_uint()?;
12197        self.expect_token(&Token::RParen)?;
12198        Ok(n)
12199    }
12200
12201    pub fn parse_optional_precision(&mut self) -> Result<Option<u64>, ParserError> {
12202        if self.consume_token(&Token::LParen) {
12203            let n = self.parse_literal_uint()?;
12204            self.expect_token(&Token::RParen)?;
12205            Ok(Some(n))
12206        } else {
12207            Ok(None)
12208        }
12209    }
12210
12211    fn maybe_parse_optional_interval_fields(
12212        &mut self,
12213    ) -> Result<Option<IntervalFields>, ParserError> {
12214        match self.parse_one_of_keywords(&[
12215            // Can be followed by `TO` option
12216            Keyword::YEAR,
12217            Keyword::DAY,
12218            Keyword::HOUR,
12219            Keyword::MINUTE,
12220            // No `TO` option
12221            Keyword::MONTH,
12222            Keyword::SECOND,
12223        ]) {
12224            Some(Keyword::YEAR) => {
12225                if self.peek_keyword(Keyword::TO) {
12226                    self.expect_keyword(Keyword::TO)?;
12227                    self.expect_keyword(Keyword::MONTH)?;
12228                    Ok(Some(IntervalFields::YearToMonth))
12229                } else {
12230                    Ok(Some(IntervalFields::Year))
12231                }
12232            }
12233            Some(Keyword::DAY) => {
12234                if self.peek_keyword(Keyword::TO) {
12235                    self.expect_keyword(Keyword::TO)?;
12236                    match self.expect_one_of_keywords(&[
12237                        Keyword::HOUR,
12238                        Keyword::MINUTE,
12239                        Keyword::SECOND,
12240                    ])? {
12241                        Keyword::HOUR => Ok(Some(IntervalFields::DayToHour)),
12242                        Keyword::MINUTE => Ok(Some(IntervalFields::DayToMinute)),
12243                        Keyword::SECOND => Ok(Some(IntervalFields::DayToSecond)),
12244                        _ => {
12245                            self.prev_token();
12246                            self.expected("HOUR, MINUTE, or SECOND", self.peek_token())
12247                        }
12248                    }
12249                } else {
12250                    Ok(Some(IntervalFields::Day))
12251                }
12252            }
12253            Some(Keyword::HOUR) => {
12254                if self.peek_keyword(Keyword::TO) {
12255                    self.expect_keyword(Keyword::TO)?;
12256                    match self.expect_one_of_keywords(&[Keyword::MINUTE, Keyword::SECOND])? {
12257                        Keyword::MINUTE => Ok(Some(IntervalFields::HourToMinute)),
12258                        Keyword::SECOND => Ok(Some(IntervalFields::HourToSecond)),
12259                        _ => {
12260                            self.prev_token();
12261                            self.expected("MINUTE or SECOND", self.peek_token())
12262                        }
12263                    }
12264                } else {
12265                    Ok(Some(IntervalFields::Hour))
12266                }
12267            }
12268            Some(Keyword::MINUTE) => {
12269                if self.peek_keyword(Keyword::TO) {
12270                    self.expect_keyword(Keyword::TO)?;
12271                    self.expect_keyword(Keyword::SECOND)?;
12272                    Ok(Some(IntervalFields::MinuteToSecond))
12273                } else {
12274                    Ok(Some(IntervalFields::Minute))
12275                }
12276            }
12277            Some(Keyword::MONTH) => Ok(Some(IntervalFields::Month)),
12278            Some(Keyword::SECOND) => Ok(Some(IntervalFields::Second)),
12279            Some(_) => {
12280                self.prev_token();
12281                self.expected(
12282                    "YEAR, MONTH, DAY, HOUR, MINUTE, or SECOND",
12283                    self.peek_token(),
12284                )
12285            }
12286            None => Ok(None),
12287        }
12288    }
12289
12290    /// Parse datetime64 [1]
12291    /// Syntax
12292    /// ```sql
12293    /// DateTime64(precision[, timezone])
12294    /// ```
12295    ///
12296    /// [1]: https://clickhouse.com/docs/en/sql-reference/data-types/datetime64
12297    pub fn parse_datetime_64(&mut self) -> Result<(u64, Option<String>), ParserError> {
12298        self.expect_keyword_is(Keyword::DATETIME64)?;
12299        self.expect_token(&Token::LParen)?;
12300        let precision = self.parse_literal_uint()?;
12301        let time_zone = if self.consume_token(&Token::Comma) {
12302            Some(self.parse_literal_string()?)
12303        } else {
12304            None
12305        };
12306        self.expect_token(&Token::RParen)?;
12307        Ok((precision, time_zone))
12308    }
12309
12310    pub fn parse_optional_character_length(
12311        &mut self,
12312    ) -> Result<Option<CharacterLength>, ParserError> {
12313        if self.consume_token(&Token::LParen) {
12314            let character_length = self.parse_character_length()?;
12315            self.expect_token(&Token::RParen)?;
12316            Ok(Some(character_length))
12317        } else {
12318            Ok(None)
12319        }
12320    }
12321
12322    pub fn parse_optional_binary_length(&mut self) -> Result<Option<BinaryLength>, ParserError> {
12323        if self.consume_token(&Token::LParen) {
12324            let binary_length = self.parse_binary_length()?;
12325            self.expect_token(&Token::RParen)?;
12326            Ok(Some(binary_length))
12327        } else {
12328            Ok(None)
12329        }
12330    }
12331
12332    pub fn parse_character_length(&mut self) -> Result<CharacterLength, ParserError> {
12333        if self.parse_keyword(Keyword::MAX) {
12334            return Ok(CharacterLength::Max);
12335        }
12336        let length = self.parse_literal_uint()?;
12337        let unit = if self.parse_keyword(Keyword::CHARACTERS) {
12338            Some(CharLengthUnits::Characters)
12339        } else if self.parse_keyword(Keyword::OCTETS) {
12340            Some(CharLengthUnits::Octets)
12341        } else {
12342            None
12343        };
12344        Ok(CharacterLength::IntegerLength { length, unit })
12345    }
12346
12347    pub fn parse_binary_length(&mut self) -> Result<BinaryLength, ParserError> {
12348        if self.parse_keyword(Keyword::MAX) {
12349            return Ok(BinaryLength::Max);
12350        }
12351        let length = self.parse_literal_uint()?;
12352        Ok(BinaryLength::IntegerLength { length })
12353    }
12354
12355    pub fn parse_optional_precision_scale(
12356        &mut self,
12357    ) -> Result<(Option<u64>, Option<u64>), ParserError> {
12358        if self.consume_token(&Token::LParen) {
12359            let n = self.parse_literal_uint()?;
12360            let scale = if self.consume_token(&Token::Comma) {
12361                Some(self.parse_literal_uint()?)
12362            } else {
12363                None
12364            };
12365            self.expect_token(&Token::RParen)?;
12366            Ok((Some(n), scale))
12367        } else {
12368            Ok((None, None))
12369        }
12370    }
12371
12372    pub fn parse_exact_number_optional_precision_scale(
12373        &mut self,
12374    ) -> Result<ExactNumberInfo, ParserError> {
12375        if self.consume_token(&Token::LParen) {
12376            let precision = self.parse_literal_uint()?;
12377            let scale = if self.consume_token(&Token::Comma) {
12378                Some(self.parse_signed_integer()?)
12379            } else {
12380                None
12381            };
12382
12383            self.expect_token(&Token::RParen)?;
12384
12385            match scale {
12386                None => Ok(ExactNumberInfo::Precision(precision)),
12387                Some(scale) => Ok(ExactNumberInfo::PrecisionAndScale(precision, scale)),
12388            }
12389        } else {
12390            Ok(ExactNumberInfo::None)
12391        }
12392    }
12393
12394    /// Parse an optionally signed integer literal.
12395    fn parse_signed_integer(&mut self) -> Result<i64, ParserError> {
12396        let is_negative = self.consume_token(&Token::Minus);
12397
12398        if !is_negative {
12399            let _ = self.consume_token(&Token::Plus);
12400        }
12401
12402        let current_token = self.peek_token_ref();
12403        match &current_token.token {
12404            Token::Number(s, _) => {
12405                let s = s.clone();
12406                let span_start = current_token.span.start;
12407                self.advance_token();
12408                let value = Self::parse::<i64>(s, span_start)?;
12409                Ok(if is_negative { -value } else { value })
12410            }
12411            _ => self.expected_ref("number", current_token),
12412        }
12413    }
12414
12415    pub fn parse_optional_type_modifiers(&mut self) -> Result<Option<Vec<String>>, ParserError> {
12416        if self.consume_token(&Token::LParen) {
12417            let mut modifiers = Vec::new();
12418            loop {
12419                let next_token = self.next_token();
12420                match next_token.token {
12421                    Token::Word(w) => modifiers.push(w.to_string()),
12422                    Token::Number(n, _) => modifiers.push(n),
12423                    Token::SingleQuotedString(s) => modifiers.push(s),
12424
12425                    Token::Comma => {
12426                        continue;
12427                    }
12428                    Token::RParen => {
12429                        break;
12430                    }
12431                    _ => self.expected("type modifiers", next_token)?,
12432                }
12433            }
12434
12435            Ok(Some(modifiers))
12436        } else {
12437            Ok(None)
12438        }
12439    }
12440
12441    /// Parse a parenthesized sub data type
12442    fn parse_sub_type<F>(&mut self, parent_type: F) -> Result<DataType, ParserError>
12443    where
12444        F: FnOnce(Box<DataType>) -> DataType,
12445    {
12446        self.expect_token(&Token::LParen)?;
12447        let inside_type = self.parse_data_type()?;
12448        self.expect_token(&Token::RParen)?;
12449        Ok(parent_type(inside_type.into()))
12450    }
12451
12452    /// Parse a DELETE statement, returning a `Box`ed SetExpr
12453    ///
12454    /// This is used to reduce the size of the stack frames in debug builds
12455    fn parse_delete_setexpr_boxed(
12456        &mut self,
12457        delete_token: TokenWithSpan,
12458    ) -> Result<Box<SetExpr>, ParserError> {
12459        Ok(Box::new(SetExpr::Delete(self.parse_delete(delete_token)?)))
12460    }
12461
12462    pub fn parse_delete(&mut self, delete_token: TokenWithSpan) -> Result<Statement, ParserError> {
12463        let (tables, with_from_keyword) = if !self.parse_keyword(Keyword::FROM) {
12464            // `FROM` keyword is optional in BigQuery SQL.
12465            // https://cloud.google.com/bigquery/docs/reference/standard-sql/dml-syntax#delete_statement
12466            if dialect_of!(self is BigQueryDialect | OracleDialect | GenericDialect) {
12467                (vec![], false)
12468            } else {
12469                let tables = self.parse_comma_separated(|p| p.parse_object_name(false))?;
12470                self.expect_keyword_is(Keyword::FROM)?;
12471                (tables, true)
12472            }
12473        } else {
12474            (vec![], true)
12475        };
12476
12477        let from = self.parse_comma_separated(Parser::parse_table_and_joins)?;
12478        let using = if self.parse_keyword(Keyword::USING) {
12479            Some(self.parse_comma_separated(Parser::parse_table_and_joins)?)
12480        } else {
12481            None
12482        };
12483        let selection = if self.parse_keyword(Keyword::WHERE) {
12484            Some(self.parse_expr()?)
12485        } else {
12486            None
12487        };
12488        let returning = if self.parse_keyword(Keyword::RETURNING) {
12489            Some(self.parse_comma_separated(Parser::parse_select_item)?)
12490        } else {
12491            None
12492        };
12493        let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
12494            self.parse_comma_separated(Parser::parse_order_by_expr)?
12495        } else {
12496            vec![]
12497        };
12498        let limit = if self.parse_keyword(Keyword::LIMIT) {
12499            self.parse_limit()?
12500        } else {
12501            None
12502        };
12503
12504        Ok(Statement::Delete(Delete {
12505            delete_token: delete_token.into(),
12506            tables,
12507            from: if with_from_keyword {
12508                FromTable::WithFromKeyword(from)
12509            } else {
12510                FromTable::WithoutKeyword(from)
12511            },
12512            using,
12513            selection,
12514            returning,
12515            order_by,
12516            limit,
12517        }))
12518    }
12519
12520    // KILL [CONNECTION | QUERY | MUTATION] processlist_id
12521    pub fn parse_kill(&mut self) -> Result<Statement, ParserError> {
12522        let modifier_keyword =
12523            self.parse_one_of_keywords(&[Keyword::CONNECTION, Keyword::QUERY, Keyword::MUTATION]);
12524
12525        let id = self.parse_literal_uint()?;
12526
12527        let modifier = match modifier_keyword {
12528            Some(Keyword::CONNECTION) => Some(KillType::Connection),
12529            Some(Keyword::QUERY) => Some(KillType::Query),
12530            Some(Keyword::MUTATION) => {
12531                if dialect_of!(self is ClickHouseDialect | GenericDialect) {
12532                    Some(KillType::Mutation)
12533                } else {
12534                    self.expected(
12535                        "Unsupported type for KILL, allowed: CONNECTION | QUERY",
12536                        self.peek_token(),
12537                    )?
12538                }
12539            }
12540            _ => None,
12541        };
12542
12543        Ok(Statement::Kill { modifier, id })
12544    }
12545
12546    pub fn parse_explain(
12547        &mut self,
12548        describe_alias: DescribeAlias,
12549    ) -> Result<Statement, ParserError> {
12550        let mut analyze = false;
12551        let mut verbose = false;
12552        let mut query_plan = false;
12553        let mut estimate = false;
12554        let mut format = None;
12555        let mut options = None;
12556
12557        // Note: DuckDB is compatible with PostgreSQL syntax for this statement,
12558        // although not all features may be implemented.
12559        if describe_alias == DescribeAlias::Explain
12560            && self.dialect.supports_explain_with_utility_options()
12561            && self.peek_token().token == Token::LParen
12562        {
12563            options = Some(self.parse_utility_options()?)
12564        } else if self.parse_keywords(&[Keyword::QUERY, Keyword::PLAN]) {
12565            query_plan = true;
12566        } else if self.parse_keyword(Keyword::ESTIMATE) {
12567            estimate = true;
12568        } else {
12569            analyze = self.parse_keyword(Keyword::ANALYZE);
12570            verbose = self.parse_keyword(Keyword::VERBOSE);
12571            if self.parse_keyword(Keyword::FORMAT) {
12572                format = Some(self.parse_analyze_format_kind()?);
12573            }
12574        }
12575
12576        match self.maybe_parse(|parser| parser.parse_statement())? {
12577            Some(Statement::Explain { .. }) | Some(Statement::ExplainTable { .. }) => Err(
12578                ParserError::ParserError("Explain must be root of the plan".to_string()),
12579            ),
12580            Some(statement) => Ok(Statement::Explain {
12581                describe_alias,
12582                analyze,
12583                verbose,
12584                query_plan,
12585                estimate,
12586                statement: Box::new(statement),
12587                format,
12588                options,
12589            }),
12590            _ => {
12591                let hive_format =
12592                    match self.parse_one_of_keywords(&[Keyword::EXTENDED, Keyword::FORMATTED]) {
12593                        Some(Keyword::EXTENDED) => Some(HiveDescribeFormat::Extended),
12594                        Some(Keyword::FORMATTED) => Some(HiveDescribeFormat::Formatted),
12595                        _ => None,
12596                    };
12597
12598                let has_table_keyword = if self.dialect.describe_requires_table_keyword() {
12599                    // only allow to use TABLE keyword for DESC|DESCRIBE statement
12600                    self.parse_keyword(Keyword::TABLE)
12601                } else {
12602                    false
12603                };
12604
12605                let table_name = self.parse_object_name(false)?;
12606                Ok(Statement::ExplainTable {
12607                    describe_alias,
12608                    hive_format,
12609                    has_table_keyword,
12610                    table_name,
12611                })
12612            }
12613        }
12614    }
12615
12616    /// Parse a query expression, i.e. a `SELECT` statement optionally
12617    /// preceded with some `WITH` CTE declarations and optionally followed
12618    /// by `ORDER BY`. Unlike some other parse_... methods, this one doesn't
12619    /// expect the initial keyword to be already consumed
12620    pub fn parse_query(&mut self) -> Result<Box<Query>, ParserError> {
12621        let _guard = self.recursion_counter.try_decrease()?;
12622        let with = if self.parse_keyword(Keyword::WITH) {
12623            let with_token = self.get_current_token();
12624            Some(With {
12625                with_token: with_token.clone().into(),
12626                recursive: self.parse_keyword(Keyword::RECURSIVE),
12627                cte_tables: self.parse_comma_separated(Parser::parse_cte)?,
12628            })
12629        } else {
12630            None
12631        };
12632        if self.parse_keyword(Keyword::INSERT) {
12633            Ok(Query {
12634                with,
12635                body: self.parse_insert_setexpr_boxed(self.get_current_token().clone())?,
12636                order_by: None,
12637                limit_clause: None,
12638                fetch: None,
12639                locks: vec![],
12640                for_clause: None,
12641                settings: None,
12642                format_clause: None,
12643                pipe_operators: vec![],
12644            }
12645            .into())
12646        } else if self.parse_keyword(Keyword::UPDATE) {
12647            Ok(Query {
12648                with,
12649                body: self.parse_update_setexpr_boxed(self.get_current_token().clone())?,
12650                order_by: None,
12651                limit_clause: None,
12652                fetch: None,
12653                locks: vec![],
12654                for_clause: None,
12655                settings: None,
12656                format_clause: None,
12657                pipe_operators: vec![],
12658            }
12659            .into())
12660        } else if self.parse_keyword(Keyword::DELETE) {
12661            Ok(Query {
12662                with,
12663                body: self.parse_delete_setexpr_boxed(self.get_current_token().clone())?,
12664                limit_clause: None,
12665                order_by: None,
12666                fetch: None,
12667                locks: vec![],
12668                for_clause: None,
12669                settings: None,
12670                format_clause: None,
12671                pipe_operators: vec![],
12672            }
12673            .into())
12674        } else if self.parse_keyword(Keyword::MERGE) {
12675            Ok(Query {
12676                with,
12677                body: self.parse_merge_setexpr_boxed(self.get_current_token().clone())?,
12678                limit_clause: None,
12679                order_by: None,
12680                fetch: None,
12681                locks: vec![],
12682                for_clause: None,
12683                settings: None,
12684                format_clause: None,
12685                pipe_operators: vec![],
12686            }
12687            .into())
12688        } else {
12689            let body = self.parse_query_body(self.dialect.prec_unknown())?;
12690
12691            let order_by = self.parse_optional_order_by()?;
12692
12693            let limit_clause = self.parse_optional_limit_clause()?;
12694
12695            let settings = self.parse_settings()?;
12696
12697            let fetch = if self.parse_keyword(Keyword::FETCH) {
12698                Some(self.parse_fetch()?)
12699            } else {
12700                None
12701            };
12702
12703            let mut for_clause = None;
12704            let mut locks = Vec::new();
12705            while self.parse_keyword(Keyword::FOR) {
12706                if let Some(parsed_for_clause) = self.parse_for_clause()? {
12707                    for_clause = Some(parsed_for_clause);
12708                    break;
12709                } else {
12710                    locks.push(self.parse_lock()?);
12711                }
12712            }
12713            let format_clause = if dialect_of!(self is ClickHouseDialect | GenericDialect)
12714                && self.parse_keyword(Keyword::FORMAT)
12715            {
12716                if self.parse_keyword(Keyword::NULL) {
12717                    Some(FormatClause::Null)
12718                } else {
12719                    let ident = self.parse_identifier()?;
12720                    Some(FormatClause::Identifier(ident))
12721                }
12722            } else {
12723                None
12724            };
12725
12726            let pipe_operators = if self.dialect.supports_pipe_operator() {
12727                self.parse_pipe_operators()?
12728            } else {
12729                Vec::new()
12730            };
12731
12732            Ok(Query {
12733                with,
12734                body,
12735                order_by,
12736                limit_clause,
12737                fetch,
12738                locks,
12739                for_clause,
12740                settings,
12741                format_clause,
12742                pipe_operators,
12743            }
12744            .into())
12745        }
12746    }
12747
12748    fn parse_pipe_operators(&mut self) -> Result<Vec<PipeOperator>, ParserError> {
12749        let mut pipe_operators = Vec::new();
12750
12751        while self.consume_token(&Token::VerticalBarRightAngleBracket) {
12752            let kw = self.expect_one_of_keywords(&[
12753                Keyword::SELECT,
12754                Keyword::EXTEND,
12755                Keyword::SET,
12756                Keyword::DROP,
12757                Keyword::AS,
12758                Keyword::WHERE,
12759                Keyword::LIMIT,
12760                Keyword::AGGREGATE,
12761                Keyword::ORDER,
12762                Keyword::TABLESAMPLE,
12763                Keyword::RENAME,
12764                Keyword::UNION,
12765                Keyword::INTERSECT,
12766                Keyword::EXCEPT,
12767                Keyword::CALL,
12768                Keyword::PIVOT,
12769                Keyword::UNPIVOT,
12770                Keyword::JOIN,
12771                Keyword::INNER,
12772                Keyword::LEFT,
12773                Keyword::RIGHT,
12774                Keyword::FULL,
12775                Keyword::CROSS,
12776            ])?;
12777            match kw {
12778                Keyword::SELECT => {
12779                    let exprs = self.parse_comma_separated(Parser::parse_select_item)?;
12780                    pipe_operators.push(PipeOperator::Select { exprs })
12781                }
12782                Keyword::EXTEND => {
12783                    let exprs = self.parse_comma_separated(Parser::parse_select_item)?;
12784                    pipe_operators.push(PipeOperator::Extend { exprs })
12785                }
12786                Keyword::SET => {
12787                    let assignments = self.parse_comma_separated(Parser::parse_assignment)?;
12788                    pipe_operators.push(PipeOperator::Set { assignments })
12789                }
12790                Keyword::DROP => {
12791                    let columns = self.parse_identifiers()?;
12792                    pipe_operators.push(PipeOperator::Drop { columns })
12793                }
12794                Keyword::AS => {
12795                    let alias = self.parse_identifier()?;
12796                    pipe_operators.push(PipeOperator::As { alias })
12797                }
12798                Keyword::WHERE => {
12799                    let expr = self.parse_expr()?;
12800                    pipe_operators.push(PipeOperator::Where { expr })
12801                }
12802                Keyword::LIMIT => {
12803                    let expr = self.parse_expr()?;
12804                    let offset = if self.parse_keyword(Keyword::OFFSET) {
12805                        Some(self.parse_expr()?)
12806                    } else {
12807                        None
12808                    };
12809                    pipe_operators.push(PipeOperator::Limit { expr, offset })
12810                }
12811                Keyword::AGGREGATE => {
12812                    let full_table_exprs = if self.peek_keyword(Keyword::GROUP) {
12813                        vec![]
12814                    } else {
12815                        self.parse_comma_separated(|parser| {
12816                            parser.parse_expr_with_alias_and_order_by()
12817                        })?
12818                    };
12819
12820                    let group_by_expr = if self.parse_keywords(&[Keyword::GROUP, Keyword::BY]) {
12821                        self.parse_comma_separated(|parser| {
12822                            parser.parse_expr_with_alias_and_order_by()
12823                        })?
12824                    } else {
12825                        vec![]
12826                    };
12827
12828                    pipe_operators.push(PipeOperator::Aggregate {
12829                        full_table_exprs,
12830                        group_by_expr,
12831                    })
12832                }
12833                Keyword::ORDER => {
12834                    self.expect_one_of_keywords(&[Keyword::BY])?;
12835                    let exprs = self.parse_comma_separated(Parser::parse_order_by_expr)?;
12836                    pipe_operators.push(PipeOperator::OrderBy { exprs })
12837                }
12838                Keyword::TABLESAMPLE => {
12839                    let sample = self.parse_table_sample(TableSampleModifier::TableSample)?;
12840                    pipe_operators.push(PipeOperator::TableSample { sample });
12841                }
12842                Keyword::RENAME => {
12843                    let mappings =
12844                        self.parse_comma_separated(Parser::parse_identifier_with_optional_alias)?;
12845                    pipe_operators.push(PipeOperator::Rename { mappings });
12846                }
12847                Keyword::UNION => {
12848                    let set_quantifier = self.parse_set_quantifier(&Some(SetOperator::Union));
12849                    let queries = self.parse_pipe_operator_queries()?;
12850                    pipe_operators.push(PipeOperator::Union {
12851                        set_quantifier,
12852                        queries,
12853                    });
12854                }
12855                Keyword::INTERSECT => {
12856                    let set_quantifier =
12857                        self.parse_distinct_required_set_quantifier("INTERSECT")?;
12858                    let queries = self.parse_pipe_operator_queries()?;
12859                    pipe_operators.push(PipeOperator::Intersect {
12860                        set_quantifier,
12861                        queries,
12862                    });
12863                }
12864                Keyword::EXCEPT => {
12865                    let set_quantifier = self.parse_distinct_required_set_quantifier("EXCEPT")?;
12866                    let queries = self.parse_pipe_operator_queries()?;
12867                    pipe_operators.push(PipeOperator::Except {
12868                        set_quantifier,
12869                        queries,
12870                    });
12871                }
12872                Keyword::CALL => {
12873                    let function_name = self.parse_object_name(false)?;
12874                    let function_expr = self.parse_function(function_name)?;
12875                    if let Expr::Function(function) = function_expr {
12876                        let alias = self.parse_identifier_optional_alias()?;
12877                        pipe_operators.push(PipeOperator::Call { function, alias });
12878                    } else {
12879                        return Err(ParserError::ParserError(
12880                            "Expected function call after CALL".to_string(),
12881                        ));
12882                    }
12883                }
12884                Keyword::PIVOT => {
12885                    self.expect_token(&Token::LParen)?;
12886                    let aggregate_functions =
12887                        self.parse_comma_separated(Self::parse_aliased_function_call)?;
12888                    self.expect_keyword_is(Keyword::FOR)?;
12889                    let value_column = self.parse_period_separated(|p| p.parse_identifier())?;
12890                    self.expect_keyword_is(Keyword::IN)?;
12891
12892                    self.expect_token(&Token::LParen)?;
12893                    let value_source = if self.parse_keyword(Keyword::ANY) {
12894                        let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
12895                            self.parse_comma_separated(Parser::parse_order_by_expr)?
12896                        } else {
12897                            vec![]
12898                        };
12899                        PivotValueSource::Any(order_by)
12900                    } else if self.peek_sub_query() {
12901                        PivotValueSource::Subquery(self.parse_query()?)
12902                    } else {
12903                        PivotValueSource::List(
12904                            self.parse_comma_separated(Self::parse_expr_with_alias)?,
12905                        )
12906                    };
12907                    self.expect_token(&Token::RParen)?;
12908                    self.expect_token(&Token::RParen)?;
12909
12910                    let alias = self.parse_identifier_optional_alias()?;
12911
12912                    pipe_operators.push(PipeOperator::Pivot {
12913                        aggregate_functions,
12914                        value_column,
12915                        value_source,
12916                        alias,
12917                    });
12918                }
12919                Keyword::UNPIVOT => {
12920                    self.expect_token(&Token::LParen)?;
12921                    let value_column = self.parse_identifier()?;
12922                    self.expect_keyword(Keyword::FOR)?;
12923                    let name_column = self.parse_identifier()?;
12924                    self.expect_keyword(Keyword::IN)?;
12925
12926                    self.expect_token(&Token::LParen)?;
12927                    let unpivot_columns = self.parse_comma_separated(Parser::parse_identifier)?;
12928                    self.expect_token(&Token::RParen)?;
12929
12930                    self.expect_token(&Token::RParen)?;
12931
12932                    let alias = self.parse_identifier_optional_alias()?;
12933
12934                    pipe_operators.push(PipeOperator::Unpivot {
12935                        value_column,
12936                        name_column,
12937                        unpivot_columns,
12938                        alias,
12939                    });
12940                }
12941                Keyword::JOIN
12942                | Keyword::INNER
12943                | Keyword::LEFT
12944                | Keyword::RIGHT
12945                | Keyword::FULL
12946                | Keyword::CROSS => {
12947                    self.prev_token();
12948                    let mut joins = self.parse_joins()?;
12949                    if joins.len() != 1 {
12950                        return Err(ParserError::ParserError(
12951                            "Join pipe operator must have a single join".to_string(),
12952                        ));
12953                    }
12954                    let join = joins.swap_remove(0);
12955                    pipe_operators.push(PipeOperator::Join(join))
12956                }
12957                unhandled => {
12958                    return Err(ParserError::ParserError(format!(
12959                    "`expect_one_of_keywords` further up allowed unhandled keyword: {unhandled:?}"
12960                )))
12961                }
12962            }
12963        }
12964        Ok(pipe_operators)
12965    }
12966
12967    fn parse_settings(&mut self) -> Result<Option<Vec<Setting>>, ParserError> {
12968        let settings = if dialect_of!(self is ClickHouseDialect|GenericDialect)
12969            && self.parse_keyword(Keyword::SETTINGS)
12970        {
12971            let key_values = self.parse_comma_separated(|p| {
12972                let key = p.parse_identifier()?;
12973                p.expect_token(&Token::Eq)?;
12974                let value = p.parse_expr()?;
12975                Ok(Setting { key, value })
12976            })?;
12977            Some(key_values)
12978        } else {
12979            None
12980        };
12981        Ok(settings)
12982    }
12983
12984    /// Parse a mssql `FOR [XML | JSON | BROWSE]` clause
12985    pub fn parse_for_clause(&mut self) -> Result<Option<ForClause>, ParserError> {
12986        if self.parse_keyword(Keyword::XML) {
12987            Ok(Some(self.parse_for_xml()?))
12988        } else if self.parse_keyword(Keyword::JSON) {
12989            Ok(Some(self.parse_for_json()?))
12990        } else if self.parse_keyword(Keyword::BROWSE) {
12991            Ok(Some(ForClause::Browse))
12992        } else {
12993            Ok(None)
12994        }
12995    }
12996
12997    /// Parse a mssql `FOR XML` clause
12998    pub fn parse_for_xml(&mut self) -> Result<ForClause, ParserError> {
12999        let for_xml = if self.parse_keyword(Keyword::RAW) {
13000            let mut element_name = None;
13001            if self.peek_token().token == Token::LParen {
13002                self.expect_token(&Token::LParen)?;
13003                element_name = Some(self.parse_literal_string()?);
13004                self.expect_token(&Token::RParen)?;
13005            }
13006            ForXml::Raw(element_name)
13007        } else if self.parse_keyword(Keyword::AUTO) {
13008            ForXml::Auto
13009        } else if self.parse_keyword(Keyword::EXPLICIT) {
13010            ForXml::Explicit
13011        } else if self.parse_keyword(Keyword::PATH) {
13012            let mut element_name = None;
13013            if self.peek_token().token == Token::LParen {
13014                self.expect_token(&Token::LParen)?;
13015                element_name = Some(self.parse_literal_string()?);
13016                self.expect_token(&Token::RParen)?;
13017            }
13018            ForXml::Path(element_name)
13019        } else {
13020            return Err(ParserError::ParserError(
13021                "Expected FOR XML [RAW | AUTO | EXPLICIT | PATH ]".to_string(),
13022            ));
13023        };
13024        let mut elements = false;
13025        let mut binary_base64 = false;
13026        let mut root = None;
13027        let mut r#type = false;
13028        while self.peek_token().token == Token::Comma {
13029            self.next_token();
13030            if self.parse_keyword(Keyword::ELEMENTS) {
13031                elements = true;
13032            } else if self.parse_keyword(Keyword::BINARY) {
13033                self.expect_keyword_is(Keyword::BASE64)?;
13034                binary_base64 = true;
13035            } else if self.parse_keyword(Keyword::ROOT) {
13036                self.expect_token(&Token::LParen)?;
13037                root = Some(self.parse_literal_string()?);
13038                self.expect_token(&Token::RParen)?;
13039            } else if self.parse_keyword(Keyword::TYPE) {
13040                r#type = true;
13041            }
13042        }
13043        Ok(ForClause::Xml {
13044            for_xml,
13045            elements,
13046            binary_base64,
13047            root,
13048            r#type,
13049        })
13050    }
13051
13052    /// Parse a mssql `FOR JSON` clause
13053    pub fn parse_for_json(&mut self) -> Result<ForClause, ParserError> {
13054        let for_json = if self.parse_keyword(Keyword::AUTO) {
13055            ForJson::Auto
13056        } else if self.parse_keyword(Keyword::PATH) {
13057            ForJson::Path
13058        } else {
13059            return Err(ParserError::ParserError(
13060                "Expected FOR JSON [AUTO | PATH ]".to_string(),
13061            ));
13062        };
13063        let mut root = None;
13064        let mut include_null_values = false;
13065        let mut without_array_wrapper = false;
13066        while self.peek_token().token == Token::Comma {
13067            self.next_token();
13068            if self.parse_keyword(Keyword::ROOT) {
13069                self.expect_token(&Token::LParen)?;
13070                root = Some(self.parse_literal_string()?);
13071                self.expect_token(&Token::RParen)?;
13072            } else if self.parse_keyword(Keyword::INCLUDE_NULL_VALUES) {
13073                include_null_values = true;
13074            } else if self.parse_keyword(Keyword::WITHOUT_ARRAY_WRAPPER) {
13075                without_array_wrapper = true;
13076            }
13077        }
13078        Ok(ForClause::Json {
13079            for_json,
13080            root,
13081            include_null_values,
13082            without_array_wrapper,
13083        })
13084    }
13085
13086    /// Parse a CTE (`alias [( col1, col2, ... )] AS (subquery)`)
13087    pub fn parse_cte(&mut self) -> Result<Cte, ParserError> {
13088        let name = self.parse_identifier()?;
13089
13090        let mut cte = if self.parse_keyword(Keyword::AS) {
13091            let mut is_materialized = None;
13092            if dialect_of!(self is PostgreSqlDialect) {
13093                if self.parse_keyword(Keyword::MATERIALIZED) {
13094                    is_materialized = Some(CteAsMaterialized::Materialized);
13095                } else if self.parse_keywords(&[Keyword::NOT, Keyword::MATERIALIZED]) {
13096                    is_materialized = Some(CteAsMaterialized::NotMaterialized);
13097                }
13098            }
13099            self.expect_token(&Token::LParen)?;
13100
13101            let query = self.parse_query()?;
13102            let closing_paren_token = self.expect_token(&Token::RParen)?;
13103
13104            let alias = TableAlias {
13105                explicit: false,
13106                name,
13107                columns: vec![],
13108            };
13109            Cte {
13110                alias,
13111                query,
13112                from: None,
13113                materialized: is_materialized,
13114                closing_paren_token: closing_paren_token.into(),
13115            }
13116        } else {
13117            let columns = self.parse_table_alias_column_defs()?;
13118            self.expect_keyword_is(Keyword::AS)?;
13119            let mut is_materialized = None;
13120            if dialect_of!(self is PostgreSqlDialect) {
13121                if self.parse_keyword(Keyword::MATERIALIZED) {
13122                    is_materialized = Some(CteAsMaterialized::Materialized);
13123                } else if self.parse_keywords(&[Keyword::NOT, Keyword::MATERIALIZED]) {
13124                    is_materialized = Some(CteAsMaterialized::NotMaterialized);
13125                }
13126            }
13127            self.expect_token(&Token::LParen)?;
13128
13129            let query = self.parse_query()?;
13130            let closing_paren_token = self.expect_token(&Token::RParen)?;
13131
13132            let alias = TableAlias {
13133                explicit: false,
13134                name,
13135                columns,
13136            };
13137            Cte {
13138                alias,
13139                query,
13140                from: None,
13141                materialized: is_materialized,
13142                closing_paren_token: closing_paren_token.into(),
13143            }
13144        };
13145        if self.parse_keyword(Keyword::FROM) {
13146            cte.from = Some(self.parse_identifier()?);
13147        }
13148        Ok(cte)
13149    }
13150
13151    /// Parse a "query body", which is an expression with roughly the
13152    /// following grammar:
13153    /// ```sql
13154    ///   query_body ::= restricted_select | '(' subquery ')' | set_operation
13155    ///   restricted_select ::= 'SELECT' [expr_list] [ from ] [ where ] [ groupby_having ]
13156    ///   subquery ::= query_body [ order_by_limit ]
13157    ///   set_operation ::= query_body { 'UNION' | 'EXCEPT' | 'INTERSECT' } [ 'ALL' ] query_body
13158    /// ```
13159    pub fn parse_query_body(&mut self, precedence: u8) -> Result<Box<SetExpr>, ParserError> {
13160        // We parse the expression using a Pratt parser, as in `parse_expr()`.
13161        // Start by parsing a restricted SELECT or a `(subquery)`:
13162        let expr = if self.peek_keyword(Keyword::SELECT)
13163            || (self.peek_keyword(Keyword::FROM) && self.dialect.supports_from_first_select())
13164        {
13165            SetExpr::Select(self.parse_select().map(Box::new)?)
13166        } else if self.consume_token(&Token::LParen) {
13167            // CTEs are not allowed here, but the parser currently accepts them
13168            let subquery = self.parse_query()?;
13169            self.expect_token(&Token::RParen)?;
13170            SetExpr::Query(subquery)
13171        } else if self.parse_keyword(Keyword::VALUES) {
13172            let is_mysql = dialect_of!(self is MySqlDialect);
13173            SetExpr::Values(self.parse_values(is_mysql, false)?)
13174        } else if self.parse_keyword(Keyword::VALUE) {
13175            let is_mysql = dialect_of!(self is MySqlDialect);
13176            SetExpr::Values(self.parse_values(is_mysql, true)?)
13177        } else if self.parse_keyword(Keyword::TABLE) {
13178            SetExpr::Table(Box::new(self.parse_as_table()?))
13179        } else {
13180            return self.expected(
13181                "SELECT, VALUES, or a subquery in the query body",
13182                self.peek_token(),
13183            );
13184        };
13185
13186        self.parse_remaining_set_exprs(expr, precedence)
13187    }
13188
13189    /// Parse any extra set expressions that may be present in a query body
13190    ///
13191    /// (this is its own function to reduce required stack size in debug builds)
13192    fn parse_remaining_set_exprs(
13193        &mut self,
13194        mut expr: SetExpr,
13195        precedence: u8,
13196    ) -> Result<Box<SetExpr>, ParserError> {
13197        loop {
13198            // The query can be optionally followed by a set operator:
13199            let op = self.parse_set_operator(&self.peek_token().token);
13200            let next_precedence = match op {
13201                // UNION and EXCEPT have the same binding power and evaluate left-to-right
13202                Some(SetOperator::Union) | Some(SetOperator::Except) | Some(SetOperator::Minus) => {
13203                    10
13204                }
13205                // INTERSECT has higher precedence than UNION/EXCEPT
13206                Some(SetOperator::Intersect) => 20,
13207                // Unexpected token or EOF => stop parsing the query body
13208                None => break,
13209            };
13210            if precedence >= next_precedence {
13211                break;
13212            }
13213            self.next_token(); // skip past the set operator
13214            let set_quantifier = self.parse_set_quantifier(&op);
13215            expr = SetExpr::SetOperation {
13216                left: Box::new(expr),
13217                op: op.unwrap(),
13218                set_quantifier,
13219                right: self.parse_query_body(next_precedence)?,
13220            };
13221        }
13222
13223        Ok(expr.into())
13224    }
13225
13226    pub fn parse_set_operator(&mut self, token: &Token) -> Option<SetOperator> {
13227        match token {
13228            Token::Word(w) if w.keyword == Keyword::UNION => Some(SetOperator::Union),
13229            Token::Word(w) if w.keyword == Keyword::EXCEPT => Some(SetOperator::Except),
13230            Token::Word(w) if w.keyword == Keyword::INTERSECT => Some(SetOperator::Intersect),
13231            Token::Word(w) if w.keyword == Keyword::MINUS => Some(SetOperator::Minus),
13232            _ => None,
13233        }
13234    }
13235
13236    pub fn parse_set_quantifier(&mut self, op: &Option<SetOperator>) -> SetQuantifier {
13237        match op {
13238            Some(
13239                SetOperator::Except
13240                | SetOperator::Intersect
13241                | SetOperator::Union
13242                | SetOperator::Minus,
13243            ) => {
13244                if self.parse_keywords(&[Keyword::DISTINCT, Keyword::BY, Keyword::NAME]) {
13245                    SetQuantifier::DistinctByName
13246                } else if self.parse_keywords(&[Keyword::BY, Keyword::NAME]) {
13247                    SetQuantifier::ByName
13248                } else if self.parse_keyword(Keyword::ALL) {
13249                    if self.parse_keywords(&[Keyword::BY, Keyword::NAME]) {
13250                        SetQuantifier::AllByName
13251                    } else {
13252                        SetQuantifier::All
13253                    }
13254                } else if self.parse_keyword(Keyword::DISTINCT) {
13255                    SetQuantifier::Distinct
13256                } else {
13257                    SetQuantifier::None
13258                }
13259            }
13260            _ => SetQuantifier::None,
13261        }
13262    }
13263
13264    /// Parse a restricted `SELECT` statement (no CTEs / `UNION` / `ORDER BY`)
13265    pub fn parse_select(&mut self) -> Result<Select, ParserError> {
13266        let mut from_first = None;
13267
13268        if self.dialect.supports_from_first_select() && self.peek_keyword(Keyword::FROM) {
13269            let from_token = self.expect_keyword(Keyword::FROM)?;
13270            let from = self.parse_table_with_joins()?;
13271            if !self.peek_keyword(Keyword::SELECT) {
13272                return Ok(Select {
13273                    select_token: AttachedToken(from_token),
13274                    distinct: None,
13275                    top: None,
13276                    top_before_distinct: false,
13277                    projection: vec![],
13278                    exclude: None,
13279                    into: None,
13280                    from,
13281                    lateral_views: vec![],
13282                    prewhere: None,
13283                    selection: None,
13284                    group_by: GroupByExpr::Expressions(vec![], vec![]),
13285                    cluster_by: vec![],
13286                    distribute_by: vec![],
13287                    sort_by: vec![],
13288                    having: None,
13289                    named_window: vec![],
13290                    window_before_qualify: false,
13291                    qualify: None,
13292                    value_table_mode: None,
13293                    connect_by: None,
13294                    flavor: SelectFlavor::FromFirstNoSelect,
13295                });
13296            }
13297            from_first = Some(from);
13298        }
13299
13300        let select_token = self.expect_keyword(Keyword::SELECT)?;
13301        let value_table_mode = self.parse_value_table_mode()?;
13302
13303        let mut top_before_distinct = false;
13304        let mut top = None;
13305        if self.dialect.supports_top_before_distinct() && self.parse_keyword(Keyword::TOP) {
13306            top = Some(self.parse_top()?);
13307            top_before_distinct = true;
13308        }
13309        let distinct = self.parse_all_or_distinct()?;
13310        if !self.dialect.supports_top_before_distinct() && self.parse_keyword(Keyword::TOP) {
13311            top = Some(self.parse_top()?);
13312        }
13313
13314        let projection =
13315            if self.dialect.supports_empty_projections() && self.peek_keyword(Keyword::FROM) {
13316                vec![]
13317            } else {
13318                self.parse_projection()?
13319            };
13320
13321        let exclude = if self.dialect.supports_select_exclude() {
13322            self.parse_optional_select_item_exclude()?
13323        } else {
13324            None
13325        };
13326
13327        let into = if self.parse_keyword(Keyword::INTO) {
13328            Some(self.parse_select_into()?)
13329        } else {
13330            None
13331        };
13332
13333        // Note that for keywords to be properly handled here, they need to be
13334        // added to `RESERVED_FOR_COLUMN_ALIAS` / `RESERVED_FOR_TABLE_ALIAS`,
13335        // otherwise they may be parsed as an alias as part of the `projection`
13336        // or `from`.
13337
13338        let (from, from_first) = if let Some(from) = from_first.take() {
13339            (from, true)
13340        } else if self.parse_keyword(Keyword::FROM) {
13341            (self.parse_table_with_joins()?, false)
13342        } else {
13343            (vec![], false)
13344        };
13345
13346        let mut lateral_views = vec![];
13347        loop {
13348            if self.parse_keywords(&[Keyword::LATERAL, Keyword::VIEW]) {
13349                let outer = self.parse_keyword(Keyword::OUTER);
13350                let lateral_view = self.parse_expr()?;
13351                let lateral_view_name = self.parse_object_name(false)?;
13352                let lateral_col_alias = self
13353                    .parse_comma_separated(|parser| {
13354                        parser.parse_optional_alias(&[
13355                            Keyword::WHERE,
13356                            Keyword::GROUP,
13357                            Keyword::CLUSTER,
13358                            Keyword::HAVING,
13359                            Keyword::LATERAL,
13360                        ]) // This couldn't possibly be a bad idea
13361                    })?
13362                    .into_iter()
13363                    .flatten()
13364                    .collect();
13365
13366                lateral_views.push(LateralView {
13367                    lateral_view,
13368                    lateral_view_name,
13369                    lateral_col_alias,
13370                    outer,
13371                });
13372            } else {
13373                break;
13374            }
13375        }
13376
13377        let prewhere = if dialect_of!(self is ClickHouseDialect|GenericDialect)
13378            && self.parse_keyword(Keyword::PREWHERE)
13379        {
13380            Some(self.parse_expr()?)
13381        } else {
13382            None
13383        };
13384
13385        let selection = if self.parse_keyword(Keyword::WHERE) {
13386            Some(self.parse_expr()?)
13387        } else {
13388            None
13389        };
13390
13391        let group_by = self
13392            .parse_optional_group_by()?
13393            .unwrap_or_else(|| GroupByExpr::Expressions(vec![], vec![]));
13394
13395        let cluster_by = if self.parse_keywords(&[Keyword::CLUSTER, Keyword::BY]) {
13396            self.parse_comma_separated(Parser::parse_expr)?
13397        } else {
13398            vec![]
13399        };
13400
13401        let distribute_by = if self.parse_keywords(&[Keyword::DISTRIBUTE, Keyword::BY]) {
13402            self.parse_comma_separated(Parser::parse_expr)?
13403        } else {
13404            vec![]
13405        };
13406
13407        let sort_by = if self.parse_keywords(&[Keyword::SORT, Keyword::BY]) {
13408            self.parse_comma_separated(Parser::parse_order_by_expr)?
13409        } else {
13410            vec![]
13411        };
13412
13413        let having = if self.parse_keyword(Keyword::HAVING) {
13414            Some(self.parse_expr()?)
13415        } else {
13416            None
13417        };
13418
13419        // Accept QUALIFY and WINDOW in any order and flag accordingly.
13420        let (named_windows, qualify, window_before_qualify) = if self.parse_keyword(Keyword::WINDOW)
13421        {
13422            let named_windows = self.parse_comma_separated(Parser::parse_named_window)?;
13423            if self.parse_keyword(Keyword::QUALIFY) {
13424                (named_windows, Some(self.parse_expr()?), true)
13425            } else {
13426                (named_windows, None, true)
13427            }
13428        } else if self.parse_keyword(Keyword::QUALIFY) {
13429            let qualify = Some(self.parse_expr()?);
13430            if self.parse_keyword(Keyword::WINDOW) {
13431                (
13432                    self.parse_comma_separated(Parser::parse_named_window)?,
13433                    qualify,
13434                    false,
13435                )
13436            } else {
13437                (Default::default(), qualify, false)
13438            }
13439        } else {
13440            Default::default()
13441        };
13442
13443        let connect_by = if self.dialect.supports_connect_by()
13444            && self
13445                .parse_one_of_keywords(&[Keyword::START, Keyword::CONNECT])
13446                .is_some()
13447        {
13448            self.prev_token();
13449            Some(self.parse_connect_by()?)
13450        } else {
13451            None
13452        };
13453
13454        Ok(Select {
13455            select_token: AttachedToken(select_token),
13456            distinct,
13457            top,
13458            top_before_distinct,
13459            projection,
13460            exclude,
13461            into,
13462            from,
13463            lateral_views,
13464            prewhere,
13465            selection,
13466            group_by,
13467            cluster_by,
13468            distribute_by,
13469            sort_by,
13470            having,
13471            named_window: named_windows,
13472            window_before_qualify,
13473            qualify,
13474            value_table_mode,
13475            connect_by,
13476            flavor: if from_first {
13477                SelectFlavor::FromFirst
13478            } else {
13479                SelectFlavor::Standard
13480            },
13481        })
13482    }
13483
13484    fn parse_value_table_mode(&mut self) -> Result<Option<ValueTableMode>, ParserError> {
13485        if !dialect_of!(self is BigQueryDialect) {
13486            return Ok(None);
13487        }
13488
13489        let mode = if self.parse_keywords(&[Keyword::DISTINCT, Keyword::AS, Keyword::VALUE]) {
13490            Some(ValueTableMode::DistinctAsValue)
13491        } else if self.parse_keywords(&[Keyword::DISTINCT, Keyword::AS, Keyword::STRUCT]) {
13492            Some(ValueTableMode::DistinctAsStruct)
13493        } else if self.parse_keywords(&[Keyword::AS, Keyword::VALUE])
13494            || self.parse_keywords(&[Keyword::ALL, Keyword::AS, Keyword::VALUE])
13495        {
13496            Some(ValueTableMode::AsValue)
13497        } else if self.parse_keywords(&[Keyword::AS, Keyword::STRUCT])
13498            || self.parse_keywords(&[Keyword::ALL, Keyword::AS, Keyword::STRUCT])
13499        {
13500            Some(ValueTableMode::AsStruct)
13501        } else if self.parse_keyword(Keyword::AS) {
13502            self.expected("VALUE or STRUCT", self.peek_token())?
13503        } else {
13504            None
13505        };
13506
13507        Ok(mode)
13508    }
13509
13510    /// Invoke `f` after first setting the parser's `ParserState` to `state`.
13511    ///
13512    /// Upon return, restores the parser's state to what it started at.
13513    fn with_state<T, F>(&mut self, state: ParserState, mut f: F) -> Result<T, ParserError>
13514    where
13515        F: FnMut(&mut Parser) -> Result<T, ParserError>,
13516    {
13517        let current_state = self.state;
13518        self.state = state;
13519        let res = f(self);
13520        self.state = current_state;
13521        res
13522    }
13523
13524    pub fn parse_connect_by(&mut self) -> Result<ConnectBy, ParserError> {
13525        let (condition, relationships) = if self.parse_keywords(&[Keyword::CONNECT, Keyword::BY]) {
13526            let relationships = self.with_state(ParserState::ConnectBy, |parser| {
13527                parser.parse_comma_separated(Parser::parse_expr)
13528            })?;
13529            self.expect_keywords(&[Keyword::START, Keyword::WITH])?;
13530            let condition = self.parse_expr()?;
13531            (condition, relationships)
13532        } else {
13533            self.expect_keywords(&[Keyword::START, Keyword::WITH])?;
13534            let condition = self.parse_expr()?;
13535            self.expect_keywords(&[Keyword::CONNECT, Keyword::BY])?;
13536            let relationships = self.with_state(ParserState::ConnectBy, |parser| {
13537                parser.parse_comma_separated(Parser::parse_expr)
13538            })?;
13539            (condition, relationships)
13540        };
13541        Ok(ConnectBy {
13542            condition,
13543            relationships,
13544        })
13545    }
13546
13547    /// Parse `CREATE TABLE x AS TABLE y`
13548    pub fn parse_as_table(&mut self) -> Result<Table, ParserError> {
13549        let token1 = self.next_token();
13550        let token2 = self.next_token();
13551        let token3 = self.next_token();
13552
13553        let table_name;
13554        let schema_name;
13555        if token2 == Token::Period {
13556            match token1.token {
13557                Token::Word(w) => {
13558                    schema_name = w.value;
13559                }
13560                _ => {
13561                    return self.expected("Schema name", token1);
13562                }
13563            }
13564            match token3.token {
13565                Token::Word(w) => {
13566                    table_name = w.value;
13567                }
13568                _ => {
13569                    return self.expected("Table name", token3);
13570                }
13571            }
13572            Ok(Table {
13573                table_name: Some(table_name),
13574                schema_name: Some(schema_name),
13575            })
13576        } else {
13577            match token1.token {
13578                Token::Word(w) => {
13579                    table_name = w.value;
13580                }
13581                _ => {
13582                    return self.expected("Table name", token1);
13583                }
13584            }
13585            Ok(Table {
13586                table_name: Some(table_name),
13587                schema_name: None,
13588            })
13589        }
13590    }
13591
13592    /// Parse a `SET ROLE` statement. Expects SET to be consumed already.
13593    fn parse_set_role(
13594        &mut self,
13595        modifier: Option<ContextModifier>,
13596    ) -> Result<Statement, ParserError> {
13597        self.expect_keyword_is(Keyword::ROLE)?;
13598
13599        let role_name = if self.parse_keyword(Keyword::NONE) {
13600            None
13601        } else {
13602            Some(self.parse_identifier()?)
13603        };
13604        Ok(Statement::Set(Set::SetRole {
13605            context_modifier: modifier,
13606            role_name,
13607        }))
13608    }
13609
13610    fn parse_set_values(
13611        &mut self,
13612        parenthesized_assignment: bool,
13613    ) -> Result<Vec<Expr>, ParserError> {
13614        let mut values = vec![];
13615
13616        if parenthesized_assignment {
13617            self.expect_token(&Token::LParen)?;
13618        }
13619
13620        loop {
13621            let value = if let Some(expr) = self.try_parse_expr_sub_query()? {
13622                expr
13623            } else if let Ok(expr) = self.parse_expr() {
13624                expr
13625            } else {
13626                self.expected("variable value", self.peek_token())?
13627            };
13628
13629            values.push(value);
13630            if self.consume_token(&Token::Comma) {
13631                continue;
13632            }
13633
13634            if parenthesized_assignment {
13635                self.expect_token(&Token::RParen)?;
13636            }
13637            return Ok(values);
13638        }
13639    }
13640
13641    fn parse_context_modifier(&mut self) -> Option<ContextModifier> {
13642        let modifier =
13643            self.parse_one_of_keywords(&[Keyword::SESSION, Keyword::LOCAL, Keyword::GLOBAL])?;
13644
13645        Self::keyword_to_modifier(modifier)
13646    }
13647
13648    /// Parse a single SET statement assignment `var = expr`.
13649    fn parse_set_assignment(&mut self) -> Result<SetAssignment, ParserError> {
13650        let scope = self.parse_context_modifier();
13651
13652        let name = if self.dialect.supports_parenthesized_set_variables()
13653            && self.consume_token(&Token::LParen)
13654        {
13655            // Parenthesized assignments are handled in the `parse_set` function after
13656            // trying to parse list of assignments using this function.
13657            // If a dialect supports both, and we find a LParen, we early exit from this function.
13658            self.expected("Unparenthesized assignment", self.peek_token())?
13659        } else {
13660            self.parse_object_name(false)?
13661        };
13662
13663        if !(self.consume_token(&Token::Eq) || self.parse_keyword(Keyword::TO)) {
13664            return self.expected("assignment operator", self.peek_token());
13665        }
13666
13667        let value = self.parse_expr()?;
13668
13669        Ok(SetAssignment { scope, name, value })
13670    }
13671
13672    fn parse_set(&mut self) -> Result<Statement, ParserError> {
13673        let hivevar = self.parse_keyword(Keyword::HIVEVAR);
13674
13675        // Modifier is either HIVEVAR: or a ContextModifier (LOCAL, SESSION, etc), not both
13676        let scope = if !hivevar {
13677            self.parse_context_modifier()
13678        } else {
13679            None
13680        };
13681
13682        if hivevar {
13683            self.expect_token(&Token::Colon)?;
13684        }
13685
13686        if let Some(set_role_stmt) = self.maybe_parse(|parser| parser.parse_set_role(scope))? {
13687            return Ok(set_role_stmt);
13688        }
13689
13690        // Handle special cases first
13691        if self.parse_keywords(&[Keyword::TIME, Keyword::ZONE])
13692            || self.parse_keyword(Keyword::TIMEZONE)
13693        {
13694            if self.consume_token(&Token::Eq) || self.parse_keyword(Keyword::TO) {
13695                return Ok(Set::SingleAssignment {
13696                    scope,
13697                    hivevar,
13698                    variable: ObjectName::from(vec!["TIMEZONE".into()]),
13699                    values: self.parse_set_values(false)?,
13700                }
13701                .into());
13702            } else {
13703                // A shorthand alias for SET TIME ZONE that doesn't require
13704                // the assignment operator. It's originally PostgreSQL specific,
13705                // but we allow it for all the dialects
13706                return Ok(Set::SetTimeZone {
13707                    local: scope == Some(ContextModifier::Local),
13708                    value: self.parse_expr()?,
13709                }
13710                .into());
13711            }
13712        } else if self.dialect.supports_set_names() && self.parse_keyword(Keyword::NAMES) {
13713            if self.parse_keyword(Keyword::DEFAULT) {
13714                return Ok(Set::SetNamesDefault {}.into());
13715            }
13716            let charset_name = self.parse_identifier()?;
13717            let collation_name = if self.parse_one_of_keywords(&[Keyword::COLLATE]).is_some() {
13718                Some(self.parse_literal_string()?)
13719            } else {
13720                None
13721            };
13722
13723            return Ok(Set::SetNames {
13724                charset_name,
13725                collation_name,
13726            }
13727            .into());
13728        } else if self.parse_keyword(Keyword::CHARACTERISTICS) {
13729            self.expect_keywords(&[Keyword::AS, Keyword::TRANSACTION])?;
13730            return Ok(Set::SetTransaction {
13731                modes: self.parse_transaction_modes()?,
13732                snapshot: None,
13733                session: true,
13734            }
13735            .into());
13736        } else if self.parse_keyword(Keyword::TRANSACTION) {
13737            if self.parse_keyword(Keyword::SNAPSHOT) {
13738                let snapshot_id = self.parse_value()?.value;
13739                return Ok(Set::SetTransaction {
13740                    modes: vec![],
13741                    snapshot: Some(snapshot_id),
13742                    session: false,
13743                }
13744                .into());
13745            }
13746            return Ok(Set::SetTransaction {
13747                modes: self.parse_transaction_modes()?,
13748                snapshot: None,
13749                session: false,
13750            }
13751            .into());
13752        } else if self.parse_keyword(Keyword::AUTHORIZATION) {
13753            let auth_value = if self.parse_keyword(Keyword::DEFAULT) {
13754                SetSessionAuthorizationParamKind::Default
13755            } else {
13756                let value = self.parse_identifier()?;
13757                SetSessionAuthorizationParamKind::User(value)
13758            };
13759            return Ok(Set::SetSessionAuthorization(SetSessionAuthorizationParam {
13760                scope: scope.expect("SET ... AUTHORIZATION must have a scope"),
13761                kind: auth_value,
13762            })
13763            .into());
13764        }
13765
13766        if self.dialect.supports_comma_separated_set_assignments() {
13767            if scope.is_some() {
13768                self.prev_token();
13769            }
13770
13771            if let Some(assignments) = self
13772                .maybe_parse(|parser| parser.parse_comma_separated(Parser::parse_set_assignment))?
13773            {
13774                return if assignments.len() > 1 {
13775                    Ok(Set::MultipleAssignments { assignments }.into())
13776                } else {
13777                    let SetAssignment { scope, name, value } =
13778                        assignments.into_iter().next().ok_or_else(|| {
13779                            ParserError::ParserError("Expected at least one assignment".to_string())
13780                        })?;
13781
13782                    Ok(Set::SingleAssignment {
13783                        scope,
13784                        hivevar,
13785                        variable: name,
13786                        values: vec![value],
13787                    }
13788                    .into())
13789                };
13790            }
13791        }
13792
13793        let variables = if self.dialect.supports_parenthesized_set_variables()
13794            && self.consume_token(&Token::LParen)
13795        {
13796            let vars = OneOrManyWithParens::Many(
13797                self.parse_comma_separated(|parser: &mut Parser<'a>| parser.parse_identifier())?
13798                    .into_iter()
13799                    .map(|ident| ObjectName::from(vec![ident]))
13800                    .collect(),
13801            );
13802            self.expect_token(&Token::RParen)?;
13803            vars
13804        } else {
13805            OneOrManyWithParens::One(self.parse_object_name(false)?)
13806        };
13807
13808        if self.consume_token(&Token::Eq) || self.parse_keyword(Keyword::TO) {
13809            let stmt = match variables {
13810                OneOrManyWithParens::One(var) => Set::SingleAssignment {
13811                    scope,
13812                    hivevar,
13813                    variable: var,
13814                    values: self.parse_set_values(false)?,
13815                },
13816                OneOrManyWithParens::Many(vars) => Set::ParenthesizedAssignments {
13817                    variables: vars,
13818                    values: self.parse_set_values(true)?,
13819                },
13820            };
13821
13822            return Ok(stmt.into());
13823        }
13824
13825        if self.dialect.supports_set_stmt_without_operator() {
13826            self.prev_token();
13827            return self.parse_set_session_params();
13828        };
13829
13830        self.expected("equals sign or TO", self.peek_token())
13831    }
13832
13833    pub fn parse_set_session_params(&mut self) -> Result<Statement, ParserError> {
13834        if self.parse_keyword(Keyword::STATISTICS) {
13835            let topic = match self.parse_one_of_keywords(&[
13836                Keyword::IO,
13837                Keyword::PROFILE,
13838                Keyword::TIME,
13839                Keyword::XML,
13840            ]) {
13841                Some(Keyword::IO) => SessionParamStatsTopic::IO,
13842                Some(Keyword::PROFILE) => SessionParamStatsTopic::Profile,
13843                Some(Keyword::TIME) => SessionParamStatsTopic::Time,
13844                Some(Keyword::XML) => SessionParamStatsTopic::Xml,
13845                _ => return self.expected("IO, PROFILE, TIME or XML", self.peek_token()),
13846            };
13847            let value = self.parse_session_param_value()?;
13848            Ok(
13849                Set::SetSessionParam(SetSessionParamKind::Statistics(SetSessionParamStatistics {
13850                    topic,
13851                    value,
13852                }))
13853                .into(),
13854            )
13855        } else if self.parse_keyword(Keyword::IDENTITY_INSERT) {
13856            let obj = self.parse_object_name(false)?;
13857            let value = self.parse_session_param_value()?;
13858            Ok(Set::SetSessionParam(SetSessionParamKind::IdentityInsert(
13859                SetSessionParamIdentityInsert { obj, value },
13860            ))
13861            .into())
13862        } else if self.parse_keyword(Keyword::OFFSETS) {
13863            let keywords = self.parse_comma_separated(|parser| {
13864                let next_token = parser.next_token();
13865                match &next_token.token {
13866                    Token::Word(w) => Ok(w.to_string()),
13867                    _ => parser.expected("SQL keyword", next_token),
13868                }
13869            })?;
13870            let value = self.parse_session_param_value()?;
13871            Ok(
13872                Set::SetSessionParam(SetSessionParamKind::Offsets(SetSessionParamOffsets {
13873                    keywords,
13874                    value,
13875                }))
13876                .into(),
13877            )
13878        } else {
13879            let names = self.parse_comma_separated(|parser| {
13880                let next_token = parser.next_token();
13881                match next_token.token {
13882                    Token::Word(w) => Ok(w.to_string()),
13883                    _ => parser.expected("Session param name", next_token),
13884                }
13885            })?;
13886            let value = self.parse_expr()?.to_string();
13887            Ok(
13888                Set::SetSessionParam(SetSessionParamKind::Generic(SetSessionParamGeneric {
13889                    names,
13890                    value,
13891                }))
13892                .into(),
13893            )
13894        }
13895    }
13896
13897    fn parse_session_param_value(&mut self) -> Result<SessionParamValue, ParserError> {
13898        if self.parse_keyword(Keyword::ON) {
13899            Ok(SessionParamValue::On)
13900        } else if self.parse_keyword(Keyword::OFF) {
13901            Ok(SessionParamValue::Off)
13902        } else {
13903            self.expected("ON or OFF", self.peek_token())
13904        }
13905    }
13906
13907    pub fn parse_show(&mut self) -> Result<Statement, ParserError> {
13908        let terse = self.parse_keyword(Keyword::TERSE);
13909        let extended = self.parse_keyword(Keyword::EXTENDED);
13910        let full = self.parse_keyword(Keyword::FULL);
13911        let session = self.parse_keyword(Keyword::SESSION);
13912        let global = self.parse_keyword(Keyword::GLOBAL);
13913        let external = self.parse_keyword(Keyword::EXTERNAL);
13914        if self
13915            .parse_one_of_keywords(&[Keyword::COLUMNS, Keyword::FIELDS])
13916            .is_some()
13917        {
13918            Ok(self.parse_show_columns(extended, full)?)
13919        } else if self.parse_keyword(Keyword::TABLES) {
13920            Ok(self.parse_show_tables(terse, extended, full, external)?)
13921        } else if self.parse_keywords(&[Keyword::MATERIALIZED, Keyword::VIEWS]) {
13922            Ok(self.parse_show_views(terse, true)?)
13923        } else if self.parse_keyword(Keyword::VIEWS) {
13924            Ok(self.parse_show_views(terse, false)?)
13925        } else if self.parse_keyword(Keyword::FUNCTIONS) {
13926            Ok(self.parse_show_functions()?)
13927        } else if extended || full {
13928            Err(ParserError::ParserError(
13929                "EXTENDED/FULL are not supported with this type of SHOW query".to_string(),
13930            ))
13931        } else if self.parse_one_of_keywords(&[Keyword::CREATE]).is_some() {
13932            Ok(self.parse_show_create()?)
13933        } else if self.parse_keyword(Keyword::COLLATION) {
13934            Ok(self.parse_show_collation()?)
13935        } else if self.parse_keyword(Keyword::VARIABLES)
13936            && dialect_of!(self is MySqlDialect | GenericDialect)
13937        {
13938            Ok(Statement::ShowVariables {
13939                filter: self.parse_show_statement_filter()?,
13940                session,
13941                global,
13942            })
13943        } else if self.parse_keyword(Keyword::STATUS)
13944            && dialect_of!(self is MySqlDialect | GenericDialect)
13945        {
13946            Ok(Statement::ShowStatus {
13947                filter: self.parse_show_statement_filter()?,
13948                session,
13949                global,
13950            })
13951        } else if self.parse_keyword(Keyword::DATABASES) {
13952            self.parse_show_databases(terse)
13953        } else if self.parse_keyword(Keyword::SCHEMAS) {
13954            self.parse_show_schemas(terse)
13955        } else if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
13956            self.parse_show_charset(false)
13957        } else if self.parse_keyword(Keyword::CHARSET) {
13958            self.parse_show_charset(true)
13959        } else {
13960            Ok(Statement::ShowVariable {
13961                variable: self.parse_identifiers()?,
13962            })
13963        }
13964    }
13965
13966    fn parse_show_charset(&mut self, is_shorthand: bool) -> Result<Statement, ParserError> {
13967        // parse one of keywords
13968        Ok(Statement::ShowCharset(ShowCharset {
13969            is_shorthand,
13970            filter: self.parse_show_statement_filter()?,
13971        }))
13972    }
13973
13974    fn parse_show_databases(&mut self, terse: bool) -> Result<Statement, ParserError> {
13975        let history = self.parse_keyword(Keyword::HISTORY);
13976        let show_options = self.parse_show_stmt_options()?;
13977        Ok(Statement::ShowDatabases {
13978            terse,
13979            history,
13980            show_options,
13981        })
13982    }
13983
13984    fn parse_show_schemas(&mut self, terse: bool) -> Result<Statement, ParserError> {
13985        let history = self.parse_keyword(Keyword::HISTORY);
13986        let show_options = self.parse_show_stmt_options()?;
13987        Ok(Statement::ShowSchemas {
13988            terse,
13989            history,
13990            show_options,
13991        })
13992    }
13993
13994    pub fn parse_show_create(&mut self) -> Result<Statement, ParserError> {
13995        let obj_type = match self.expect_one_of_keywords(&[
13996            Keyword::TABLE,
13997            Keyword::TRIGGER,
13998            Keyword::FUNCTION,
13999            Keyword::PROCEDURE,
14000            Keyword::EVENT,
14001            Keyword::VIEW,
14002        ])? {
14003            Keyword::TABLE => Ok(ShowCreateObject::Table),
14004            Keyword::TRIGGER => Ok(ShowCreateObject::Trigger),
14005            Keyword::FUNCTION => Ok(ShowCreateObject::Function),
14006            Keyword::PROCEDURE => Ok(ShowCreateObject::Procedure),
14007            Keyword::EVENT => Ok(ShowCreateObject::Event),
14008            Keyword::VIEW => Ok(ShowCreateObject::View),
14009            keyword => Err(ParserError::ParserError(format!(
14010                "Unable to map keyword to ShowCreateObject: {keyword:?}"
14011            ))),
14012        }?;
14013
14014        let obj_name = self.parse_object_name(false)?;
14015
14016        Ok(Statement::ShowCreate { obj_type, obj_name })
14017    }
14018
14019    pub fn parse_show_columns(
14020        &mut self,
14021        extended: bool,
14022        full: bool,
14023    ) -> Result<Statement, ParserError> {
14024        let show_options = self.parse_show_stmt_options()?;
14025        Ok(Statement::ShowColumns {
14026            extended,
14027            full,
14028            show_options,
14029        })
14030    }
14031
14032    fn parse_show_tables(
14033        &mut self,
14034        terse: bool,
14035        extended: bool,
14036        full: bool,
14037        external: bool,
14038    ) -> Result<Statement, ParserError> {
14039        let history = !external && self.parse_keyword(Keyword::HISTORY);
14040        let show_options = self.parse_show_stmt_options()?;
14041        Ok(Statement::ShowTables {
14042            terse,
14043            history,
14044            extended,
14045            full,
14046            external,
14047            show_options,
14048        })
14049    }
14050
14051    fn parse_show_views(
14052        &mut self,
14053        terse: bool,
14054        materialized: bool,
14055    ) -> Result<Statement, ParserError> {
14056        let show_options = self.parse_show_stmt_options()?;
14057        Ok(Statement::ShowViews {
14058            materialized,
14059            terse,
14060            show_options,
14061        })
14062    }
14063
14064    pub fn parse_show_functions(&mut self) -> Result<Statement, ParserError> {
14065        let filter = self.parse_show_statement_filter()?;
14066        Ok(Statement::ShowFunctions { filter })
14067    }
14068
14069    pub fn parse_show_collation(&mut self) -> Result<Statement, ParserError> {
14070        let filter = self.parse_show_statement_filter()?;
14071        Ok(Statement::ShowCollation { filter })
14072    }
14073
14074    pub fn parse_show_statement_filter(
14075        &mut self,
14076    ) -> Result<Option<ShowStatementFilter>, ParserError> {
14077        if self.parse_keyword(Keyword::LIKE) {
14078            Ok(Some(ShowStatementFilter::Like(
14079                self.parse_literal_string()?,
14080            )))
14081        } else if self.parse_keyword(Keyword::ILIKE) {
14082            Ok(Some(ShowStatementFilter::ILike(
14083                self.parse_literal_string()?,
14084            )))
14085        } else if self.parse_keyword(Keyword::WHERE) {
14086            Ok(Some(ShowStatementFilter::Where(self.parse_expr()?)))
14087        } else {
14088            self.maybe_parse(|parser| -> Result<String, ParserError> {
14089                parser.parse_literal_string()
14090            })?
14091            .map_or(Ok(None), |filter| {
14092                Ok(Some(ShowStatementFilter::NoKeyword(filter)))
14093            })
14094        }
14095    }
14096
14097    pub fn parse_use(&mut self) -> Result<Statement, ParserError> {
14098        // Determine which keywords are recognized by the current dialect
14099        let parsed_keyword = if dialect_of!(self is HiveDialect) {
14100            // HiveDialect accepts USE DEFAULT; statement without any db specified
14101            if self.parse_keyword(Keyword::DEFAULT) {
14102                return Ok(Statement::Use(Use::Default));
14103            }
14104            None // HiveDialect doesn't expect any other specific keyword after `USE`
14105        } else if dialect_of!(self is DatabricksDialect) {
14106            self.parse_one_of_keywords(&[Keyword::CATALOG, Keyword::DATABASE, Keyword::SCHEMA])
14107        } else if dialect_of!(self is SnowflakeDialect) {
14108            self.parse_one_of_keywords(&[
14109                Keyword::DATABASE,
14110                Keyword::SCHEMA,
14111                Keyword::WAREHOUSE,
14112                Keyword::ROLE,
14113                Keyword::SECONDARY,
14114            ])
14115        } else {
14116            None // No specific keywords for other dialects, including GenericDialect
14117        };
14118
14119        let result = if matches!(parsed_keyword, Some(Keyword::SECONDARY)) {
14120            self.parse_secondary_roles()?
14121        } else {
14122            let obj_name = self.parse_object_name(false)?;
14123            match parsed_keyword {
14124                Some(Keyword::CATALOG) => Use::Catalog(obj_name),
14125                Some(Keyword::DATABASE) => Use::Database(obj_name),
14126                Some(Keyword::SCHEMA) => Use::Schema(obj_name),
14127                Some(Keyword::WAREHOUSE) => Use::Warehouse(obj_name),
14128                Some(Keyword::ROLE) => Use::Role(obj_name),
14129                _ => Use::Object(obj_name),
14130            }
14131        };
14132
14133        Ok(Statement::Use(result))
14134    }
14135
14136    fn parse_secondary_roles(&mut self) -> Result<Use, ParserError> {
14137        self.expect_one_of_keywords(&[Keyword::ROLES, Keyword::ROLE])?;
14138        if self.parse_keyword(Keyword::NONE) {
14139            Ok(Use::SecondaryRoles(SecondaryRoles::None))
14140        } else if self.parse_keyword(Keyword::ALL) {
14141            Ok(Use::SecondaryRoles(SecondaryRoles::All))
14142        } else {
14143            let roles = self.parse_comma_separated(|parser| parser.parse_identifier())?;
14144            Ok(Use::SecondaryRoles(SecondaryRoles::List(roles)))
14145        }
14146    }
14147
14148    pub fn parse_table_and_joins(&mut self) -> Result<TableWithJoins, ParserError> {
14149        let relation = self.parse_table_factor()?;
14150        // Note that for keywords to be properly handled here, they need to be
14151        // added to `RESERVED_FOR_TABLE_ALIAS`, otherwise they may be parsed as
14152        // a table alias.
14153        let joins = self.parse_joins()?;
14154        Ok(TableWithJoins { relation, joins })
14155    }
14156
14157    fn parse_joins(&mut self) -> Result<Vec<Join>, ParserError> {
14158        let mut joins = vec![];
14159        loop {
14160            let global = self.parse_keyword(Keyword::GLOBAL);
14161            let join = if self.parse_keyword(Keyword::CROSS) {
14162                let join_operator = if self.parse_keyword(Keyword::JOIN) {
14163                    JoinOperator::CrossJoin(JoinConstraint::None)
14164                } else if self.parse_keyword(Keyword::APPLY) {
14165                    // MSSQL extension, similar to CROSS JOIN LATERAL
14166                    JoinOperator::CrossApply
14167                } else {
14168                    return self.expected("JOIN or APPLY after CROSS", self.peek_token());
14169                };
14170                let relation = self.parse_table_factor()?;
14171                let join_operator = if matches!(join_operator, JoinOperator::CrossJoin(_))
14172                    && self.dialect.supports_cross_join_constraint()
14173                {
14174                    let constraint = self.parse_join_constraint(false)?;
14175                    JoinOperator::CrossJoin(constraint)
14176                } else {
14177                    join_operator
14178                };
14179                Join {
14180                    relation,
14181                    global,
14182                    join_operator,
14183                }
14184            } else if self.parse_keyword(Keyword::OUTER) {
14185                // MSSQL extension, similar to LEFT JOIN LATERAL .. ON 1=1
14186                self.expect_keyword_is(Keyword::APPLY)?;
14187                Join {
14188                    relation: self.parse_table_factor()?,
14189                    global,
14190                    join_operator: JoinOperator::OuterApply,
14191                }
14192            } else if self.parse_keyword(Keyword::ASOF) {
14193                self.expect_keyword_is(Keyword::JOIN)?;
14194                let relation = self.parse_table_factor()?;
14195                self.expect_keyword_is(Keyword::MATCH_CONDITION)?;
14196                let match_condition = self.parse_parenthesized(Self::parse_expr)?;
14197                Join {
14198                    relation,
14199                    global,
14200                    join_operator: JoinOperator::AsOf {
14201                        match_condition,
14202                        constraint: self.parse_join_constraint(false)?,
14203                    },
14204                }
14205            } else {
14206                let natural = self.parse_keyword(Keyword::NATURAL);
14207                let peek_keyword = if let Token::Word(w) = self.peek_token().token {
14208                    w.keyword
14209                } else {
14210                    Keyword::NoKeyword
14211                };
14212
14213                let join_operator_type = match peek_keyword {
14214                    Keyword::INNER | Keyword::JOIN => {
14215                        let inner = self.parse_keyword(Keyword::INNER); // [ INNER ]
14216                        self.expect_keyword_is(Keyword::JOIN)?;
14217                        if inner {
14218                            JoinOperator::Inner
14219                        } else {
14220                            JoinOperator::Join
14221                        }
14222                    }
14223                    kw @ Keyword::LEFT | kw @ Keyword::RIGHT => {
14224                        let _ = self.next_token(); // consume LEFT/RIGHT
14225                        let is_left = kw == Keyword::LEFT;
14226                        let join_type = self.parse_one_of_keywords(&[
14227                            Keyword::OUTER,
14228                            Keyword::SEMI,
14229                            Keyword::ANTI,
14230                            Keyword::JOIN,
14231                        ]);
14232                        match join_type {
14233                            Some(Keyword::OUTER) => {
14234                                self.expect_keyword_is(Keyword::JOIN)?;
14235                                if is_left {
14236                                    JoinOperator::LeftOuter
14237                                } else {
14238                                    JoinOperator::RightOuter
14239                                }
14240                            }
14241                            Some(Keyword::SEMI) => {
14242                                self.expect_keyword_is(Keyword::JOIN)?;
14243                                if is_left {
14244                                    JoinOperator::LeftSemi
14245                                } else {
14246                                    JoinOperator::RightSemi
14247                                }
14248                            }
14249                            Some(Keyword::ANTI) => {
14250                                self.expect_keyword_is(Keyword::JOIN)?;
14251                                if is_left {
14252                                    JoinOperator::LeftAnti
14253                                } else {
14254                                    JoinOperator::RightAnti
14255                                }
14256                            }
14257                            Some(Keyword::JOIN) => {
14258                                if is_left {
14259                                    JoinOperator::Left
14260                                } else {
14261                                    JoinOperator::Right
14262                                }
14263                            }
14264                            _ => {
14265                                return Err(ParserError::ParserError(format!(
14266                                    "expected OUTER, SEMI, ANTI or JOIN after {kw:?}"
14267                                )))
14268                            }
14269                        }
14270                    }
14271                    Keyword::ANTI => {
14272                        let _ = self.next_token(); // consume ANTI
14273                        self.expect_keyword_is(Keyword::JOIN)?;
14274                        JoinOperator::Anti
14275                    }
14276                    Keyword::SEMI => {
14277                        let _ = self.next_token(); // consume SEMI
14278                        self.expect_keyword_is(Keyword::JOIN)?;
14279                        JoinOperator::Semi
14280                    }
14281                    Keyword::FULL => {
14282                        let _ = self.next_token(); // consume FULL
14283                        let _ = self.parse_keyword(Keyword::OUTER); // [ OUTER ]
14284                        self.expect_keyword_is(Keyword::JOIN)?;
14285                        JoinOperator::FullOuter
14286                    }
14287                    Keyword::OUTER => {
14288                        return self.expected("LEFT, RIGHT, or FULL", self.peek_token());
14289                    }
14290                    Keyword::STRAIGHT_JOIN => {
14291                        let _ = self.next_token(); // consume STRAIGHT_JOIN
14292                        JoinOperator::StraightJoin
14293                    }
14294                    _ if natural => {
14295                        return self.expected("a join type after NATURAL", self.peek_token());
14296                    }
14297                    _ => break,
14298                };
14299                let mut relation = self.parse_table_factor()?;
14300
14301                if !self
14302                    .dialect
14303                    .supports_left_associative_joins_without_parens()
14304                    && self.peek_parens_less_nested_join()
14305                {
14306                    let joins = self.parse_joins()?;
14307                    relation = TableFactor::NestedJoin {
14308                        table_with_joins: Box::new(TableWithJoins { relation, joins }),
14309                        alias: None,
14310                    };
14311                }
14312
14313                let join_constraint = self.parse_join_constraint(natural)?;
14314                Join {
14315                    relation,
14316                    global,
14317                    join_operator: join_operator_type(join_constraint),
14318                }
14319            };
14320            joins.push(join);
14321        }
14322        Ok(joins)
14323    }
14324
14325    fn peek_parens_less_nested_join(&self) -> bool {
14326        matches!(
14327            self.peek_token_ref().token,
14328            Token::Word(Word {
14329                keyword: Keyword::JOIN
14330                    | Keyword::INNER
14331                    | Keyword::LEFT
14332                    | Keyword::RIGHT
14333                    | Keyword::FULL,
14334                ..
14335            })
14336        )
14337    }
14338
14339    /// A table name or a parenthesized subquery, followed by optional `[AS] alias`
14340    pub fn parse_table_factor(&mut self) -> Result<TableFactor, ParserError> {
14341        if self.parse_keyword(Keyword::LATERAL) {
14342            // LATERAL must always be followed by a subquery or table function.
14343            if self.consume_token(&Token::LParen) {
14344                self.parse_derived_table_factor(Lateral)
14345            } else {
14346                let name = self.parse_object_name(false)?;
14347                self.expect_token(&Token::LParen)?;
14348                let args = self.parse_optional_args()?;
14349                let alias = self.maybe_parse_table_alias()?;
14350                Ok(TableFactor::Function {
14351                    lateral: true,
14352                    name,
14353                    args,
14354                    alias,
14355                })
14356            }
14357        } else if self.parse_keyword(Keyword::TABLE) {
14358            // parse table function (SELECT * FROM TABLE (<expr>) [ AS <alias> ])
14359            self.expect_token(&Token::LParen)?;
14360            let expr = self.parse_expr()?;
14361            self.expect_token(&Token::RParen)?;
14362            let alias = self.maybe_parse_table_alias()?;
14363            Ok(TableFactor::TableFunction { expr, alias })
14364        } else if self.consume_token(&Token::LParen) {
14365            // A left paren introduces either a derived table (i.e., a subquery)
14366            // or a nested join. It's nearly impossible to determine ahead of
14367            // time which it is... so we just try to parse both.
14368            //
14369            // Here's an example that demonstrates the complexity:
14370            //                     /-------------------------------------------------------\
14371            //                     | /-----------------------------------\                 |
14372            //     SELECT * FROM ( ( ( (SELECT 1) UNION (SELECT 2) ) AS t1 NATURAL JOIN t2 ) )
14373            //                   ^ ^ ^ ^
14374            //                   | | | |
14375            //                   | | | |
14376            //                   | | | (4) belongs to a SetExpr::Query inside the subquery
14377            //                   | | (3) starts a derived table (subquery)
14378            //                   | (2) starts a nested join
14379            //                   (1) an additional set of parens around a nested join
14380            //
14381
14382            // If the recently consumed '(' starts a derived table, the call to
14383            // `parse_derived_table_factor` below will return success after parsing the
14384            // subquery, followed by the closing ')', and the alias of the derived table.
14385            // In the example above this is case (3).
14386            if let Some(mut table) =
14387                self.maybe_parse(|parser| parser.parse_derived_table_factor(NotLateral))?
14388            {
14389                while let Some(kw) = self.parse_one_of_keywords(&[Keyword::PIVOT, Keyword::UNPIVOT])
14390                {
14391                    table = match kw {
14392                        Keyword::PIVOT => self.parse_pivot_table_factor(table)?,
14393                        Keyword::UNPIVOT => self.parse_unpivot_table_factor(table)?,
14394                        unexpected_keyword => return Err(ParserError::ParserError(
14395                            format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in pivot/unpivot"),
14396                        )),
14397                    }
14398                }
14399                return Ok(table);
14400            }
14401
14402            // A parsing error from `parse_derived_table_factor` indicates that the '(' we've
14403            // recently consumed does not start a derived table (cases 1, 2, or 4).
14404            // `maybe_parse` will ignore such an error and rewind to be after the opening '('.
14405
14406            // Inside the parentheses we expect to find an (A) table factor
14407            // followed by some joins or (B) another level of nesting.
14408            let mut table_and_joins = self.parse_table_and_joins()?;
14409
14410            #[allow(clippy::if_same_then_else)]
14411            if !table_and_joins.joins.is_empty() {
14412                self.expect_token(&Token::RParen)?;
14413                let alias = self.maybe_parse_table_alias()?;
14414                Ok(TableFactor::NestedJoin {
14415                    table_with_joins: Box::new(table_and_joins),
14416                    alias,
14417                }) // (A)
14418            } else if let TableFactor::NestedJoin {
14419                table_with_joins: _,
14420                alias: _,
14421            } = &table_and_joins.relation
14422            {
14423                // (B): `table_and_joins` (what we found inside the parentheses)
14424                // is a nested join `(foo JOIN bar)`, not followed by other joins.
14425                self.expect_token(&Token::RParen)?;
14426                let alias = self.maybe_parse_table_alias()?;
14427                Ok(TableFactor::NestedJoin {
14428                    table_with_joins: Box::new(table_and_joins),
14429                    alias,
14430                })
14431            } else if dialect_of!(self is SnowflakeDialect | GenericDialect) {
14432                // Dialect-specific behavior: Snowflake diverges from the
14433                // standard and from most of the other implementations by
14434                // allowing extra parentheses not only around a join (B), but
14435                // around lone table names (e.g. `FROM (mytable [AS alias])`)
14436                // and around derived tables (e.g. `FROM ((SELECT ...)
14437                // [AS alias])`) as well.
14438                self.expect_token(&Token::RParen)?;
14439
14440                if let Some(outer_alias) = self.maybe_parse_table_alias()? {
14441                    // Snowflake also allows specifying an alias *after* parens
14442                    // e.g. `FROM (mytable) AS alias`
14443                    match &mut table_and_joins.relation {
14444                        TableFactor::Derived { alias, .. }
14445                        | TableFactor::Table { alias, .. }
14446                        | TableFactor::Function { alias, .. }
14447                        | TableFactor::UNNEST { alias, .. }
14448                        | TableFactor::JsonTable { alias, .. }
14449                        | TableFactor::XmlTable { alias, .. }
14450                        | TableFactor::OpenJsonTable { alias, .. }
14451                        | TableFactor::TableFunction { alias, .. }
14452                        | TableFactor::Pivot { alias, .. }
14453                        | TableFactor::Unpivot { alias, .. }
14454                        | TableFactor::MatchRecognize { alias, .. }
14455                        | TableFactor::SemanticView { alias, .. }
14456                        | TableFactor::NestedJoin { alias, .. } => {
14457                            // but not `FROM (mytable AS alias1) AS alias2`.
14458                            if let Some(inner_alias) = alias {
14459                                return Err(ParserError::ParserError(format!(
14460                                    "duplicate alias {inner_alias}"
14461                                )));
14462                            }
14463                            // Act as if the alias was specified normally next
14464                            // to the table name: `(mytable) AS alias` ->
14465                            // `(mytable AS alias)`
14466                            alias.replace(outer_alias);
14467                        }
14468                    };
14469                }
14470                // Do not store the extra set of parens in the AST
14471                Ok(table_and_joins.relation)
14472            } else {
14473                // The SQL spec prohibits derived tables and bare tables from
14474                // appearing alone in parentheses (e.g. `FROM (mytable)`)
14475                self.expected("joined table", self.peek_token())
14476            }
14477        } else if dialect_of!(self is SnowflakeDialect | DatabricksDialect | GenericDialect)
14478            && matches!(
14479                self.peek_tokens(),
14480                [
14481                    Token::Word(Word {
14482                        keyword: Keyword::VALUES,
14483                        ..
14484                    }),
14485                    Token::LParen
14486                ]
14487            )
14488        {
14489            self.expect_keyword_is(Keyword::VALUES)?;
14490
14491            // Snowflake and Databricks allow syntax like below:
14492            // SELECT * FROM VALUES (1, 'a'), (2, 'b') AS t (col1, col2)
14493            // where there are no parentheses around the VALUES clause.
14494            let values = SetExpr::Values(self.parse_values(false, false)?);
14495            let alias = self.maybe_parse_table_alias()?;
14496            Ok(TableFactor::Derived {
14497                lateral: false,
14498                subquery: Box::new(Query {
14499                    with: None,
14500                    body: Box::new(values),
14501                    order_by: None,
14502                    limit_clause: None,
14503                    fetch: None,
14504                    locks: vec![],
14505                    for_clause: None,
14506                    settings: None,
14507                    format_clause: None,
14508                    pipe_operators: vec![],
14509                }),
14510                alias,
14511            })
14512        } else if dialect_of!(self is BigQueryDialect | PostgreSqlDialect | GenericDialect)
14513            && self.parse_keyword(Keyword::UNNEST)
14514        {
14515            self.expect_token(&Token::LParen)?;
14516            let array_exprs = self.parse_comma_separated(Parser::parse_expr)?;
14517            self.expect_token(&Token::RParen)?;
14518
14519            let with_ordinality = self.parse_keywords(&[Keyword::WITH, Keyword::ORDINALITY]);
14520            let alias = match self.maybe_parse_table_alias() {
14521                Ok(Some(alias)) => Some(alias),
14522                Ok(None) => None,
14523                Err(e) => return Err(e),
14524            };
14525
14526            let with_offset = match self.expect_keywords(&[Keyword::WITH, Keyword::OFFSET]) {
14527                Ok(()) => true,
14528                Err(_) => false,
14529            };
14530
14531            let with_offset_alias = if with_offset {
14532                match self.parse_optional_alias(keywords::RESERVED_FOR_COLUMN_ALIAS) {
14533                    Ok(Some(alias)) => Some(alias),
14534                    Ok(None) => None,
14535                    Err(e) => return Err(e),
14536                }
14537            } else {
14538                None
14539            };
14540
14541            Ok(TableFactor::UNNEST {
14542                alias,
14543                array_exprs,
14544                with_offset,
14545                with_offset_alias,
14546                with_ordinality,
14547            })
14548        } else if self.parse_keyword_with_tokens(Keyword::JSON_TABLE, &[Token::LParen]) {
14549            let json_expr = self.parse_expr()?;
14550            self.expect_token(&Token::Comma)?;
14551            let json_path = self.parse_value()?.value;
14552            self.expect_keyword_is(Keyword::COLUMNS)?;
14553            self.expect_token(&Token::LParen)?;
14554            let columns = self.parse_comma_separated(Parser::parse_json_table_column_def)?;
14555            self.expect_token(&Token::RParen)?;
14556            self.expect_token(&Token::RParen)?;
14557            let alias = self.maybe_parse_table_alias()?;
14558            Ok(TableFactor::JsonTable {
14559                json_expr,
14560                json_path,
14561                columns,
14562                alias,
14563            })
14564        } else if self.parse_keyword_with_tokens(Keyword::OPENJSON, &[Token::LParen]) {
14565            self.prev_token();
14566            self.parse_open_json_table_factor()
14567        } else if self.parse_keyword_with_tokens(Keyword::XMLTABLE, &[Token::LParen]) {
14568            self.prev_token();
14569            self.parse_xml_table_factor()
14570        } else if self.dialect.supports_semantic_view_table_factor()
14571            && self.peek_keyword_with_tokens(Keyword::SEMANTIC_VIEW, &[Token::LParen])
14572        {
14573            self.parse_semantic_view_table_factor()
14574        } else {
14575            let name = self.parse_object_name(true)?;
14576
14577            let json_path = match self.peek_token().token {
14578                Token::LBracket if self.dialect.supports_partiql() => Some(self.parse_json_path()?),
14579                _ => None,
14580            };
14581
14582            let partitions: Vec<Ident> = if dialect_of!(self is MySqlDialect | GenericDialect)
14583                && self.parse_keyword(Keyword::PARTITION)
14584            {
14585                self.parse_parenthesized_identifiers()?
14586            } else {
14587                vec![]
14588            };
14589
14590            // Parse potential version qualifier
14591            let version = self.maybe_parse_table_version()?;
14592
14593            // Postgres, MSSQL, ClickHouse: table-valued functions:
14594            let args = if self.consume_token(&Token::LParen) {
14595                Some(self.parse_table_function_args()?)
14596            } else {
14597                None
14598            };
14599
14600            let with_ordinality = self.parse_keywords(&[Keyword::WITH, Keyword::ORDINALITY]);
14601
14602            let mut sample = None;
14603            if self.dialect.supports_table_sample_before_alias() {
14604                if let Some(parsed_sample) = self.maybe_parse_table_sample()? {
14605                    sample = Some(TableSampleKind::BeforeTableAlias(parsed_sample));
14606                }
14607            }
14608
14609            let alias = self.maybe_parse_table_alias()?;
14610
14611            // MYSQL-specific table hints:
14612            let index_hints = if self.dialect.supports_table_hints() {
14613                self.maybe_parse(|p| p.parse_table_index_hints())?
14614                    .unwrap_or(vec![])
14615            } else {
14616                vec![]
14617            };
14618
14619            // MSSQL-specific table hints:
14620            let mut with_hints = vec![];
14621            if self.parse_keyword(Keyword::WITH) {
14622                if self.consume_token(&Token::LParen) {
14623                    with_hints = self.parse_comma_separated(Parser::parse_expr)?;
14624                    self.expect_token(&Token::RParen)?;
14625                } else {
14626                    // rewind, as WITH may belong to the next statement's CTE
14627                    self.prev_token();
14628                }
14629            };
14630
14631            if !self.dialect.supports_table_sample_before_alias() {
14632                if let Some(parsed_sample) = self.maybe_parse_table_sample()? {
14633                    sample = Some(TableSampleKind::AfterTableAlias(parsed_sample));
14634                }
14635            }
14636
14637            let mut table = TableFactor::Table {
14638                name,
14639                alias,
14640                args,
14641                with_hints,
14642                version,
14643                partitions,
14644                with_ordinality,
14645                json_path,
14646                sample,
14647                index_hints,
14648            };
14649
14650            while let Some(kw) = self.parse_one_of_keywords(&[Keyword::PIVOT, Keyword::UNPIVOT]) {
14651                table = match kw {
14652                    Keyword::PIVOT => self.parse_pivot_table_factor(table)?,
14653                    Keyword::UNPIVOT => self.parse_unpivot_table_factor(table)?,
14654                    unexpected_keyword => return Err(ParserError::ParserError(
14655                        format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in pivot/unpivot"),
14656                    )),
14657                }
14658            }
14659
14660            if self.dialect.supports_match_recognize()
14661                && self.parse_keyword(Keyword::MATCH_RECOGNIZE)
14662            {
14663                table = self.parse_match_recognize(table)?;
14664            }
14665
14666            Ok(table)
14667        }
14668    }
14669
14670    fn maybe_parse_table_sample(&mut self) -> Result<Option<Box<TableSample>>, ParserError> {
14671        let modifier = if self.parse_keyword(Keyword::TABLESAMPLE) {
14672            TableSampleModifier::TableSample
14673        } else if self.parse_keyword(Keyword::SAMPLE) {
14674            TableSampleModifier::Sample
14675        } else {
14676            return Ok(None);
14677        };
14678        self.parse_table_sample(modifier).map(Some)
14679    }
14680
14681    fn parse_table_sample(
14682        &mut self,
14683        modifier: TableSampleModifier,
14684    ) -> Result<Box<TableSample>, ParserError> {
14685        let name = match self.parse_one_of_keywords(&[
14686            Keyword::BERNOULLI,
14687            Keyword::ROW,
14688            Keyword::SYSTEM,
14689            Keyword::BLOCK,
14690        ]) {
14691            Some(Keyword::BERNOULLI) => Some(TableSampleMethod::Bernoulli),
14692            Some(Keyword::ROW) => Some(TableSampleMethod::Row),
14693            Some(Keyword::SYSTEM) => Some(TableSampleMethod::System),
14694            Some(Keyword::BLOCK) => Some(TableSampleMethod::Block),
14695            _ => None,
14696        };
14697
14698        let parenthesized = self.consume_token(&Token::LParen);
14699
14700        let (quantity, bucket) = if parenthesized && self.parse_keyword(Keyword::BUCKET) {
14701            let selected_bucket = self.parse_number_value()?.value;
14702            self.expect_keywords(&[Keyword::OUT, Keyword::OF])?;
14703            let total = self.parse_number_value()?.value;
14704            let on = if self.parse_keyword(Keyword::ON) {
14705                Some(self.parse_expr()?)
14706            } else {
14707                None
14708            };
14709            (
14710                None,
14711                Some(TableSampleBucket {
14712                    bucket: selected_bucket,
14713                    total,
14714                    on,
14715                }),
14716            )
14717        } else {
14718            let value = match self.maybe_parse(|p| p.parse_expr())? {
14719                Some(num) => num,
14720                None => {
14721                    let next_token = self.next_token();
14722                    if let Token::Word(w) = next_token.token {
14723                        Expr::Value(Value::Placeholder(w.value).with_span(next_token.span))
14724                    } else {
14725                        return parser_err!(
14726                            "Expecting number or byte length e.g. 100M",
14727                            self.peek_token().span.start
14728                        );
14729                    }
14730                }
14731            };
14732            let unit = if self.parse_keyword(Keyword::ROWS) {
14733                Some(TableSampleUnit::Rows)
14734            } else if self.parse_keyword(Keyword::PERCENT) {
14735                Some(TableSampleUnit::Percent)
14736            } else {
14737                None
14738            };
14739            (
14740                Some(TableSampleQuantity {
14741                    parenthesized,
14742                    value,
14743                    unit,
14744                }),
14745                None,
14746            )
14747        };
14748        if parenthesized {
14749            self.expect_token(&Token::RParen)?;
14750        }
14751
14752        let seed = if self.parse_keyword(Keyword::REPEATABLE) {
14753            Some(self.parse_table_sample_seed(TableSampleSeedModifier::Repeatable)?)
14754        } else if self.parse_keyword(Keyword::SEED) {
14755            Some(self.parse_table_sample_seed(TableSampleSeedModifier::Seed)?)
14756        } else {
14757            None
14758        };
14759
14760        let offset = if self.parse_keyword(Keyword::OFFSET) {
14761            Some(self.parse_expr()?)
14762        } else {
14763            None
14764        };
14765
14766        Ok(Box::new(TableSample {
14767            modifier,
14768            name,
14769            quantity,
14770            seed,
14771            bucket,
14772            offset,
14773        }))
14774    }
14775
14776    fn parse_table_sample_seed(
14777        &mut self,
14778        modifier: TableSampleSeedModifier,
14779    ) -> Result<TableSampleSeed, ParserError> {
14780        self.expect_token(&Token::LParen)?;
14781        let value = self.parse_number_value()?.value;
14782        self.expect_token(&Token::RParen)?;
14783        Ok(TableSampleSeed { modifier, value })
14784    }
14785
14786    /// Parses `OPENJSON( jsonExpression [ , path ] )  [ <with_clause> ]` clause,
14787    /// assuming the `OPENJSON` keyword was already consumed.
14788    fn parse_open_json_table_factor(&mut self) -> Result<TableFactor, ParserError> {
14789        self.expect_token(&Token::LParen)?;
14790        let json_expr = self.parse_expr()?;
14791        let json_path = if self.consume_token(&Token::Comma) {
14792            Some(self.parse_value()?.value)
14793        } else {
14794            None
14795        };
14796        self.expect_token(&Token::RParen)?;
14797        let columns = if self.parse_keyword(Keyword::WITH) {
14798            self.expect_token(&Token::LParen)?;
14799            let columns = self.parse_comma_separated(Parser::parse_openjson_table_column_def)?;
14800            self.expect_token(&Token::RParen)?;
14801            columns
14802        } else {
14803            Vec::new()
14804        };
14805        let alias = self.maybe_parse_table_alias()?;
14806        Ok(TableFactor::OpenJsonTable {
14807            json_expr,
14808            json_path,
14809            columns,
14810            alias,
14811        })
14812    }
14813
14814    fn parse_xml_table_factor(&mut self) -> Result<TableFactor, ParserError> {
14815        self.expect_token(&Token::LParen)?;
14816        let namespaces = if self.parse_keyword(Keyword::XMLNAMESPACES) {
14817            self.expect_token(&Token::LParen)?;
14818            let namespaces = self.parse_comma_separated(Parser::parse_xml_namespace_definition)?;
14819            self.expect_token(&Token::RParen)?;
14820            self.expect_token(&Token::Comma)?;
14821            namespaces
14822        } else {
14823            vec![]
14824        };
14825        let row_expression = self.parse_expr()?;
14826        let passing = self.parse_xml_passing_clause()?;
14827        self.expect_keyword_is(Keyword::COLUMNS)?;
14828        let columns = self.parse_comma_separated(Parser::parse_xml_table_column)?;
14829        self.expect_token(&Token::RParen)?;
14830        let alias = self.maybe_parse_table_alias()?;
14831        Ok(TableFactor::XmlTable {
14832            namespaces,
14833            row_expression,
14834            passing,
14835            columns,
14836            alias,
14837        })
14838    }
14839
14840    fn parse_xml_namespace_definition(&mut self) -> Result<XmlNamespaceDefinition, ParserError> {
14841        let uri = self.parse_expr()?;
14842        self.expect_keyword_is(Keyword::AS)?;
14843        let name = self.parse_identifier()?;
14844        Ok(XmlNamespaceDefinition { uri, name })
14845    }
14846
14847    fn parse_xml_table_column(&mut self) -> Result<XmlTableColumn, ParserError> {
14848        let name = self.parse_identifier()?;
14849
14850        let option = if self.parse_keyword(Keyword::FOR) {
14851            self.expect_keyword(Keyword::ORDINALITY)?;
14852            XmlTableColumnOption::ForOrdinality
14853        } else {
14854            let r#type = self.parse_data_type()?;
14855            let mut path = None;
14856            let mut default = None;
14857
14858            if self.parse_keyword(Keyword::PATH) {
14859                path = Some(self.parse_expr()?);
14860            }
14861
14862            if self.parse_keyword(Keyword::DEFAULT) {
14863                default = Some(self.parse_expr()?);
14864            }
14865
14866            let not_null = self.parse_keywords(&[Keyword::NOT, Keyword::NULL]);
14867            if !not_null {
14868                // NULL is the default but can be specified explicitly
14869                let _ = self.parse_keyword(Keyword::NULL);
14870            }
14871
14872            XmlTableColumnOption::NamedInfo {
14873                r#type,
14874                path,
14875                default,
14876                nullable: !not_null,
14877            }
14878        };
14879        Ok(XmlTableColumn { name, option })
14880    }
14881
14882    fn parse_xml_passing_clause(&mut self) -> Result<XmlPassingClause, ParserError> {
14883        let mut arguments = vec![];
14884        if self.parse_keyword(Keyword::PASSING) {
14885            loop {
14886                let by_value =
14887                    self.parse_keyword(Keyword::BY) && self.expect_keyword(Keyword::VALUE).is_ok();
14888                let expr = self.parse_expr()?;
14889                let alias = if self.parse_keyword(Keyword::AS) {
14890                    Some(self.parse_identifier()?)
14891                } else {
14892                    None
14893                };
14894                arguments.push(XmlPassingArgument {
14895                    expr,
14896                    alias,
14897                    by_value,
14898                });
14899                if !self.consume_token(&Token::Comma) {
14900                    break;
14901                }
14902            }
14903        }
14904        Ok(XmlPassingClause { arguments })
14905    }
14906
14907    /// Parse a [TableFactor::SemanticView]
14908    fn parse_semantic_view_table_factor(&mut self) -> Result<TableFactor, ParserError> {
14909        self.expect_keyword(Keyword::SEMANTIC_VIEW)?;
14910        self.expect_token(&Token::LParen)?;
14911
14912        let name = self.parse_object_name(true)?;
14913
14914        // Parse DIMENSIONS, METRICS, FACTS and WHERE clauses in flexible order
14915        let mut dimensions = Vec::new();
14916        let mut metrics = Vec::new();
14917        let mut facts = Vec::new();
14918        let mut where_clause = None;
14919
14920        while self.peek_token().token != Token::RParen {
14921            if self.parse_keyword(Keyword::DIMENSIONS) {
14922                if !dimensions.is_empty() {
14923                    return Err(ParserError::ParserError(
14924                        "DIMENSIONS clause can only be specified once".to_string(),
14925                    ));
14926                }
14927                dimensions = self.parse_comma_separated(Parser::parse_wildcard_expr)?;
14928            } else if self.parse_keyword(Keyword::METRICS) {
14929                if !metrics.is_empty() {
14930                    return Err(ParserError::ParserError(
14931                        "METRICS clause can only be specified once".to_string(),
14932                    ));
14933                }
14934                metrics = self.parse_comma_separated(Parser::parse_wildcard_expr)?;
14935            } else if self.parse_keyword(Keyword::FACTS) {
14936                if !facts.is_empty() {
14937                    return Err(ParserError::ParserError(
14938                        "FACTS clause can only be specified once".to_string(),
14939                    ));
14940                }
14941                facts = self.parse_comma_separated(Parser::parse_wildcard_expr)?;
14942            } else if self.parse_keyword(Keyword::WHERE) {
14943                if where_clause.is_some() {
14944                    return Err(ParserError::ParserError(
14945                        "WHERE clause can only be specified once".to_string(),
14946                    ));
14947                }
14948                where_clause = Some(self.parse_expr()?);
14949            } else {
14950                return parser_err!(
14951                    format!(
14952                        "Expected one of DIMENSIONS, METRICS, FACTS or WHERE, got {}",
14953                        self.peek_token().token
14954                    ),
14955                    self.peek_token().span.start
14956                )?;
14957            }
14958        }
14959
14960        self.expect_token(&Token::RParen)?;
14961
14962        let alias = self.maybe_parse_table_alias()?;
14963
14964        Ok(TableFactor::SemanticView {
14965            name,
14966            dimensions,
14967            metrics,
14968            facts,
14969            where_clause,
14970            alias,
14971        })
14972    }
14973
14974    fn parse_match_recognize(&mut self, table: TableFactor) -> Result<TableFactor, ParserError> {
14975        self.expect_token(&Token::LParen)?;
14976
14977        let partition_by = if self.parse_keywords(&[Keyword::PARTITION, Keyword::BY]) {
14978            self.parse_comma_separated(Parser::parse_expr)?
14979        } else {
14980            vec![]
14981        };
14982
14983        let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
14984            self.parse_comma_separated(Parser::parse_order_by_expr)?
14985        } else {
14986            vec![]
14987        };
14988
14989        let measures = if self.parse_keyword(Keyword::MEASURES) {
14990            self.parse_comma_separated(|p| {
14991                let expr = p.parse_expr()?;
14992                let _ = p.parse_keyword(Keyword::AS);
14993                let alias = p.parse_identifier()?;
14994                Ok(Measure { expr, alias })
14995            })?
14996        } else {
14997            vec![]
14998        };
14999
15000        let rows_per_match =
15001            if self.parse_keywords(&[Keyword::ONE, Keyword::ROW, Keyword::PER, Keyword::MATCH]) {
15002                Some(RowsPerMatch::OneRow)
15003            } else if self.parse_keywords(&[
15004                Keyword::ALL,
15005                Keyword::ROWS,
15006                Keyword::PER,
15007                Keyword::MATCH,
15008            ]) {
15009                Some(RowsPerMatch::AllRows(
15010                    if self.parse_keywords(&[Keyword::SHOW, Keyword::EMPTY, Keyword::MATCHES]) {
15011                        Some(EmptyMatchesMode::Show)
15012                    } else if self.parse_keywords(&[
15013                        Keyword::OMIT,
15014                        Keyword::EMPTY,
15015                        Keyword::MATCHES,
15016                    ]) {
15017                        Some(EmptyMatchesMode::Omit)
15018                    } else if self.parse_keywords(&[
15019                        Keyword::WITH,
15020                        Keyword::UNMATCHED,
15021                        Keyword::ROWS,
15022                    ]) {
15023                        Some(EmptyMatchesMode::WithUnmatched)
15024                    } else {
15025                        None
15026                    },
15027                ))
15028            } else {
15029                None
15030            };
15031
15032        let after_match_skip =
15033            if self.parse_keywords(&[Keyword::AFTER, Keyword::MATCH, Keyword::SKIP]) {
15034                if self.parse_keywords(&[Keyword::PAST, Keyword::LAST, Keyword::ROW]) {
15035                    Some(AfterMatchSkip::PastLastRow)
15036                } else if self.parse_keywords(&[Keyword::TO, Keyword::NEXT, Keyword::ROW]) {
15037                    Some(AfterMatchSkip::ToNextRow)
15038                } else if self.parse_keywords(&[Keyword::TO, Keyword::FIRST]) {
15039                    Some(AfterMatchSkip::ToFirst(self.parse_identifier()?))
15040                } else if self.parse_keywords(&[Keyword::TO, Keyword::LAST]) {
15041                    Some(AfterMatchSkip::ToLast(self.parse_identifier()?))
15042                } else {
15043                    let found = self.next_token();
15044                    return self.expected("after match skip option", found);
15045                }
15046            } else {
15047                None
15048            };
15049
15050        self.expect_keyword_is(Keyword::PATTERN)?;
15051        let pattern = self.parse_parenthesized(Self::parse_pattern)?;
15052
15053        self.expect_keyword_is(Keyword::DEFINE)?;
15054
15055        let symbols = self.parse_comma_separated(|p| {
15056            let symbol = p.parse_identifier()?;
15057            p.expect_keyword_is(Keyword::AS)?;
15058            let definition = p.parse_expr()?;
15059            Ok(SymbolDefinition { symbol, definition })
15060        })?;
15061
15062        self.expect_token(&Token::RParen)?;
15063
15064        let alias = self.maybe_parse_table_alias()?;
15065
15066        Ok(TableFactor::MatchRecognize {
15067            table: Box::new(table),
15068            partition_by,
15069            order_by,
15070            measures,
15071            rows_per_match,
15072            after_match_skip,
15073            pattern,
15074            symbols,
15075            alias,
15076        })
15077    }
15078
15079    fn parse_base_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
15080        match self.next_token().token {
15081            Token::Caret => Ok(MatchRecognizePattern::Symbol(MatchRecognizeSymbol::Start)),
15082            Token::Placeholder(s) if s == "$" => {
15083                Ok(MatchRecognizePattern::Symbol(MatchRecognizeSymbol::End))
15084            }
15085            Token::LBrace => {
15086                self.expect_token(&Token::Minus)?;
15087                let symbol = self.parse_identifier().map(MatchRecognizeSymbol::Named)?;
15088                self.expect_token(&Token::Minus)?;
15089                self.expect_token(&Token::RBrace)?;
15090                Ok(MatchRecognizePattern::Exclude(symbol))
15091            }
15092            Token::Word(Word {
15093                value,
15094                quote_style: None,
15095                ..
15096            }) if value == "PERMUTE" => {
15097                self.expect_token(&Token::LParen)?;
15098                let symbols = self.parse_comma_separated(|p| {
15099                    p.parse_identifier().map(MatchRecognizeSymbol::Named)
15100                })?;
15101                self.expect_token(&Token::RParen)?;
15102                Ok(MatchRecognizePattern::Permute(symbols))
15103            }
15104            Token::LParen => {
15105                let pattern = self.parse_pattern()?;
15106                self.expect_token(&Token::RParen)?;
15107                Ok(MatchRecognizePattern::Group(Box::new(pattern)))
15108            }
15109            _ => {
15110                self.prev_token();
15111                self.parse_identifier()
15112                    .map(MatchRecognizeSymbol::Named)
15113                    .map(MatchRecognizePattern::Symbol)
15114            }
15115        }
15116    }
15117
15118    fn parse_repetition_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
15119        let mut pattern = self.parse_base_pattern()?;
15120        loop {
15121            let token = self.next_token();
15122            let quantifier = match token.token {
15123                Token::Mul => RepetitionQuantifier::ZeroOrMore,
15124                Token::Plus => RepetitionQuantifier::OneOrMore,
15125                Token::Placeholder(s) if s == "?" => RepetitionQuantifier::AtMostOne,
15126                Token::LBrace => {
15127                    // quantifier is a range like {n} or {n,} or {,m} or {n,m}
15128                    let token = self.next_token();
15129                    match token.token {
15130                        Token::Comma => {
15131                            let next_token = self.next_token();
15132                            let Token::Number(n, _) = next_token.token else {
15133                                return self.expected("literal number", next_token);
15134                            };
15135                            self.expect_token(&Token::RBrace)?;
15136                            RepetitionQuantifier::AtMost(Self::parse(n, token.span.start)?)
15137                        }
15138                        Token::Number(n, _) if self.consume_token(&Token::Comma) => {
15139                            let next_token = self.next_token();
15140                            match next_token.token {
15141                                Token::Number(m, _) => {
15142                                    self.expect_token(&Token::RBrace)?;
15143                                    RepetitionQuantifier::Range(
15144                                        Self::parse(n, token.span.start)?,
15145                                        Self::parse(m, token.span.start)?,
15146                                    )
15147                                }
15148                                Token::RBrace => {
15149                                    RepetitionQuantifier::AtLeast(Self::parse(n, token.span.start)?)
15150                                }
15151                                _ => {
15152                                    return self.expected("} or upper bound", next_token);
15153                                }
15154                            }
15155                        }
15156                        Token::Number(n, _) => {
15157                            self.expect_token(&Token::RBrace)?;
15158                            RepetitionQuantifier::Exactly(Self::parse(n, token.span.start)?)
15159                        }
15160                        _ => return self.expected("quantifier range", token),
15161                    }
15162                }
15163                _ => {
15164                    self.prev_token();
15165                    break;
15166                }
15167            };
15168            pattern = MatchRecognizePattern::Repetition(Box::new(pattern), quantifier);
15169        }
15170        Ok(pattern)
15171    }
15172
15173    fn parse_concat_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
15174        let mut patterns = vec![self.parse_repetition_pattern()?];
15175        while !matches!(self.peek_token().token, Token::RParen | Token::Pipe) {
15176            patterns.push(self.parse_repetition_pattern()?);
15177        }
15178        match <[MatchRecognizePattern; 1]>::try_from(patterns) {
15179            Ok([pattern]) => Ok(pattern),
15180            Err(patterns) => Ok(MatchRecognizePattern::Concat(patterns)),
15181        }
15182    }
15183
15184    fn parse_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
15185        let pattern = self.parse_concat_pattern()?;
15186        if self.consume_token(&Token::Pipe) {
15187            match self.parse_pattern()? {
15188                // flatten nested alternations
15189                MatchRecognizePattern::Alternation(mut patterns) => {
15190                    patterns.insert(0, pattern);
15191                    Ok(MatchRecognizePattern::Alternation(patterns))
15192                }
15193                next => Ok(MatchRecognizePattern::Alternation(vec![pattern, next])),
15194            }
15195        } else {
15196            Ok(pattern)
15197        }
15198    }
15199
15200    /// Parses a the timestamp version specifier (i.e. query historical data)
15201    pub fn maybe_parse_table_version(&mut self) -> Result<Option<TableVersion>, ParserError> {
15202        if self.dialect.supports_timestamp_versioning() {
15203            if self.parse_keywords(&[Keyword::FOR, Keyword::SYSTEM_TIME, Keyword::AS, Keyword::OF])
15204            {
15205                let expr = self.parse_expr()?;
15206                return Ok(Some(TableVersion::ForSystemTimeAsOf(expr)));
15207            } else if self.peek_keyword(Keyword::AT) || self.peek_keyword(Keyword::BEFORE) {
15208                let func_name = self.parse_object_name(true)?;
15209                let func = self.parse_function(func_name)?;
15210                return Ok(Some(TableVersion::Function(func)));
15211            }
15212        }
15213        Ok(None)
15214    }
15215
15216    /// Parses MySQL's JSON_TABLE column definition.
15217    /// For example: `id INT EXISTS PATH '$' DEFAULT '0' ON EMPTY ERROR ON ERROR`
15218    pub fn parse_json_table_column_def(&mut self) -> Result<JsonTableColumn, ParserError> {
15219        if self.parse_keyword(Keyword::NESTED) {
15220            let _has_path_keyword = self.parse_keyword(Keyword::PATH);
15221            let path = self.parse_value()?.value;
15222            self.expect_keyword_is(Keyword::COLUMNS)?;
15223            let columns = self.parse_parenthesized(|p| {
15224                p.parse_comma_separated(Self::parse_json_table_column_def)
15225            })?;
15226            return Ok(JsonTableColumn::Nested(JsonTableNestedColumn {
15227                path,
15228                columns,
15229            }));
15230        }
15231        let name = self.parse_identifier()?;
15232        if self.parse_keyword(Keyword::FOR) {
15233            self.expect_keyword_is(Keyword::ORDINALITY)?;
15234            return Ok(JsonTableColumn::ForOrdinality(name));
15235        }
15236        let r#type = self.parse_data_type()?;
15237        let exists = self.parse_keyword(Keyword::EXISTS);
15238        self.expect_keyword_is(Keyword::PATH)?;
15239        let path = self.parse_value()?.value;
15240        let mut on_empty = None;
15241        let mut on_error = None;
15242        while let Some(error_handling) = self.parse_json_table_column_error_handling()? {
15243            if self.parse_keyword(Keyword::EMPTY) {
15244                on_empty = Some(error_handling);
15245            } else {
15246                self.expect_keyword_is(Keyword::ERROR)?;
15247                on_error = Some(error_handling);
15248            }
15249        }
15250        Ok(JsonTableColumn::Named(JsonTableNamedColumn {
15251            name,
15252            r#type,
15253            path,
15254            exists,
15255            on_empty,
15256            on_error,
15257        }))
15258    }
15259
15260    /// Parses MSSQL's `OPENJSON WITH` column definition.
15261    ///
15262    /// ```sql
15263    /// colName type [ column_path ] [ AS JSON ]
15264    /// ```
15265    ///
15266    /// Reference: <https://learn.microsoft.com/en-us/sql/t-sql/functions/openjson-transact-sql?view=sql-server-ver16#syntax>
15267    pub fn parse_openjson_table_column_def(&mut self) -> Result<OpenJsonTableColumn, ParserError> {
15268        let name = self.parse_identifier()?;
15269        let r#type = self.parse_data_type()?;
15270        let path = if let Token::SingleQuotedString(path) = self.peek_token().token {
15271            self.next_token();
15272            Some(path)
15273        } else {
15274            None
15275        };
15276        let as_json = self.parse_keyword(Keyword::AS);
15277        if as_json {
15278            self.expect_keyword_is(Keyword::JSON)?;
15279        }
15280        Ok(OpenJsonTableColumn {
15281            name,
15282            r#type,
15283            path,
15284            as_json,
15285        })
15286    }
15287
15288    fn parse_json_table_column_error_handling(
15289        &mut self,
15290    ) -> Result<Option<JsonTableColumnErrorHandling>, ParserError> {
15291        let res = if self.parse_keyword(Keyword::NULL) {
15292            JsonTableColumnErrorHandling::Null
15293        } else if self.parse_keyword(Keyword::ERROR) {
15294            JsonTableColumnErrorHandling::Error
15295        } else if self.parse_keyword(Keyword::DEFAULT) {
15296            JsonTableColumnErrorHandling::Default(self.parse_value()?.value)
15297        } else {
15298            return Ok(None);
15299        };
15300        self.expect_keyword_is(Keyword::ON)?;
15301        Ok(Some(res))
15302    }
15303
15304    pub fn parse_derived_table_factor(
15305        &mut self,
15306        lateral: IsLateral,
15307    ) -> Result<TableFactor, ParserError> {
15308        let subquery = self.parse_query()?;
15309        self.expect_token(&Token::RParen)?;
15310        let alias = self.maybe_parse_table_alias()?;
15311        Ok(TableFactor::Derived {
15312            lateral: match lateral {
15313                Lateral => true,
15314                NotLateral => false,
15315            },
15316            subquery,
15317            alias,
15318        })
15319    }
15320
15321    fn parse_aliased_function_call(&mut self) -> Result<ExprWithAlias, ParserError> {
15322        let function_name = match self.next_token().token {
15323            Token::Word(w) => Ok(w.value),
15324            _ => self.expected("a function identifier", self.peek_token()),
15325        }?;
15326        let expr = self.parse_function(ObjectName::from(vec![Ident::new(function_name)]))?;
15327        let alias = if self.parse_keyword(Keyword::AS) {
15328            Some(self.parse_identifier()?)
15329        } else {
15330            None
15331        };
15332
15333        Ok(ExprWithAlias { expr, alias })
15334    }
15335    /// Parses an expression with an optional alias
15336    ///
15337    /// Examples:
15338    ///
15339    /// ```sql
15340    /// SUM(price) AS total_price
15341    /// ```
15342    /// ```sql
15343    /// SUM(price)
15344    /// ```
15345    ///
15346    /// Example
15347    /// ```
15348    /// # use sqlparser::parser::{Parser, ParserError};
15349    /// # use sqlparser::dialect::GenericDialect;
15350    /// # fn main() ->Result<(), ParserError> {
15351    /// let sql = r#"SUM("a") as "b""#;
15352    /// let mut parser = Parser::new(&GenericDialect).try_with_sql(sql)?;
15353    /// let expr_with_alias = parser.parse_expr_with_alias()?;
15354    /// assert_eq!(Some("b".to_string()), expr_with_alias.alias.map(|x|x.value));
15355    /// # Ok(())
15356    /// # }
15357    pub fn parse_expr_with_alias(&mut self) -> Result<ExprWithAlias, ParserError> {
15358        let expr = self.parse_expr()?;
15359        let alias = if self.parse_keyword(Keyword::AS) {
15360            Some(self.parse_identifier()?)
15361        } else {
15362            None
15363        };
15364
15365        Ok(ExprWithAlias { expr, alias })
15366    }
15367
15368    pub fn parse_pivot_table_factor(
15369        &mut self,
15370        table: TableFactor,
15371    ) -> Result<TableFactor, ParserError> {
15372        self.expect_token(&Token::LParen)?;
15373        let aggregate_functions = self.parse_comma_separated(Self::parse_aliased_function_call)?;
15374        self.expect_keyword_is(Keyword::FOR)?;
15375        let value_column = if self.peek_token_ref().token == Token::LParen {
15376            self.parse_parenthesized_column_list_inner(Mandatory, false, |p| {
15377                p.parse_subexpr(self.dialect.prec_value(Precedence::Between))
15378            })?
15379        } else {
15380            vec![self.parse_subexpr(self.dialect.prec_value(Precedence::Between))?]
15381        };
15382        self.expect_keyword_is(Keyword::IN)?;
15383
15384        self.expect_token(&Token::LParen)?;
15385        let value_source = if self.parse_keyword(Keyword::ANY) {
15386            let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
15387                self.parse_comma_separated(Parser::parse_order_by_expr)?
15388            } else {
15389                vec![]
15390            };
15391            PivotValueSource::Any(order_by)
15392        } else if self.peek_sub_query() {
15393            PivotValueSource::Subquery(self.parse_query()?)
15394        } else {
15395            PivotValueSource::List(self.parse_comma_separated(Self::parse_expr_with_alias)?)
15396        };
15397        self.expect_token(&Token::RParen)?;
15398
15399        let default_on_null =
15400            if self.parse_keywords(&[Keyword::DEFAULT, Keyword::ON, Keyword::NULL]) {
15401                self.expect_token(&Token::LParen)?;
15402                let expr = self.parse_expr()?;
15403                self.expect_token(&Token::RParen)?;
15404                Some(expr)
15405            } else {
15406                None
15407            };
15408
15409        self.expect_token(&Token::RParen)?;
15410        let alias = self.maybe_parse_table_alias()?;
15411        Ok(TableFactor::Pivot {
15412            table: Box::new(table),
15413            aggregate_functions,
15414            value_column,
15415            value_source,
15416            default_on_null,
15417            alias,
15418        })
15419    }
15420
15421    pub fn parse_unpivot_table_factor(
15422        &mut self,
15423        table: TableFactor,
15424    ) -> Result<TableFactor, ParserError> {
15425        let null_inclusion = if self.parse_keyword(Keyword::INCLUDE) {
15426            self.expect_keyword_is(Keyword::NULLS)?;
15427            Some(NullInclusion::IncludeNulls)
15428        } else if self.parse_keyword(Keyword::EXCLUDE) {
15429            self.expect_keyword_is(Keyword::NULLS)?;
15430            Some(NullInclusion::ExcludeNulls)
15431        } else {
15432            None
15433        };
15434        self.expect_token(&Token::LParen)?;
15435        let value = self.parse_expr()?;
15436        self.expect_keyword_is(Keyword::FOR)?;
15437        let name = self.parse_identifier()?;
15438        self.expect_keyword_is(Keyword::IN)?;
15439        let columns = self.parse_parenthesized_column_list_inner(Mandatory, false, |p| {
15440            p.parse_expr_with_alias()
15441        })?;
15442        self.expect_token(&Token::RParen)?;
15443        let alias = self.maybe_parse_table_alias()?;
15444        Ok(TableFactor::Unpivot {
15445            table: Box::new(table),
15446            value,
15447            null_inclusion,
15448            name,
15449            columns,
15450            alias,
15451        })
15452    }
15453
15454    pub fn parse_join_constraint(&mut self, natural: bool) -> Result<JoinConstraint, ParserError> {
15455        if natural {
15456            Ok(JoinConstraint::Natural)
15457        } else if self.parse_keyword(Keyword::ON) {
15458            let constraint = self.parse_expr()?;
15459            Ok(JoinConstraint::On(constraint))
15460        } else if self.parse_keyword(Keyword::USING) {
15461            let columns = self.parse_parenthesized_qualified_column_list(Mandatory, false)?;
15462            Ok(JoinConstraint::Using(columns))
15463        } else {
15464            Ok(JoinConstraint::None)
15465            //self.expected("ON, or USING after JOIN", self.peek_token())
15466        }
15467    }
15468
15469    /// Parse a GRANT statement.
15470    pub fn parse_grant(&mut self) -> Result<Statement, ParserError> {
15471        let (privileges, objects) = self.parse_grant_deny_revoke_privileges_objects()?;
15472
15473        self.expect_keyword_is(Keyword::TO)?;
15474        let grantees = self.parse_grantees()?;
15475
15476        let with_grant_option =
15477            self.parse_keywords(&[Keyword::WITH, Keyword::GRANT, Keyword::OPTION]);
15478
15479        let current_grants =
15480            if self.parse_keywords(&[Keyword::COPY, Keyword::CURRENT, Keyword::GRANTS]) {
15481                Some(CurrentGrantsKind::CopyCurrentGrants)
15482            } else if self.parse_keywords(&[Keyword::REVOKE, Keyword::CURRENT, Keyword::GRANTS]) {
15483                Some(CurrentGrantsKind::RevokeCurrentGrants)
15484            } else {
15485                None
15486            };
15487
15488        let as_grantor = if self.parse_keywords(&[Keyword::AS]) {
15489            Some(self.parse_identifier()?)
15490        } else {
15491            None
15492        };
15493
15494        let granted_by = if self.parse_keywords(&[Keyword::GRANTED, Keyword::BY]) {
15495            Some(self.parse_identifier()?)
15496        } else {
15497            None
15498        };
15499
15500        Ok(Statement::Grant {
15501            privileges,
15502            objects,
15503            grantees,
15504            with_grant_option,
15505            as_grantor,
15506            granted_by,
15507            current_grants,
15508        })
15509    }
15510
15511    fn parse_grantees(&mut self) -> Result<Vec<Grantee>, ParserError> {
15512        let mut values = vec![];
15513        let mut grantee_type = GranteesType::None;
15514        loop {
15515            let new_grantee_type = if self.parse_keyword(Keyword::ROLE) {
15516                GranteesType::Role
15517            } else if self.parse_keyword(Keyword::USER) {
15518                GranteesType::User
15519            } else if self.parse_keyword(Keyword::SHARE) {
15520                GranteesType::Share
15521            } else if self.parse_keyword(Keyword::GROUP) {
15522                GranteesType::Group
15523            } else if self.parse_keyword(Keyword::PUBLIC) {
15524                GranteesType::Public
15525            } else if self.parse_keywords(&[Keyword::DATABASE, Keyword::ROLE]) {
15526                GranteesType::DatabaseRole
15527            } else if self.parse_keywords(&[Keyword::APPLICATION, Keyword::ROLE]) {
15528                GranteesType::ApplicationRole
15529            } else if self.parse_keyword(Keyword::APPLICATION) {
15530                GranteesType::Application
15531            } else {
15532                grantee_type.clone() // keep from previous iteraton, if not specified
15533            };
15534
15535            if self
15536                .dialect
15537                .get_reserved_grantees_types()
15538                .contains(&new_grantee_type)
15539            {
15540                self.prev_token();
15541            } else {
15542                grantee_type = new_grantee_type;
15543            }
15544
15545            let grantee = if grantee_type == GranteesType::Public {
15546                Grantee {
15547                    grantee_type: grantee_type.clone(),
15548                    name: None,
15549                }
15550            } else {
15551                let mut name = self.parse_grantee_name()?;
15552                if self.consume_token(&Token::Colon) {
15553                    // Redshift supports namespace prefix for external users and groups:
15554                    // <Namespace>:<GroupName> or <Namespace>:<UserName>
15555                    // https://docs.aws.amazon.com/redshift/latest/mgmt/redshift-iam-access-control-native-idp.html
15556                    let ident = self.parse_identifier()?;
15557                    if let GranteeName::ObjectName(namespace) = name {
15558                        name = GranteeName::ObjectName(ObjectName::from(vec![Ident::new(
15559                            format!("{namespace}:{ident}"),
15560                        )]));
15561                    };
15562                }
15563                Grantee {
15564                    grantee_type: grantee_type.clone(),
15565                    name: Some(name),
15566                }
15567            };
15568
15569            values.push(grantee);
15570
15571            if !self.consume_token(&Token::Comma) {
15572                break;
15573            }
15574        }
15575
15576        Ok(values)
15577    }
15578
15579    pub fn parse_grant_deny_revoke_privileges_objects(
15580        &mut self,
15581    ) -> Result<(Privileges, Option<GrantObjects>), ParserError> {
15582        let privileges = if self.parse_keyword(Keyword::ALL) {
15583            Privileges::All {
15584                with_privileges_keyword: self.parse_keyword(Keyword::PRIVILEGES),
15585            }
15586        } else {
15587            let actions = self.parse_actions_list()?;
15588            Privileges::Actions(actions)
15589        };
15590
15591        let objects = if self.parse_keyword(Keyword::ON) {
15592            if self.parse_keywords(&[Keyword::ALL, Keyword::TABLES, Keyword::IN, Keyword::SCHEMA]) {
15593                Some(GrantObjects::AllTablesInSchema {
15594                    schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15595                })
15596            } else if self.parse_keywords(&[
15597                Keyword::ALL,
15598                Keyword::EXTERNAL,
15599                Keyword::TABLES,
15600                Keyword::IN,
15601                Keyword::SCHEMA,
15602            ]) {
15603                Some(GrantObjects::AllExternalTablesInSchema {
15604                    schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15605                })
15606            } else if self.parse_keywords(&[
15607                Keyword::ALL,
15608                Keyword::VIEWS,
15609                Keyword::IN,
15610                Keyword::SCHEMA,
15611            ]) {
15612                Some(GrantObjects::AllViewsInSchema {
15613                    schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15614                })
15615            } else if self.parse_keywords(&[
15616                Keyword::ALL,
15617                Keyword::MATERIALIZED,
15618                Keyword::VIEWS,
15619                Keyword::IN,
15620                Keyword::SCHEMA,
15621            ]) {
15622                Some(GrantObjects::AllMaterializedViewsInSchema {
15623                    schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15624                })
15625            } else if self.parse_keywords(&[
15626                Keyword::ALL,
15627                Keyword::FUNCTIONS,
15628                Keyword::IN,
15629                Keyword::SCHEMA,
15630            ]) {
15631                Some(GrantObjects::AllFunctionsInSchema {
15632                    schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15633                })
15634            } else if self.parse_keywords(&[
15635                Keyword::FUTURE,
15636                Keyword::SCHEMAS,
15637                Keyword::IN,
15638                Keyword::DATABASE,
15639            ]) {
15640                Some(GrantObjects::FutureSchemasInDatabase {
15641                    databases: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15642                })
15643            } else if self.parse_keywords(&[
15644                Keyword::FUTURE,
15645                Keyword::TABLES,
15646                Keyword::IN,
15647                Keyword::SCHEMA,
15648            ]) {
15649                Some(GrantObjects::FutureTablesInSchema {
15650                    schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15651                })
15652            } else if self.parse_keywords(&[
15653                Keyword::FUTURE,
15654                Keyword::EXTERNAL,
15655                Keyword::TABLES,
15656                Keyword::IN,
15657                Keyword::SCHEMA,
15658            ]) {
15659                Some(GrantObjects::FutureExternalTablesInSchema {
15660                    schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15661                })
15662            } else if self.parse_keywords(&[
15663                Keyword::FUTURE,
15664                Keyword::VIEWS,
15665                Keyword::IN,
15666                Keyword::SCHEMA,
15667            ]) {
15668                Some(GrantObjects::FutureViewsInSchema {
15669                    schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15670                })
15671            } else if self.parse_keywords(&[
15672                Keyword::FUTURE,
15673                Keyword::MATERIALIZED,
15674                Keyword::VIEWS,
15675                Keyword::IN,
15676                Keyword::SCHEMA,
15677            ]) {
15678                Some(GrantObjects::FutureMaterializedViewsInSchema {
15679                    schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15680                })
15681            } else if self.parse_keywords(&[
15682                Keyword::ALL,
15683                Keyword::SEQUENCES,
15684                Keyword::IN,
15685                Keyword::SCHEMA,
15686            ]) {
15687                Some(GrantObjects::AllSequencesInSchema {
15688                    schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15689                })
15690            } else if self.parse_keywords(&[
15691                Keyword::FUTURE,
15692                Keyword::SEQUENCES,
15693                Keyword::IN,
15694                Keyword::SCHEMA,
15695            ]) {
15696                Some(GrantObjects::FutureSequencesInSchema {
15697                    schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15698                })
15699            } else if self.parse_keywords(&[Keyword::RESOURCE, Keyword::MONITOR]) {
15700                Some(GrantObjects::ResourceMonitors(
15701                    self.parse_comma_separated(|p| p.parse_object_name(false))?,
15702                ))
15703            } else if self.parse_keywords(&[Keyword::COMPUTE, Keyword::POOL]) {
15704                Some(GrantObjects::ComputePools(
15705                    self.parse_comma_separated(|p| p.parse_object_name(false))?,
15706                ))
15707            } else if self.parse_keywords(&[Keyword::FAILOVER, Keyword::GROUP]) {
15708                Some(GrantObjects::FailoverGroup(
15709                    self.parse_comma_separated(|p| p.parse_object_name(false))?,
15710                ))
15711            } else if self.parse_keywords(&[Keyword::REPLICATION, Keyword::GROUP]) {
15712                Some(GrantObjects::ReplicationGroup(
15713                    self.parse_comma_separated(|p| p.parse_object_name(false))?,
15714                ))
15715            } else if self.parse_keywords(&[Keyword::EXTERNAL, Keyword::VOLUME]) {
15716                Some(GrantObjects::ExternalVolumes(
15717                    self.parse_comma_separated(|p| p.parse_object_name(false))?,
15718                ))
15719            } else {
15720                let object_type = self.parse_one_of_keywords(&[
15721                    Keyword::SEQUENCE,
15722                    Keyword::DATABASE,
15723                    Keyword::SCHEMA,
15724                    Keyword::TABLE,
15725                    Keyword::VIEW,
15726                    Keyword::WAREHOUSE,
15727                    Keyword::INTEGRATION,
15728                    Keyword::VIEW,
15729                    Keyword::WAREHOUSE,
15730                    Keyword::INTEGRATION,
15731                    Keyword::USER,
15732                    Keyword::CONNECTION,
15733                    Keyword::PROCEDURE,
15734                    Keyword::FUNCTION,
15735                ]);
15736                let objects =
15737                    self.parse_comma_separated(|p| p.parse_object_name_inner(false, true));
15738                match object_type {
15739                    Some(Keyword::DATABASE) => Some(GrantObjects::Databases(objects?)),
15740                    Some(Keyword::SCHEMA) => Some(GrantObjects::Schemas(objects?)),
15741                    Some(Keyword::SEQUENCE) => Some(GrantObjects::Sequences(objects?)),
15742                    Some(Keyword::WAREHOUSE) => Some(GrantObjects::Warehouses(objects?)),
15743                    Some(Keyword::INTEGRATION) => Some(GrantObjects::Integrations(objects?)),
15744                    Some(Keyword::VIEW) => Some(GrantObjects::Views(objects?)),
15745                    Some(Keyword::USER) => Some(GrantObjects::Users(objects?)),
15746                    Some(Keyword::CONNECTION) => Some(GrantObjects::Connections(objects?)),
15747                    kw @ (Some(Keyword::PROCEDURE) | Some(Keyword::FUNCTION)) => {
15748                        if let Some(name) = objects?.first() {
15749                            self.parse_grant_procedure_or_function(name, &kw)?
15750                        } else {
15751                            self.expected("procedure or function name", self.peek_token())?
15752                        }
15753                    }
15754                    Some(Keyword::TABLE) | None => Some(GrantObjects::Tables(objects?)),
15755                    Some(unexpected_keyword) => return Err(ParserError::ParserError(
15756                        format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in grant objects"),
15757                    )),
15758                }
15759            }
15760        } else {
15761            None
15762        };
15763
15764        Ok((privileges, objects))
15765    }
15766
15767    fn parse_grant_procedure_or_function(
15768        &mut self,
15769        name: &ObjectName,
15770        kw: &Option<Keyword>,
15771    ) -> Result<Option<GrantObjects>, ParserError> {
15772        let arg_types = if self.consume_token(&Token::LParen) {
15773            let list = self.parse_comma_separated0(Self::parse_data_type, Token::RParen)?;
15774            self.expect_token(&Token::RParen)?;
15775            list
15776        } else {
15777            vec![]
15778        };
15779        match kw {
15780            Some(Keyword::PROCEDURE) => Ok(Some(GrantObjects::Procedure {
15781                name: name.clone(),
15782                arg_types,
15783            })),
15784            Some(Keyword::FUNCTION) => Ok(Some(GrantObjects::Function {
15785                name: name.clone(),
15786                arg_types,
15787            })),
15788            _ => self.expected("procedure or function keywords", self.peek_token())?,
15789        }
15790    }
15791
15792    pub fn parse_grant_permission(&mut self) -> Result<Action, ParserError> {
15793        fn parse_columns(parser: &mut Parser) -> Result<Option<Vec<Ident>>, ParserError> {
15794            let columns = parser.parse_parenthesized_column_list(Optional, false)?;
15795            if columns.is_empty() {
15796                Ok(None)
15797            } else {
15798                Ok(Some(columns))
15799            }
15800        }
15801
15802        // Multi-word privileges
15803        if self.parse_keywords(&[Keyword::IMPORTED, Keyword::PRIVILEGES]) {
15804            Ok(Action::ImportedPrivileges)
15805        } else if self.parse_keywords(&[Keyword::ADD, Keyword::SEARCH, Keyword::OPTIMIZATION]) {
15806            Ok(Action::AddSearchOptimization)
15807        } else if self.parse_keywords(&[Keyword::ATTACH, Keyword::LISTING]) {
15808            Ok(Action::AttachListing)
15809        } else if self.parse_keywords(&[Keyword::ATTACH, Keyword::POLICY]) {
15810            Ok(Action::AttachPolicy)
15811        } else if self.parse_keywords(&[Keyword::BIND, Keyword::SERVICE, Keyword::ENDPOINT]) {
15812            Ok(Action::BindServiceEndpoint)
15813        } else if self.parse_keywords(&[Keyword::DATABASE, Keyword::ROLE]) {
15814            let role = self.parse_object_name(false)?;
15815            Ok(Action::DatabaseRole { role })
15816        } else if self.parse_keywords(&[Keyword::EVOLVE, Keyword::SCHEMA]) {
15817            Ok(Action::EvolveSchema)
15818        } else if self.parse_keywords(&[Keyword::IMPORT, Keyword::SHARE]) {
15819            Ok(Action::ImportShare)
15820        } else if self.parse_keywords(&[Keyword::MANAGE, Keyword::VERSIONS]) {
15821            Ok(Action::ManageVersions)
15822        } else if self.parse_keywords(&[Keyword::MANAGE, Keyword::RELEASES]) {
15823            Ok(Action::ManageReleases)
15824        } else if self.parse_keywords(&[Keyword::OVERRIDE, Keyword::SHARE, Keyword::RESTRICTIONS]) {
15825            Ok(Action::OverrideShareRestrictions)
15826        } else if self.parse_keywords(&[
15827            Keyword::PURCHASE,
15828            Keyword::DATA,
15829            Keyword::EXCHANGE,
15830            Keyword::LISTING,
15831        ]) {
15832            Ok(Action::PurchaseDataExchangeListing)
15833        } else if self.parse_keywords(&[Keyword::RESOLVE, Keyword::ALL]) {
15834            Ok(Action::ResolveAll)
15835        } else if self.parse_keywords(&[Keyword::READ, Keyword::SESSION]) {
15836            Ok(Action::ReadSession)
15837
15838        // Single-word privileges
15839        } else if self.parse_keyword(Keyword::APPLY) {
15840            let apply_type = self.parse_action_apply_type()?;
15841            Ok(Action::Apply { apply_type })
15842        } else if self.parse_keyword(Keyword::APPLYBUDGET) {
15843            Ok(Action::ApplyBudget)
15844        } else if self.parse_keyword(Keyword::AUDIT) {
15845            Ok(Action::Audit)
15846        } else if self.parse_keyword(Keyword::CONNECT) {
15847            Ok(Action::Connect)
15848        } else if self.parse_keyword(Keyword::CREATE) {
15849            let obj_type = self.maybe_parse_action_create_object_type();
15850            Ok(Action::Create { obj_type })
15851        } else if self.parse_keyword(Keyword::DELETE) {
15852            Ok(Action::Delete)
15853        } else if self.parse_keyword(Keyword::EXEC) {
15854            let obj_type = self.maybe_parse_action_execute_obj_type();
15855            Ok(Action::Exec { obj_type })
15856        } else if self.parse_keyword(Keyword::EXECUTE) {
15857            let obj_type = self.maybe_parse_action_execute_obj_type();
15858            Ok(Action::Execute { obj_type })
15859        } else if self.parse_keyword(Keyword::FAILOVER) {
15860            Ok(Action::Failover)
15861        } else if self.parse_keyword(Keyword::INSERT) {
15862            Ok(Action::Insert {
15863                columns: parse_columns(self)?,
15864            })
15865        } else if self.parse_keyword(Keyword::MANAGE) {
15866            let manage_type = self.parse_action_manage_type()?;
15867            Ok(Action::Manage { manage_type })
15868        } else if self.parse_keyword(Keyword::MODIFY) {
15869            let modify_type = self.parse_action_modify_type();
15870            Ok(Action::Modify { modify_type })
15871        } else if self.parse_keyword(Keyword::MONITOR) {
15872            let monitor_type = self.parse_action_monitor_type();
15873            Ok(Action::Monitor { monitor_type })
15874        } else if self.parse_keyword(Keyword::OPERATE) {
15875            Ok(Action::Operate)
15876        } else if self.parse_keyword(Keyword::REFERENCES) {
15877            Ok(Action::References {
15878                columns: parse_columns(self)?,
15879            })
15880        } else if self.parse_keyword(Keyword::READ) {
15881            Ok(Action::Read)
15882        } else if self.parse_keyword(Keyword::REPLICATE) {
15883            Ok(Action::Replicate)
15884        } else if self.parse_keyword(Keyword::ROLE) {
15885            let role = self.parse_object_name(false)?;
15886            Ok(Action::Role { role })
15887        } else if self.parse_keyword(Keyword::SELECT) {
15888            Ok(Action::Select {
15889                columns: parse_columns(self)?,
15890            })
15891        } else if self.parse_keyword(Keyword::TEMPORARY) {
15892            Ok(Action::Temporary)
15893        } else if self.parse_keyword(Keyword::TRIGGER) {
15894            Ok(Action::Trigger)
15895        } else if self.parse_keyword(Keyword::TRUNCATE) {
15896            Ok(Action::Truncate)
15897        } else if self.parse_keyword(Keyword::UPDATE) {
15898            Ok(Action::Update {
15899                columns: parse_columns(self)?,
15900            })
15901        } else if self.parse_keyword(Keyword::USAGE) {
15902            Ok(Action::Usage)
15903        } else if self.parse_keyword(Keyword::OWNERSHIP) {
15904            Ok(Action::Ownership)
15905        } else if self.parse_keyword(Keyword::DROP) {
15906            Ok(Action::Drop)
15907        } else {
15908            self.expected("a privilege keyword", self.peek_token())?
15909        }
15910    }
15911
15912    fn maybe_parse_action_create_object_type(&mut self) -> Option<ActionCreateObjectType> {
15913        // Multi-word object types
15914        if self.parse_keywords(&[Keyword::APPLICATION, Keyword::PACKAGE]) {
15915            Some(ActionCreateObjectType::ApplicationPackage)
15916        } else if self.parse_keywords(&[Keyword::COMPUTE, Keyword::POOL]) {
15917            Some(ActionCreateObjectType::ComputePool)
15918        } else if self.parse_keywords(&[Keyword::DATA, Keyword::EXCHANGE, Keyword::LISTING]) {
15919            Some(ActionCreateObjectType::DataExchangeListing)
15920        } else if self.parse_keywords(&[Keyword::EXTERNAL, Keyword::VOLUME]) {
15921            Some(ActionCreateObjectType::ExternalVolume)
15922        } else if self.parse_keywords(&[Keyword::FAILOVER, Keyword::GROUP]) {
15923            Some(ActionCreateObjectType::FailoverGroup)
15924        } else if self.parse_keywords(&[Keyword::NETWORK, Keyword::POLICY]) {
15925            Some(ActionCreateObjectType::NetworkPolicy)
15926        } else if self.parse_keywords(&[Keyword::ORGANIZATION, Keyword::LISTING]) {
15927            Some(ActionCreateObjectType::OrganiationListing)
15928        } else if self.parse_keywords(&[Keyword::REPLICATION, Keyword::GROUP]) {
15929            Some(ActionCreateObjectType::ReplicationGroup)
15930        }
15931        // Single-word object types
15932        else if self.parse_keyword(Keyword::ACCOUNT) {
15933            Some(ActionCreateObjectType::Account)
15934        } else if self.parse_keyword(Keyword::APPLICATION) {
15935            Some(ActionCreateObjectType::Application)
15936        } else if self.parse_keyword(Keyword::DATABASE) {
15937            Some(ActionCreateObjectType::Database)
15938        } else if self.parse_keyword(Keyword::INTEGRATION) {
15939            Some(ActionCreateObjectType::Integration)
15940        } else if self.parse_keyword(Keyword::ROLE) {
15941            Some(ActionCreateObjectType::Role)
15942        } else if self.parse_keyword(Keyword::SCHEMA) {
15943            Some(ActionCreateObjectType::Schema)
15944        } else if self.parse_keyword(Keyword::SHARE) {
15945            Some(ActionCreateObjectType::Share)
15946        } else if self.parse_keyword(Keyword::USER) {
15947            Some(ActionCreateObjectType::User)
15948        } else if self.parse_keyword(Keyword::WAREHOUSE) {
15949            Some(ActionCreateObjectType::Warehouse)
15950        } else {
15951            None
15952        }
15953    }
15954
15955    fn parse_action_apply_type(&mut self) -> Result<ActionApplyType, ParserError> {
15956        if self.parse_keywords(&[Keyword::AGGREGATION, Keyword::POLICY]) {
15957            Ok(ActionApplyType::AggregationPolicy)
15958        } else if self.parse_keywords(&[Keyword::AUTHENTICATION, Keyword::POLICY]) {
15959            Ok(ActionApplyType::AuthenticationPolicy)
15960        } else if self.parse_keywords(&[Keyword::JOIN, Keyword::POLICY]) {
15961            Ok(ActionApplyType::JoinPolicy)
15962        } else if self.parse_keywords(&[Keyword::MASKING, Keyword::POLICY]) {
15963            Ok(ActionApplyType::MaskingPolicy)
15964        } else if self.parse_keywords(&[Keyword::PACKAGES, Keyword::POLICY]) {
15965            Ok(ActionApplyType::PackagesPolicy)
15966        } else if self.parse_keywords(&[Keyword::PASSWORD, Keyword::POLICY]) {
15967            Ok(ActionApplyType::PasswordPolicy)
15968        } else if self.parse_keywords(&[Keyword::PROJECTION, Keyword::POLICY]) {
15969            Ok(ActionApplyType::ProjectionPolicy)
15970        } else if self.parse_keywords(&[Keyword::ROW, Keyword::ACCESS, Keyword::POLICY]) {
15971            Ok(ActionApplyType::RowAccessPolicy)
15972        } else if self.parse_keywords(&[Keyword::SESSION, Keyword::POLICY]) {
15973            Ok(ActionApplyType::SessionPolicy)
15974        } else if self.parse_keyword(Keyword::TAG) {
15975            Ok(ActionApplyType::Tag)
15976        } else {
15977            self.expected("GRANT APPLY type", self.peek_token())
15978        }
15979    }
15980
15981    fn maybe_parse_action_execute_obj_type(&mut self) -> Option<ActionExecuteObjectType> {
15982        if self.parse_keywords(&[Keyword::DATA, Keyword::METRIC, Keyword::FUNCTION]) {
15983            Some(ActionExecuteObjectType::DataMetricFunction)
15984        } else if self.parse_keywords(&[Keyword::MANAGED, Keyword::ALERT]) {
15985            Some(ActionExecuteObjectType::ManagedAlert)
15986        } else if self.parse_keywords(&[Keyword::MANAGED, Keyword::TASK]) {
15987            Some(ActionExecuteObjectType::ManagedTask)
15988        } else if self.parse_keyword(Keyword::ALERT) {
15989            Some(ActionExecuteObjectType::Alert)
15990        } else if self.parse_keyword(Keyword::TASK) {
15991            Some(ActionExecuteObjectType::Task)
15992        } else {
15993            None
15994        }
15995    }
15996
15997    fn parse_action_manage_type(&mut self) -> Result<ActionManageType, ParserError> {
15998        if self.parse_keywords(&[Keyword::ACCOUNT, Keyword::SUPPORT, Keyword::CASES]) {
15999            Ok(ActionManageType::AccountSupportCases)
16000        } else if self.parse_keywords(&[Keyword::EVENT, Keyword::SHARING]) {
16001            Ok(ActionManageType::EventSharing)
16002        } else if self.parse_keywords(&[Keyword::LISTING, Keyword::AUTO, Keyword::FULFILLMENT]) {
16003            Ok(ActionManageType::ListingAutoFulfillment)
16004        } else if self.parse_keywords(&[Keyword::ORGANIZATION, Keyword::SUPPORT, Keyword::CASES]) {
16005            Ok(ActionManageType::OrganizationSupportCases)
16006        } else if self.parse_keywords(&[Keyword::USER, Keyword::SUPPORT, Keyword::CASES]) {
16007            Ok(ActionManageType::UserSupportCases)
16008        } else if self.parse_keyword(Keyword::GRANTS) {
16009            Ok(ActionManageType::Grants)
16010        } else if self.parse_keyword(Keyword::WAREHOUSES) {
16011            Ok(ActionManageType::Warehouses)
16012        } else {
16013            self.expected("GRANT MANAGE type", self.peek_token())
16014        }
16015    }
16016
16017    fn parse_action_modify_type(&mut self) -> Option<ActionModifyType> {
16018        if self.parse_keywords(&[Keyword::LOG, Keyword::LEVEL]) {
16019            Some(ActionModifyType::LogLevel)
16020        } else if self.parse_keywords(&[Keyword::TRACE, Keyword::LEVEL]) {
16021            Some(ActionModifyType::TraceLevel)
16022        } else if self.parse_keywords(&[Keyword::SESSION, Keyword::LOG, Keyword::LEVEL]) {
16023            Some(ActionModifyType::SessionLogLevel)
16024        } else if self.parse_keywords(&[Keyword::SESSION, Keyword::TRACE, Keyword::LEVEL]) {
16025            Some(ActionModifyType::SessionTraceLevel)
16026        } else {
16027            None
16028        }
16029    }
16030
16031    fn parse_action_monitor_type(&mut self) -> Option<ActionMonitorType> {
16032        if self.parse_keyword(Keyword::EXECUTION) {
16033            Some(ActionMonitorType::Execution)
16034        } else if self.parse_keyword(Keyword::SECURITY) {
16035            Some(ActionMonitorType::Security)
16036        } else if self.parse_keyword(Keyword::USAGE) {
16037            Some(ActionMonitorType::Usage)
16038        } else {
16039            None
16040        }
16041    }
16042
16043    pub fn parse_grantee_name(&mut self) -> Result<GranteeName, ParserError> {
16044        let mut name = self.parse_object_name(false)?;
16045        if self.dialect.supports_user_host_grantee()
16046            && name.0.len() == 1
16047            && name.0[0].as_ident().is_some()
16048            && self.consume_token(&Token::AtSign)
16049        {
16050            let user = name.0.pop().unwrap().as_ident().unwrap().clone();
16051            let host = self.parse_identifier()?;
16052            Ok(GranteeName::UserHost { user, host })
16053        } else {
16054            Ok(GranteeName::ObjectName(name))
16055        }
16056    }
16057
16058    /// Parse [`Statement::Deny`]
16059    pub fn parse_deny(&mut self) -> Result<Statement, ParserError> {
16060        self.expect_keyword(Keyword::DENY)?;
16061
16062        let (privileges, objects) = self.parse_grant_deny_revoke_privileges_objects()?;
16063        let objects = match objects {
16064            Some(o) => o,
16065            None => {
16066                return parser_err!(
16067                    "DENY statements must specify an object",
16068                    self.peek_token().span.start
16069                )
16070            }
16071        };
16072
16073        self.expect_keyword_is(Keyword::TO)?;
16074        let grantees = self.parse_grantees()?;
16075        let cascade = self.parse_cascade_option();
16076        let granted_by = if self.parse_keywords(&[Keyword::AS]) {
16077            Some(self.parse_identifier()?)
16078        } else {
16079            None
16080        };
16081
16082        Ok(Statement::Deny(DenyStatement {
16083            privileges,
16084            objects,
16085            grantees,
16086            cascade,
16087            granted_by,
16088        }))
16089    }
16090
16091    /// Parse a REVOKE statement
16092    pub fn parse_revoke(&mut self) -> Result<Statement, ParserError> {
16093        let (privileges, objects) = self.parse_grant_deny_revoke_privileges_objects()?;
16094
16095        self.expect_keyword_is(Keyword::FROM)?;
16096        let grantees = self.parse_grantees()?;
16097
16098        let granted_by = if self.parse_keywords(&[Keyword::GRANTED, Keyword::BY]) {
16099            Some(self.parse_identifier()?)
16100        } else {
16101            None
16102        };
16103
16104        let cascade = self.parse_cascade_option();
16105
16106        Ok(Statement::Revoke {
16107            privileges,
16108            objects,
16109            grantees,
16110            granted_by,
16111            cascade,
16112        })
16113    }
16114
16115    /// Parse an REPLACE statement
16116    pub fn parse_replace(
16117        &mut self,
16118        replace_token: TokenWithSpan,
16119    ) -> Result<Statement, ParserError> {
16120        if !dialect_of!(self is MySqlDialect | GenericDialect) {
16121            return parser_err!(
16122                "Unsupported statement REPLACE",
16123                self.peek_token().span.start
16124            );
16125        }
16126
16127        let mut insert = self.parse_insert(replace_token)?;
16128        if let Statement::Insert(Insert { replace_into, .. }) = &mut insert {
16129            *replace_into = true;
16130        }
16131
16132        Ok(insert)
16133    }
16134
16135    /// Parse an INSERT statement, returning a `Box`ed SetExpr
16136    ///
16137    /// This is used to reduce the size of the stack frames in debug builds
16138    fn parse_insert_setexpr_boxed(
16139        &mut self,
16140        insert_token: TokenWithSpan,
16141    ) -> Result<Box<SetExpr>, ParserError> {
16142        Ok(Box::new(SetExpr::Insert(self.parse_insert(insert_token)?)))
16143    }
16144
16145    /// Parse an INSERT statement
16146    pub fn parse_insert(&mut self, insert_token: TokenWithSpan) -> Result<Statement, ParserError> {
16147        let or = self.parse_conflict_clause();
16148        let priority = if !dialect_of!(self is MySqlDialect | GenericDialect) {
16149            None
16150        } else if self.parse_keyword(Keyword::LOW_PRIORITY) {
16151            Some(MysqlInsertPriority::LowPriority)
16152        } else if self.parse_keyword(Keyword::DELAYED) {
16153            Some(MysqlInsertPriority::Delayed)
16154        } else if self.parse_keyword(Keyword::HIGH_PRIORITY) {
16155            Some(MysqlInsertPriority::HighPriority)
16156        } else {
16157            None
16158        };
16159
16160        let ignore = dialect_of!(self is MySqlDialect | GenericDialect)
16161            && self.parse_keyword(Keyword::IGNORE);
16162
16163        let replace_into = false;
16164
16165        let overwrite = self.parse_keyword(Keyword::OVERWRITE);
16166        let into = self.parse_keyword(Keyword::INTO);
16167
16168        let local = self.parse_keyword(Keyword::LOCAL);
16169
16170        if self.parse_keyword(Keyword::DIRECTORY) {
16171            let path = self.parse_literal_string()?;
16172            let file_format = if self.parse_keywords(&[Keyword::STORED, Keyword::AS]) {
16173                Some(self.parse_file_format()?)
16174            } else {
16175                None
16176            };
16177            let source = self.parse_query()?;
16178            Ok(Statement::Directory {
16179                local,
16180                path,
16181                overwrite,
16182                file_format,
16183                source,
16184            })
16185        } else {
16186            // Hive lets you put table here regardless
16187            let table = self.parse_keyword(Keyword::TABLE);
16188            let table_object = self.parse_table_object()?;
16189
16190            let table_alias =
16191                if dialect_of!(self is PostgreSqlDialect) && self.parse_keyword(Keyword::AS) {
16192                    Some(self.parse_identifier()?)
16193                } else {
16194                    None
16195                };
16196
16197            let is_mysql = dialect_of!(self is MySqlDialect);
16198
16199            let (columns, partitioned, after_columns, source, assignments) = if self
16200                .parse_keywords(&[Keyword::DEFAULT, Keyword::VALUES])
16201            {
16202                (vec![], None, vec![], None, vec![])
16203            } else {
16204                let (columns, partitioned, after_columns) = if !self.peek_subquery_start() {
16205                    let columns = self.parse_parenthesized_column_list(Optional, is_mysql)?;
16206
16207                    let partitioned = self.parse_insert_partition()?;
16208                    // Hive allows you to specify columns after partitions as well if you want.
16209                    let after_columns = if dialect_of!(self is HiveDialect) {
16210                        self.parse_parenthesized_column_list(Optional, false)?
16211                    } else {
16212                        vec![]
16213                    };
16214                    (columns, partitioned, after_columns)
16215                } else {
16216                    Default::default()
16217                };
16218
16219                let (source, assignments) = if self.peek_keyword(Keyword::FORMAT)
16220                    || self.peek_keyword(Keyword::SETTINGS)
16221                {
16222                    (None, vec![])
16223                } else if self.dialect.supports_insert_set() && self.parse_keyword(Keyword::SET) {
16224                    (None, self.parse_comma_separated(Parser::parse_assignment)?)
16225                } else {
16226                    (Some(self.parse_query()?), vec![])
16227                };
16228
16229                (columns, partitioned, after_columns, source, assignments)
16230            };
16231
16232            let (format_clause, settings) = if self.dialect.supports_insert_format() {
16233                // Settings always comes before `FORMAT` for ClickHouse:
16234                // <https://clickhouse.com/docs/en/sql-reference/statements/insert-into>
16235                let settings = self.parse_settings()?;
16236
16237                let format = if self.parse_keyword(Keyword::FORMAT) {
16238                    Some(self.parse_input_format_clause()?)
16239                } else {
16240                    None
16241                };
16242
16243                (format, settings)
16244            } else {
16245                Default::default()
16246            };
16247
16248            let insert_alias = if dialect_of!(self is MySqlDialect | GenericDialect)
16249                && self.parse_keyword(Keyword::AS)
16250            {
16251                let row_alias = self.parse_object_name(false)?;
16252                let col_aliases = Some(self.parse_parenthesized_column_list(Optional, false)?);
16253                Some(InsertAliases {
16254                    row_alias,
16255                    col_aliases,
16256                })
16257            } else {
16258                None
16259            };
16260
16261            let on = if self.parse_keyword(Keyword::ON) {
16262                if self.parse_keyword(Keyword::CONFLICT) {
16263                    let conflict_target =
16264                        if self.parse_keywords(&[Keyword::ON, Keyword::CONSTRAINT]) {
16265                            Some(ConflictTarget::OnConstraint(self.parse_object_name(false)?))
16266                        } else if self.peek_token() == Token::LParen {
16267                            Some(ConflictTarget::Columns(
16268                                self.parse_parenthesized_column_list(IsOptional::Mandatory, false)?,
16269                            ))
16270                        } else {
16271                            None
16272                        };
16273
16274                    self.expect_keyword_is(Keyword::DO)?;
16275                    let action = if self.parse_keyword(Keyword::NOTHING) {
16276                        OnConflictAction::DoNothing
16277                    } else {
16278                        self.expect_keyword_is(Keyword::UPDATE)?;
16279                        self.expect_keyword_is(Keyword::SET)?;
16280                        let assignments = self.parse_comma_separated(Parser::parse_assignment)?;
16281                        let selection = if self.parse_keyword(Keyword::WHERE) {
16282                            Some(self.parse_expr()?)
16283                        } else {
16284                            None
16285                        };
16286                        OnConflictAction::DoUpdate(DoUpdate {
16287                            assignments,
16288                            selection,
16289                        })
16290                    };
16291
16292                    Some(OnInsert::OnConflict(OnConflict {
16293                        conflict_target,
16294                        action,
16295                    }))
16296                } else {
16297                    self.expect_keyword_is(Keyword::DUPLICATE)?;
16298                    self.expect_keyword_is(Keyword::KEY)?;
16299                    self.expect_keyword_is(Keyword::UPDATE)?;
16300                    let l = self.parse_comma_separated(Parser::parse_assignment)?;
16301
16302                    Some(OnInsert::DuplicateKeyUpdate(l))
16303                }
16304            } else {
16305                None
16306            };
16307
16308            let returning = if self.parse_keyword(Keyword::RETURNING) {
16309                Some(self.parse_comma_separated(Parser::parse_select_item)?)
16310            } else {
16311                None
16312            };
16313
16314            Ok(Statement::Insert(Insert {
16315                insert_token: insert_token.into(),
16316                or,
16317                table: table_object,
16318                table_alias,
16319                ignore,
16320                into,
16321                overwrite,
16322                partitioned,
16323                columns,
16324                after_columns,
16325                source,
16326                assignments,
16327                has_table_keyword: table,
16328                on,
16329                returning,
16330                replace_into,
16331                priority,
16332                insert_alias,
16333                settings,
16334                format_clause,
16335            }))
16336        }
16337    }
16338
16339    // Parses input format clause used for [ClickHouse].
16340    //
16341    // <https://clickhouse.com/docs/en/interfaces/formats>
16342    pub fn parse_input_format_clause(&mut self) -> Result<InputFormatClause, ParserError> {
16343        let ident = self.parse_identifier()?;
16344        let values = self
16345            .maybe_parse(|p| p.parse_comma_separated(|p| p.parse_expr()))?
16346            .unwrap_or_default();
16347
16348        Ok(InputFormatClause { ident, values })
16349    }
16350
16351    /// Returns true if the immediate tokens look like the
16352    /// beginning of a subquery. `(SELECT ...`
16353    fn peek_subquery_start(&mut self) -> bool {
16354        let [maybe_lparen, maybe_select] = self.peek_tokens();
16355        Token::LParen == maybe_lparen
16356            && matches!(maybe_select, Token::Word(w) if w.keyword == Keyword::SELECT)
16357    }
16358
16359    fn parse_conflict_clause(&mut self) -> Option<SqliteOnConflict> {
16360        if self.parse_keywords(&[Keyword::OR, Keyword::REPLACE]) {
16361            Some(SqliteOnConflict::Replace)
16362        } else if self.parse_keywords(&[Keyword::OR, Keyword::ROLLBACK]) {
16363            Some(SqliteOnConflict::Rollback)
16364        } else if self.parse_keywords(&[Keyword::OR, Keyword::ABORT]) {
16365            Some(SqliteOnConflict::Abort)
16366        } else if self.parse_keywords(&[Keyword::OR, Keyword::FAIL]) {
16367            Some(SqliteOnConflict::Fail)
16368        } else if self.parse_keywords(&[Keyword::OR, Keyword::IGNORE]) {
16369            Some(SqliteOnConflict::Ignore)
16370        } else if self.parse_keyword(Keyword::REPLACE) {
16371            Some(SqliteOnConflict::Replace)
16372        } else {
16373            None
16374        }
16375    }
16376
16377    pub fn parse_insert_partition(&mut self) -> Result<Option<Vec<Expr>>, ParserError> {
16378        if self.parse_keyword(Keyword::PARTITION) {
16379            self.expect_token(&Token::LParen)?;
16380            let partition_cols = Some(self.parse_comma_separated(Parser::parse_expr)?);
16381            self.expect_token(&Token::RParen)?;
16382            Ok(partition_cols)
16383        } else {
16384            Ok(None)
16385        }
16386    }
16387
16388    pub fn parse_load_data_table_format(
16389        &mut self,
16390    ) -> Result<Option<HiveLoadDataFormat>, ParserError> {
16391        if self.parse_keyword(Keyword::INPUTFORMAT) {
16392            let input_format = self.parse_expr()?;
16393            self.expect_keyword_is(Keyword::SERDE)?;
16394            let serde = self.parse_expr()?;
16395            Ok(Some(HiveLoadDataFormat {
16396                input_format,
16397                serde,
16398            }))
16399        } else {
16400            Ok(None)
16401        }
16402    }
16403
16404    /// Parse an UPDATE statement, returning a `Box`ed SetExpr
16405    ///
16406    /// This is used to reduce the size of the stack frames in debug builds
16407    fn parse_update_setexpr_boxed(
16408        &mut self,
16409        update_token: TokenWithSpan,
16410    ) -> Result<Box<SetExpr>, ParserError> {
16411        Ok(Box::new(SetExpr::Update(self.parse_update(update_token)?)))
16412    }
16413
16414    pub fn parse_update(&mut self, update_token: TokenWithSpan) -> Result<Statement, ParserError> {
16415        let or = self.parse_conflict_clause();
16416        let table = self.parse_table_and_joins()?;
16417        let from_before_set = if self.parse_keyword(Keyword::FROM) {
16418            Some(UpdateTableFromKind::BeforeSet(
16419                self.parse_table_with_joins()?,
16420            ))
16421        } else {
16422            None
16423        };
16424        self.expect_keyword(Keyword::SET)?;
16425        let assignments = self.parse_comma_separated(Parser::parse_assignment)?;
16426        let from = if from_before_set.is_none() && self.parse_keyword(Keyword::FROM) {
16427            Some(UpdateTableFromKind::AfterSet(
16428                self.parse_table_with_joins()?,
16429            ))
16430        } else {
16431            from_before_set
16432        };
16433        let selection = if self.parse_keyword(Keyword::WHERE) {
16434            Some(self.parse_expr()?)
16435        } else {
16436            None
16437        };
16438        let returning = if self.parse_keyword(Keyword::RETURNING) {
16439            Some(self.parse_comma_separated(Parser::parse_select_item)?)
16440        } else {
16441            None
16442        };
16443        let limit = if self.parse_keyword(Keyword::LIMIT) {
16444            Some(self.parse_expr()?)
16445        } else {
16446            None
16447        };
16448        Ok(Update {
16449            update_token: update_token.into(),
16450            table,
16451            assignments,
16452            from,
16453            selection,
16454            returning,
16455            or,
16456            limit,
16457        }
16458        .into())
16459    }
16460
16461    /// Parse a `var = expr` assignment, used in an UPDATE statement
16462    pub fn parse_assignment(&mut self) -> Result<Assignment, ParserError> {
16463        let target = self.parse_assignment_target()?;
16464        self.expect_token(&Token::Eq)?;
16465        let value = self.parse_expr()?;
16466        Ok(Assignment { target, value })
16467    }
16468
16469    /// Parse the left-hand side of an assignment, used in an UPDATE statement
16470    pub fn parse_assignment_target(&mut self) -> Result<AssignmentTarget, ParserError> {
16471        if self.consume_token(&Token::LParen) {
16472            let columns = self.parse_comma_separated(|p| p.parse_object_name(false))?;
16473            self.expect_token(&Token::RParen)?;
16474            Ok(AssignmentTarget::Tuple(columns))
16475        } else {
16476            let column = self.parse_object_name(false)?;
16477            Ok(AssignmentTarget::ColumnName(column))
16478        }
16479    }
16480
16481    pub fn parse_function_args(&mut self) -> Result<FunctionArg, ParserError> {
16482        let arg = if self.dialect.supports_named_fn_args_with_expr_name() {
16483            self.maybe_parse(|p| {
16484                let name = p.parse_expr()?;
16485                let operator = p.parse_function_named_arg_operator()?;
16486                let arg = p.parse_wildcard_expr()?.into();
16487                Ok(FunctionArg::ExprNamed {
16488                    name,
16489                    arg,
16490                    operator,
16491                })
16492            })?
16493        } else {
16494            self.maybe_parse(|p| {
16495                let name = p.parse_identifier()?;
16496                let operator = p.parse_function_named_arg_operator()?;
16497                let arg = p.parse_wildcard_expr()?.into();
16498                Ok(FunctionArg::Named {
16499                    name,
16500                    arg,
16501                    operator,
16502                })
16503            })?
16504        };
16505        if let Some(arg) = arg {
16506            return Ok(arg);
16507        }
16508        Ok(FunctionArg::Unnamed(self.parse_wildcard_expr()?.into()))
16509    }
16510
16511    fn parse_function_named_arg_operator(&mut self) -> Result<FunctionArgOperator, ParserError> {
16512        if self.parse_keyword(Keyword::VALUE) {
16513            return Ok(FunctionArgOperator::Value);
16514        }
16515        let tok = self.next_token();
16516        match tok.token {
16517            Token::RArrow if self.dialect.supports_named_fn_args_with_rarrow_operator() => {
16518                Ok(FunctionArgOperator::RightArrow)
16519            }
16520            Token::Eq if self.dialect.supports_named_fn_args_with_eq_operator() => {
16521                Ok(FunctionArgOperator::Equals)
16522            }
16523            Token::Assignment
16524                if self
16525                    .dialect
16526                    .supports_named_fn_args_with_assignment_operator() =>
16527            {
16528                Ok(FunctionArgOperator::Assignment)
16529            }
16530            Token::Colon if self.dialect.supports_named_fn_args_with_colon_operator() => {
16531                Ok(FunctionArgOperator::Colon)
16532            }
16533            _ => {
16534                self.prev_token();
16535                self.expected("argument operator", tok)
16536            }
16537        }
16538    }
16539
16540    pub fn parse_optional_args(&mut self) -> Result<Vec<FunctionArg>, ParserError> {
16541        if self.consume_token(&Token::RParen) {
16542            Ok(vec![])
16543        } else {
16544            let args = self.parse_comma_separated(Parser::parse_function_args)?;
16545            self.expect_token(&Token::RParen)?;
16546            Ok(args)
16547        }
16548    }
16549
16550    fn parse_table_function_args(&mut self) -> Result<TableFunctionArgs, ParserError> {
16551        if self.consume_token(&Token::RParen) {
16552            return Ok(TableFunctionArgs {
16553                args: vec![],
16554                settings: None,
16555            });
16556        }
16557        let mut args = vec![];
16558        let settings = loop {
16559            if let Some(settings) = self.parse_settings()? {
16560                break Some(settings);
16561            }
16562            args.push(self.parse_function_args()?);
16563            if self.is_parse_comma_separated_end() {
16564                break None;
16565            }
16566        };
16567        self.expect_token(&Token::RParen)?;
16568        Ok(TableFunctionArgs { args, settings })
16569    }
16570
16571    /// Parses a potentially empty list of arguments to a function
16572    /// (including the closing parenthesis).
16573    ///
16574    /// Examples:
16575    /// ```sql
16576    /// FIRST_VALUE(x ORDER BY 1,2,3);
16577    /// FIRST_VALUE(x IGNORE NULL);
16578    /// ```
16579    fn parse_function_argument_list(&mut self) -> Result<FunctionArgumentList, ParserError> {
16580        let mut clauses = vec![];
16581
16582        // Handle clauses that may exist with an empty argument list
16583
16584        if let Some(null_clause) = self.parse_json_null_clause() {
16585            clauses.push(FunctionArgumentClause::JsonNullClause(null_clause));
16586        }
16587
16588        if let Some(json_returning_clause) = self.maybe_parse_json_returning_clause()? {
16589            clauses.push(FunctionArgumentClause::JsonReturningClause(
16590                json_returning_clause,
16591            ));
16592        }
16593
16594        if self.consume_token(&Token::RParen) {
16595            return Ok(FunctionArgumentList {
16596                duplicate_treatment: None,
16597                args: vec![],
16598                clauses,
16599            });
16600        }
16601
16602        let duplicate_treatment = self.parse_duplicate_treatment()?;
16603        let args = self.parse_comma_separated(Parser::parse_function_args)?;
16604
16605        if self.dialect.supports_window_function_null_treatment_arg() {
16606            if let Some(null_treatment) = self.parse_null_treatment()? {
16607                clauses.push(FunctionArgumentClause::IgnoreOrRespectNulls(null_treatment));
16608            }
16609        }
16610
16611        if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
16612            clauses.push(FunctionArgumentClause::OrderBy(
16613                self.parse_comma_separated(Parser::parse_order_by_expr)?,
16614            ));
16615        }
16616
16617        if self.parse_keyword(Keyword::LIMIT) {
16618            clauses.push(FunctionArgumentClause::Limit(self.parse_expr()?));
16619        }
16620
16621        if dialect_of!(self is GenericDialect | BigQueryDialect)
16622            && self.parse_keyword(Keyword::HAVING)
16623        {
16624            let kind = match self.expect_one_of_keywords(&[Keyword::MIN, Keyword::MAX])? {
16625                Keyword::MIN => HavingBoundKind::Min,
16626                Keyword::MAX => HavingBoundKind::Max,
16627                unexpected_keyword => return Err(ParserError::ParserError(
16628                    format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in having bound"),
16629                )),
16630            };
16631            clauses.push(FunctionArgumentClause::Having(HavingBound(
16632                kind,
16633                self.parse_expr()?,
16634            )))
16635        }
16636
16637        if dialect_of!(self is GenericDialect | MySqlDialect)
16638            && self.parse_keyword(Keyword::SEPARATOR)
16639        {
16640            clauses.push(FunctionArgumentClause::Separator(self.parse_value()?.value));
16641        }
16642
16643        if let Some(on_overflow) = self.parse_listagg_on_overflow()? {
16644            clauses.push(FunctionArgumentClause::OnOverflow(on_overflow));
16645        }
16646
16647        if let Some(null_clause) = self.parse_json_null_clause() {
16648            clauses.push(FunctionArgumentClause::JsonNullClause(null_clause));
16649        }
16650
16651        if let Some(json_returning_clause) = self.maybe_parse_json_returning_clause()? {
16652            clauses.push(FunctionArgumentClause::JsonReturningClause(
16653                json_returning_clause,
16654            ));
16655        }
16656
16657        self.expect_token(&Token::RParen)?;
16658        Ok(FunctionArgumentList {
16659            duplicate_treatment,
16660            args,
16661            clauses,
16662        })
16663    }
16664
16665    fn parse_json_null_clause(&mut self) -> Option<JsonNullClause> {
16666        if self.parse_keywords(&[Keyword::ABSENT, Keyword::ON, Keyword::NULL]) {
16667            Some(JsonNullClause::AbsentOnNull)
16668        } else if self.parse_keywords(&[Keyword::NULL, Keyword::ON, Keyword::NULL]) {
16669            Some(JsonNullClause::NullOnNull)
16670        } else {
16671            None
16672        }
16673    }
16674
16675    fn maybe_parse_json_returning_clause(
16676        &mut self,
16677    ) -> Result<Option<JsonReturningClause>, ParserError> {
16678        if self.parse_keyword(Keyword::RETURNING) {
16679            let data_type = self.parse_data_type()?;
16680            Ok(Some(JsonReturningClause { data_type }))
16681        } else {
16682            Ok(None)
16683        }
16684    }
16685
16686    fn parse_duplicate_treatment(&mut self) -> Result<Option<DuplicateTreatment>, ParserError> {
16687        let loc = self.peek_token().span.start;
16688        match (
16689            self.parse_keyword(Keyword::ALL),
16690            self.parse_keyword(Keyword::DISTINCT),
16691        ) {
16692            (true, false) => Ok(Some(DuplicateTreatment::All)),
16693            (false, true) => Ok(Some(DuplicateTreatment::Distinct)),
16694            (false, false) => Ok(None),
16695            (true, true) => parser_err!("Cannot specify both ALL and DISTINCT".to_string(), loc),
16696        }
16697    }
16698
16699    /// Parse a comma-delimited list of projections after SELECT
16700    pub fn parse_select_item(&mut self) -> Result<SelectItem, ParserError> {
16701        let prefix = self
16702            .parse_one_of_keywords(
16703                self.dialect
16704                    .get_reserved_keywords_for_select_item_operator(),
16705            )
16706            .map(|keyword| Ident::new(format!("{keyword:?}")));
16707
16708        match self.parse_wildcard_expr()? {
16709            Expr::QualifiedWildcard(prefix, token) => Ok(SelectItem::QualifiedWildcard(
16710                SelectItemQualifiedWildcardKind::ObjectName(prefix),
16711                self.parse_wildcard_additional_options(token.0)?,
16712            )),
16713            Expr::Wildcard(token) => Ok(SelectItem::Wildcard(
16714                self.parse_wildcard_additional_options(token.0)?,
16715            )),
16716            Expr::Identifier(v) if v.value.to_lowercase() == "from" && v.quote_style.is_none() => {
16717                parser_err!(
16718                    format!("Expected an expression, found: {}", v),
16719                    self.peek_token().span.start
16720                )
16721            }
16722            Expr::BinaryOp {
16723                left,
16724                op: BinaryOperator::Eq,
16725                right,
16726            } if self.dialect.supports_eq_alias_assignment()
16727                && matches!(left.as_ref(), Expr::Identifier(_)) =>
16728            {
16729                let Expr::Identifier(alias) = *left else {
16730                    return parser_err!(
16731                        "BUG: expected identifier expression as alias",
16732                        self.peek_token().span.start
16733                    );
16734                };
16735                Ok(SelectItem::ExprWithAlias {
16736                    expr: *right,
16737                    alias,
16738                })
16739            }
16740            expr if self.dialect.supports_select_expr_star()
16741                && self.consume_tokens(&[Token::Period, Token::Mul]) =>
16742            {
16743                let wildcard_token = self.get_previous_token().clone();
16744                Ok(SelectItem::QualifiedWildcard(
16745                    SelectItemQualifiedWildcardKind::Expr(expr),
16746                    self.parse_wildcard_additional_options(wildcard_token)?,
16747                ))
16748            }
16749            expr => self
16750                .maybe_parse_select_item_alias()
16751                .map(|alias| match alias {
16752                    Some(alias) => SelectItem::ExprWithAlias {
16753                        expr: maybe_prefixed_expr(expr, prefix),
16754                        alias,
16755                    },
16756                    None => SelectItem::UnnamedExpr(maybe_prefixed_expr(expr, prefix)),
16757                }),
16758        }
16759    }
16760
16761    /// Parse an [`WildcardAdditionalOptions`] information for wildcard select items.
16762    ///
16763    /// If it is not possible to parse it, will return an option.
16764    pub fn parse_wildcard_additional_options(
16765        &mut self,
16766        wildcard_token: TokenWithSpan,
16767    ) -> Result<WildcardAdditionalOptions, ParserError> {
16768        let opt_ilike = if dialect_of!(self is GenericDialect | SnowflakeDialect) {
16769            self.parse_optional_select_item_ilike()?
16770        } else {
16771            None
16772        };
16773        let opt_exclude = if opt_ilike.is_none() && self.dialect.supports_select_wildcard_exclude()
16774        {
16775            self.parse_optional_select_item_exclude()?
16776        } else {
16777            None
16778        };
16779        let opt_except = if self.dialect.supports_select_wildcard_except() {
16780            self.parse_optional_select_item_except()?
16781        } else {
16782            None
16783        };
16784        let opt_replace = if dialect_of!(self is GenericDialect | BigQueryDialect | ClickHouseDialect | DuckDbDialect | SnowflakeDialect)
16785        {
16786            self.parse_optional_select_item_replace()?
16787        } else {
16788            None
16789        };
16790        let opt_rename = if dialect_of!(self is GenericDialect | SnowflakeDialect) {
16791            self.parse_optional_select_item_rename()?
16792        } else {
16793            None
16794        };
16795
16796        Ok(WildcardAdditionalOptions {
16797            wildcard_token: wildcard_token.into(),
16798            opt_ilike,
16799            opt_exclude,
16800            opt_except,
16801            opt_rename,
16802            opt_replace,
16803        })
16804    }
16805
16806    /// Parse an [`Ilike`](IlikeSelectItem) information for wildcard select items.
16807    ///
16808    /// If it is not possible to parse it, will return an option.
16809    pub fn parse_optional_select_item_ilike(
16810        &mut self,
16811    ) -> Result<Option<IlikeSelectItem>, ParserError> {
16812        let opt_ilike = if self.parse_keyword(Keyword::ILIKE) {
16813            let next_token = self.next_token();
16814            let pattern = match next_token.token {
16815                Token::SingleQuotedString(s) => s,
16816                _ => return self.expected("ilike pattern", next_token),
16817            };
16818            Some(IlikeSelectItem { pattern })
16819        } else {
16820            None
16821        };
16822        Ok(opt_ilike)
16823    }
16824
16825    /// Parse an [`Exclude`](ExcludeSelectItem) information for wildcard select items.
16826    ///
16827    /// If it is not possible to parse it, will return an option.
16828    pub fn parse_optional_select_item_exclude(
16829        &mut self,
16830    ) -> Result<Option<ExcludeSelectItem>, ParserError> {
16831        let opt_exclude = if self.parse_keyword(Keyword::EXCLUDE) {
16832            if self.consume_token(&Token::LParen) {
16833                let columns = self.parse_comma_separated(|parser| parser.parse_identifier())?;
16834                self.expect_token(&Token::RParen)?;
16835                Some(ExcludeSelectItem::Multiple(columns))
16836            } else {
16837                let column = self.parse_identifier()?;
16838                Some(ExcludeSelectItem::Single(column))
16839            }
16840        } else {
16841            None
16842        };
16843
16844        Ok(opt_exclude)
16845    }
16846
16847    /// Parse an [`Except`](ExceptSelectItem) information for wildcard select items.
16848    ///
16849    /// If it is not possible to parse it, will return an option.
16850    pub fn parse_optional_select_item_except(
16851        &mut self,
16852    ) -> Result<Option<ExceptSelectItem>, ParserError> {
16853        let opt_except = if self.parse_keyword(Keyword::EXCEPT) {
16854            if self.peek_token().token == Token::LParen {
16855                let idents = self.parse_parenthesized_column_list(Mandatory, false)?;
16856                match &idents[..] {
16857                    [] => {
16858                        return self.expected(
16859                            "at least one column should be parsed by the expect clause",
16860                            self.peek_token(),
16861                        )?;
16862                    }
16863                    [first, idents @ ..] => Some(ExceptSelectItem {
16864                        first_element: first.clone(),
16865                        additional_elements: idents.to_vec(),
16866                    }),
16867                }
16868            } else {
16869                // Clickhouse allows EXCEPT column_name
16870                let ident = self.parse_identifier()?;
16871                Some(ExceptSelectItem {
16872                    first_element: ident,
16873                    additional_elements: vec![],
16874                })
16875            }
16876        } else {
16877            None
16878        };
16879
16880        Ok(opt_except)
16881    }
16882
16883    /// Parse a [`Rename`](RenameSelectItem) information for wildcard select items.
16884    pub fn parse_optional_select_item_rename(
16885        &mut self,
16886    ) -> Result<Option<RenameSelectItem>, ParserError> {
16887        let opt_rename = if self.parse_keyword(Keyword::RENAME) {
16888            if self.consume_token(&Token::LParen) {
16889                let idents =
16890                    self.parse_comma_separated(|parser| parser.parse_identifier_with_alias())?;
16891                self.expect_token(&Token::RParen)?;
16892                Some(RenameSelectItem::Multiple(idents))
16893            } else {
16894                let ident = self.parse_identifier_with_alias()?;
16895                Some(RenameSelectItem::Single(ident))
16896            }
16897        } else {
16898            None
16899        };
16900
16901        Ok(opt_rename)
16902    }
16903
16904    /// Parse a [`Replace`](ReplaceSelectItem) information for wildcard select items.
16905    pub fn parse_optional_select_item_replace(
16906        &mut self,
16907    ) -> Result<Option<ReplaceSelectItem>, ParserError> {
16908        let opt_replace = if self.parse_keyword(Keyword::REPLACE) {
16909            if self.consume_token(&Token::LParen) {
16910                let items = self.parse_comma_separated(|parser| {
16911                    Ok(Box::new(parser.parse_replace_elements()?))
16912                })?;
16913                self.expect_token(&Token::RParen)?;
16914                Some(ReplaceSelectItem { items })
16915            } else {
16916                let tok = self.next_token();
16917                return self.expected("( after REPLACE but", tok);
16918            }
16919        } else {
16920            None
16921        };
16922
16923        Ok(opt_replace)
16924    }
16925    pub fn parse_replace_elements(&mut self) -> Result<ReplaceSelectElement, ParserError> {
16926        let expr = self.parse_expr()?;
16927        let as_keyword = self.parse_keyword(Keyword::AS);
16928        let ident = self.parse_identifier()?;
16929        Ok(ReplaceSelectElement {
16930            expr,
16931            column_name: ident,
16932            as_keyword,
16933        })
16934    }
16935
16936    /// Parse ASC or DESC, returns an Option with true if ASC, false of DESC or `None` if none of
16937    /// them.
16938    pub fn parse_asc_desc(&mut self) -> Option<bool> {
16939        if self.parse_keyword(Keyword::ASC) {
16940            Some(true)
16941        } else if self.parse_keyword(Keyword::DESC) {
16942            Some(false)
16943        } else {
16944            None
16945        }
16946    }
16947
16948    /// Parse an [OrderByExpr] expression.
16949    pub fn parse_order_by_expr(&mut self) -> Result<OrderByExpr, ParserError> {
16950        self.parse_order_by_expr_inner(false)
16951            .map(|(order_by, _)| order_by)
16952    }
16953
16954    /// Parse an [IndexColumn].
16955    pub fn parse_create_index_expr(&mut self) -> Result<IndexColumn, ParserError> {
16956        self.parse_order_by_expr_inner(true)
16957            .map(|(column, operator_class)| IndexColumn {
16958                column,
16959                operator_class,
16960            })
16961    }
16962
16963    fn parse_order_by_expr_inner(
16964        &mut self,
16965        with_operator_class: bool,
16966    ) -> Result<(OrderByExpr, Option<Ident>), ParserError> {
16967        let expr = self.parse_expr()?;
16968
16969        let operator_class: Option<Ident> = if with_operator_class {
16970            // We check that if non of the following keywords are present, then we parse an
16971            // identifier as operator class.
16972            if self
16973                .peek_one_of_keywords(&[Keyword::ASC, Keyword::DESC, Keyword::NULLS, Keyword::WITH])
16974                .is_some()
16975            {
16976                None
16977            } else {
16978                self.maybe_parse(|parser| parser.parse_identifier())?
16979            }
16980        } else {
16981            None
16982        };
16983
16984        let options = self.parse_order_by_options()?;
16985
16986        let with_fill = if dialect_of!(self is ClickHouseDialect | GenericDialect)
16987            && self.parse_keywords(&[Keyword::WITH, Keyword::FILL])
16988        {
16989            Some(self.parse_with_fill()?)
16990        } else {
16991            None
16992        };
16993
16994        Ok((
16995            OrderByExpr {
16996                expr,
16997                options,
16998                with_fill,
16999            },
17000            operator_class,
17001        ))
17002    }
17003
17004    fn parse_order_by_options(&mut self) -> Result<OrderByOptions, ParserError> {
17005        let asc = self.parse_asc_desc();
17006
17007        let nulls_first = if self.parse_keywords(&[Keyword::NULLS, Keyword::FIRST]) {
17008            Some(true)
17009        } else if self.parse_keywords(&[Keyword::NULLS, Keyword::LAST]) {
17010            Some(false)
17011        } else {
17012            None
17013        };
17014
17015        Ok(OrderByOptions { asc, nulls_first })
17016    }
17017
17018    // Parse a WITH FILL clause (ClickHouse dialect)
17019    // that follow the WITH FILL keywords in a ORDER BY clause
17020    pub fn parse_with_fill(&mut self) -> Result<WithFill, ParserError> {
17021        let from = if self.parse_keyword(Keyword::FROM) {
17022            Some(self.parse_expr()?)
17023        } else {
17024            None
17025        };
17026
17027        let to = if self.parse_keyword(Keyword::TO) {
17028            Some(self.parse_expr()?)
17029        } else {
17030            None
17031        };
17032
17033        let step = if self.parse_keyword(Keyword::STEP) {
17034            Some(self.parse_expr()?)
17035        } else {
17036            None
17037        };
17038
17039        Ok(WithFill { from, to, step })
17040    }
17041
17042    // Parse a set of comma separated INTERPOLATE expressions (ClickHouse dialect)
17043    // that follow the INTERPOLATE keyword in an ORDER BY clause with the WITH FILL modifier
17044    pub fn parse_interpolations(&mut self) -> Result<Option<Interpolate>, ParserError> {
17045        if !self.parse_keyword(Keyword::INTERPOLATE) {
17046            return Ok(None);
17047        }
17048
17049        if self.consume_token(&Token::LParen) {
17050            let interpolations =
17051                self.parse_comma_separated0(|p| p.parse_interpolation(), Token::RParen)?;
17052            self.expect_token(&Token::RParen)?;
17053            // INTERPOLATE () and INTERPOLATE ( ... ) variants
17054            return Ok(Some(Interpolate {
17055                exprs: Some(interpolations),
17056            }));
17057        }
17058
17059        // INTERPOLATE
17060        Ok(Some(Interpolate { exprs: None }))
17061    }
17062
17063    // Parse a INTERPOLATE expression (ClickHouse dialect)
17064    pub fn parse_interpolation(&mut self) -> Result<InterpolateExpr, ParserError> {
17065        let column = self.parse_identifier()?;
17066        let expr = if self.parse_keyword(Keyword::AS) {
17067            Some(self.parse_expr()?)
17068        } else {
17069            None
17070        };
17071        Ok(InterpolateExpr { column, expr })
17072    }
17073
17074    /// Parse a TOP clause, MSSQL equivalent of LIMIT,
17075    /// that follows after `SELECT [DISTINCT]`.
17076    pub fn parse_top(&mut self) -> Result<Top, ParserError> {
17077        let quantity = if self.consume_token(&Token::LParen) {
17078            let quantity = self.parse_expr()?;
17079            self.expect_token(&Token::RParen)?;
17080            Some(TopQuantity::Expr(quantity))
17081        } else {
17082            let next_token = self.next_token();
17083            let quantity = match next_token.token {
17084                Token::Number(s, _) => Self::parse::<u64>(s, next_token.span.start)?,
17085                _ => self.expected("literal int", next_token)?,
17086            };
17087            Some(TopQuantity::Constant(quantity))
17088        };
17089
17090        let percent = self.parse_keyword(Keyword::PERCENT);
17091
17092        let with_ties = self.parse_keywords(&[Keyword::WITH, Keyword::TIES]);
17093
17094        Ok(Top {
17095            with_ties,
17096            percent,
17097            quantity,
17098        })
17099    }
17100
17101    /// Parse a LIMIT clause
17102    pub fn parse_limit(&mut self) -> Result<Option<Expr>, ParserError> {
17103        if self.parse_keyword(Keyword::ALL) {
17104            Ok(None)
17105        } else {
17106            Ok(Some(self.parse_expr()?))
17107        }
17108    }
17109
17110    /// Parse an OFFSET clause
17111    pub fn parse_offset(&mut self) -> Result<Offset, ParserError> {
17112        let value = self.parse_expr()?;
17113        let rows = if self.parse_keyword(Keyword::ROW) {
17114            OffsetRows::Row
17115        } else if self.parse_keyword(Keyword::ROWS) {
17116            OffsetRows::Rows
17117        } else {
17118            OffsetRows::None
17119        };
17120        Ok(Offset { value, rows })
17121    }
17122
17123    /// Parse a FETCH clause
17124    pub fn parse_fetch(&mut self) -> Result<Fetch, ParserError> {
17125        let _ = self.parse_one_of_keywords(&[Keyword::FIRST, Keyword::NEXT]);
17126
17127        let (quantity, percent) = if self
17128            .parse_one_of_keywords(&[Keyword::ROW, Keyword::ROWS])
17129            .is_some()
17130        {
17131            (None, false)
17132        } else {
17133            let quantity = Expr::Value(self.parse_value()?);
17134            let percent = self.parse_keyword(Keyword::PERCENT);
17135            let _ = self.parse_one_of_keywords(&[Keyword::ROW, Keyword::ROWS]);
17136            (Some(quantity), percent)
17137        };
17138
17139        let with_ties = if self.parse_keyword(Keyword::ONLY) {
17140            false
17141        } else {
17142            self.parse_keywords(&[Keyword::WITH, Keyword::TIES])
17143        };
17144
17145        Ok(Fetch {
17146            with_ties,
17147            percent,
17148            quantity,
17149        })
17150    }
17151
17152    /// Parse a FOR UPDATE/FOR SHARE clause
17153    pub fn parse_lock(&mut self) -> Result<LockClause, ParserError> {
17154        let lock_type = match self.expect_one_of_keywords(&[Keyword::UPDATE, Keyword::SHARE])? {
17155            Keyword::UPDATE => LockType::Update,
17156            Keyword::SHARE => LockType::Share,
17157            unexpected_keyword => return Err(ParserError::ParserError(
17158                format!("Internal parser error: expected any of {{UPDATE, SHARE}}, got {unexpected_keyword:?}"),
17159            )),
17160        };
17161        let of = if self.parse_keyword(Keyword::OF) {
17162            Some(self.parse_object_name(false)?)
17163        } else {
17164            None
17165        };
17166        let nonblock = if self.parse_keyword(Keyword::NOWAIT) {
17167            Some(NonBlock::Nowait)
17168        } else if self.parse_keywords(&[Keyword::SKIP, Keyword::LOCKED]) {
17169            Some(NonBlock::SkipLocked)
17170        } else {
17171            None
17172        };
17173        Ok(LockClause {
17174            lock_type,
17175            of,
17176            nonblock,
17177        })
17178    }
17179
17180    pub fn parse_values(
17181        &mut self,
17182        allow_empty: bool,
17183        value_keyword: bool,
17184    ) -> Result<Values, ParserError> {
17185        let mut explicit_row = false;
17186
17187        let rows = self.parse_comma_separated(|parser| {
17188            if parser.parse_keyword(Keyword::ROW) {
17189                explicit_row = true;
17190            }
17191
17192            parser.expect_token(&Token::LParen)?;
17193            if allow_empty && parser.peek_token().token == Token::RParen {
17194                parser.next_token();
17195                Ok(vec![])
17196            } else {
17197                let exprs = parser.parse_comma_separated(Parser::parse_expr)?;
17198                parser.expect_token(&Token::RParen)?;
17199                Ok(exprs)
17200            }
17201        })?;
17202        Ok(Values {
17203            explicit_row,
17204            rows,
17205            value_keyword,
17206        })
17207    }
17208
17209    pub fn parse_start_transaction(&mut self) -> Result<Statement, ParserError> {
17210        self.expect_keyword_is(Keyword::TRANSACTION)?;
17211        Ok(Statement::StartTransaction {
17212            modes: self.parse_transaction_modes()?,
17213            begin: false,
17214            transaction: Some(BeginTransactionKind::Transaction),
17215            modifier: None,
17216            statements: vec![],
17217            exception: None,
17218            has_end_keyword: false,
17219        })
17220    }
17221
17222    pub fn parse_begin(&mut self) -> Result<Statement, ParserError> {
17223        let modifier = if !self.dialect.supports_start_transaction_modifier() {
17224            None
17225        } else if self.parse_keyword(Keyword::DEFERRED) {
17226            Some(TransactionModifier::Deferred)
17227        } else if self.parse_keyword(Keyword::IMMEDIATE) {
17228            Some(TransactionModifier::Immediate)
17229        } else if self.parse_keyword(Keyword::EXCLUSIVE) {
17230            Some(TransactionModifier::Exclusive)
17231        } else if self.parse_keyword(Keyword::TRY) {
17232            Some(TransactionModifier::Try)
17233        } else if self.parse_keyword(Keyword::CATCH) {
17234            Some(TransactionModifier::Catch)
17235        } else {
17236            None
17237        };
17238        let transaction = match self.parse_one_of_keywords(&[Keyword::TRANSACTION, Keyword::WORK]) {
17239            Some(Keyword::TRANSACTION) => Some(BeginTransactionKind::Transaction),
17240            Some(Keyword::WORK) => Some(BeginTransactionKind::Work),
17241            _ => None,
17242        };
17243        Ok(Statement::StartTransaction {
17244            modes: self.parse_transaction_modes()?,
17245            begin: true,
17246            transaction,
17247            modifier,
17248            statements: vec![],
17249            exception: None,
17250            has_end_keyword: false,
17251        })
17252    }
17253
17254    pub fn parse_begin_exception_end(&mut self) -> Result<Statement, ParserError> {
17255        let statements = self.parse_statement_list(&[Keyword::EXCEPTION, Keyword::END])?;
17256
17257        let exception = if self.parse_keyword(Keyword::EXCEPTION) {
17258            let mut when = Vec::new();
17259
17260            // We can have multiple `WHEN` arms so we consume all cases until `END`
17261            while !self.peek_keyword(Keyword::END) {
17262                self.expect_keyword(Keyword::WHEN)?;
17263
17264                // Each `WHEN` case can have one or more conditions, e.g.
17265                // WHEN EXCEPTION_1 [OR EXCEPTION_2] THEN
17266                // So we parse identifiers until the `THEN` keyword.
17267                let mut idents = Vec::new();
17268
17269                while !self.parse_keyword(Keyword::THEN) {
17270                    let ident = self.parse_identifier()?;
17271                    idents.push(ident);
17272
17273                    self.maybe_parse(|p| p.expect_keyword(Keyword::OR))?;
17274                }
17275
17276                let statements = self.parse_statement_list(&[Keyword::WHEN, Keyword::END])?;
17277
17278                when.push(ExceptionWhen { idents, statements });
17279            }
17280
17281            Some(when)
17282        } else {
17283            None
17284        };
17285
17286        self.expect_keyword(Keyword::END)?;
17287
17288        Ok(Statement::StartTransaction {
17289            begin: true,
17290            statements,
17291            exception,
17292            has_end_keyword: true,
17293            transaction: None,
17294            modifier: None,
17295            modes: Default::default(),
17296        })
17297    }
17298
17299    pub fn parse_end(&mut self) -> Result<Statement, ParserError> {
17300        let modifier = if !self.dialect.supports_end_transaction_modifier() {
17301            None
17302        } else if self.parse_keyword(Keyword::TRY) {
17303            Some(TransactionModifier::Try)
17304        } else if self.parse_keyword(Keyword::CATCH) {
17305            Some(TransactionModifier::Catch)
17306        } else {
17307            None
17308        };
17309        Ok(Statement::Commit {
17310            chain: self.parse_commit_rollback_chain()?,
17311            end: true,
17312            modifier,
17313        })
17314    }
17315
17316    pub fn parse_transaction_modes(&mut self) -> Result<Vec<TransactionMode>, ParserError> {
17317        let mut modes = vec![];
17318        let mut required = false;
17319        loop {
17320            let mode = if self.parse_keywords(&[Keyword::ISOLATION, Keyword::LEVEL]) {
17321                let iso_level = if self.parse_keywords(&[Keyword::READ, Keyword::UNCOMMITTED]) {
17322                    TransactionIsolationLevel::ReadUncommitted
17323                } else if self.parse_keywords(&[Keyword::READ, Keyword::COMMITTED]) {
17324                    TransactionIsolationLevel::ReadCommitted
17325                } else if self.parse_keywords(&[Keyword::REPEATABLE, Keyword::READ]) {
17326                    TransactionIsolationLevel::RepeatableRead
17327                } else if self.parse_keyword(Keyword::SERIALIZABLE) {
17328                    TransactionIsolationLevel::Serializable
17329                } else if self.parse_keyword(Keyword::SNAPSHOT) {
17330                    TransactionIsolationLevel::Snapshot
17331                } else {
17332                    self.expected("isolation level", self.peek_token())?
17333                };
17334                TransactionMode::IsolationLevel(iso_level)
17335            } else if self.parse_keywords(&[Keyword::READ, Keyword::ONLY]) {
17336                TransactionMode::AccessMode(TransactionAccessMode::ReadOnly)
17337            } else if self.parse_keywords(&[Keyword::READ, Keyword::WRITE]) {
17338                TransactionMode::AccessMode(TransactionAccessMode::ReadWrite)
17339            } else if required {
17340                self.expected("transaction mode", self.peek_token())?
17341            } else {
17342                break;
17343            };
17344            modes.push(mode);
17345            // ANSI requires a comma after each transaction mode, but
17346            // PostgreSQL, for historical reasons, does not. We follow
17347            // PostgreSQL in making the comma optional, since that is strictly
17348            // more general.
17349            required = self.consume_token(&Token::Comma);
17350        }
17351        Ok(modes)
17352    }
17353
17354    pub fn parse_commit(&mut self) -> Result<Statement, ParserError> {
17355        Ok(Statement::Commit {
17356            chain: self.parse_commit_rollback_chain()?,
17357            end: false,
17358            modifier: None,
17359        })
17360    }
17361
17362    pub fn parse_rollback(&mut self) -> Result<Statement, ParserError> {
17363        let chain = self.parse_commit_rollback_chain()?;
17364        let savepoint = self.parse_rollback_savepoint()?;
17365
17366        Ok(Statement::Rollback { chain, savepoint })
17367    }
17368
17369    pub fn parse_commit_rollback_chain(&mut self) -> Result<bool, ParserError> {
17370        let _ = self.parse_one_of_keywords(&[Keyword::TRANSACTION, Keyword::WORK]);
17371        if self.parse_keyword(Keyword::AND) {
17372            let chain = !self.parse_keyword(Keyword::NO);
17373            self.expect_keyword_is(Keyword::CHAIN)?;
17374            Ok(chain)
17375        } else {
17376            Ok(false)
17377        }
17378    }
17379
17380    pub fn parse_rollback_savepoint(&mut self) -> Result<Option<Ident>, ParserError> {
17381        if self.parse_keyword(Keyword::TO) {
17382            let _ = self.parse_keyword(Keyword::SAVEPOINT);
17383            let savepoint = self.parse_identifier()?;
17384
17385            Ok(Some(savepoint))
17386        } else {
17387            Ok(None)
17388        }
17389    }
17390
17391    /// Parse a 'RAISERROR' statement
17392    pub fn parse_raiserror(&mut self) -> Result<Statement, ParserError> {
17393        self.expect_token(&Token::LParen)?;
17394        let message = Box::new(self.parse_expr()?);
17395        self.expect_token(&Token::Comma)?;
17396        let severity = Box::new(self.parse_expr()?);
17397        self.expect_token(&Token::Comma)?;
17398        let state = Box::new(self.parse_expr()?);
17399        let arguments = if self.consume_token(&Token::Comma) {
17400            self.parse_comma_separated(Parser::parse_expr)?
17401        } else {
17402            vec![]
17403        };
17404        self.expect_token(&Token::RParen)?;
17405        let options = if self.parse_keyword(Keyword::WITH) {
17406            self.parse_comma_separated(Parser::parse_raiserror_option)?
17407        } else {
17408            vec![]
17409        };
17410        Ok(Statement::RaisError {
17411            message,
17412            severity,
17413            state,
17414            arguments,
17415            options,
17416        })
17417    }
17418
17419    pub fn parse_raiserror_option(&mut self) -> Result<RaisErrorOption, ParserError> {
17420        match self.expect_one_of_keywords(&[Keyword::LOG, Keyword::NOWAIT, Keyword::SETERROR])? {
17421            Keyword::LOG => Ok(RaisErrorOption::Log),
17422            Keyword::NOWAIT => Ok(RaisErrorOption::NoWait),
17423            Keyword::SETERROR => Ok(RaisErrorOption::SetError),
17424            _ => self.expected(
17425                "LOG, NOWAIT OR SETERROR raiserror option",
17426                self.peek_token(),
17427            ),
17428        }
17429    }
17430
17431    pub fn parse_deallocate(&mut self) -> Result<Statement, ParserError> {
17432        let prepare = self.parse_keyword(Keyword::PREPARE);
17433        let name = self.parse_identifier()?;
17434        Ok(Statement::Deallocate { name, prepare })
17435    }
17436
17437    pub fn parse_execute(&mut self) -> Result<Statement, ParserError> {
17438        let name = if self.dialect.supports_execute_immediate()
17439            && self.parse_keyword(Keyword::IMMEDIATE)
17440        {
17441            None
17442        } else {
17443            let name = self.parse_object_name(false)?;
17444            Some(name)
17445        };
17446
17447        let has_parentheses = self.consume_token(&Token::LParen);
17448
17449        let end_kws = &[Keyword::USING, Keyword::OUTPUT, Keyword::DEFAULT];
17450        let end_token = match (has_parentheses, self.peek_token().token) {
17451            (true, _) => Token::RParen,
17452            (false, Token::EOF) => Token::EOF,
17453            (false, Token::Word(w)) if end_kws.contains(&w.keyword) => Token::Word(w),
17454            (false, _) => Token::SemiColon,
17455        };
17456
17457        let parameters = self.parse_comma_separated0(Parser::parse_expr, end_token)?;
17458
17459        if has_parentheses {
17460            self.expect_token(&Token::RParen)?;
17461        }
17462
17463        let into = if self.parse_keyword(Keyword::INTO) {
17464            self.parse_comma_separated(Self::parse_identifier)?
17465        } else {
17466            vec![]
17467        };
17468
17469        let using = if self.parse_keyword(Keyword::USING) {
17470            self.parse_comma_separated(Self::parse_expr_with_alias)?
17471        } else {
17472            vec![]
17473        };
17474
17475        let output = self.parse_keyword(Keyword::OUTPUT);
17476
17477        let default = self.parse_keyword(Keyword::DEFAULT);
17478
17479        Ok(Statement::Execute {
17480            immediate: name.is_none(),
17481            name,
17482            parameters,
17483            has_parentheses,
17484            into,
17485            using,
17486            output,
17487            default,
17488        })
17489    }
17490
17491    pub fn parse_prepare(&mut self) -> Result<Statement, ParserError> {
17492        let name = self.parse_identifier()?;
17493
17494        let mut data_types = vec![];
17495        if self.consume_token(&Token::LParen) {
17496            data_types = self.parse_comma_separated(Parser::parse_data_type)?;
17497            self.expect_token(&Token::RParen)?;
17498        }
17499
17500        self.expect_keyword_is(Keyword::AS)?;
17501        let statement = Box::new(self.parse_statement()?);
17502        Ok(Statement::Prepare {
17503            name,
17504            data_types,
17505            statement,
17506        })
17507    }
17508
17509    pub fn parse_unload(&mut self) -> Result<Statement, ParserError> {
17510        self.expect_keyword(Keyword::UNLOAD)?;
17511        self.expect_token(&Token::LParen)?;
17512        let (query, query_text) = if matches!(self.peek_token().token, Token::SingleQuotedString(_))
17513        {
17514            (None, Some(self.parse_literal_string()?))
17515        } else {
17516            (Some(self.parse_query()?), None)
17517        };
17518        self.expect_token(&Token::RParen)?;
17519
17520        self.expect_keyword_is(Keyword::TO)?;
17521        let to = self.parse_identifier()?;
17522        let auth = if self.parse_keyword(Keyword::IAM_ROLE) {
17523            Some(self.parse_iam_role_kind()?)
17524        } else {
17525            None
17526        };
17527        let with = self.parse_options(Keyword::WITH)?;
17528        let mut options = vec![];
17529        while let Some(opt) = self.maybe_parse(|parser| parser.parse_copy_legacy_option())? {
17530            options.push(opt);
17531        }
17532        Ok(Statement::Unload {
17533            query,
17534            query_text,
17535            to,
17536            auth,
17537            with,
17538            options,
17539        })
17540    }
17541
17542    fn parse_select_into(&mut self) -> Result<SelectInto, ParserError> {
17543        let temporary = self
17544            .parse_one_of_keywords(&[Keyword::TEMP, Keyword::TEMPORARY])
17545            .is_some();
17546        let unlogged = self.parse_keyword(Keyword::UNLOGGED);
17547        let table = self.parse_keyword(Keyword::TABLE);
17548        let name = self.parse_object_name(false)?;
17549
17550        Ok(SelectInto {
17551            temporary,
17552            unlogged,
17553            table,
17554            name,
17555        })
17556    }
17557
17558    fn parse_pragma_value(&mut self) -> Result<Value, ParserError> {
17559        match self.parse_value()?.value {
17560            v @ Value::SingleQuotedString(_) => Ok(v),
17561            v @ Value::DoubleQuotedString(_) => Ok(v),
17562            v @ Value::Number(_, _) => Ok(v),
17563            v @ Value::Placeholder(_) => Ok(v),
17564            _ => {
17565                self.prev_token();
17566                self.expected("number or string or ? placeholder", self.peek_token())
17567            }
17568        }
17569    }
17570
17571    // PRAGMA [schema-name '.'] pragma-name [('=' pragma-value) | '(' pragma-value ')']
17572    pub fn parse_pragma(&mut self) -> Result<Statement, ParserError> {
17573        let name = self.parse_object_name(false)?;
17574        if self.consume_token(&Token::LParen) {
17575            let value = self.parse_pragma_value()?;
17576            self.expect_token(&Token::RParen)?;
17577            Ok(Statement::Pragma {
17578                name,
17579                value: Some(value),
17580                is_eq: false,
17581            })
17582        } else if self.consume_token(&Token::Eq) {
17583            Ok(Statement::Pragma {
17584                name,
17585                value: Some(self.parse_pragma_value()?),
17586                is_eq: true,
17587            })
17588        } else {
17589            Ok(Statement::Pragma {
17590                name,
17591                value: None,
17592                is_eq: false,
17593            })
17594        }
17595    }
17596
17597    /// `INSTALL [extension_name]`
17598    pub fn parse_install(&mut self) -> Result<Statement, ParserError> {
17599        let extension_name = self.parse_identifier()?;
17600
17601        Ok(Statement::Install { extension_name })
17602    }
17603
17604    /// Parse a SQL LOAD statement
17605    pub fn parse_load(&mut self) -> Result<Statement, ParserError> {
17606        if self.dialect.supports_load_extension() {
17607            let extension_name = self.parse_identifier()?;
17608            Ok(Statement::Load { extension_name })
17609        } else if self.parse_keyword(Keyword::DATA) && self.dialect.supports_load_data() {
17610            let local = self.parse_one_of_keywords(&[Keyword::LOCAL]).is_some();
17611            self.expect_keyword_is(Keyword::INPATH)?;
17612            let inpath = self.parse_literal_string()?;
17613            let overwrite = self.parse_one_of_keywords(&[Keyword::OVERWRITE]).is_some();
17614            self.expect_keyword_is(Keyword::INTO)?;
17615            self.expect_keyword_is(Keyword::TABLE)?;
17616            let table_name = self.parse_object_name(false)?;
17617            let partitioned = self.parse_insert_partition()?;
17618            let table_format = self.parse_load_data_table_format()?;
17619            Ok(Statement::LoadData {
17620                local,
17621                inpath,
17622                overwrite,
17623                table_name,
17624                partitioned,
17625                table_format,
17626            })
17627        } else {
17628            self.expected(
17629                "`DATA` or an extension name after `LOAD`",
17630                self.peek_token(),
17631            )
17632        }
17633    }
17634
17635    /// ```sql
17636    /// OPTIMIZE TABLE [db.]name [ON CLUSTER cluster] [PARTITION partition | PARTITION ID 'partition_id'] [FINAL] [DEDUPLICATE [BY expression]]
17637    /// ```
17638    /// [ClickHouse](https://clickhouse.com/docs/en/sql-reference/statements/optimize)
17639    pub fn parse_optimize_table(&mut self) -> Result<Statement, ParserError> {
17640        self.expect_keyword_is(Keyword::TABLE)?;
17641        let name = self.parse_object_name(false)?;
17642        let on_cluster = self.parse_optional_on_cluster()?;
17643
17644        let partition = if self.parse_keyword(Keyword::PARTITION) {
17645            if self.parse_keyword(Keyword::ID) {
17646                Some(Partition::Identifier(self.parse_identifier()?))
17647            } else {
17648                Some(Partition::Expr(self.parse_expr()?))
17649            }
17650        } else {
17651            None
17652        };
17653
17654        let include_final = self.parse_keyword(Keyword::FINAL);
17655        let deduplicate = if self.parse_keyword(Keyword::DEDUPLICATE) {
17656            if self.parse_keyword(Keyword::BY) {
17657                Some(Deduplicate::ByExpression(self.parse_expr()?))
17658            } else {
17659                Some(Deduplicate::All)
17660            }
17661        } else {
17662            None
17663        };
17664
17665        Ok(Statement::OptimizeTable {
17666            name,
17667            on_cluster,
17668            partition,
17669            include_final,
17670            deduplicate,
17671        })
17672    }
17673
17674    /// ```sql
17675    /// CREATE [ { TEMPORARY | TEMP } ] SEQUENCE [ IF NOT EXISTS ] <sequence_name>
17676    /// ```
17677    ///
17678    /// See [Postgres docs](https://www.postgresql.org/docs/current/sql-createsequence.html) for more details.
17679    pub fn parse_create_sequence(&mut self, temporary: bool) -> Result<Statement, ParserError> {
17680        //[ IF NOT EXISTS ]
17681        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
17682        //name
17683        let name = self.parse_object_name(false)?;
17684        //[ AS data_type ]
17685        let mut data_type: Option<DataType> = None;
17686        if self.parse_keywords(&[Keyword::AS]) {
17687            data_type = Some(self.parse_data_type()?)
17688        }
17689        let sequence_options = self.parse_create_sequence_options()?;
17690        // [ OWNED BY { table_name.column_name | NONE } ]
17691        let owned_by = if self.parse_keywords(&[Keyword::OWNED, Keyword::BY]) {
17692            if self.parse_keywords(&[Keyword::NONE]) {
17693                Some(ObjectName::from(vec![Ident::new("NONE")]))
17694            } else {
17695                Some(self.parse_object_name(false)?)
17696            }
17697        } else {
17698            None
17699        };
17700        Ok(Statement::CreateSequence {
17701            temporary,
17702            if_not_exists,
17703            name,
17704            data_type,
17705            sequence_options,
17706            owned_by,
17707        })
17708    }
17709
17710    fn parse_create_sequence_options(&mut self) -> Result<Vec<SequenceOptions>, ParserError> {
17711        let mut sequence_options = vec![];
17712        //[ INCREMENT [ BY ] increment ]
17713        if self.parse_keywords(&[Keyword::INCREMENT]) {
17714            if self.parse_keywords(&[Keyword::BY]) {
17715                sequence_options.push(SequenceOptions::IncrementBy(self.parse_number()?, true));
17716            } else {
17717                sequence_options.push(SequenceOptions::IncrementBy(self.parse_number()?, false));
17718            }
17719        }
17720        //[ MINVALUE minvalue | NO MINVALUE ]
17721        if self.parse_keyword(Keyword::MINVALUE) {
17722            sequence_options.push(SequenceOptions::MinValue(Some(self.parse_number()?)));
17723        } else if self.parse_keywords(&[Keyword::NO, Keyword::MINVALUE]) {
17724            sequence_options.push(SequenceOptions::MinValue(None));
17725        }
17726        //[ MAXVALUE maxvalue | NO MAXVALUE ]
17727        if self.parse_keywords(&[Keyword::MAXVALUE]) {
17728            sequence_options.push(SequenceOptions::MaxValue(Some(self.parse_number()?)));
17729        } else if self.parse_keywords(&[Keyword::NO, Keyword::MAXVALUE]) {
17730            sequence_options.push(SequenceOptions::MaxValue(None));
17731        }
17732
17733        //[ START [ WITH ] start ]
17734        if self.parse_keywords(&[Keyword::START]) {
17735            if self.parse_keywords(&[Keyword::WITH]) {
17736                sequence_options.push(SequenceOptions::StartWith(self.parse_number()?, true));
17737            } else {
17738                sequence_options.push(SequenceOptions::StartWith(self.parse_number()?, false));
17739            }
17740        }
17741        //[ CACHE cache ]
17742        if self.parse_keywords(&[Keyword::CACHE]) {
17743            sequence_options.push(SequenceOptions::Cache(self.parse_number()?));
17744        }
17745        // [ [ NO ] CYCLE ]
17746        if self.parse_keywords(&[Keyword::NO, Keyword::CYCLE]) {
17747            sequence_options.push(SequenceOptions::Cycle(true));
17748        } else if self.parse_keywords(&[Keyword::CYCLE]) {
17749            sequence_options.push(SequenceOptions::Cycle(false));
17750        }
17751
17752        Ok(sequence_options)
17753    }
17754
17755    ///   Parse a `CREATE SERVER` statement.
17756    ///
17757    ///  See [Statement::CreateServer]
17758    pub fn parse_pg_create_server(&mut self) -> Result<Statement, ParserError> {
17759        let ine = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
17760        let name = self.parse_object_name(false)?;
17761
17762        let server_type = if self.parse_keyword(Keyword::TYPE) {
17763            Some(self.parse_identifier()?)
17764        } else {
17765            None
17766        };
17767
17768        let version = if self.parse_keyword(Keyword::VERSION) {
17769            Some(self.parse_identifier()?)
17770        } else {
17771            None
17772        };
17773
17774        self.expect_keywords(&[Keyword::FOREIGN, Keyword::DATA, Keyword::WRAPPER])?;
17775        let foreign_data_wrapper = self.parse_object_name(false)?;
17776
17777        let mut options = None;
17778        if self.parse_keyword(Keyword::OPTIONS) {
17779            self.expect_token(&Token::LParen)?;
17780            options = Some(self.parse_comma_separated(|p| {
17781                let key = p.parse_identifier()?;
17782                let value = p.parse_identifier()?;
17783                Ok(CreateServerOption { key, value })
17784            })?);
17785            self.expect_token(&Token::RParen)?;
17786        }
17787
17788        Ok(Statement::CreateServer(CreateServerStatement {
17789            name,
17790            if_not_exists: ine,
17791            server_type,
17792            version,
17793            foreign_data_wrapper,
17794            options,
17795        }))
17796    }
17797
17798    /// The index of the first unprocessed token.
17799    pub fn index(&self) -> usize {
17800        self.index
17801    }
17802
17803    pub fn parse_named_window(&mut self) -> Result<NamedWindowDefinition, ParserError> {
17804        let ident = self.parse_identifier()?;
17805        self.expect_keyword_is(Keyword::AS)?;
17806
17807        let window_expr = if self.consume_token(&Token::LParen) {
17808            NamedWindowExpr::WindowSpec(self.parse_window_spec()?)
17809        } else if self.dialect.supports_window_clause_named_window_reference() {
17810            NamedWindowExpr::NamedWindow(self.parse_identifier()?)
17811        } else {
17812            return self.expected("(", self.peek_token());
17813        };
17814
17815        Ok(NamedWindowDefinition(ident, window_expr))
17816    }
17817
17818    pub fn parse_create_procedure(&mut self, or_alter: bool) -> Result<Statement, ParserError> {
17819        let name = self.parse_object_name(false)?;
17820        let params = self.parse_optional_procedure_parameters()?;
17821
17822        let language = if self.parse_keyword(Keyword::LANGUAGE) {
17823            Some(self.parse_identifier()?)
17824        } else {
17825            None
17826        };
17827
17828        self.expect_keyword_is(Keyword::AS)?;
17829
17830        let body = self.parse_conditional_statements(&[Keyword::END])?;
17831
17832        Ok(Statement::CreateProcedure {
17833            name,
17834            or_alter,
17835            params,
17836            language,
17837            body,
17838        })
17839    }
17840
17841    pub fn parse_window_spec(&mut self) -> Result<WindowSpec, ParserError> {
17842        let window_name = match self.peek_token().token {
17843            Token::Word(word) if word.keyword == Keyword::NoKeyword => {
17844                self.parse_optional_ident()?
17845            }
17846            _ => None,
17847        };
17848
17849        let partition_by = if self.parse_keywords(&[Keyword::PARTITION, Keyword::BY]) {
17850            self.parse_comma_separated(Parser::parse_expr)?
17851        } else {
17852            vec![]
17853        };
17854        let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
17855            self.parse_comma_separated(Parser::parse_order_by_expr)?
17856        } else {
17857            vec![]
17858        };
17859
17860        let window_frame = if !self.consume_token(&Token::RParen) {
17861            let window_frame = self.parse_window_frame()?;
17862            self.expect_token(&Token::RParen)?;
17863            Some(window_frame)
17864        } else {
17865            None
17866        };
17867        Ok(WindowSpec {
17868            window_name,
17869            partition_by,
17870            order_by,
17871            window_frame,
17872        })
17873    }
17874
17875    pub fn parse_create_type(&mut self) -> Result<Statement, ParserError> {
17876        let name = self.parse_object_name(false)?;
17877
17878        // Check if we have AS keyword
17879        let has_as = self.parse_keyword(Keyword::AS);
17880
17881        if !has_as {
17882            // Two cases: CREATE TYPE name; or CREATE TYPE name (options);
17883            if self.consume_token(&Token::LParen) {
17884                // CREATE TYPE name (options) - SQL definition without AS
17885                let options = self.parse_create_type_sql_definition_options()?;
17886                self.expect_token(&Token::RParen)?;
17887                return Ok(Statement::CreateType {
17888                    name,
17889                    representation: Some(UserDefinedTypeRepresentation::SqlDefinition { options }),
17890                });
17891            }
17892
17893            // CREATE TYPE name; - no representation
17894            return Ok(Statement::CreateType {
17895                name,
17896                representation: None,
17897            });
17898        }
17899
17900        // We have AS keyword
17901        if self.parse_keyword(Keyword::ENUM) {
17902            // CREATE TYPE name AS ENUM (labels)
17903            self.parse_create_type_enum(name)
17904        } else if self.parse_keyword(Keyword::RANGE) {
17905            // CREATE TYPE name AS RANGE (options)
17906            self.parse_create_type_range(name)
17907        } else if self.consume_token(&Token::LParen) {
17908            // CREATE TYPE name AS (attributes) - Composite
17909            self.parse_create_type_composite(name)
17910        } else {
17911            self.expected("ENUM, RANGE, or '(' after AS", self.peek_token())
17912        }
17913    }
17914
17915    /// Parse remainder of `CREATE TYPE AS (attributes)` statement (composite type)
17916    ///
17917    /// See [PostgreSQL](https://www.postgresql.org/docs/current/sql-createtype.html)
17918    fn parse_create_type_composite(&mut self, name: ObjectName) -> Result<Statement, ParserError> {
17919        if self.consume_token(&Token::RParen) {
17920            // Empty composite type
17921            return Ok(Statement::CreateType {
17922                name,
17923                representation: Some(UserDefinedTypeRepresentation::Composite {
17924                    attributes: vec![],
17925                }),
17926            });
17927        }
17928
17929        let mut attributes = vec![];
17930        loop {
17931            let attr_name = self.parse_identifier()?;
17932            let attr_data_type = self.parse_data_type()?;
17933            let attr_collation = if self.parse_keyword(Keyword::COLLATE) {
17934                Some(self.parse_object_name(false)?)
17935            } else {
17936                None
17937            };
17938            attributes.push(UserDefinedTypeCompositeAttributeDef {
17939                name: attr_name,
17940                data_type: attr_data_type,
17941                collation: attr_collation,
17942            });
17943
17944            if !self.consume_token(&Token::Comma) {
17945                break;
17946            }
17947        }
17948        self.expect_token(&Token::RParen)?;
17949
17950        Ok(Statement::CreateType {
17951            name,
17952            representation: Some(UserDefinedTypeRepresentation::Composite { attributes }),
17953        })
17954    }
17955
17956    /// Parse remainder of `CREATE TYPE AS ENUM` statement (see [Statement::CreateType] and [Self::parse_create_type])
17957    ///
17958    /// See [PostgreSQL](https://www.postgresql.org/docs/current/sql-createtype.html)
17959    pub fn parse_create_type_enum(&mut self, name: ObjectName) -> Result<Statement, ParserError> {
17960        self.expect_token(&Token::LParen)?;
17961        let labels = self.parse_comma_separated0(|p| p.parse_identifier(), Token::RParen)?;
17962        self.expect_token(&Token::RParen)?;
17963
17964        Ok(Statement::CreateType {
17965            name,
17966            representation: Some(UserDefinedTypeRepresentation::Enum { labels }),
17967        })
17968    }
17969
17970    /// Parse remainder of `CREATE TYPE AS RANGE` statement
17971    ///
17972    /// See [PostgreSQL](https://www.postgresql.org/docs/current/sql-createtype.html)
17973    fn parse_create_type_range(&mut self, name: ObjectName) -> Result<Statement, ParserError> {
17974        self.expect_token(&Token::LParen)?;
17975        let options = self.parse_comma_separated0(|p| p.parse_range_option(), Token::RParen)?;
17976        self.expect_token(&Token::RParen)?;
17977
17978        Ok(Statement::CreateType {
17979            name,
17980            representation: Some(UserDefinedTypeRepresentation::Range { options }),
17981        })
17982    }
17983
17984    /// Parse a single range option for a `CREATE TYPE AS RANGE` statement
17985    fn parse_range_option(&mut self) -> Result<UserDefinedTypeRangeOption, ParserError> {
17986        let keyword = self.parse_one_of_keywords(&[
17987            Keyword::SUBTYPE,
17988            Keyword::SUBTYPE_OPCLASS,
17989            Keyword::COLLATION,
17990            Keyword::CANONICAL,
17991            Keyword::SUBTYPE_DIFF,
17992            Keyword::MULTIRANGE_TYPE_NAME,
17993        ]);
17994
17995        match keyword {
17996            Some(Keyword::SUBTYPE) => {
17997                self.expect_token(&Token::Eq)?;
17998                let data_type = self.parse_data_type()?;
17999                Ok(UserDefinedTypeRangeOption::Subtype(data_type))
18000            }
18001            Some(Keyword::SUBTYPE_OPCLASS) => {
18002                self.expect_token(&Token::Eq)?;
18003                let name = self.parse_object_name(false)?;
18004                Ok(UserDefinedTypeRangeOption::SubtypeOpClass(name))
18005            }
18006            Some(Keyword::COLLATION) => {
18007                self.expect_token(&Token::Eq)?;
18008                let name = self.parse_object_name(false)?;
18009                Ok(UserDefinedTypeRangeOption::Collation(name))
18010            }
18011            Some(Keyword::CANONICAL) => {
18012                self.expect_token(&Token::Eq)?;
18013                let name = self.parse_object_name(false)?;
18014                Ok(UserDefinedTypeRangeOption::Canonical(name))
18015            }
18016            Some(Keyword::SUBTYPE_DIFF) => {
18017                self.expect_token(&Token::Eq)?;
18018                let name = self.parse_object_name(false)?;
18019                Ok(UserDefinedTypeRangeOption::SubtypeDiff(name))
18020            }
18021            Some(Keyword::MULTIRANGE_TYPE_NAME) => {
18022                self.expect_token(&Token::Eq)?;
18023                let name = self.parse_object_name(false)?;
18024                Ok(UserDefinedTypeRangeOption::MultirangeTypeName(name))
18025            }
18026            _ => self.expected("range option keyword", self.peek_token()),
18027        }
18028    }
18029
18030    /// Parse SQL definition options for CREATE TYPE (options)
18031    fn parse_create_type_sql_definition_options(
18032        &mut self,
18033    ) -> Result<Vec<UserDefinedTypeSqlDefinitionOption>, ParserError> {
18034        self.parse_comma_separated0(|p| p.parse_sql_definition_option(), Token::RParen)
18035    }
18036
18037    /// Parse a single SQL definition option for CREATE TYPE (options)
18038    fn parse_sql_definition_option(
18039        &mut self,
18040    ) -> Result<UserDefinedTypeSqlDefinitionOption, ParserError> {
18041        let keyword = self.parse_one_of_keywords(&[
18042            Keyword::INPUT,
18043            Keyword::OUTPUT,
18044            Keyword::RECEIVE,
18045            Keyword::SEND,
18046            Keyword::TYPMOD_IN,
18047            Keyword::TYPMOD_OUT,
18048            Keyword::ANALYZE,
18049            Keyword::SUBSCRIPT,
18050            Keyword::INTERNALLENGTH,
18051            Keyword::PASSEDBYVALUE,
18052            Keyword::ALIGNMENT,
18053            Keyword::STORAGE,
18054            Keyword::LIKE,
18055            Keyword::CATEGORY,
18056            Keyword::PREFERRED,
18057            Keyword::DEFAULT,
18058            Keyword::ELEMENT,
18059            Keyword::DELIMITER,
18060            Keyword::COLLATABLE,
18061        ]);
18062
18063        match keyword {
18064            Some(Keyword::INPUT) => {
18065                self.expect_token(&Token::Eq)?;
18066                let name = self.parse_object_name(false)?;
18067                Ok(UserDefinedTypeSqlDefinitionOption::Input(name))
18068            }
18069            Some(Keyword::OUTPUT) => {
18070                self.expect_token(&Token::Eq)?;
18071                let name = self.parse_object_name(false)?;
18072                Ok(UserDefinedTypeSqlDefinitionOption::Output(name))
18073            }
18074            Some(Keyword::RECEIVE) => {
18075                self.expect_token(&Token::Eq)?;
18076                let name = self.parse_object_name(false)?;
18077                Ok(UserDefinedTypeSqlDefinitionOption::Receive(name))
18078            }
18079            Some(Keyword::SEND) => {
18080                self.expect_token(&Token::Eq)?;
18081                let name = self.parse_object_name(false)?;
18082                Ok(UserDefinedTypeSqlDefinitionOption::Send(name))
18083            }
18084            Some(Keyword::TYPMOD_IN) => {
18085                self.expect_token(&Token::Eq)?;
18086                let name = self.parse_object_name(false)?;
18087                Ok(UserDefinedTypeSqlDefinitionOption::TypmodIn(name))
18088            }
18089            Some(Keyword::TYPMOD_OUT) => {
18090                self.expect_token(&Token::Eq)?;
18091                let name = self.parse_object_name(false)?;
18092                Ok(UserDefinedTypeSqlDefinitionOption::TypmodOut(name))
18093            }
18094            Some(Keyword::ANALYZE) => {
18095                self.expect_token(&Token::Eq)?;
18096                let name = self.parse_object_name(false)?;
18097                Ok(UserDefinedTypeSqlDefinitionOption::Analyze(name))
18098            }
18099            Some(Keyword::SUBSCRIPT) => {
18100                self.expect_token(&Token::Eq)?;
18101                let name = self.parse_object_name(false)?;
18102                Ok(UserDefinedTypeSqlDefinitionOption::Subscript(name))
18103            }
18104            Some(Keyword::INTERNALLENGTH) => {
18105                self.expect_token(&Token::Eq)?;
18106                if self.parse_keyword(Keyword::VARIABLE) {
18107                    Ok(UserDefinedTypeSqlDefinitionOption::InternalLength(
18108                        UserDefinedTypeInternalLength::Variable,
18109                    ))
18110                } else {
18111                    let value = self.parse_literal_uint()?;
18112                    Ok(UserDefinedTypeSqlDefinitionOption::InternalLength(
18113                        UserDefinedTypeInternalLength::Fixed(value),
18114                    ))
18115                }
18116            }
18117            Some(Keyword::PASSEDBYVALUE) => Ok(UserDefinedTypeSqlDefinitionOption::PassedByValue),
18118            Some(Keyword::ALIGNMENT) => {
18119                self.expect_token(&Token::Eq)?;
18120                let align_keyword = self.parse_one_of_keywords(&[
18121                    Keyword::CHAR,
18122                    Keyword::INT2,
18123                    Keyword::INT4,
18124                    Keyword::DOUBLE,
18125                ]);
18126                match align_keyword {
18127                    Some(Keyword::CHAR) => Ok(UserDefinedTypeSqlDefinitionOption::Alignment(
18128                        Alignment::Char,
18129                    )),
18130                    Some(Keyword::INT2) => Ok(UserDefinedTypeSqlDefinitionOption::Alignment(
18131                        Alignment::Int2,
18132                    )),
18133                    Some(Keyword::INT4) => Ok(UserDefinedTypeSqlDefinitionOption::Alignment(
18134                        Alignment::Int4,
18135                    )),
18136                    Some(Keyword::DOUBLE) => Ok(UserDefinedTypeSqlDefinitionOption::Alignment(
18137                        Alignment::Double,
18138                    )),
18139                    _ => self.expected(
18140                        "alignment value (char, int2, int4, or double)",
18141                        self.peek_token(),
18142                    ),
18143                }
18144            }
18145            Some(Keyword::STORAGE) => {
18146                self.expect_token(&Token::Eq)?;
18147                let storage_keyword = self.parse_one_of_keywords(&[
18148                    Keyword::PLAIN,
18149                    Keyword::EXTERNAL,
18150                    Keyword::EXTENDED,
18151                    Keyword::MAIN,
18152                ]);
18153                match storage_keyword {
18154                    Some(Keyword::PLAIN) => Ok(UserDefinedTypeSqlDefinitionOption::Storage(
18155                        UserDefinedTypeStorage::Plain,
18156                    )),
18157                    Some(Keyword::EXTERNAL) => Ok(UserDefinedTypeSqlDefinitionOption::Storage(
18158                        UserDefinedTypeStorage::External,
18159                    )),
18160                    Some(Keyword::EXTENDED) => Ok(UserDefinedTypeSqlDefinitionOption::Storage(
18161                        UserDefinedTypeStorage::Extended,
18162                    )),
18163                    Some(Keyword::MAIN) => Ok(UserDefinedTypeSqlDefinitionOption::Storage(
18164                        UserDefinedTypeStorage::Main,
18165                    )),
18166                    _ => self.expected(
18167                        "storage value (plain, external, extended, or main)",
18168                        self.peek_token(),
18169                    ),
18170                }
18171            }
18172            Some(Keyword::LIKE) => {
18173                self.expect_token(&Token::Eq)?;
18174                let name = self.parse_object_name(false)?;
18175                Ok(UserDefinedTypeSqlDefinitionOption::Like(name))
18176            }
18177            Some(Keyword::CATEGORY) => {
18178                self.expect_token(&Token::Eq)?;
18179                let category_str = self.parse_literal_string()?;
18180                let category_char = category_str.chars().next().ok_or_else(|| {
18181                    ParserError::ParserError(
18182                        "CATEGORY value must be a single character".to_string(),
18183                    )
18184                })?;
18185                Ok(UserDefinedTypeSqlDefinitionOption::Category(category_char))
18186            }
18187            Some(Keyword::PREFERRED) => {
18188                self.expect_token(&Token::Eq)?;
18189                let value =
18190                    self.parse_keyword(Keyword::TRUE) || !self.parse_keyword(Keyword::FALSE);
18191                Ok(UserDefinedTypeSqlDefinitionOption::Preferred(value))
18192            }
18193            Some(Keyword::DEFAULT) => {
18194                self.expect_token(&Token::Eq)?;
18195                let expr = self.parse_expr()?;
18196                Ok(UserDefinedTypeSqlDefinitionOption::Default(expr))
18197            }
18198            Some(Keyword::ELEMENT) => {
18199                self.expect_token(&Token::Eq)?;
18200                let data_type = self.parse_data_type()?;
18201                Ok(UserDefinedTypeSqlDefinitionOption::Element(data_type))
18202            }
18203            Some(Keyword::DELIMITER) => {
18204                self.expect_token(&Token::Eq)?;
18205                let delimiter = self.parse_literal_string()?;
18206                Ok(UserDefinedTypeSqlDefinitionOption::Delimiter(delimiter))
18207            }
18208            Some(Keyword::COLLATABLE) => {
18209                self.expect_token(&Token::Eq)?;
18210                let value =
18211                    self.parse_keyword(Keyword::TRUE) || !self.parse_keyword(Keyword::FALSE);
18212                Ok(UserDefinedTypeSqlDefinitionOption::Collatable(value))
18213            }
18214            _ => self.expected("SQL definition option keyword", self.peek_token()),
18215        }
18216    }
18217
18218    fn parse_parenthesized_identifiers(&mut self) -> Result<Vec<Ident>, ParserError> {
18219        self.expect_token(&Token::LParen)?;
18220        let idents = self.parse_comma_separated0(|p| p.parse_identifier(), Token::RParen)?;
18221        self.expect_token(&Token::RParen)?;
18222        Ok(idents)
18223    }
18224
18225    fn parse_column_position(&mut self) -> Result<Option<MySQLColumnPosition>, ParserError> {
18226        if dialect_of!(self is MySqlDialect | GenericDialect) {
18227            if self.parse_keyword(Keyword::FIRST) {
18228                Ok(Some(MySQLColumnPosition::First))
18229            } else if self.parse_keyword(Keyword::AFTER) {
18230                let ident = self.parse_identifier()?;
18231                Ok(Some(MySQLColumnPosition::After(ident)))
18232            } else {
18233                Ok(None)
18234            }
18235        } else {
18236            Ok(None)
18237        }
18238    }
18239
18240    /// Parse [Statement::Print]
18241    fn parse_print(&mut self) -> Result<Statement, ParserError> {
18242        Ok(Statement::Print(PrintStatement {
18243            message: Box::new(self.parse_expr()?),
18244        }))
18245    }
18246
18247    /// Parse [Statement::Return]
18248    fn parse_return(&mut self) -> Result<Statement, ParserError> {
18249        match self.maybe_parse(|p| p.parse_expr())? {
18250            Some(expr) => Ok(Statement::Return(ReturnStatement {
18251                value: Some(ReturnStatementValue::Expr(expr)),
18252            })),
18253            None => Ok(Statement::Return(ReturnStatement { value: None })),
18254        }
18255    }
18256
18257    /// /// Parse a `EXPORT DATA` statement.
18258    ///
18259    /// See [Statement::ExportData]
18260    fn parse_export_data(&mut self) -> Result<Statement, ParserError> {
18261        self.expect_keywords(&[Keyword::EXPORT, Keyword::DATA])?;
18262
18263        let connection = if self.parse_keywords(&[Keyword::WITH, Keyword::CONNECTION]) {
18264            Some(self.parse_object_name(false)?)
18265        } else {
18266            None
18267        };
18268        self.expect_keyword(Keyword::OPTIONS)?;
18269        self.expect_token(&Token::LParen)?;
18270        let options = self.parse_comma_separated(|p| p.parse_sql_option())?;
18271        self.expect_token(&Token::RParen)?;
18272        self.expect_keyword(Keyword::AS)?;
18273        let query = self.parse_query()?;
18274        Ok(Statement::ExportData(ExportData {
18275            options,
18276            query,
18277            connection,
18278        }))
18279    }
18280
18281    fn parse_vacuum(&mut self) -> Result<Statement, ParserError> {
18282        self.expect_keyword(Keyword::VACUUM)?;
18283        let full = self.parse_keyword(Keyword::FULL);
18284        let sort_only = self.parse_keywords(&[Keyword::SORT, Keyword::ONLY]);
18285        let delete_only = self.parse_keywords(&[Keyword::DELETE, Keyword::ONLY]);
18286        let reindex = self.parse_keyword(Keyword::REINDEX);
18287        let recluster = self.parse_keyword(Keyword::RECLUSTER);
18288        let (table_name, threshold, boost) =
18289            match self.maybe_parse(|p| p.parse_object_name(false))? {
18290                Some(table_name) => {
18291                    let threshold = if self.parse_keyword(Keyword::TO) {
18292                        let value = self.parse_value()?;
18293                        self.expect_keyword(Keyword::PERCENT)?;
18294                        Some(value.value)
18295                    } else {
18296                        None
18297                    };
18298                    let boost = self.parse_keyword(Keyword::BOOST);
18299                    (Some(table_name), threshold, boost)
18300                }
18301                _ => (None, None, false),
18302            };
18303        Ok(Statement::Vacuum(VacuumStatement {
18304            full,
18305            sort_only,
18306            delete_only,
18307            reindex,
18308            recluster,
18309            table_name,
18310            threshold,
18311            boost,
18312        }))
18313    }
18314
18315    /// Consume the parser and return its underlying token buffer
18316    pub fn into_tokens(self) -> Vec<TokenWithSpan> {
18317        self.tokens
18318    }
18319
18320    /// Returns true if the next keyword indicates a sub query, i.e. SELECT or WITH
18321    fn peek_sub_query(&mut self) -> bool {
18322        if self
18323            .parse_one_of_keywords(&[Keyword::SELECT, Keyword::WITH])
18324            .is_some()
18325        {
18326            self.prev_token();
18327            return true;
18328        }
18329        false
18330    }
18331
18332    pub(crate) fn parse_show_stmt_options(&mut self) -> Result<ShowStatementOptions, ParserError> {
18333        let show_in;
18334        let mut filter_position = None;
18335        if self.dialect.supports_show_like_before_in() {
18336            if let Some(filter) = self.parse_show_statement_filter()? {
18337                filter_position = Some(ShowStatementFilterPosition::Infix(filter));
18338            }
18339            show_in = self.maybe_parse_show_stmt_in()?;
18340        } else {
18341            show_in = self.maybe_parse_show_stmt_in()?;
18342            if let Some(filter) = self.parse_show_statement_filter()? {
18343                filter_position = Some(ShowStatementFilterPosition::Suffix(filter));
18344            }
18345        }
18346        let starts_with = self.maybe_parse_show_stmt_starts_with()?;
18347        let limit = self.maybe_parse_show_stmt_limit()?;
18348        let from = self.maybe_parse_show_stmt_from()?;
18349        Ok(ShowStatementOptions {
18350            filter_position,
18351            show_in,
18352            starts_with,
18353            limit,
18354            limit_from: from,
18355        })
18356    }
18357
18358    fn maybe_parse_show_stmt_in(&mut self) -> Result<Option<ShowStatementIn>, ParserError> {
18359        let clause = match self.parse_one_of_keywords(&[Keyword::FROM, Keyword::IN]) {
18360            Some(Keyword::FROM) => ShowStatementInClause::FROM,
18361            Some(Keyword::IN) => ShowStatementInClause::IN,
18362            None => return Ok(None),
18363            _ => return self.expected("FROM or IN", self.peek_token()),
18364        };
18365
18366        let (parent_type, parent_name) = match self.parse_one_of_keywords(&[
18367            Keyword::ACCOUNT,
18368            Keyword::DATABASE,
18369            Keyword::SCHEMA,
18370            Keyword::TABLE,
18371            Keyword::VIEW,
18372        ]) {
18373            // If we see these next keywords it means we don't have a parent name
18374            Some(Keyword::DATABASE)
18375                if self.peek_keywords(&[Keyword::STARTS, Keyword::WITH])
18376                    | self.peek_keyword(Keyword::LIMIT) =>
18377            {
18378                (Some(ShowStatementInParentType::Database), None)
18379            }
18380            Some(Keyword::SCHEMA)
18381                if self.peek_keywords(&[Keyword::STARTS, Keyword::WITH])
18382                    | self.peek_keyword(Keyword::LIMIT) =>
18383            {
18384                (Some(ShowStatementInParentType::Schema), None)
18385            }
18386            Some(parent_kw) => {
18387                // The parent name here is still optional, for example:
18388                // SHOW TABLES IN ACCOUNT, so parsing the object name
18389                // may fail because the statement ends.
18390                let parent_name = self.maybe_parse(|p| p.parse_object_name(false))?;
18391                match parent_kw {
18392                    Keyword::ACCOUNT => (Some(ShowStatementInParentType::Account), parent_name),
18393                    Keyword::DATABASE => (Some(ShowStatementInParentType::Database), parent_name),
18394                    Keyword::SCHEMA => (Some(ShowStatementInParentType::Schema), parent_name),
18395                    Keyword::TABLE => (Some(ShowStatementInParentType::Table), parent_name),
18396                    Keyword::VIEW => (Some(ShowStatementInParentType::View), parent_name),
18397                    _ => {
18398                        return self.expected(
18399                            "one of ACCOUNT, DATABASE, SCHEMA, TABLE or VIEW",
18400                            self.peek_token(),
18401                        )
18402                    }
18403                }
18404            }
18405            None => {
18406                // Parsing MySQL style FROM tbl_name FROM db_name
18407                // which is equivalent to FROM tbl_name.db_name
18408                let mut parent_name = self.parse_object_name(false)?;
18409                if self
18410                    .parse_one_of_keywords(&[Keyword::FROM, Keyword::IN])
18411                    .is_some()
18412                {
18413                    parent_name
18414                        .0
18415                        .insert(0, ObjectNamePart::Identifier(self.parse_identifier()?));
18416                }
18417                (None, Some(parent_name))
18418            }
18419        };
18420
18421        Ok(Some(ShowStatementIn {
18422            clause,
18423            parent_type,
18424            parent_name,
18425        }))
18426    }
18427
18428    fn maybe_parse_show_stmt_starts_with(&mut self) -> Result<Option<Value>, ParserError> {
18429        if self.parse_keywords(&[Keyword::STARTS, Keyword::WITH]) {
18430            Ok(Some(self.parse_value()?.value))
18431        } else {
18432            Ok(None)
18433        }
18434    }
18435
18436    fn maybe_parse_show_stmt_limit(&mut self) -> Result<Option<Expr>, ParserError> {
18437        if self.parse_keyword(Keyword::LIMIT) {
18438            Ok(self.parse_limit()?)
18439        } else {
18440            Ok(None)
18441        }
18442    }
18443
18444    fn maybe_parse_show_stmt_from(&mut self) -> Result<Option<Value>, ParserError> {
18445        if self.parse_keyword(Keyword::FROM) {
18446            Ok(Some(self.parse_value()?.value))
18447        } else {
18448            Ok(None)
18449        }
18450    }
18451
18452    pub(crate) fn in_column_definition_state(&self) -> bool {
18453        matches!(self.state, ColumnDefinition)
18454    }
18455
18456    /// Parses options provided in key-value format.
18457    ///
18458    /// * `parenthesized` - true if the options are enclosed in parenthesis
18459    /// * `end_words` - a list of keywords that any of them indicates the end of the options section
18460    pub(crate) fn parse_key_value_options(
18461        &mut self,
18462        parenthesized: bool,
18463        end_words: &[Keyword],
18464    ) -> Result<KeyValueOptions, ParserError> {
18465        let mut options: Vec<KeyValueOption> = Vec::new();
18466        let mut delimiter = KeyValueOptionsDelimiter::Space;
18467        if parenthesized {
18468            self.expect_token(&Token::LParen)?;
18469        }
18470        loop {
18471            match self.next_token().token {
18472                Token::RParen => {
18473                    if parenthesized {
18474                        break;
18475                    } else {
18476                        return self.expected(" another option or EOF", self.peek_token());
18477                    }
18478                }
18479                Token::EOF => break,
18480                Token::Comma => {
18481                    delimiter = KeyValueOptionsDelimiter::Comma;
18482                    continue;
18483                }
18484                Token::Word(w) if !end_words.contains(&w.keyword) => {
18485                    options.push(self.parse_key_value_option(&w)?)
18486                }
18487                Token::Word(w) if end_words.contains(&w.keyword) => {
18488                    self.prev_token();
18489                    break;
18490                }
18491                _ => return self.expected("another option, EOF, Comma or ')'", self.peek_token()),
18492            };
18493        }
18494
18495        Ok(KeyValueOptions { delimiter, options })
18496    }
18497
18498    /// Parses a `KEY = VALUE` construct based on the specified key
18499    pub(crate) fn parse_key_value_option(
18500        &mut self,
18501        key: &Word,
18502    ) -> Result<KeyValueOption, ParserError> {
18503        self.expect_token(&Token::Eq)?;
18504        match self.peek_token().token {
18505            Token::SingleQuotedString(_) => Ok(KeyValueOption {
18506                option_name: key.value.clone(),
18507                option_value: KeyValueOptionKind::Single(self.parse_value()?.into()),
18508            }),
18509            Token::Word(word)
18510                if word.keyword == Keyword::TRUE || word.keyword == Keyword::FALSE =>
18511            {
18512                Ok(KeyValueOption {
18513                    option_name: key.value.clone(),
18514                    option_value: KeyValueOptionKind::Single(self.parse_value()?.into()),
18515                })
18516            }
18517            Token::Number(..) => Ok(KeyValueOption {
18518                option_name: key.value.clone(),
18519                option_value: KeyValueOptionKind::Single(self.parse_value()?.into()),
18520            }),
18521            Token::Word(word) => {
18522                self.next_token();
18523                Ok(KeyValueOption {
18524                    option_name: key.value.clone(),
18525                    option_value: KeyValueOptionKind::Single(Value::Placeholder(
18526                        word.value.clone(),
18527                    )),
18528                })
18529            }
18530            Token::LParen => {
18531                // Can be a list of values or a list of key value properties.
18532                // Try to parse a list of values and if that fails, try to parse
18533                // a list of key-value properties.
18534                match self.maybe_parse(|parser| {
18535                    parser.expect_token(&Token::LParen)?;
18536                    let values = parser.parse_comma_separated0(|p| p.parse_value(), Token::RParen);
18537                    parser.expect_token(&Token::RParen)?;
18538                    values
18539                })? {
18540                    Some(values) => {
18541                        let values = values.into_iter().map(|v| v.value).collect();
18542                        Ok(KeyValueOption {
18543                            option_name: key.value.clone(),
18544                            option_value: KeyValueOptionKind::Multi(values),
18545                        })
18546                    }
18547                    None => Ok(KeyValueOption {
18548                        option_name: key.value.clone(),
18549                        option_value: KeyValueOptionKind::KeyValueOptions(Box::new(
18550                            self.parse_key_value_options(true, &[])?,
18551                        )),
18552                    }),
18553                }
18554            }
18555            _ => self.expected("expected option value", self.peek_token()),
18556        }
18557    }
18558
18559    /// Parses a RESET statement
18560    fn parse_reset(&mut self) -> Result<Statement, ParserError> {
18561        if self.parse_keyword(Keyword::ALL) {
18562            return Ok(Statement::Reset(ResetStatement { reset: Reset::ALL }));
18563        }
18564
18565        let obj = self.parse_object_name(false)?;
18566        Ok(Statement::Reset(ResetStatement {
18567            reset: Reset::ConfigurationParameter(obj),
18568        }))
18569    }
18570}
18571
18572fn maybe_prefixed_expr(expr: Expr, prefix: Option<Ident>) -> Expr {
18573    if let Some(prefix) = prefix {
18574        Expr::Prefixed {
18575            prefix,
18576            value: Box::new(expr),
18577        }
18578    } else {
18579        expr
18580    }
18581}
18582
18583impl Word {
18584    #[deprecated(since = "0.54.0", note = "please use `into_ident` instead")]
18585    pub fn to_ident(&self, span: Span) -> Ident {
18586        Ident {
18587            value: self.value.clone(),
18588            quote_style: self.quote_style,
18589            span,
18590        }
18591    }
18592
18593    /// Convert this word into an [`Ident`] identifier
18594    pub fn into_ident(self, span: Span) -> Ident {
18595        Ident {
18596            value: self.value,
18597            quote_style: self.quote_style,
18598            span,
18599        }
18600    }
18601}
18602
18603#[cfg(test)]
18604mod tests {
18605    use crate::test_utils::{all_dialects, TestedDialects};
18606
18607    use super::*;
18608
18609    #[test]
18610    fn test_prev_index() {
18611        let sql = "SELECT version";
18612        all_dialects().run_parser_method(sql, |parser| {
18613            assert_eq!(parser.peek_token(), Token::make_keyword("SELECT"));
18614            assert_eq!(parser.next_token(), Token::make_keyword("SELECT"));
18615            parser.prev_token();
18616            assert_eq!(parser.next_token(), Token::make_keyword("SELECT"));
18617            assert_eq!(parser.next_token(), Token::make_word("version", None));
18618            parser.prev_token();
18619            assert_eq!(parser.peek_token(), Token::make_word("version", None));
18620            assert_eq!(parser.next_token(), Token::make_word("version", None));
18621            assert_eq!(parser.peek_token(), Token::EOF);
18622            parser.prev_token();
18623            assert_eq!(parser.next_token(), Token::make_word("version", None));
18624            assert_eq!(parser.next_token(), Token::EOF);
18625            assert_eq!(parser.next_token(), Token::EOF);
18626            parser.prev_token();
18627        });
18628    }
18629
18630    #[test]
18631    fn test_peek_tokens() {
18632        all_dialects().run_parser_method("SELECT foo AS bar FROM baz", |parser| {
18633            assert!(matches!(
18634                parser.peek_tokens(),
18635                [Token::Word(Word {
18636                    keyword: Keyword::SELECT,
18637                    ..
18638                })]
18639            ));
18640
18641            assert!(matches!(
18642                parser.peek_tokens(),
18643                [
18644                    Token::Word(Word {
18645                        keyword: Keyword::SELECT,
18646                        ..
18647                    }),
18648                    Token::Word(_),
18649                    Token::Word(Word {
18650                        keyword: Keyword::AS,
18651                        ..
18652                    }),
18653                ]
18654            ));
18655
18656            for _ in 0..4 {
18657                parser.next_token();
18658            }
18659
18660            assert!(matches!(
18661                parser.peek_tokens(),
18662                [
18663                    Token::Word(Word {
18664                        keyword: Keyword::FROM,
18665                        ..
18666                    }),
18667                    Token::Word(_),
18668                    Token::EOF,
18669                    Token::EOF,
18670                ]
18671            ))
18672        })
18673    }
18674
18675    #[cfg(test)]
18676    mod test_parse_data_type {
18677        use crate::ast::{
18678            CharLengthUnits, CharacterLength, DataType, ExactNumberInfo, ObjectName, TimezoneInfo,
18679        };
18680        use crate::dialect::{AnsiDialect, GenericDialect, PostgreSqlDialect};
18681        use crate::test_utils::TestedDialects;
18682
18683        macro_rules! test_parse_data_type {
18684            ($dialect:expr, $input:expr, $expected_type:expr $(,)?) => {{
18685                $dialect.run_parser_method(&*$input, |parser| {
18686                    let data_type = parser.parse_data_type().unwrap();
18687                    assert_eq!($expected_type, data_type);
18688                    assert_eq!($input.to_string(), data_type.to_string());
18689                });
18690            }};
18691        }
18692
18693        #[test]
18694        fn test_ansii_character_string_types() {
18695            // Character string types: <https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#character-string-type>
18696            let dialect =
18697                TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
18698
18699            test_parse_data_type!(dialect, "CHARACTER", DataType::Character(None));
18700
18701            test_parse_data_type!(
18702                dialect,
18703                "CHARACTER(20)",
18704                DataType::Character(Some(CharacterLength::IntegerLength {
18705                    length: 20,
18706                    unit: None
18707                }))
18708            );
18709
18710            test_parse_data_type!(
18711                dialect,
18712                "CHARACTER(20 CHARACTERS)",
18713                DataType::Character(Some(CharacterLength::IntegerLength {
18714                    length: 20,
18715                    unit: Some(CharLengthUnits::Characters)
18716                }))
18717            );
18718
18719            test_parse_data_type!(
18720                dialect,
18721                "CHARACTER(20 OCTETS)",
18722                DataType::Character(Some(CharacterLength::IntegerLength {
18723                    length: 20,
18724                    unit: Some(CharLengthUnits::Octets)
18725                }))
18726            );
18727
18728            test_parse_data_type!(dialect, "CHAR", DataType::Char(None));
18729
18730            test_parse_data_type!(
18731                dialect,
18732                "CHAR(20)",
18733                DataType::Char(Some(CharacterLength::IntegerLength {
18734                    length: 20,
18735                    unit: None
18736                }))
18737            );
18738
18739            test_parse_data_type!(
18740                dialect,
18741                "CHAR(20 CHARACTERS)",
18742                DataType::Char(Some(CharacterLength::IntegerLength {
18743                    length: 20,
18744                    unit: Some(CharLengthUnits::Characters)
18745                }))
18746            );
18747
18748            test_parse_data_type!(
18749                dialect,
18750                "CHAR(20 OCTETS)",
18751                DataType::Char(Some(CharacterLength::IntegerLength {
18752                    length: 20,
18753                    unit: Some(CharLengthUnits::Octets)
18754                }))
18755            );
18756
18757            test_parse_data_type!(
18758                dialect,
18759                "CHARACTER VARYING(20)",
18760                DataType::CharacterVarying(Some(CharacterLength::IntegerLength {
18761                    length: 20,
18762                    unit: None
18763                }))
18764            );
18765
18766            test_parse_data_type!(
18767                dialect,
18768                "CHARACTER VARYING(20 CHARACTERS)",
18769                DataType::CharacterVarying(Some(CharacterLength::IntegerLength {
18770                    length: 20,
18771                    unit: Some(CharLengthUnits::Characters)
18772                }))
18773            );
18774
18775            test_parse_data_type!(
18776                dialect,
18777                "CHARACTER VARYING(20 OCTETS)",
18778                DataType::CharacterVarying(Some(CharacterLength::IntegerLength {
18779                    length: 20,
18780                    unit: Some(CharLengthUnits::Octets)
18781                }))
18782            );
18783
18784            test_parse_data_type!(
18785                dialect,
18786                "CHAR VARYING(20)",
18787                DataType::CharVarying(Some(CharacterLength::IntegerLength {
18788                    length: 20,
18789                    unit: None
18790                }))
18791            );
18792
18793            test_parse_data_type!(
18794                dialect,
18795                "CHAR VARYING(20 CHARACTERS)",
18796                DataType::CharVarying(Some(CharacterLength::IntegerLength {
18797                    length: 20,
18798                    unit: Some(CharLengthUnits::Characters)
18799                }))
18800            );
18801
18802            test_parse_data_type!(
18803                dialect,
18804                "CHAR VARYING(20 OCTETS)",
18805                DataType::CharVarying(Some(CharacterLength::IntegerLength {
18806                    length: 20,
18807                    unit: Some(CharLengthUnits::Octets)
18808                }))
18809            );
18810
18811            test_parse_data_type!(
18812                dialect,
18813                "VARCHAR(20)",
18814                DataType::Varchar(Some(CharacterLength::IntegerLength {
18815                    length: 20,
18816                    unit: None
18817                }))
18818            );
18819        }
18820
18821        #[test]
18822        fn test_ansii_character_large_object_types() {
18823            // Character large object types: <https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#character-large-object-length>
18824            let dialect =
18825                TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
18826
18827            test_parse_data_type!(
18828                dialect,
18829                "CHARACTER LARGE OBJECT",
18830                DataType::CharacterLargeObject(None)
18831            );
18832            test_parse_data_type!(
18833                dialect,
18834                "CHARACTER LARGE OBJECT(20)",
18835                DataType::CharacterLargeObject(Some(20))
18836            );
18837
18838            test_parse_data_type!(
18839                dialect,
18840                "CHAR LARGE OBJECT",
18841                DataType::CharLargeObject(None)
18842            );
18843            test_parse_data_type!(
18844                dialect,
18845                "CHAR LARGE OBJECT(20)",
18846                DataType::CharLargeObject(Some(20))
18847            );
18848
18849            test_parse_data_type!(dialect, "CLOB", DataType::Clob(None));
18850            test_parse_data_type!(dialect, "CLOB(20)", DataType::Clob(Some(20)));
18851        }
18852
18853        #[test]
18854        fn test_parse_custom_types() {
18855            let dialect =
18856                TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
18857
18858            test_parse_data_type!(
18859                dialect,
18860                "GEOMETRY",
18861                DataType::Custom(ObjectName::from(vec!["GEOMETRY".into()]), vec![])
18862            );
18863
18864            test_parse_data_type!(
18865                dialect,
18866                "GEOMETRY(POINT)",
18867                DataType::Custom(
18868                    ObjectName::from(vec!["GEOMETRY".into()]),
18869                    vec!["POINT".to_string()]
18870                )
18871            );
18872
18873            test_parse_data_type!(
18874                dialect,
18875                "GEOMETRY(POINT, 4326)",
18876                DataType::Custom(
18877                    ObjectName::from(vec!["GEOMETRY".into()]),
18878                    vec!["POINT".to_string(), "4326".to_string()]
18879                )
18880            );
18881        }
18882
18883        #[test]
18884        fn test_ansii_exact_numeric_types() {
18885            // Exact numeric types: <https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#exact-numeric-type>
18886            let dialect = TestedDialects::new(vec![
18887                Box::new(GenericDialect {}),
18888                Box::new(AnsiDialect {}),
18889                Box::new(PostgreSqlDialect {}),
18890            ]);
18891
18892            test_parse_data_type!(dialect, "NUMERIC", DataType::Numeric(ExactNumberInfo::None));
18893
18894            test_parse_data_type!(
18895                dialect,
18896                "NUMERIC(2)",
18897                DataType::Numeric(ExactNumberInfo::Precision(2))
18898            );
18899
18900            test_parse_data_type!(
18901                dialect,
18902                "NUMERIC(2,10)",
18903                DataType::Numeric(ExactNumberInfo::PrecisionAndScale(2, 10))
18904            );
18905
18906            test_parse_data_type!(dialect, "DECIMAL", DataType::Decimal(ExactNumberInfo::None));
18907
18908            test_parse_data_type!(
18909                dialect,
18910                "DECIMAL(2)",
18911                DataType::Decimal(ExactNumberInfo::Precision(2))
18912            );
18913
18914            test_parse_data_type!(
18915                dialect,
18916                "DECIMAL(2,10)",
18917                DataType::Decimal(ExactNumberInfo::PrecisionAndScale(2, 10))
18918            );
18919
18920            test_parse_data_type!(dialect, "DEC", DataType::Dec(ExactNumberInfo::None));
18921
18922            test_parse_data_type!(
18923                dialect,
18924                "DEC(2)",
18925                DataType::Dec(ExactNumberInfo::Precision(2))
18926            );
18927
18928            test_parse_data_type!(
18929                dialect,
18930                "DEC(2,10)",
18931                DataType::Dec(ExactNumberInfo::PrecisionAndScale(2, 10))
18932            );
18933
18934            // Test negative scale values.
18935            test_parse_data_type!(
18936                dialect,
18937                "NUMERIC(10,-2)",
18938                DataType::Numeric(ExactNumberInfo::PrecisionAndScale(10, -2))
18939            );
18940
18941            test_parse_data_type!(
18942                dialect,
18943                "DECIMAL(1000,-10)",
18944                DataType::Decimal(ExactNumberInfo::PrecisionAndScale(1000, -10))
18945            );
18946
18947            test_parse_data_type!(
18948                dialect,
18949                "DEC(5,-1000)",
18950                DataType::Dec(ExactNumberInfo::PrecisionAndScale(5, -1000))
18951            );
18952
18953            test_parse_data_type!(
18954                dialect,
18955                "NUMERIC(10,-5)",
18956                DataType::Numeric(ExactNumberInfo::PrecisionAndScale(10, -5))
18957            );
18958
18959            test_parse_data_type!(
18960                dialect,
18961                "DECIMAL(20,-10)",
18962                DataType::Decimal(ExactNumberInfo::PrecisionAndScale(20, -10))
18963            );
18964
18965            test_parse_data_type!(
18966                dialect,
18967                "DEC(5,-2)",
18968                DataType::Dec(ExactNumberInfo::PrecisionAndScale(5, -2))
18969            );
18970
18971            dialect.run_parser_method("NUMERIC(10,+5)", |parser| {
18972                let data_type = parser.parse_data_type().unwrap();
18973                assert_eq!(
18974                    DataType::Numeric(ExactNumberInfo::PrecisionAndScale(10, 5)),
18975                    data_type
18976                );
18977                // Note: Explicit '+' sign is not preserved in output, which is correct
18978                assert_eq!("NUMERIC(10,5)", data_type.to_string());
18979            });
18980        }
18981
18982        #[test]
18983        fn test_ansii_date_type() {
18984            // Datetime types: <https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#datetime-type>
18985            let dialect =
18986                TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
18987
18988            test_parse_data_type!(dialect, "DATE", DataType::Date);
18989
18990            test_parse_data_type!(dialect, "TIME", DataType::Time(None, TimezoneInfo::None));
18991
18992            test_parse_data_type!(
18993                dialect,
18994                "TIME(6)",
18995                DataType::Time(Some(6), TimezoneInfo::None)
18996            );
18997
18998            test_parse_data_type!(
18999                dialect,
19000                "TIME WITH TIME ZONE",
19001                DataType::Time(None, TimezoneInfo::WithTimeZone)
19002            );
19003
19004            test_parse_data_type!(
19005                dialect,
19006                "TIME(6) WITH TIME ZONE",
19007                DataType::Time(Some(6), TimezoneInfo::WithTimeZone)
19008            );
19009
19010            test_parse_data_type!(
19011                dialect,
19012                "TIME WITHOUT TIME ZONE",
19013                DataType::Time(None, TimezoneInfo::WithoutTimeZone)
19014            );
19015
19016            test_parse_data_type!(
19017                dialect,
19018                "TIME(6) WITHOUT TIME ZONE",
19019                DataType::Time(Some(6), TimezoneInfo::WithoutTimeZone)
19020            );
19021
19022            test_parse_data_type!(
19023                dialect,
19024                "TIMESTAMP",
19025                DataType::Timestamp(None, TimezoneInfo::None)
19026            );
19027
19028            test_parse_data_type!(
19029                dialect,
19030                "TIMESTAMP(22)",
19031                DataType::Timestamp(Some(22), TimezoneInfo::None)
19032            );
19033
19034            test_parse_data_type!(
19035                dialect,
19036                "TIMESTAMP(22) WITH TIME ZONE",
19037                DataType::Timestamp(Some(22), TimezoneInfo::WithTimeZone)
19038            );
19039
19040            test_parse_data_type!(
19041                dialect,
19042                "TIMESTAMP(33) WITHOUT TIME ZONE",
19043                DataType::Timestamp(Some(33), TimezoneInfo::WithoutTimeZone)
19044            );
19045        }
19046    }
19047
19048    #[test]
19049    fn test_parse_schema_name() {
19050        // The expected name should be identical as the input name, that's why I don't receive both
19051        macro_rules! test_parse_schema_name {
19052            ($input:expr, $expected_name:expr $(,)?) => {{
19053                all_dialects().run_parser_method(&*$input, |parser| {
19054                    let schema_name = parser.parse_schema_name().unwrap();
19055                    // Validate that the structure is the same as expected
19056                    assert_eq!(schema_name, $expected_name);
19057                    // Validate that the input and the expected structure serialization are the same
19058                    assert_eq!(schema_name.to_string(), $input.to_string());
19059                });
19060            }};
19061        }
19062
19063        let dummy_name = ObjectName::from(vec![Ident::new("dummy_name")]);
19064        let dummy_authorization = Ident::new("dummy_authorization");
19065
19066        test_parse_schema_name!(
19067            format!("{dummy_name}"),
19068            SchemaName::Simple(dummy_name.clone())
19069        );
19070
19071        test_parse_schema_name!(
19072            format!("AUTHORIZATION {dummy_authorization}"),
19073            SchemaName::UnnamedAuthorization(dummy_authorization.clone()),
19074        );
19075        test_parse_schema_name!(
19076            format!("{dummy_name} AUTHORIZATION {dummy_authorization}"),
19077            SchemaName::NamedAuthorization(dummy_name.clone(), dummy_authorization.clone()),
19078        );
19079    }
19080
19081    #[test]
19082    fn mysql_parse_index_table_constraint() {
19083        macro_rules! test_parse_table_constraint {
19084            ($dialect:expr, $input:expr, $expected:expr $(,)?) => {{
19085                $dialect.run_parser_method(&*$input, |parser| {
19086                    let constraint = parser.parse_optional_table_constraint().unwrap().unwrap();
19087                    // Validate that the structure is the same as expected
19088                    assert_eq!(constraint, $expected);
19089                    // Validate that the input and the expected structure serialization are the same
19090                    assert_eq!(constraint.to_string(), $input.to_string());
19091                });
19092            }};
19093        }
19094
19095        fn mk_expected_col(name: &str) -> IndexColumn {
19096            IndexColumn {
19097                column: OrderByExpr {
19098                    expr: Expr::Identifier(name.into()),
19099                    options: OrderByOptions {
19100                        asc: None,
19101                        nulls_first: None,
19102                    },
19103                    with_fill: None,
19104                },
19105                operator_class: None,
19106            }
19107        }
19108
19109        let dialect =
19110            TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(MySqlDialect {})]);
19111
19112        test_parse_table_constraint!(
19113            dialect,
19114            "INDEX (c1)",
19115            IndexConstraint {
19116                display_as_key: false,
19117                name: None,
19118                index_type: None,
19119                columns: vec![mk_expected_col("c1")],
19120                index_options: vec![],
19121            }
19122            .into()
19123        );
19124
19125        test_parse_table_constraint!(
19126            dialect,
19127            "KEY (c1)",
19128            IndexConstraint {
19129                display_as_key: true,
19130                name: None,
19131                index_type: None,
19132                columns: vec![mk_expected_col("c1")],
19133                index_options: vec![],
19134            }
19135            .into()
19136        );
19137
19138        test_parse_table_constraint!(
19139            dialect,
19140            "INDEX 'index' (c1, c2)",
19141            TableConstraint::Index(IndexConstraint {
19142                display_as_key: false,
19143                name: Some(Ident::with_quote('\'', "index")),
19144                index_type: None,
19145                columns: vec![mk_expected_col("c1"), mk_expected_col("c2")],
19146                index_options: vec![],
19147            })
19148        );
19149
19150        test_parse_table_constraint!(
19151            dialect,
19152            "INDEX USING BTREE (c1)",
19153            IndexConstraint {
19154                display_as_key: false,
19155                name: None,
19156                index_type: Some(IndexType::BTree),
19157                columns: vec![mk_expected_col("c1")],
19158                index_options: vec![],
19159            }
19160            .into()
19161        );
19162
19163        test_parse_table_constraint!(
19164            dialect,
19165            "INDEX USING HASH (c1)",
19166            IndexConstraint {
19167                display_as_key: false,
19168                name: None,
19169                index_type: Some(IndexType::Hash),
19170                columns: vec![mk_expected_col("c1")],
19171                index_options: vec![],
19172            }
19173            .into()
19174        );
19175
19176        test_parse_table_constraint!(
19177            dialect,
19178            "INDEX idx_name USING BTREE (c1)",
19179            IndexConstraint {
19180                display_as_key: false,
19181                name: Some(Ident::new("idx_name")),
19182                index_type: Some(IndexType::BTree),
19183                columns: vec![mk_expected_col("c1")],
19184                index_options: vec![],
19185            }
19186            .into()
19187        );
19188
19189        test_parse_table_constraint!(
19190            dialect,
19191            "INDEX idx_name USING HASH (c1)",
19192            IndexConstraint {
19193                display_as_key: false,
19194                name: Some(Ident::new("idx_name")),
19195                index_type: Some(IndexType::Hash),
19196                columns: vec![mk_expected_col("c1")],
19197                index_options: vec![],
19198            }
19199            .into()
19200        );
19201    }
19202
19203    #[test]
19204    fn test_tokenizer_error_loc() {
19205        let sql = "foo '";
19206        let ast = Parser::parse_sql(&GenericDialect, sql);
19207        assert_eq!(
19208            ast,
19209            Err(ParserError::TokenizerError(
19210                "Unterminated string literal at Line: 1, Column: 5".to_string()
19211            ))
19212        );
19213    }
19214
19215    #[test]
19216    fn test_parser_error_loc() {
19217        let sql = "SELECT this is a syntax error";
19218        let ast = Parser::parse_sql(&GenericDialect, sql);
19219        assert_eq!(
19220            ast,
19221            Err(ParserError::ParserError(
19222                "Expected: [NOT] NULL | TRUE | FALSE | DISTINCT | [form] NORMALIZED FROM after IS, found: a at Line: 1, Column: 16"
19223                    .to_string()
19224            ))
19225        );
19226    }
19227
19228    #[test]
19229    fn test_nested_explain_error() {
19230        let sql = "EXPLAIN EXPLAIN SELECT 1";
19231        let ast = Parser::parse_sql(&GenericDialect, sql);
19232        assert_eq!(
19233            ast,
19234            Err(ParserError::ParserError(
19235                "Explain must be root of the plan".to_string()
19236            ))
19237        );
19238    }
19239
19240    #[test]
19241    fn test_parse_multipart_identifier_positive() {
19242        let dialect = TestedDialects::new(vec![Box::new(GenericDialect {})]);
19243
19244        // parse multipart with quotes
19245        let expected = vec![
19246            Ident {
19247                value: "CATALOG".to_string(),
19248                quote_style: None,
19249                span: Span::empty(),
19250            },
19251            Ident {
19252                value: "F(o)o. \"bar".to_string(),
19253                quote_style: Some('"'),
19254                span: Span::empty(),
19255            },
19256            Ident {
19257                value: "table".to_string(),
19258                quote_style: None,
19259                span: Span::empty(),
19260            },
19261        ];
19262        dialect.run_parser_method(r#"CATALOG."F(o)o. ""bar".table"#, |parser| {
19263            let actual = parser.parse_multipart_identifier().unwrap();
19264            assert_eq!(expected, actual);
19265        });
19266
19267        // allow whitespace between ident parts
19268        let expected = vec![
19269            Ident {
19270                value: "CATALOG".to_string(),
19271                quote_style: None,
19272                span: Span::empty(),
19273            },
19274            Ident {
19275                value: "table".to_string(),
19276                quote_style: None,
19277                span: Span::empty(),
19278            },
19279        ];
19280        dialect.run_parser_method("CATALOG . table", |parser| {
19281            let actual = parser.parse_multipart_identifier().unwrap();
19282            assert_eq!(expected, actual);
19283        });
19284    }
19285
19286    #[test]
19287    fn test_parse_multipart_identifier_negative() {
19288        macro_rules! test_parse_multipart_identifier_error {
19289            ($input:expr, $expected_err:expr $(,)?) => {{
19290                all_dialects().run_parser_method(&*$input, |parser| {
19291                    let actual_err = parser.parse_multipart_identifier().unwrap_err();
19292                    assert_eq!(actual_err.to_string(), $expected_err);
19293                });
19294            }};
19295        }
19296
19297        test_parse_multipart_identifier_error!(
19298            "",
19299            "sql parser error: Empty input when parsing identifier",
19300        );
19301
19302        test_parse_multipart_identifier_error!(
19303            "*schema.table",
19304            "sql parser error: Unexpected token in identifier: *",
19305        );
19306
19307        test_parse_multipart_identifier_error!(
19308            "schema.table*",
19309            "sql parser error: Unexpected token in identifier: *",
19310        );
19311
19312        test_parse_multipart_identifier_error!(
19313            "schema.table.",
19314            "sql parser error: Trailing period in identifier",
19315        );
19316
19317        test_parse_multipart_identifier_error!(
19318            "schema.*",
19319            "sql parser error: Unexpected token following period in identifier: *",
19320        );
19321    }
19322
19323    #[test]
19324    fn test_mysql_partition_selection() {
19325        let sql = "SELECT * FROM employees PARTITION (p0, p2)";
19326        let expected = vec!["p0", "p2"];
19327
19328        let ast: Vec<Statement> = Parser::parse_sql(&MySqlDialect {}, sql).unwrap();
19329        assert_eq!(ast.len(), 1);
19330        if let Statement::Query(v) = &ast[0] {
19331            if let SetExpr::Select(select) = &*v.body {
19332                assert_eq!(select.from.len(), 1);
19333                let from: &TableWithJoins = &select.from[0];
19334                let table_factor = &from.relation;
19335                if let TableFactor::Table { partitions, .. } = table_factor {
19336                    let actual: Vec<&str> = partitions
19337                        .iter()
19338                        .map(|ident| ident.value.as_str())
19339                        .collect();
19340                    assert_eq!(expected, actual);
19341                }
19342            }
19343        } else {
19344            panic!("fail to parse mysql partition selection");
19345        }
19346    }
19347
19348    #[test]
19349    fn test_replace_into_placeholders() {
19350        let sql = "REPLACE INTO t (a) VALUES (&a)";
19351
19352        assert!(Parser::parse_sql(&GenericDialect {}, sql).is_err());
19353    }
19354
19355    #[test]
19356    fn test_replace_into_set_placeholder() {
19357        let sql = "REPLACE INTO t SET ?";
19358
19359        assert!(Parser::parse_sql(&GenericDialect {}, sql).is_err());
19360    }
19361
19362    #[test]
19363    fn test_replace_incomplete() {
19364        let sql = r#"REPLACE"#;
19365
19366        assert!(Parser::parse_sql(&MySqlDialect {}, sql).is_err());
19367    }
19368
19369    #[test]
19370    fn test_placeholder_invalid_whitespace() {
19371        for w in ["  ", "/*invalid*/"] {
19372            let sql = format!("\nSELECT\n  :{w}fooBar");
19373            assert!(Parser::parse_sql(&GenericDialect, &sql).is_err());
19374        }
19375    }
19376}