sqlparser/parser/
mod.rs

1// Licensed under the Apache License, Version 2.0 (the "License");
2// you may not use this file except in compliance with the License.
3// You may obtain a copy of the License at
4//
5// http://www.apache.org/licenses/LICENSE-2.0
6//
7// Unless required by applicable law or agreed to in writing, software
8// distributed under the License is distributed on an "AS IS" BASIS,
9// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
10// See the License for the specific language governing permissions and
11// limitations under the License.
12
13//! SQL Parser
14
15#[cfg(not(feature = "std"))]
16use alloc::{
17    boxed::Box,
18    format,
19    string::{String, ToString},
20    vec,
21    vec::Vec,
22};
23use core::{
24    fmt::{self, Display},
25    str::FromStr,
26};
27use helpers::attached_token::AttachedToken;
28
29use log::debug;
30
31use recursion::RecursionCounter;
32use IsLateral::*;
33use IsOptional::*;
34
35use crate::ast::helpers::{
36    key_value_options::{
37        KeyValueOption, KeyValueOptionType, KeyValueOptions, KeyValueOptionsDelimiter,
38    },
39    stmt_create_table::{CreateTableBuilder, CreateTableConfiguration},
40};
41use crate::ast::Statement::CreatePolicy;
42use crate::ast::*;
43use crate::dialect::*;
44use crate::keywords::{Keyword, ALL_KEYWORDS};
45use crate::tokenizer::*;
46use sqlparser::parser::ParserState::ColumnDefinition;
47
48mod alter;
49
50#[derive(Debug, Clone, PartialEq, Eq)]
51pub enum ParserError {
52    TokenizerError(String),
53    ParserError(String),
54    RecursionLimitExceeded,
55}
56
57// Use `Parser::expected` instead, if possible
58macro_rules! parser_err {
59    ($MSG:expr, $loc:expr) => {
60        Err(ParserError::ParserError(format!("{}{}", $MSG, $loc)))
61    };
62}
63
64#[cfg(feature = "std")]
65/// Implementation [`RecursionCounter`] if std is available
66mod recursion {
67    use std::cell::Cell;
68    use std::rc::Rc;
69
70    use super::ParserError;
71
72    /// Tracks remaining recursion depth. This value is decremented on
73    /// each call to [`RecursionCounter::try_decrease()`], when it reaches 0 an error will
74    /// be returned.
75    ///
76    /// Note: Uses an [`std::rc::Rc`] and [`std::cell::Cell`] in order to satisfy the Rust
77    /// borrow checker so the automatic [`DepthGuard`] decrement a
78    /// reference to the counter.
79    ///
80    /// Note: when "recursive-protection" feature is enabled, this crate uses additional stack overflow protection
81    /// for some of its recursive methods. See [`recursive::recursive`] for more information.
82    pub(crate) struct RecursionCounter {
83        remaining_depth: Rc<Cell<usize>>,
84    }
85
86    impl RecursionCounter {
87        /// Creates a [`RecursionCounter`] with the specified maximum
88        /// depth
89        pub fn new(remaining_depth: usize) -> Self {
90            Self {
91                remaining_depth: Rc::new(remaining_depth.into()),
92            }
93        }
94
95        /// Decreases the remaining depth by 1.
96        ///
97        /// Returns [`Err`] if the remaining depth falls to 0.
98        ///
99        /// Returns a [`DepthGuard`] which will adds 1 to the
100        /// remaining depth upon drop;
101        pub fn try_decrease(&self) -> Result<DepthGuard, ParserError> {
102            let old_value = self.remaining_depth.get();
103            // ran out of space
104            if old_value == 0 {
105                Err(ParserError::RecursionLimitExceeded)
106            } else {
107                self.remaining_depth.set(old_value - 1);
108                Ok(DepthGuard::new(Rc::clone(&self.remaining_depth)))
109            }
110        }
111    }
112
113    /// Guard that increases the remaining depth by 1 on drop
114    pub struct DepthGuard {
115        remaining_depth: Rc<Cell<usize>>,
116    }
117
118    impl DepthGuard {
119        fn new(remaining_depth: Rc<Cell<usize>>) -> Self {
120            Self { remaining_depth }
121        }
122    }
123    impl Drop for DepthGuard {
124        fn drop(&mut self) {
125            let old_value = self.remaining_depth.get();
126            self.remaining_depth.set(old_value + 1);
127        }
128    }
129}
130
131#[cfg(not(feature = "std"))]
132mod recursion {
133    /// Implementation [`RecursionCounter`] if std is NOT available (and does not
134    /// guard against stack overflow).
135    ///
136    /// Has the same API as the std [`RecursionCounter`] implementation
137    /// but does not actually limit stack depth.
138    pub(crate) struct RecursionCounter {}
139
140    impl RecursionCounter {
141        pub fn new(_remaining_depth: usize) -> Self {
142            Self {}
143        }
144        pub fn try_decrease(&self) -> Result<DepthGuard, super::ParserError> {
145            Ok(DepthGuard {})
146        }
147    }
148
149    pub struct DepthGuard {}
150}
151
152#[derive(PartialEq, Eq)]
153pub enum IsOptional {
154    Optional,
155    Mandatory,
156}
157
158pub enum IsLateral {
159    Lateral,
160    NotLateral,
161}
162
163pub enum WildcardExpr {
164    Expr(Expr),
165    QualifiedWildcard(ObjectName),
166    Wildcard,
167}
168
169impl From<TokenizerError> for ParserError {
170    fn from(e: TokenizerError) -> Self {
171        ParserError::TokenizerError(e.to_string())
172    }
173}
174
175impl fmt::Display for ParserError {
176    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
177        write!(
178            f,
179            "sql parser error: {}",
180            match self {
181                ParserError::TokenizerError(s) => s,
182                ParserError::ParserError(s) => s,
183                ParserError::RecursionLimitExceeded => "recursion limit exceeded",
184            }
185        )
186    }
187}
188
189#[cfg(feature = "std")]
190impl std::error::Error for ParserError {}
191
192// By default, allow expressions up to this deep before erroring
193const DEFAULT_REMAINING_DEPTH: usize = 50;
194
195// A constant EOF token that can be referenced.
196const EOF_TOKEN: TokenWithSpan = TokenWithSpan {
197    token: Token::EOF,
198    span: Span {
199        start: Location { line: 0, column: 0 },
200        end: Location { line: 0, column: 0 },
201    },
202};
203
204/// Composite types declarations using angle brackets syntax can be arbitrary
205/// nested such that the following declaration is possible:
206///      `ARRAY<ARRAY<INT>>`
207/// But the tokenizer recognizes the `>>` as a ShiftRight token.
208/// We work around that limitation when parsing a data type by accepting
209/// either a `>` or `>>` token in such cases, remembering which variant we
210/// matched.
211/// In the latter case having matched a `>>`, the parent type will not look to
212/// match its closing `>` as a result since that will have taken place at the
213/// child type.
214///
215/// See [Parser::parse_data_type] for details
216struct MatchedTrailingBracket(bool);
217
218impl From<bool> for MatchedTrailingBracket {
219    fn from(value: bool) -> Self {
220        Self(value)
221    }
222}
223
224/// Options that control how the [`Parser`] parses SQL text
225#[derive(Debug, Clone, PartialEq, Eq)]
226pub struct ParserOptions {
227    pub trailing_commas: bool,
228    /// Controls how literal values are unescaped. See
229    /// [`Tokenizer::with_unescape`] for more details.
230    pub unescape: bool,
231    /// Controls if the parser expects a semi-colon token
232    /// between statements. Default is `true`.
233    pub require_semicolon_stmt_delimiter: bool,
234}
235
236impl Default for ParserOptions {
237    fn default() -> Self {
238        Self {
239            trailing_commas: false,
240            unescape: true,
241            require_semicolon_stmt_delimiter: true,
242        }
243    }
244}
245
246impl ParserOptions {
247    /// Create a new [`ParserOptions`]
248    pub fn new() -> Self {
249        Default::default()
250    }
251
252    /// Set if trailing commas are allowed.
253    ///
254    /// If this option is `false` (the default), the following SQL will
255    /// not parse. If the option is `true`, the SQL will parse.
256    ///
257    /// ```sql
258    ///  SELECT
259    ///   foo,
260    ///   bar,
261    ///  FROM baz
262    /// ```
263    pub fn with_trailing_commas(mut self, trailing_commas: bool) -> Self {
264        self.trailing_commas = trailing_commas;
265        self
266    }
267
268    /// Set if literal values are unescaped. Defaults to true. See
269    /// [`Tokenizer::with_unescape`] for more details.
270    pub fn with_unescape(mut self, unescape: bool) -> Self {
271        self.unescape = unescape;
272        self
273    }
274}
275
276#[derive(Copy, Clone)]
277enum ParserState {
278    /// The default state of the parser.
279    Normal,
280    /// The state when parsing a CONNECT BY expression. This allows parsing
281    /// PRIOR expressions while still allowing prior as an identifier name
282    /// in other contexts.
283    ConnectBy,
284    /// The state when parsing column definitions.  This state prohibits
285    /// NOT NULL as an alias for IS NOT NULL.  For example:
286    /// ```sql
287    /// CREATE TABLE foo (abc BIGINT NOT NULL);
288    /// ```
289    ColumnDefinition,
290}
291
292/// A SQL Parser
293///
294/// This struct is the main entry point for parsing SQL queries.
295///
296/// # Functionality:
297/// * Parsing SQL: see examples on [`Parser::new`] and [`Parser::parse_sql`]
298/// * Controlling recursion: See [`Parser::with_recursion_limit`]
299/// * Controlling parser options: See [`Parser::with_options`]
300/// * Providing your own tokens: See [`Parser::with_tokens`]
301///
302/// # Internals
303///
304/// The parser uses a [`Tokenizer`] to tokenize the input SQL string into a
305/// `Vec` of [`TokenWithSpan`]s and maintains an `index` to the current token
306/// being processed. The token vec may contain multiple SQL statements.
307///
308/// * The "current" token is the token at `index - 1`
309/// * The "next" token is the token at `index`
310/// * The "previous" token is the token at `index - 2`
311///
312/// If `index` is equal to the length of the token stream, the 'next' token is
313/// [`Token::EOF`].
314///
315/// For example, the SQL string "SELECT * FROM foo" will be tokenized into
316/// following tokens:
317/// ```text
318///  [
319///    "SELECT", // token index 0
320///    " ",      // whitespace
321///    "*",
322///    " ",
323///    "FROM",
324///    " ",
325///    "foo"
326///   ]
327/// ```
328///
329///
330pub struct Parser<'a> {
331    /// The tokens
332    tokens: Vec<TokenWithSpan>,
333    /// The index of the first unprocessed token in [`Parser::tokens`].
334    index: usize,
335    /// The current state of the parser.
336    state: ParserState,
337    /// The SQL dialect to use.
338    dialect: &'a dyn Dialect,
339    /// Additional options that allow you to mix & match behavior
340    /// otherwise constrained to certain dialects (e.g. trailing
341    /// commas) and/or format of parse (e.g. unescaping).
342    options: ParserOptions,
343    /// Ensures the stack does not overflow by limiting recursion depth.
344    recursion_counter: RecursionCounter,
345}
346
347impl<'a> Parser<'a> {
348    /// Create a parser for a [`Dialect`]
349    ///
350    /// See also [`Parser::parse_sql`]
351    ///
352    /// Example:
353    /// ```
354    /// # use sqlparser::{parser::{Parser, ParserError}, dialect::GenericDialect};
355    /// # fn main() -> Result<(), ParserError> {
356    /// let dialect = GenericDialect{};
357    /// let statements = Parser::new(&dialect)
358    ///   .try_with_sql("SELECT * FROM foo")?
359    ///   .parse_statements()?;
360    /// # Ok(())
361    /// # }
362    /// ```
363    pub fn new(dialect: &'a dyn Dialect) -> Self {
364        Self {
365            tokens: vec![],
366            index: 0,
367            state: ParserState::Normal,
368            dialect,
369            recursion_counter: RecursionCounter::new(DEFAULT_REMAINING_DEPTH),
370            options: ParserOptions::new().with_trailing_commas(dialect.supports_trailing_commas()),
371        }
372    }
373
374    /// Specify the maximum recursion limit while parsing.
375    ///
376    /// [`Parser`] prevents stack overflows by returning
377    /// [`ParserError::RecursionLimitExceeded`] if the parser exceeds
378    /// this depth while processing the query.
379    ///
380    /// Example:
381    /// ```
382    /// # use sqlparser::{parser::{Parser, ParserError}, dialect::GenericDialect};
383    /// # fn main() -> Result<(), ParserError> {
384    /// let dialect = GenericDialect{};
385    /// let result = Parser::new(&dialect)
386    ///   .with_recursion_limit(1)
387    ///   .try_with_sql("SELECT * FROM foo WHERE (a OR (b OR (c OR d)))")?
388    ///   .parse_statements();
389    ///   assert_eq!(result, Err(ParserError::RecursionLimitExceeded));
390    /// # Ok(())
391    /// # }
392    /// ```
393    ///
394    /// Note: when "recursive-protection" feature is enabled, this crate uses additional stack overflow protection
395    //  for some of its recursive methods. See [`recursive::recursive`] for more information.
396    pub fn with_recursion_limit(mut self, recursion_limit: usize) -> Self {
397        self.recursion_counter = RecursionCounter::new(recursion_limit);
398        self
399    }
400
401    /// Specify additional parser options
402    ///
403    /// [`Parser`] supports additional options ([`ParserOptions`])
404    /// that allow you to mix & match behavior otherwise constrained
405    /// to certain dialects (e.g. trailing commas).
406    ///
407    /// Example:
408    /// ```
409    /// # use sqlparser::{parser::{Parser, ParserError, ParserOptions}, dialect::GenericDialect};
410    /// # fn main() -> Result<(), ParserError> {
411    /// let dialect = GenericDialect{};
412    /// let options = ParserOptions::new()
413    ///    .with_trailing_commas(true)
414    ///    .with_unescape(false);
415    /// let result = Parser::new(&dialect)
416    ///   .with_options(options)
417    ///   .try_with_sql("SELECT a, b, COUNT(*), FROM foo GROUP BY a, b,")?
418    ///   .parse_statements();
419    ///   assert!(matches!(result, Ok(_)));
420    /// # Ok(())
421    /// # }
422    /// ```
423    pub fn with_options(mut self, options: ParserOptions) -> Self {
424        self.options = options;
425        self
426    }
427
428    /// Reset this parser to parse the specified token stream
429    pub fn with_tokens_with_locations(mut self, tokens: Vec<TokenWithSpan>) -> Self {
430        self.tokens = tokens;
431        self.index = 0;
432        self
433    }
434
435    /// Reset this parser state to parse the specified tokens
436    pub fn with_tokens(self, tokens: Vec<Token>) -> Self {
437        // Put in dummy locations
438        let tokens_with_locations: Vec<TokenWithSpan> = tokens
439            .into_iter()
440            .map(|token| TokenWithSpan {
441                token,
442                span: Span::empty(),
443            })
444            .collect();
445        self.with_tokens_with_locations(tokens_with_locations)
446    }
447
448    /// Tokenize the sql string and sets this [`Parser`]'s state to
449    /// parse the resulting tokens
450    ///
451    /// Returns an error if there was an error tokenizing the SQL string.
452    ///
453    /// See example on [`Parser::new()`] for an example
454    pub fn try_with_sql(self, sql: &str) -> Result<Self, ParserError> {
455        debug!("Parsing sql '{sql}'...");
456        let tokens = Tokenizer::new(self.dialect, sql)
457            .with_unescape(self.options.unescape)
458            .tokenize_with_location()?;
459        Ok(self.with_tokens_with_locations(tokens))
460    }
461
462    /// Parse potentially multiple statements
463    ///
464    /// Example
465    /// ```
466    /// # use sqlparser::{parser::{Parser, ParserError}, dialect::GenericDialect};
467    /// # fn main() -> Result<(), ParserError> {
468    /// let dialect = GenericDialect{};
469    /// let statements = Parser::new(&dialect)
470    ///   // Parse a SQL string with 2 separate statements
471    ///   .try_with_sql("SELECT * FROM foo; SELECT * FROM bar;")?
472    ///   .parse_statements()?;
473    /// assert_eq!(statements.len(), 2);
474    /// # Ok(())
475    /// # }
476    /// ```
477    pub fn parse_statements(&mut self) -> Result<Vec<Statement>, ParserError> {
478        let mut stmts = Vec::new();
479        let mut expecting_statement_delimiter = false;
480        loop {
481            // ignore empty statements (between successive statement delimiters)
482            while self.consume_token(&Token::SemiColon) {
483                expecting_statement_delimiter = false;
484            }
485
486            if !self.options.require_semicolon_stmt_delimiter {
487                expecting_statement_delimiter = false;
488            }
489
490            match self.peek_token().token {
491                Token::EOF => break,
492
493                // end of statement
494                Token::Word(word) => {
495                    if expecting_statement_delimiter && word.keyword == Keyword::END {
496                        break;
497                    }
498                }
499                _ => {}
500            }
501
502            if expecting_statement_delimiter {
503                return self.expected("end of statement", self.peek_token());
504            }
505
506            let statement = self.parse_statement()?;
507            stmts.push(statement);
508            expecting_statement_delimiter = true;
509        }
510        Ok(stmts)
511    }
512
513    /// Convenience method to parse a string with one or more SQL
514    /// statements into produce an Abstract Syntax Tree (AST).
515    ///
516    /// Example
517    /// ```
518    /// # use sqlparser::{parser::{Parser, ParserError}, dialect::GenericDialect};
519    /// # fn main() -> Result<(), ParserError> {
520    /// let dialect = GenericDialect{};
521    /// let statements = Parser::parse_sql(
522    ///   &dialect, "SELECT * FROM foo"
523    /// )?;
524    /// assert_eq!(statements.len(), 1);
525    /// # Ok(())
526    /// # }
527    /// ```
528    pub fn parse_sql(dialect: &dyn Dialect, sql: &str) -> Result<Vec<Statement>, ParserError> {
529        Parser::new(dialect).try_with_sql(sql)?.parse_statements()
530    }
531
532    /// Parse a single top-level statement (such as SELECT, INSERT, CREATE, etc.),
533    /// stopping before the statement separator, if any.
534    pub fn parse_statement(&mut self) -> Result<Statement, ParserError> {
535        let _guard = self.recursion_counter.try_decrease()?;
536
537        // allow the dialect to override statement parsing
538        if let Some(statement) = self.dialect.parse_statement(self) {
539            return statement;
540        }
541
542        let next_token = self.next_token();
543        match &next_token.token {
544            Token::Word(w) => match w.keyword {
545                Keyword::KILL => self.parse_kill(),
546                Keyword::FLUSH => self.parse_flush(),
547                Keyword::DESC => self.parse_explain(DescribeAlias::Desc),
548                Keyword::DESCRIBE => self.parse_explain(DescribeAlias::Describe),
549                Keyword::EXPLAIN => self.parse_explain(DescribeAlias::Explain),
550                Keyword::ANALYZE => self.parse_analyze(),
551                Keyword::CASE => {
552                    self.prev_token();
553                    self.parse_case_stmt()
554                }
555                Keyword::IF => {
556                    self.prev_token();
557                    self.parse_if_stmt()
558                }
559                Keyword::WHILE => {
560                    self.prev_token();
561                    self.parse_while()
562                }
563                Keyword::LOOP if self.dialect.supports_loop_end_loop() => {
564                    self.parse_loop(None)
565                }
566                Keyword::FOR if self.dialect.supports_loop_end_loop() => {
567                    self.parse_for_statement()
568                }
569                Keyword::REPEAT if self.dialect.supports_loop_end_loop() => {
570                    self.parse_repeat_statement()
571                }
572                Keyword::LEAVE => {
573                    let label = if self.peek_token() != Token::SemiColon {
574                        Some(self.parse_identifier()?)
575                    } else {
576                        None
577                    };
578                    Ok(Statement::Leave { label })
579                }
580                Keyword::ITERATE => {
581                    let label = if self.peek_token() != Token::SemiColon {
582                        Some(self.parse_identifier()?)
583                    } else {
584                        None
585                    };
586                    Ok(Statement::Iterate { label })
587                }
588                Keyword::BREAK if self.dialect.supports_loop_end_loop() => {
589                    let label = if self.peek_token() != Token::SemiColon {
590                        Some(self.parse_identifier()?)
591                    } else {
592                        None
593                    };
594                    Ok(Statement::Break { label })
595                }
596                Keyword::CONTINUE if self.dialect.supports_loop_end_loop() => {
597                    let label = if self.peek_token() != Token::SemiColon {
598                        Some(self.parse_identifier()?)
599                    } else {
600                        None
601                    };
602                    Ok(Statement::Continue { label })
603                }
604                Keyword::RAISE => {
605                    self.prev_token();
606                    self.parse_raise_stmt()
607                }
608                Keyword::SELECT | Keyword::WITH | Keyword::VALUES | Keyword::FROM => {
609                    self.prev_token();
610                    self.parse_query().map(Statement::Query)
611                }
612                Keyword::TRUNCATE => self.parse_truncate(),
613                Keyword::ATTACH => {
614                    if dialect_of!(self is DuckDbDialect) {
615                        self.parse_attach_duckdb_database()
616                    } else {
617                        self.parse_attach_database()
618                    }
619                }
620                Keyword::DETACH if dialect_of!(self is DuckDbDialect | GenericDialect) => {
621                    self.parse_detach_duckdb_database()
622                }
623                Keyword::MSCK => self.parse_msck(),
624                Keyword::CREATE => self.parse_create(),
625                Keyword::CACHE => self.parse_cache_table(),
626                Keyword::DROP => self.parse_drop(),
627                Keyword::UNDROP => self.parse_undrop(),
628                Keyword::DISCARD => self.parse_discard(),
629                Keyword::DECLARE => self.parse_declare(),
630                Keyword::FETCH => self.parse_fetch_statement(),
631                Keyword::DELETE => self.parse_delete(),
632                Keyword::INSERT => self.parse_insert(),
633                Keyword::REPLACE => self.parse_replace(),
634                Keyword::UNCACHE => self.parse_uncache_table(),
635                Keyword::UPDATE => self.parse_update(),
636                Keyword::ALTER => self.parse_alter(),
637                Keyword::CALL => self.parse_call(),
638                Keyword::COPY => self.parse_copy(),
639                Keyword::OPEN => {
640                    self.prev_token();
641                    self.parse_open()
642                }
643                Keyword::CLOSE => self.parse_close(),
644                Keyword::SET => self.parse_set(),
645                Keyword::SHOW => self.parse_show(),
646                Keyword::USE => self.parse_use(),
647                Keyword::GRANT => self.parse_grant(),
648                Keyword::DENY => {
649                    self.prev_token();
650                    self.parse_deny()
651                }
652                Keyword::REVOKE => self.parse_revoke(),
653                Keyword::START => self.parse_start_transaction(),
654                Keyword::BEGIN => self.parse_begin(),
655                Keyword::END => self.parse_end(),
656                Keyword::SAVEPOINT => self.parse_savepoint(),
657                Keyword::RELEASE => self.parse_release(),
658                Keyword::COMMIT => self.parse_commit(),
659                Keyword::RAISERROR => Ok(self.parse_raiserror()?),
660                Keyword::ROLLBACK => self.parse_rollback(),
661                Keyword::ASSERT => self.parse_assert(),
662                // `PREPARE`, `EXECUTE` and `DEALLOCATE` are Postgres-specific
663                // syntaxes. They are used for Postgres prepared statement.
664                Keyword::DEALLOCATE => self.parse_deallocate(),
665                Keyword::EXECUTE | Keyword::EXEC => self.parse_execute(),
666                Keyword::PREPARE => self.parse_prepare(),
667                Keyword::MERGE => self.parse_merge(),
668                // `LISTEN`, `UNLISTEN` and `NOTIFY` are Postgres-specific
669                // syntaxes. They are used for Postgres statement.
670                Keyword::LISTEN if self.dialect.supports_listen_notify() => self.parse_listen(),
671                Keyword::UNLISTEN if self.dialect.supports_listen_notify() => self.parse_unlisten(),
672                Keyword::NOTIFY if self.dialect.supports_listen_notify() => self.parse_notify(),
673                // `PRAGMA` is sqlite specific https://www.sqlite.org/pragma.html
674                Keyword::PRAGMA => self.parse_pragma(),
675                Keyword::UNLOAD => {
676                    self.prev_token();
677                    self.parse_unload()
678                }
679                Keyword::RENAME => self.parse_rename(),
680                // `INSTALL` is duckdb specific https://duckdb.org/docs/extensions/overview
681                Keyword::INSTALL if dialect_of!(self is DuckDbDialect | GenericDialect) => {
682                    self.parse_install()
683                }
684                Keyword::LOAD => self.parse_load(),
685                // `OPTIMIZE` is clickhouse specific https://clickhouse.tech/docs/en/sql-reference/statements/optimize/
686                Keyword::OPTIMIZE if dialect_of!(self is ClickHouseDialect | GenericDialect) => {
687                    self.parse_optimize_table()
688                }
689                // `COMMENT` is snowflake specific https://docs.snowflake.com/en/sql-reference/sql/comment
690                Keyword::COMMENT if self.dialect.supports_comment_on() => self.parse_comment(),
691                Keyword::PRINT => self.parse_print(),
692                Keyword::RETURN => self.parse_return(),
693                Keyword::EXPORT => {
694                    self.prev_token();
695                    self.parse_export_data()
696                }
697                Keyword::VACUUM => {
698                    self.prev_token();
699                    self.parse_vacuum()
700                }
701                _ => {
702                    if self.dialect.supports_loop_end_loop() && self.peek_token() == Token::Colon {
703                        let label = Ident::new(w.value.clone());
704                        self.expect_token(&Token::Colon)?;
705                        if self.parse_keyword(Keyword::LOOP) {
706                            self.parse_loop(Some(label))
707                        } else if self.parse_keyword(Keyword::WHILE) {
708                            self.parse_while_with_label(Some(label))
709                        } else if self.parse_keyword(Keyword::BEGIN) {
710                            self.parse_begin_with_label(Some(label))
711                        } else {
712                            self.expected("LOOP, WHILE, or BEGIN after label", self.peek_token())
713                        }
714                    } else {
715                        self.expected("an SQL statement", next_token)
716                    }
717                }
718            },
719            Token::LParen => {
720                self.prev_token();
721                self.parse_query().map(Statement::Query)
722            }
723            _ => self.expected("an SQL statement", next_token),
724        }
725    }
726
727    /// Parse a `CASE` statement.
728    ///
729    /// See [Statement::Case]
730    pub fn parse_case_stmt(&mut self) -> Result<Statement, ParserError> {
731        let case_token = self.expect_keyword(Keyword::CASE)?;
732
733        let match_expr = if self.peek_keyword(Keyword::WHEN) {
734            None
735        } else {
736            Some(self.parse_expr()?)
737        };
738
739        self.expect_keyword_is(Keyword::WHEN)?;
740        let when_blocks = self.parse_keyword_separated(Keyword::WHEN, |parser| {
741            parser.parse_conditional_statement_block(&[Keyword::WHEN, Keyword::ELSE, Keyword::END])
742        })?;
743
744        let else_block = if self.parse_keyword(Keyword::ELSE) {
745            Some(self.parse_conditional_statement_block(&[Keyword::END])?)
746        } else {
747            None
748        };
749
750        let mut end_case_token = self.expect_keyword(Keyword::END)?;
751        if self.peek_keyword(Keyword::CASE) {
752            end_case_token = self.expect_keyword(Keyword::CASE)?;
753        }
754
755        Ok(Statement::Case(CaseStatement {
756            case_token: AttachedToken(case_token),
757            match_expr,
758            when_blocks,
759            else_block,
760            end_case_token: AttachedToken(end_case_token),
761        }))
762    }
763
764    /// Parse an `IF` statement.
765    ///
766    /// See [Statement::If]
767    pub fn parse_if_stmt(&mut self) -> Result<Statement, ParserError> {
768        self.expect_keyword_is(Keyword::IF)?;
769        let if_block = self.parse_conditional_statement_block(&[
770            Keyword::ELSE,
771            Keyword::ELSEIF,
772            Keyword::END,
773        ])?;
774
775        let elseif_blocks = if self.parse_keyword(Keyword::ELSEIF) {
776            self.parse_keyword_separated(Keyword::ELSEIF, |parser| {
777                parser.parse_conditional_statement_block(&[
778                    Keyword::ELSEIF,
779                    Keyword::ELSE,
780                    Keyword::END,
781                ])
782            })?
783        } else {
784            vec![]
785        };
786
787        let else_block = if self.parse_keyword(Keyword::ELSE) {
788            Some(self.parse_conditional_statement_block(&[Keyword::END])?)
789        } else {
790            None
791        };
792
793        self.expect_keyword_is(Keyword::END)?;
794        let end_token = self.expect_keyword(Keyword::IF)?;
795
796        Ok(Statement::If(IfStatement {
797            if_block,
798            elseif_blocks,
799            else_block,
800            end_token: Some(AttachedToken(end_token)),
801        }))
802    }
803
804    /// Parse a `WHILE` statement.
805    ///
806    /// See [Statement::While]
807    fn parse_while(&mut self) -> Result<Statement, ParserError> {
808        self.expect_keyword_is(Keyword::WHILE)?;
809        self.parse_while_with_label(None)
810    }
811
812    /// Parse a `WHILE` statement with an optional label.
813    fn parse_while_with_label(&mut self, label: Option<Ident>) -> Result<Statement, ParserError> {
814        if self.dialect.supports_while_do_end_while() {
815            let condition = self.parse_expr()?;
816            self.expect_keyword_is(Keyword::DO)?;
817            let statements = self.parse_statement_list(&[Keyword::END])?;
818            self.expect_keyword_is(Keyword::END)?;
819            self.expect_keyword_is(Keyword::WHILE)?;
820            if label.is_some() {
821                let _ = self.parse_identifier();
822            }
823
824            let while_block = ConditionalStatementBlock {
825                start_token: AttachedToken::empty(),
826                condition: Some(condition),
827                then_token: None,
828                conditional_statements: ConditionalStatements::Sequence { statements },
829            };
830            return Ok(Statement::While(WhileStatement { label, while_block }));
831        }
832
833        let while_block = self.parse_conditional_statement_block(&[Keyword::END])?;
834
835        Ok(Statement::While(WhileStatement { label, while_block }))
836    }
837
838    /// Parse a `LOOP` statement.
839    ///
840    /// See [Statement::Loop]
841    fn parse_loop(&mut self, label: Option<Ident>) -> Result<Statement, ParserError> {
842        let body = self.parse_statement_list(&[Keyword::END])?;
843        self.expect_keyword_is(Keyword::END)?;
844        self.expect_keyword_is(Keyword::LOOP)?;
845        if let Token::Word(_) = self.peek_token().token {
846            let _ = self.parse_identifier();
847        }
848
849        Ok(Statement::Loop(LoopStatement { label, body }))
850    }
851
852    /// Parse a `FOR` statement.
853    ///
854    /// See [Statement::For]
855    fn parse_for_statement(&mut self) -> Result<Statement, ParserError> {
856        let variable = self.parse_identifier()?;
857        self.expect_keyword_is(Keyword::IN)?;
858        self.expect_token(&Token::LParen)?;
859        let query = self.parse_query()?;
860        self.expect_token(&Token::RParen)?;
861        self.expect_keyword_is(Keyword::DO)?;
862        let body = self.parse_statement_list(&[Keyword::END])?;
863        self.expect_keyword_is(Keyword::END)?;
864        self.expect_keyword_is(Keyword::FOR)?;
865
866        Ok(Statement::For(ForStatement {
867            variable,
868            query,
869            body,
870        }))
871    }
872
873    /// Parse a `REPEAT` statement.
874    ///
875    /// See [Statement::Repeat]
876    fn parse_repeat_statement(&mut self) -> Result<Statement, ParserError> {
877        let body = self.parse_statement_list(&[Keyword::UNTIL])?;
878        self.expect_keyword_is(Keyword::UNTIL)?;
879        let until_condition = self.parse_expr()?;
880        self.expect_keyword_is(Keyword::END)?;
881        self.expect_keyword_is(Keyword::REPEAT)?;
882
883        Ok(Statement::Repeat(RepeatStatement {
884            body,
885            until_condition,
886        }))
887    }
888
889    /// Parses an expression and associated list of statements
890    /// belonging to a conditional statement like `IF` or `WHEN` or `WHILE`.
891    ///
892    /// Example:
893    /// ```sql
894    /// IF condition THEN statement1; statement2;
895    /// ```
896    fn parse_conditional_statement_block(
897        &mut self,
898        terminal_keywords: &[Keyword],
899    ) -> Result<ConditionalStatementBlock, ParserError> {
900        let start_token = self.get_current_token().clone(); // self.expect_keyword(keyword)?;
901        let mut then_token = None;
902
903        let condition = match &start_token.token {
904            Token::Word(w) if w.keyword == Keyword::ELSE => None,
905            Token::Word(w) if w.keyword == Keyword::WHILE => {
906                let expr = self.parse_expr()?;
907                Some(expr)
908            }
909            _ => {
910                let expr = self.parse_expr()?;
911                then_token = Some(AttachedToken(self.expect_keyword(Keyword::THEN)?));
912                Some(expr)
913            }
914        };
915
916        let conditional_statements = self.parse_conditional_statements(terminal_keywords)?;
917
918        Ok(ConditionalStatementBlock {
919            start_token: AttachedToken(start_token),
920            condition,
921            then_token,
922            conditional_statements,
923        })
924    }
925
926    /// Parse a BEGIN/END block or a sequence of statements
927    /// This could be inside of a conditional (IF, CASE, WHILE etc.) or an object body defined optionally BEGIN/END and one or more statements.
928    pub(crate) fn parse_conditional_statements(
929        &mut self,
930        terminal_keywords: &[Keyword],
931    ) -> Result<ConditionalStatements, ParserError> {
932        let conditional_statements = if self.peek_keyword(Keyword::BEGIN) {
933            let begin_token = self.expect_keyword(Keyword::BEGIN)?;
934            let statements = self.parse_statement_list(terminal_keywords)?;
935            let end_token = self.expect_keyword(Keyword::END)?;
936
937            ConditionalStatements::BeginEnd(BeginEndStatements {
938                begin_token: AttachedToken(begin_token),
939                statements,
940                end_token: AttachedToken(end_token),
941            })
942        } else {
943            ConditionalStatements::Sequence {
944                statements: self.parse_statement_list(terminal_keywords)?,
945            }
946        };
947        Ok(conditional_statements)
948    }
949
950    /// Parse a `RAISE` statement.
951    ///
952    /// See [Statement::Raise]
953    pub fn parse_raise_stmt(&mut self) -> Result<Statement, ParserError> {
954        self.expect_keyword_is(Keyword::RAISE)?;
955
956        let value = if self.parse_keywords(&[Keyword::USING, Keyword::MESSAGE]) {
957            self.expect_token(&Token::Eq)?;
958            Some(RaiseStatementValue::UsingMessage(self.parse_expr()?))
959        } else {
960            self.maybe_parse(|parser| parser.parse_expr().map(RaiseStatementValue::Expr))?
961        };
962
963        Ok(Statement::Raise(RaiseStatement { value }))
964    }
965
966    pub fn parse_comment(&mut self) -> Result<Statement, ParserError> {
967        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
968
969        self.expect_keyword_is(Keyword::ON)?;
970        let token = self.next_token();
971
972        let (object_type, object_name) = match token.token {
973            Token::Word(w) if w.keyword == Keyword::COLUMN => {
974                (CommentObject::Column, self.parse_object_name(false)?)
975            }
976            Token::Word(w) if w.keyword == Keyword::TABLE => {
977                (CommentObject::Table, self.parse_object_name(false)?)
978            }
979            Token::Word(w) if w.keyword == Keyword::EXTENSION => {
980                (CommentObject::Extension, self.parse_object_name(false)?)
981            }
982            Token::Word(w) if w.keyword == Keyword::SCHEMA => {
983                (CommentObject::Schema, self.parse_object_name(false)?)
984            }
985            Token::Word(w) if w.keyword == Keyword::DATABASE => {
986                (CommentObject::Database, self.parse_object_name(false)?)
987            }
988            Token::Word(w) if w.keyword == Keyword::USER => {
989                (CommentObject::User, self.parse_object_name(false)?)
990            }
991            Token::Word(w) if w.keyword == Keyword::ROLE => {
992                (CommentObject::Role, self.parse_object_name(false)?)
993            }
994            _ => self.expected("comment object_type", token)?,
995        };
996
997        self.expect_keyword_is(Keyword::IS)?;
998        let comment = if self.parse_keyword(Keyword::NULL) {
999            None
1000        } else {
1001            Some(self.parse_literal_string()?)
1002        };
1003        Ok(Statement::Comment {
1004            object_type,
1005            object_name,
1006            comment,
1007            if_exists,
1008        })
1009    }
1010
1011    pub fn parse_flush(&mut self) -> Result<Statement, ParserError> {
1012        let mut channel = None;
1013        let mut tables: Vec<ObjectName> = vec![];
1014        let mut read_lock = false;
1015        let mut export = false;
1016
1017        if !dialect_of!(self is MySqlDialect | GenericDialect) {
1018            return parser_err!("Unsupported statement FLUSH", self.peek_token().span.start);
1019        }
1020
1021        let location = if self.parse_keyword(Keyword::NO_WRITE_TO_BINLOG) {
1022            Some(FlushLocation::NoWriteToBinlog)
1023        } else if self.parse_keyword(Keyword::LOCAL) {
1024            Some(FlushLocation::Local)
1025        } else {
1026            None
1027        };
1028
1029        let object_type = if self.parse_keywords(&[Keyword::BINARY, Keyword::LOGS]) {
1030            FlushType::BinaryLogs
1031        } else if self.parse_keywords(&[Keyword::ENGINE, Keyword::LOGS]) {
1032            FlushType::EngineLogs
1033        } else if self.parse_keywords(&[Keyword::ERROR, Keyword::LOGS]) {
1034            FlushType::ErrorLogs
1035        } else if self.parse_keywords(&[Keyword::GENERAL, Keyword::LOGS]) {
1036            FlushType::GeneralLogs
1037        } else if self.parse_keywords(&[Keyword::HOSTS]) {
1038            FlushType::Hosts
1039        } else if self.parse_keyword(Keyword::PRIVILEGES) {
1040            FlushType::Privileges
1041        } else if self.parse_keyword(Keyword::OPTIMIZER_COSTS) {
1042            FlushType::OptimizerCosts
1043        } else if self.parse_keywords(&[Keyword::RELAY, Keyword::LOGS]) {
1044            if self.parse_keywords(&[Keyword::FOR, Keyword::CHANNEL]) {
1045                channel = Some(self.parse_object_name(false).unwrap().to_string());
1046            }
1047            FlushType::RelayLogs
1048        } else if self.parse_keywords(&[Keyword::SLOW, Keyword::LOGS]) {
1049            FlushType::SlowLogs
1050        } else if self.parse_keyword(Keyword::STATUS) {
1051            FlushType::Status
1052        } else if self.parse_keyword(Keyword::USER_RESOURCES) {
1053            FlushType::UserResources
1054        } else if self.parse_keywords(&[Keyword::LOGS]) {
1055            FlushType::Logs
1056        } else if self.parse_keywords(&[Keyword::TABLES]) {
1057            loop {
1058                let next_token = self.next_token();
1059                match &next_token.token {
1060                    Token::Word(w) => match w.keyword {
1061                        Keyword::WITH => {
1062                            read_lock = self.parse_keywords(&[Keyword::READ, Keyword::LOCK]);
1063                        }
1064                        Keyword::FOR => {
1065                            export = self.parse_keyword(Keyword::EXPORT);
1066                        }
1067                        Keyword::NoKeyword => {
1068                            self.prev_token();
1069                            tables = self.parse_comma_separated(|p| p.parse_object_name(false))?;
1070                        }
1071                        _ => {}
1072                    },
1073                    _ => {
1074                        break;
1075                    }
1076                }
1077            }
1078
1079            FlushType::Tables
1080        } else {
1081            return self.expected(
1082                "BINARY LOGS, ENGINE LOGS, ERROR LOGS, GENERAL LOGS, HOSTS, LOGS, PRIVILEGES, OPTIMIZER_COSTS,\
1083                 RELAY LOGS [FOR CHANNEL channel], SLOW LOGS, STATUS, USER_RESOURCES",
1084                self.peek_token(),
1085            );
1086        };
1087
1088        Ok(Statement::Flush {
1089            object_type,
1090            location,
1091            channel,
1092            read_lock,
1093            export,
1094            tables,
1095        })
1096    }
1097
1098    pub fn parse_msck(&mut self) -> Result<Statement, ParserError> {
1099        let repair = self.parse_keyword(Keyword::REPAIR);
1100        self.expect_keyword_is(Keyword::TABLE)?;
1101        let table_name = self.parse_object_name(false)?;
1102        let partition_action = self
1103            .maybe_parse(|parser| {
1104                let pa = match parser.parse_one_of_keywords(&[
1105                    Keyword::ADD,
1106                    Keyword::DROP,
1107                    Keyword::SYNC,
1108                ]) {
1109                    Some(Keyword::ADD) => Some(AddDropSync::ADD),
1110                    Some(Keyword::DROP) => Some(AddDropSync::DROP),
1111                    Some(Keyword::SYNC) => Some(AddDropSync::SYNC),
1112                    _ => None,
1113                };
1114                parser.expect_keyword_is(Keyword::PARTITIONS)?;
1115                Ok(pa)
1116            })?
1117            .unwrap_or_default();
1118        Ok(Statement::Msck {
1119            repair,
1120            table_name,
1121            partition_action,
1122        })
1123    }
1124
1125    pub fn parse_truncate(&mut self) -> Result<Statement, ParserError> {
1126        let table = self.parse_keyword(Keyword::TABLE);
1127
1128        let table_names = self
1129            .parse_comma_separated(|p| {
1130                Ok((p.parse_keyword(Keyword::ONLY), p.parse_object_name(false)?))
1131            })?
1132            .into_iter()
1133            .map(|(only, name)| TruncateTableTarget { name, only })
1134            .collect();
1135
1136        let mut partitions = None;
1137        if self.parse_keyword(Keyword::PARTITION) {
1138            self.expect_token(&Token::LParen)?;
1139            partitions = Some(self.parse_comma_separated(Parser::parse_expr)?);
1140            self.expect_token(&Token::RParen)?;
1141        }
1142
1143        let mut identity = None;
1144        let mut cascade = None;
1145
1146        if dialect_of!(self is PostgreSqlDialect | GenericDialect) {
1147            identity = if self.parse_keywords(&[Keyword::RESTART, Keyword::IDENTITY]) {
1148                Some(TruncateIdentityOption::Restart)
1149            } else if self.parse_keywords(&[Keyword::CONTINUE, Keyword::IDENTITY]) {
1150                Some(TruncateIdentityOption::Continue)
1151            } else {
1152                None
1153            };
1154
1155            cascade = self.parse_cascade_option();
1156        };
1157
1158        let on_cluster = self.parse_optional_on_cluster()?;
1159
1160        Ok(Statement::Truncate {
1161            table_names,
1162            partitions,
1163            table,
1164            identity,
1165            cascade,
1166            on_cluster,
1167        })
1168    }
1169
1170    fn parse_cascade_option(&mut self) -> Option<CascadeOption> {
1171        if self.parse_keyword(Keyword::CASCADE) {
1172            Some(CascadeOption::Cascade)
1173        } else if self.parse_keyword(Keyword::RESTRICT) {
1174            Some(CascadeOption::Restrict)
1175        } else {
1176            None
1177        }
1178    }
1179
1180    pub fn parse_attach_duckdb_database_options(
1181        &mut self,
1182    ) -> Result<Vec<AttachDuckDBDatabaseOption>, ParserError> {
1183        if !self.consume_token(&Token::LParen) {
1184            return Ok(vec![]);
1185        }
1186
1187        let mut options = vec![];
1188        loop {
1189            if self.parse_keyword(Keyword::READ_ONLY) {
1190                let boolean = if self.parse_keyword(Keyword::TRUE) {
1191                    Some(true)
1192                } else if self.parse_keyword(Keyword::FALSE) {
1193                    Some(false)
1194                } else {
1195                    None
1196                };
1197                options.push(AttachDuckDBDatabaseOption::ReadOnly(boolean));
1198            } else if self.parse_keyword(Keyword::TYPE) {
1199                let ident = self.parse_identifier()?;
1200                options.push(AttachDuckDBDatabaseOption::Type(ident));
1201            } else {
1202                return self.expected("expected one of: ), READ_ONLY, TYPE", self.peek_token());
1203            };
1204
1205            if self.consume_token(&Token::RParen) {
1206                return Ok(options);
1207            } else if self.consume_token(&Token::Comma) {
1208                continue;
1209            } else {
1210                return self.expected("expected one of: ')', ','", self.peek_token());
1211            }
1212        }
1213    }
1214
1215    pub fn parse_attach_duckdb_database(&mut self) -> Result<Statement, ParserError> {
1216        let database = self.parse_keyword(Keyword::DATABASE);
1217        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
1218        let database_path = self.parse_identifier()?;
1219        let database_alias = if self.parse_keyword(Keyword::AS) {
1220            Some(self.parse_identifier()?)
1221        } else {
1222            None
1223        };
1224
1225        let attach_options = self.parse_attach_duckdb_database_options()?;
1226        Ok(Statement::AttachDuckDBDatabase {
1227            if_not_exists,
1228            database,
1229            database_path,
1230            database_alias,
1231            attach_options,
1232        })
1233    }
1234
1235    pub fn parse_detach_duckdb_database(&mut self) -> Result<Statement, ParserError> {
1236        let database = self.parse_keyword(Keyword::DATABASE);
1237        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
1238        let database_alias = self.parse_identifier()?;
1239        Ok(Statement::DetachDuckDBDatabase {
1240            if_exists,
1241            database,
1242            database_alias,
1243        })
1244    }
1245
1246    pub fn parse_attach_database(&mut self) -> Result<Statement, ParserError> {
1247        let database = self.parse_keyword(Keyword::DATABASE);
1248        let database_file_name = self.parse_expr()?;
1249        self.expect_keyword_is(Keyword::AS)?;
1250        let schema_name = self.parse_identifier()?;
1251        Ok(Statement::AttachDatabase {
1252            database,
1253            schema_name,
1254            database_file_name,
1255        })
1256    }
1257
1258    pub fn parse_analyze(&mut self) -> Result<Statement, ParserError> {
1259        let has_table_keyword = self.parse_keyword(Keyword::TABLE);
1260        let table_name = self.parse_object_name(false)?;
1261        let mut for_columns = false;
1262        let mut cache_metadata = false;
1263        let mut noscan = false;
1264        let mut partitions = None;
1265        let mut compute_statistics = false;
1266        let mut columns = vec![];
1267        loop {
1268            match self.parse_one_of_keywords(&[
1269                Keyword::PARTITION,
1270                Keyword::FOR,
1271                Keyword::CACHE,
1272                Keyword::NOSCAN,
1273                Keyword::COMPUTE,
1274            ]) {
1275                Some(Keyword::PARTITION) => {
1276                    self.expect_token(&Token::LParen)?;
1277                    partitions = Some(self.parse_comma_separated(Parser::parse_expr)?);
1278                    self.expect_token(&Token::RParen)?;
1279                }
1280                Some(Keyword::NOSCAN) => noscan = true,
1281                Some(Keyword::FOR) => {
1282                    self.expect_keyword_is(Keyword::COLUMNS)?;
1283
1284                    columns = self
1285                        .maybe_parse(|parser| {
1286                            parser.parse_comma_separated(|p| p.parse_identifier())
1287                        })?
1288                        .unwrap_or_default();
1289                    for_columns = true
1290                }
1291                Some(Keyword::CACHE) => {
1292                    self.expect_keyword_is(Keyword::METADATA)?;
1293                    cache_metadata = true
1294                }
1295                Some(Keyword::COMPUTE) => {
1296                    self.expect_keyword_is(Keyword::STATISTICS)?;
1297                    compute_statistics = true
1298                }
1299                _ => break,
1300            }
1301        }
1302
1303        Ok(Statement::Analyze {
1304            has_table_keyword,
1305            table_name,
1306            for_columns,
1307            columns,
1308            partitions,
1309            cache_metadata,
1310            noscan,
1311            compute_statistics,
1312        })
1313    }
1314
1315    /// Parse a new expression including wildcard & qualified wildcard.
1316    pub fn parse_wildcard_expr(&mut self) -> Result<Expr, ParserError> {
1317        let index = self.index;
1318
1319        let next_token = self.next_token();
1320        match next_token.token {
1321            t @ (Token::Word(_) | Token::SingleQuotedString(_)) => {
1322                if self.peek_token().token == Token::Period {
1323                    let mut id_parts: Vec<Ident> = vec![match t {
1324                        Token::Word(w) => w.into_ident(next_token.span),
1325                        Token::SingleQuotedString(s) => Ident::with_quote('\'', s),
1326                        _ => unreachable!(), // We matched above
1327                    }];
1328
1329                    while self.consume_token(&Token::Period) {
1330                        let next_token = self.next_token();
1331                        match next_token.token {
1332                            Token::Word(w) => id_parts.push(w.into_ident(next_token.span)),
1333                            Token::SingleQuotedString(s) => {
1334                                // SQLite has single-quoted identifiers
1335                                id_parts.push(Ident::with_quote('\'', s))
1336                            }
1337                            Token::Mul => {
1338                                return Ok(Expr::QualifiedWildcard(
1339                                    ObjectName::from(id_parts),
1340                                    AttachedToken(next_token),
1341                                ));
1342                            }
1343                            _ => {
1344                                return self
1345                                    .expected("an identifier or a '*' after '.'", next_token);
1346                            }
1347                        }
1348                    }
1349                }
1350            }
1351            Token::Mul => {
1352                return Ok(Expr::Wildcard(AttachedToken(next_token)));
1353            }
1354            _ => (),
1355        };
1356
1357        self.index = index;
1358        self.parse_expr()
1359    }
1360
1361    /// Parse a new expression.
1362    pub fn parse_expr(&mut self) -> Result<Expr, ParserError> {
1363        self.parse_subexpr(self.dialect.prec_unknown())
1364    }
1365
1366    pub fn parse_expr_with_alias_and_order_by(
1367        &mut self,
1368    ) -> Result<ExprWithAliasAndOrderBy, ParserError> {
1369        let expr = self.parse_expr()?;
1370
1371        fn validator(explicit: bool, kw: &Keyword, _parser: &mut Parser) -> bool {
1372            explicit || !&[Keyword::ASC, Keyword::DESC, Keyword::GROUP].contains(kw)
1373        }
1374        let alias = self.parse_optional_alias_inner(None, validator)?;
1375        let order_by = OrderByOptions {
1376            asc: self.parse_asc_desc(),
1377            nulls_first: None,
1378        };
1379        Ok(ExprWithAliasAndOrderBy {
1380            expr: ExprWithAlias { expr, alias },
1381            order_by,
1382        })
1383    }
1384
1385    /// Parse tokens until the precedence changes.
1386    pub fn parse_subexpr(&mut self, precedence: u8) -> Result<Expr, ParserError> {
1387        let _guard = self.recursion_counter.try_decrease()?;
1388        debug!("parsing expr");
1389        let mut expr = self.parse_prefix()?;
1390
1391        expr = self.parse_compound_expr(expr, vec![])?;
1392
1393        debug!("prefix: {expr:?}");
1394        loop {
1395            let next_precedence = self.get_next_precedence()?;
1396            debug!("next precedence: {next_precedence:?}");
1397
1398            if precedence >= next_precedence {
1399                break;
1400            }
1401
1402            // The period operator is handled exclusively by the
1403            // compound field access parsing.
1404            if Token::Period == self.peek_token_ref().token {
1405                break;
1406            }
1407
1408            expr = self.parse_infix(expr, next_precedence)?;
1409        }
1410        Ok(expr)
1411    }
1412
1413    pub fn parse_assert(&mut self) -> Result<Statement, ParserError> {
1414        let condition = self.parse_expr()?;
1415        let message = if self.parse_keyword(Keyword::AS) {
1416            Some(self.parse_expr()?)
1417        } else {
1418            None
1419        };
1420
1421        Ok(Statement::Assert { condition, message })
1422    }
1423
1424    pub fn parse_savepoint(&mut self) -> Result<Statement, ParserError> {
1425        let name = self.parse_identifier()?;
1426        Ok(Statement::Savepoint { name })
1427    }
1428
1429    pub fn parse_release(&mut self) -> Result<Statement, ParserError> {
1430        let _ = self.parse_keyword(Keyword::SAVEPOINT);
1431        let name = self.parse_identifier()?;
1432
1433        Ok(Statement::ReleaseSavepoint { name })
1434    }
1435
1436    pub fn parse_listen(&mut self) -> Result<Statement, ParserError> {
1437        let channel = self.parse_identifier()?;
1438        Ok(Statement::LISTEN { channel })
1439    }
1440
1441    pub fn parse_unlisten(&mut self) -> Result<Statement, ParserError> {
1442        let channel = if self.consume_token(&Token::Mul) {
1443            Ident::new(Expr::Wildcard(AttachedToken::empty()).to_string())
1444        } else {
1445            match self.parse_identifier() {
1446                Ok(expr) => expr,
1447                _ => {
1448                    self.prev_token();
1449                    return self.expected("wildcard or identifier", self.peek_token());
1450                }
1451            }
1452        };
1453        Ok(Statement::UNLISTEN { channel })
1454    }
1455
1456    pub fn parse_notify(&mut self) -> Result<Statement, ParserError> {
1457        let channel = self.parse_identifier()?;
1458        let payload = if self.consume_token(&Token::Comma) {
1459            Some(self.parse_literal_string()?)
1460        } else {
1461            None
1462        };
1463        Ok(Statement::NOTIFY { channel, payload })
1464    }
1465
1466    /// Parses a `RENAME TABLE` statement. See [Statement::RenameTable]
1467    pub fn parse_rename(&mut self) -> Result<Statement, ParserError> {
1468        if self.peek_keyword(Keyword::TABLE) {
1469            self.expect_keyword(Keyword::TABLE)?;
1470            let rename_tables = self.parse_comma_separated(|parser| {
1471                let old_name = parser.parse_object_name(false)?;
1472                parser.expect_keyword(Keyword::TO)?;
1473                let new_name = parser.parse_object_name(false)?;
1474
1475                Ok(RenameTable { old_name, new_name })
1476            })?;
1477            Ok(Statement::RenameTable(rename_tables))
1478        } else {
1479            self.expected("KEYWORD `TABLE` after RENAME", self.peek_token())
1480        }
1481    }
1482
1483    /// Tries to parse an expression by matching the specified word to known keywords that have a special meaning in the dialect.
1484    /// Returns `None if no match is found.
1485    fn parse_expr_prefix_by_reserved_word(
1486        &mut self,
1487        w: &Word,
1488        w_span: Span,
1489    ) -> Result<Option<Expr>, ParserError> {
1490        match w.keyword {
1491            Keyword::TRUE | Keyword::FALSE if self.dialect.supports_boolean_literals() => {
1492                self.prev_token();
1493                Ok(Some(Expr::Value(self.parse_value()?)))
1494            }
1495            Keyword::NULL => {
1496                self.prev_token();
1497                Ok(Some(Expr::Value(self.parse_value()?)))
1498            }
1499            Keyword::CURRENT_CATALOG
1500            | Keyword::CURRENT_USER
1501            | Keyword::SESSION_USER
1502            | Keyword::USER
1503            if dialect_of!(self is PostgreSqlDialect | GenericDialect) =>
1504                {
1505                    Ok(Some(Expr::Function(Function {
1506                        name: ObjectName::from(vec![w.clone().into_ident(w_span)]),
1507                        uses_odbc_syntax: false,
1508                        parameters: FunctionArguments::None,
1509                        args: FunctionArguments::None,
1510                        null_treatment: None,
1511                        filter: None,
1512                        over: None,
1513                        within_group: vec![],
1514                    })))
1515                }
1516            Keyword::CURRENT_TIMESTAMP
1517            | Keyword::CURRENT_TIME
1518            | Keyword::CURRENT_DATE
1519            | Keyword::LOCALTIME
1520            | Keyword::LOCALTIMESTAMP => {
1521                Ok(Some(self.parse_time_functions(ObjectName::from(vec![w.clone().into_ident(w_span)]))?))
1522            }
1523            Keyword::CASE => Ok(Some(self.parse_case_expr()?)),
1524            Keyword::CONVERT => Ok(Some(self.parse_convert_expr(false)?)),
1525            Keyword::TRY_CONVERT if self.dialect.supports_try_convert() => Ok(Some(self.parse_convert_expr(true)?)),
1526            Keyword::CAST => Ok(Some(self.parse_cast_expr(CastKind::Cast)?)),
1527            Keyword::TRY_CAST => Ok(Some(self.parse_cast_expr(CastKind::TryCast)?)),
1528            Keyword::SAFE_CAST => Ok(Some(self.parse_cast_expr(CastKind::SafeCast)?)),
1529            Keyword::EXISTS
1530            // Support parsing Databricks has a function named `exists`.
1531            if !dialect_of!(self is DatabricksDialect)
1532                || matches!(
1533                        self.peek_nth_token_ref(1).token,
1534                        Token::Word(Word {
1535                            keyword: Keyword::SELECT | Keyword::WITH,
1536                            ..
1537                        })
1538                    ) =>
1539                {
1540                    Ok(Some(self.parse_exists_expr(false)?))
1541                }
1542            Keyword::EXTRACT => Ok(Some(self.parse_extract_expr()?)),
1543            Keyword::CEIL => Ok(Some(self.parse_ceil_floor_expr(true)?)),
1544            Keyword::FLOOR => Ok(Some(self.parse_ceil_floor_expr(false)?)),
1545            Keyword::POSITION if self.peek_token_ref().token == Token::LParen => {
1546                Ok(Some(self.parse_position_expr(w.clone().into_ident(w_span))?))
1547            }
1548            Keyword::SUBSTR | Keyword::SUBSTRING => {
1549                self.prev_token();
1550                Ok(Some(self.parse_substring()?))
1551            }
1552            Keyword::OVERLAY => Ok(Some(self.parse_overlay_expr()?)),
1553            Keyword::TRIM => Ok(Some(self.parse_trim_expr()?)),
1554            Keyword::INTERVAL => Ok(Some(self.parse_interval()?)),
1555            // Treat ARRAY[1,2,3] as an array [1,2,3], otherwise try as subquery or a function call
1556            Keyword::ARRAY if *self.peek_token_ref() == Token::LBracket => {
1557                self.expect_token(&Token::LBracket)?;
1558                Ok(Some(self.parse_array_expr(true)?))
1559            }
1560            // Handle typed array literals: ARRAY<type>[1, 2, 3]
1561            Keyword::ARRAY if *self.peek_token_ref() == Token::Lt => {
1562                self.expect_token(&Token::Lt)?;
1563                let (element_type, trailing_bracket) = self.parse_data_type_helper()?;
1564                self.expect_closing_angle_bracket(trailing_bracket)?;
1565                Ok(Some(self.parse_typed_array_expr(element_type)?))
1566            }
1567            Keyword::ARRAY
1568            if self.peek_token() == Token::LParen
1569                && !dialect_of!(self is ClickHouseDialect | DatabricksDialect) =>
1570                {
1571                    self.expect_token(&Token::LParen)?;
1572                    let query = self.parse_query()?;
1573                    self.expect_token(&Token::RParen)?;
1574                    Ok(Some(Expr::Function(Function {
1575                        name: ObjectName::from(vec![w.clone().into_ident(w_span)]),
1576                        uses_odbc_syntax: false,
1577                        parameters: FunctionArguments::None,
1578                        args: FunctionArguments::Subquery(query),
1579                        filter: None,
1580                        null_treatment: None,
1581                        over: None,
1582                        within_group: vec![],
1583                    })))
1584                }
1585            Keyword::NOT => Ok(Some(self.parse_not()?)),
1586            Keyword::MATCH if self.dialect.supports_match_against() => {
1587                Ok(Some(self.parse_match_against()?))
1588            }
1589            Keyword::STRUCT if self.dialect.supports_struct_literal() => {
1590                let struct_expr = self.parse_struct_literal()?;
1591                Ok(Some(struct_expr))
1592            }
1593            Keyword::PRIOR if matches!(self.state, ParserState::ConnectBy) => {
1594                let expr = self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?;
1595                Ok(Some(Expr::Prior(Box::new(expr))))
1596            }
1597            Keyword::MAP if *self.peek_token_ref() == Token::LBrace && self.dialect.support_map_literal_syntax() => {
1598                Ok(Some(self.parse_duckdb_map_literal()?))
1599            }
1600            _ if self.dialect.supports_geometric_types() => match w.keyword {
1601                Keyword::CIRCLE => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Circle)?)),
1602                Keyword::BOX => Ok(Some(self.parse_geometric_type(GeometricTypeKind::GeometricBox)?)),
1603                Keyword::PATH => Ok(Some(self.parse_geometric_type(GeometricTypeKind::GeometricPath)?)),
1604                Keyword::LINE => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Line)?)),
1605                Keyword::LSEG => Ok(Some(self.parse_geometric_type(GeometricTypeKind::LineSegment)?)),
1606                Keyword::POINT => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Point)?)),
1607                Keyword::POLYGON => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Polygon)?)),
1608                _ => Ok(None),
1609            },
1610            _ => Ok(None),
1611        }
1612    }
1613
1614    /// Tries to parse an expression by a word that is not known to have a special meaning in the dialect.
1615    fn parse_expr_prefix_by_unreserved_word(
1616        &mut self,
1617        w: &Word,
1618        w_span: Span,
1619    ) -> Result<Expr, ParserError> {
1620        match self.peek_token().token {
1621            Token::LParen if !self.peek_outer_join_operator() => {
1622                let id_parts = vec![w.clone().into_ident(w_span)];
1623                self.parse_function(ObjectName::from(id_parts))
1624            }
1625            // string introducer https://dev.mysql.com/doc/refman/8.0/en/charset-introducer.html
1626            Token::SingleQuotedString(_)
1627            | Token::DoubleQuotedString(_)
1628            | Token::HexStringLiteral(_)
1629                if w.value.starts_with('_') =>
1630            {
1631                Ok(Expr::Prefixed {
1632                    prefix: w.clone().into_ident(w_span),
1633                    value: self.parse_introduced_string_expr()?.into(),
1634                })
1635            }
1636            // string introducer https://dev.mysql.com/doc/refman/8.0/en/charset-introducer.html
1637            Token::SingleQuotedString(_)
1638            | Token::DoubleQuotedString(_)
1639            | Token::HexStringLiteral(_)
1640                if w.value.starts_with('_') =>
1641            {
1642                Ok(Expr::Prefixed {
1643                    prefix: w.clone().into_ident(w_span),
1644                    value: self.parse_introduced_string_expr()?.into(),
1645                })
1646            }
1647            Token::Arrow if self.dialect.supports_lambda_functions() => {
1648                self.expect_token(&Token::Arrow)?;
1649                Ok(Expr::Lambda(LambdaFunction {
1650                    params: OneOrManyWithParens::One(w.clone().into_ident(w_span)),
1651                    body: Box::new(self.parse_expr()?),
1652                }))
1653            }
1654            _ => Ok(Expr::Identifier(w.clone().into_ident(w_span))),
1655        }
1656    }
1657
1658    /// Parse an expression prefix.
1659    pub fn parse_prefix(&mut self) -> Result<Expr, ParserError> {
1660        // allow the dialect to override prefix parsing
1661        if let Some(prefix) = self.dialect.parse_prefix(self) {
1662            return prefix;
1663        }
1664
1665        // PostgreSQL allows any string literal to be preceded by a type name, indicating that the
1666        // string literal represents a literal of that type. Some examples:
1667        //
1668        //      DATE '2020-05-20'
1669        //      TIMESTAMP WITH TIME ZONE '2020-05-20 7:43:54'
1670        //      BOOL 'true'
1671        //
1672        // The first two are standard SQL, while the latter is a PostgreSQL extension. Complicating
1673        // matters is the fact that INTERVAL string literals may optionally be followed by special
1674        // keywords, e.g.:
1675        //
1676        //      INTERVAL '7' DAY
1677        //
1678        // Note also that naively `SELECT date` looks like a syntax error because the `date` type
1679        // name is not followed by a string literal, but in fact in PostgreSQL it is a valid
1680        // expression that should parse as the column name "date".
1681        let loc = self.peek_token_ref().span.start;
1682        let opt_expr = self.maybe_parse(|parser| {
1683            match parser.parse_data_type()? {
1684                DataType::Interval { .. } => parser.parse_interval(),
1685                // PostgreSQL allows almost any identifier to be used as custom data type name,
1686                // and we support that in `parse_data_type()`. But unlike Postgres we don't
1687                // have a list of globally reserved keywords (since they vary across dialects),
1688                // so given `NOT 'a' LIKE 'b'`, we'd accept `NOT` as a possible custom data type
1689                // name, resulting in `NOT 'a'` being recognized as a `TypedString` instead of
1690                // an unary negation `NOT ('a' LIKE 'b')`. To solve this, we don't accept the
1691                // `type 'string'` syntax for the custom data types at all.
1692                DataType::Custom(..) => parser_err!("dummy", loc),
1693                data_type => Ok(Expr::TypedString(TypedString {
1694                    data_type,
1695                    value: parser.parse_value()?,
1696                    uses_odbc_syntax: false,
1697                })),
1698            }
1699        })?;
1700
1701        if let Some(expr) = opt_expr {
1702            return Ok(expr);
1703        }
1704
1705        // Cache some dialect properties to avoid lifetime issues with the
1706        // next_token reference.
1707
1708        let dialect = self.dialect;
1709
1710        self.advance_token();
1711        let next_token_index = self.get_current_index();
1712        let next_token = self.get_current_token();
1713        let span = next_token.span;
1714        let expr = match &next_token.token {
1715            Token::Word(w) => {
1716                // The word we consumed may fall into one of two cases: it has a special meaning, or not.
1717                // For example, in Snowflake, the word `interval` may have two meanings depending on the context:
1718                // `SELECT CURRENT_DATE() + INTERVAL '1 DAY', MAX(interval) FROM tbl;`
1719                //                          ^^^^^^^^^^^^^^^^      ^^^^^^^^
1720                //                         interval expression   identifier
1721                //
1722                // We first try to parse the word and following tokens as a special expression, and if that fails,
1723                // we rollback and try to parse it as an identifier.
1724                let w = w.clone();
1725                match self.try_parse(|parser| parser.parse_expr_prefix_by_reserved_word(&w, span)) {
1726                    // This word indicated an expression prefix and parsing was successful
1727                    Ok(Some(expr)) => Ok(expr),
1728
1729                    // No expression prefix associated with this word
1730                    Ok(None) => Ok(self.parse_expr_prefix_by_unreserved_word(&w, span)?),
1731
1732                    // If parsing of the word as a special expression failed, we are facing two options:
1733                    // 1. The statement is malformed, e.g. `SELECT INTERVAL '1 DAI` (`DAI` instead of `DAY`)
1734                    // 2. The word is used as an identifier, e.g. `SELECT MAX(interval) FROM tbl`
1735                    // We first try to parse the word as an identifier and if that fails
1736                    // we rollback and return the parsing error we got from trying to parse a
1737                    // special expression (to maintain backwards compatibility of parsing errors).
1738                    Err(e) => {
1739                        if !self.dialect.is_reserved_for_identifier(w.keyword) {
1740                            if let Ok(Some(expr)) = self.maybe_parse(|parser| {
1741                                parser.parse_expr_prefix_by_unreserved_word(&w, span)
1742                            }) {
1743                                return Ok(expr);
1744                            }
1745                        }
1746                        return Err(e);
1747                    }
1748                }
1749            } // End of Token::Word
1750            // array `[1, 2, 3]`
1751            Token::LBracket => self.parse_array_expr(false),
1752            tok @ Token::Minus | tok @ Token::Plus => {
1753                let op = if *tok == Token::Plus {
1754                    UnaryOperator::Plus
1755                } else {
1756                    UnaryOperator::Minus
1757                };
1758                Ok(Expr::UnaryOp {
1759                    op,
1760                    expr: Box::new(
1761                        self.parse_subexpr(self.dialect.prec_value(Precedence::MulDivModOp))?,
1762                    ),
1763                })
1764            }
1765            Token::ExclamationMark if dialect.supports_bang_not_operator() => Ok(Expr::UnaryOp {
1766                op: UnaryOperator::BangNot,
1767                expr: Box::new(self.parse_subexpr(self.dialect.prec_value(Precedence::UnaryNot))?),
1768            }),
1769            tok @ Token::DoubleExclamationMark
1770            | tok @ Token::PGSquareRoot
1771            | tok @ Token::PGCubeRoot
1772            | tok @ Token::AtSign
1773            | tok @ Token::Tilde
1774                if dialect_is!(dialect is PostgreSqlDialect) =>
1775            {
1776                let op = match tok {
1777                    Token::DoubleExclamationMark => UnaryOperator::PGPrefixFactorial,
1778                    Token::PGSquareRoot => UnaryOperator::PGSquareRoot,
1779                    Token::PGCubeRoot => UnaryOperator::PGCubeRoot,
1780                    Token::AtSign => UnaryOperator::PGAbs,
1781                    Token::Tilde => UnaryOperator::PGBitwiseNot,
1782                    _ => unreachable!(),
1783                };
1784                Ok(Expr::UnaryOp {
1785                    op,
1786                    expr: Box::new(
1787                        self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?,
1788                    ),
1789                })
1790            }
1791            tok @ Token::Sharp
1792            | tok @ Token::AtDashAt
1793            | tok @ Token::AtAt
1794            | tok @ Token::QuestionMarkDash
1795            | tok @ Token::QuestionPipe
1796                if self.dialect.supports_geometric_types() =>
1797            {
1798                let op = match tok {
1799                    Token::Sharp => UnaryOperator::Hash,
1800                    Token::AtDashAt => UnaryOperator::AtDashAt,
1801                    Token::AtAt => UnaryOperator::DoubleAt,
1802                    Token::QuestionMarkDash => UnaryOperator::QuestionDash,
1803                    Token::QuestionPipe => UnaryOperator::QuestionPipe,
1804                    _ => {
1805                        return Err(ParserError::ParserError(format!(
1806                            "Unexpected token in unary operator parsing: {tok:?}"
1807                        )))
1808                    }
1809                };
1810                Ok(Expr::UnaryOp {
1811                    op,
1812                    expr: Box::new(
1813                        self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?,
1814                    ),
1815                })
1816            }
1817            Token::EscapedStringLiteral(_) if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) =>
1818            {
1819                self.prev_token();
1820                Ok(Expr::Value(self.parse_value()?))
1821            }
1822            Token::UnicodeStringLiteral(_) => {
1823                self.prev_token();
1824                Ok(Expr::Value(self.parse_value()?))
1825            }
1826            Token::Number(_, _)
1827            | Token::SingleQuotedString(_)
1828            | Token::DoubleQuotedString(_)
1829            | Token::TripleSingleQuotedString(_)
1830            | Token::TripleDoubleQuotedString(_)
1831            | Token::DollarQuotedString(_)
1832            | Token::SingleQuotedByteStringLiteral(_)
1833            | Token::DoubleQuotedByteStringLiteral(_)
1834            | Token::TripleSingleQuotedByteStringLiteral(_)
1835            | Token::TripleDoubleQuotedByteStringLiteral(_)
1836            | Token::SingleQuotedRawStringLiteral(_)
1837            | Token::DoubleQuotedRawStringLiteral(_)
1838            | Token::TripleSingleQuotedRawStringLiteral(_)
1839            | Token::TripleDoubleQuotedRawStringLiteral(_)
1840            | Token::NationalStringLiteral(_)
1841            | Token::HexStringLiteral(_) => {
1842                self.prev_token();
1843                Ok(Expr::Value(self.parse_value()?))
1844            }
1845            Token::LParen => {
1846                let expr = if let Some(expr) = self.try_parse_expr_sub_query()? {
1847                    expr
1848                } else if let Some(lambda) = self.try_parse_lambda()? {
1849                    return Ok(lambda);
1850                } else {
1851                    let exprs = self.parse_comma_separated(Parser::parse_expr)?;
1852                    match exprs.len() {
1853                        0 => unreachable!(), // parse_comma_separated ensures 1 or more
1854                        1 => Expr::Nested(Box::new(exprs.into_iter().next().unwrap())),
1855                        _ => Expr::Tuple(exprs),
1856                    }
1857                };
1858                self.expect_token(&Token::RParen)?;
1859                Ok(expr)
1860            }
1861            Token::Placeholder(_) | Token::Colon | Token::AtSign => {
1862                self.prev_token();
1863                Ok(Expr::Value(self.parse_value()?))
1864            }
1865            Token::LBrace => {
1866                self.prev_token();
1867                self.parse_lbrace_expr()
1868            }
1869            _ => self.expected_at("an expression", next_token_index),
1870        }?;
1871
1872        if !self.in_column_definition_state() && self.parse_keyword(Keyword::COLLATE) {
1873            Ok(Expr::Collate {
1874                expr: Box::new(expr),
1875                collation: self.parse_object_name(false)?,
1876            })
1877        } else {
1878            Ok(expr)
1879        }
1880    }
1881
1882    fn parse_geometric_type(&mut self, kind: GeometricTypeKind) -> Result<Expr, ParserError> {
1883        Ok(Expr::TypedString(TypedString {
1884            data_type: DataType::GeometricType(kind),
1885            value: self.parse_value()?,
1886            uses_odbc_syntax: false,
1887        }))
1888    }
1889
1890    /// Try to parse an [Expr::CompoundFieldAccess] like `a.b.c` or `a.b[1].c`.
1891    /// If all the fields are `Expr::Identifier`s, return an [Expr::CompoundIdentifier] instead.
1892    /// If only the root exists, return the root.
1893    /// Parses compound expressions which may be delimited by period
1894    /// or bracket notation.
1895    /// For example: `a.b.c`, `a.b[1]`.
1896    pub fn parse_compound_expr(
1897        &mut self,
1898        root: Expr,
1899        mut chain: Vec<AccessExpr>,
1900    ) -> Result<Expr, ParserError> {
1901        let mut ending_wildcard: Option<TokenWithSpan> = None;
1902        loop {
1903            if self.consume_token(&Token::Period) {
1904                let next_token = self.peek_token_ref();
1905                match &next_token.token {
1906                    Token::Mul => {
1907                        // Postgres explicitly allows funcnm(tablenm.*) and the
1908                        // function array_agg traverses this control flow
1909                        if dialect_of!(self is PostgreSqlDialect) {
1910                            ending_wildcard = Some(self.next_token());
1911                        } else {
1912                            // Put back the consumed `.` tokens before exiting.
1913                            // If this expression is being parsed in the
1914                            // context of a projection, then the `.*` could imply
1915                            // a wildcard expansion. For example:
1916                            // `SELECT STRUCT('foo').* FROM T`
1917                            self.prev_token(); // .
1918                        }
1919
1920                        break;
1921                    }
1922                    Token::SingleQuotedString(s) => {
1923                        let expr =
1924                            Expr::Identifier(Ident::with_quote_and_span('\'', next_token.span, s));
1925                        chain.push(AccessExpr::Dot(expr));
1926                        self.advance_token(); // The consumed string
1927                    }
1928                    // Fallback to parsing an arbitrary expression.
1929                    _ => match self.parse_subexpr(self.dialect.prec_value(Precedence::Period))? {
1930                        // If we get back a compound field access or identifier,
1931                        // we flatten the nested expression.
1932                        // For example if the current root is `foo`
1933                        // and we get back a compound identifier expression `bar.baz`
1934                        // The full expression should be `foo.bar.baz` (i.e.
1935                        // a root with an access chain with 2 entries) and not
1936                        // `foo.(bar.baz)` (i.e. a root with an access chain with
1937                        // 1 entry`).
1938                        Expr::CompoundFieldAccess { root, access_chain } => {
1939                            chain.push(AccessExpr::Dot(*root));
1940                            chain.extend(access_chain);
1941                        }
1942                        Expr::CompoundIdentifier(parts) => chain
1943                            .extend(parts.into_iter().map(Expr::Identifier).map(AccessExpr::Dot)),
1944                        expr => {
1945                            chain.push(AccessExpr::Dot(expr));
1946                        }
1947                    },
1948                }
1949            } else if !self.dialect.supports_partiql()
1950                && self.peek_token_ref().token == Token::LBracket
1951            {
1952                self.parse_multi_dim_subscript(&mut chain)?;
1953            } else {
1954                break;
1955            }
1956        }
1957
1958        let tok_index = self.get_current_index();
1959        if let Some(wildcard_token) = ending_wildcard {
1960            if !Self::is_all_ident(&root, &chain) {
1961                return self.expected("an identifier or a '*' after '.'", self.peek_token());
1962            };
1963            Ok(Expr::QualifiedWildcard(
1964                ObjectName::from(Self::exprs_to_idents(root, chain)?),
1965                AttachedToken(wildcard_token),
1966            ))
1967        } else if self.maybe_parse_outer_join_operator() {
1968            if !Self::is_all_ident(&root, &chain) {
1969                return self.expected_at("column identifier before (+)", tok_index);
1970            };
1971            let expr = if chain.is_empty() {
1972                root
1973            } else {
1974                Expr::CompoundIdentifier(Self::exprs_to_idents(root, chain)?)
1975            };
1976            Ok(Expr::OuterJoin(expr.into()))
1977        } else {
1978            Self::build_compound_expr(root, chain)
1979        }
1980    }
1981
1982    /// Combines a root expression and access chain to form
1983    /// a compound expression. Which may be a [Expr::CompoundFieldAccess]
1984    /// or other special cased expressions like [Expr::CompoundIdentifier],
1985    /// [Expr::OuterJoin].
1986    fn build_compound_expr(
1987        root: Expr,
1988        mut access_chain: Vec<AccessExpr>,
1989    ) -> Result<Expr, ParserError> {
1990        if access_chain.is_empty() {
1991            return Ok(root);
1992        }
1993
1994        if Self::is_all_ident(&root, &access_chain) {
1995            return Ok(Expr::CompoundIdentifier(Self::exprs_to_idents(
1996                root,
1997                access_chain,
1998            )?));
1999        }
2000
2001        // Flatten qualified function calls.
2002        // For example, the expression `a.b.c.foo(1,2,3)` should
2003        // represent a function called `a.b.c.foo`, rather than
2004        // a composite expression.
2005        if matches!(root, Expr::Identifier(_))
2006            && matches!(
2007                access_chain.last(),
2008                Some(AccessExpr::Dot(Expr::Function(_)))
2009            )
2010            && access_chain
2011                .iter()
2012                .rev()
2013                .skip(1) // All except the Function
2014                .all(|access| matches!(access, AccessExpr::Dot(Expr::Identifier(_))))
2015        {
2016            let Some(AccessExpr::Dot(Expr::Function(mut func))) = access_chain.pop() else {
2017                return parser_err!("expected function expression", root.span().start);
2018            };
2019
2020            let compound_func_name = [root]
2021                .into_iter()
2022                .chain(access_chain.into_iter().flat_map(|access| match access {
2023                    AccessExpr::Dot(expr) => Some(expr),
2024                    _ => None,
2025                }))
2026                .flat_map(|expr| match expr {
2027                    Expr::Identifier(ident) => Some(ident),
2028                    _ => None,
2029                })
2030                .map(ObjectNamePart::Identifier)
2031                .chain(func.name.0)
2032                .collect::<Vec<_>>();
2033            func.name = ObjectName(compound_func_name);
2034
2035            return Ok(Expr::Function(func));
2036        }
2037
2038        // Flatten qualified outer join expressions.
2039        // For example, the expression `T.foo(+)` should
2040        // represent an outer join on the column name `T.foo`
2041        // rather than a composite expression.
2042        if access_chain.len() == 1
2043            && matches!(
2044                access_chain.last(),
2045                Some(AccessExpr::Dot(Expr::OuterJoin(_)))
2046            )
2047        {
2048            let Some(AccessExpr::Dot(Expr::OuterJoin(inner_expr))) = access_chain.pop() else {
2049                return parser_err!("expected (+) expression", root.span().start);
2050            };
2051
2052            if !Self::is_all_ident(&root, &[]) {
2053                return parser_err!("column identifier before (+)", root.span().start);
2054            };
2055
2056            let token_start = root.span().start;
2057            let mut idents = Self::exprs_to_idents(root, vec![])?;
2058            match *inner_expr {
2059                Expr::CompoundIdentifier(suffix) => idents.extend(suffix),
2060                Expr::Identifier(suffix) => idents.push(suffix),
2061                _ => {
2062                    return parser_err!("column identifier before (+)", token_start);
2063                }
2064            }
2065
2066            return Ok(Expr::OuterJoin(Expr::CompoundIdentifier(idents).into()));
2067        }
2068
2069        Ok(Expr::CompoundFieldAccess {
2070            root: Box::new(root),
2071            access_chain,
2072        })
2073    }
2074
2075    fn keyword_to_modifier(k: Keyword) -> Option<ContextModifier> {
2076        match k {
2077            Keyword::LOCAL => Some(ContextModifier::Local),
2078            Keyword::GLOBAL => Some(ContextModifier::Global),
2079            Keyword::SESSION => Some(ContextModifier::Session),
2080            _ => None,
2081        }
2082    }
2083
2084    /// Check if the root is an identifier and all fields are identifiers.
2085    fn is_all_ident(root: &Expr, fields: &[AccessExpr]) -> bool {
2086        if !matches!(root, Expr::Identifier(_)) {
2087            return false;
2088        }
2089        fields
2090            .iter()
2091            .all(|x| matches!(x, AccessExpr::Dot(Expr::Identifier(_))))
2092    }
2093
2094    /// Convert a root and a list of fields to a list of identifiers.
2095    fn exprs_to_idents(root: Expr, fields: Vec<AccessExpr>) -> Result<Vec<Ident>, ParserError> {
2096        let mut idents = vec![];
2097        if let Expr::Identifier(root) = root {
2098            idents.push(root);
2099            for x in fields {
2100                if let AccessExpr::Dot(Expr::Identifier(ident)) = x {
2101                    idents.push(ident);
2102                } else {
2103                    return parser_err!(
2104                        format!("Expected identifier, found: {}", x),
2105                        x.span().start
2106                    );
2107                }
2108            }
2109            Ok(idents)
2110        } else {
2111            parser_err!(
2112                format!("Expected identifier, found: {}", root),
2113                root.span().start
2114            )
2115        }
2116    }
2117
2118    /// Returns true if the next tokens indicate the outer join operator `(+)`.
2119    fn peek_outer_join_operator(&mut self) -> bool {
2120        if !self.dialect.supports_outer_join_operator() {
2121            return false;
2122        }
2123
2124        let [maybe_lparen, maybe_plus, maybe_rparen] = self.peek_tokens_ref();
2125        Token::LParen == maybe_lparen.token
2126            && Token::Plus == maybe_plus.token
2127            && Token::RParen == maybe_rparen.token
2128    }
2129
2130    /// If the next tokens indicates the outer join operator `(+)`, consume
2131    /// the tokens and return true.
2132    fn maybe_parse_outer_join_operator(&mut self) -> bool {
2133        self.dialect.supports_outer_join_operator()
2134            && self.consume_tokens(&[Token::LParen, Token::Plus, Token::RParen])
2135    }
2136
2137    pub fn parse_utility_options(&mut self) -> Result<Vec<UtilityOption>, ParserError> {
2138        self.expect_token(&Token::LParen)?;
2139        let options = self.parse_comma_separated(Self::parse_utility_option)?;
2140        self.expect_token(&Token::RParen)?;
2141
2142        Ok(options)
2143    }
2144
2145    fn parse_utility_option(&mut self) -> Result<UtilityOption, ParserError> {
2146        let name = self.parse_identifier()?;
2147
2148        let next_token = self.peek_token();
2149        if next_token == Token::Comma || next_token == Token::RParen {
2150            return Ok(UtilityOption { name, arg: None });
2151        }
2152        let arg = self.parse_expr()?;
2153
2154        Ok(UtilityOption {
2155            name,
2156            arg: Some(arg),
2157        })
2158    }
2159
2160    fn try_parse_expr_sub_query(&mut self) -> Result<Option<Expr>, ParserError> {
2161        if !self.peek_sub_query() {
2162            return Ok(None);
2163        }
2164
2165        Ok(Some(Expr::Subquery(self.parse_query()?)))
2166    }
2167
2168    fn try_parse_lambda(&mut self) -> Result<Option<Expr>, ParserError> {
2169        if !self.dialect.supports_lambda_functions() {
2170            return Ok(None);
2171        }
2172        self.maybe_parse(|p| {
2173            let params = p.parse_comma_separated(|p| p.parse_identifier())?;
2174            p.expect_token(&Token::RParen)?;
2175            p.expect_token(&Token::Arrow)?;
2176            let expr = p.parse_expr()?;
2177            Ok(Expr::Lambda(LambdaFunction {
2178                params: OneOrManyWithParens::Many(params),
2179                body: Box::new(expr),
2180            }))
2181        })
2182    }
2183
2184    /// Tries to parse the body of an [ODBC escaping sequence]
2185    /// i.e. without the enclosing braces
2186    /// Currently implemented:
2187    /// Scalar Function Calls
2188    /// Date, Time, and Timestamp Literals
2189    /// See <https://learn.microsoft.com/en-us/sql/odbc/reference/develop-app/escape-sequences-in-odbc?view=sql-server-2017>
2190    fn maybe_parse_odbc_body(&mut self) -> Result<Option<Expr>, ParserError> {
2191        // Attempt 1: Try to parse it as a function.
2192        if let Some(expr) = self.maybe_parse_odbc_fn_body()? {
2193            return Ok(Some(expr));
2194        }
2195        // Attempt 2: Try to parse it as a Date, Time or Timestamp Literal
2196        self.maybe_parse_odbc_body_datetime()
2197    }
2198
2199    /// Tries to parse the body of an [ODBC Date, Time, and Timestamp Literals] call.
2200    ///
2201    /// ```sql
2202    /// {d '2025-07-17'}
2203    /// {t '14:12:01'}
2204    /// {ts '2025-07-17 14:12:01'}
2205    /// ```
2206    ///
2207    /// [ODBC Date, Time, and Timestamp Literals]:
2208    /// https://learn.microsoft.com/en-us/sql/odbc/reference/develop-app/date-time-and-timestamp-literals?view=sql-server-2017
2209    fn maybe_parse_odbc_body_datetime(&mut self) -> Result<Option<Expr>, ParserError> {
2210        self.maybe_parse(|p| {
2211            let token = p.next_token().clone();
2212            let word_string = token.token.to_string();
2213            let data_type = match word_string.as_str() {
2214                "t" => DataType::Time(None, TimezoneInfo::None),
2215                "d" => DataType::Date,
2216                "ts" => DataType::Timestamp(None, TimezoneInfo::None),
2217                _ => return p.expected("ODBC datetime keyword (t, d, or ts)", token),
2218            };
2219            let value = p.parse_value()?;
2220            Ok(Expr::TypedString(TypedString {
2221                data_type,
2222                value,
2223                uses_odbc_syntax: true,
2224            }))
2225        })
2226    }
2227
2228    /// Tries to parse the body of an [ODBC function] call.
2229    /// i.e. without the enclosing braces
2230    ///
2231    /// ```sql
2232    /// fn myfunc(1,2,3)
2233    /// ```
2234    ///
2235    /// [ODBC function]: https://learn.microsoft.com/en-us/sql/odbc/reference/develop-app/scalar-function-calls?view=sql-server-2017
2236    fn maybe_parse_odbc_fn_body(&mut self) -> Result<Option<Expr>, ParserError> {
2237        self.maybe_parse(|p| {
2238            p.expect_keyword(Keyword::FN)?;
2239            let fn_name = p.parse_object_name(false)?;
2240            let mut fn_call = p.parse_function_call(fn_name)?;
2241            fn_call.uses_odbc_syntax = true;
2242            Ok(Expr::Function(fn_call))
2243        })
2244    }
2245
2246    pub fn parse_function(&mut self, name: ObjectName) -> Result<Expr, ParserError> {
2247        self.parse_function_call(name).map(Expr::Function)
2248    }
2249
2250    fn parse_function_call(&mut self, name: ObjectName) -> Result<Function, ParserError> {
2251        self.expect_token(&Token::LParen)?;
2252
2253        // Snowflake permits a subquery to be passed as an argument without
2254        // an enclosing set of parens if it's the only argument.
2255        if dialect_of!(self is SnowflakeDialect) && self.peek_sub_query() {
2256            let subquery = self.parse_query()?;
2257            self.expect_token(&Token::RParen)?;
2258            return Ok(Function {
2259                name,
2260                uses_odbc_syntax: false,
2261                parameters: FunctionArguments::None,
2262                args: FunctionArguments::Subquery(subquery),
2263                filter: None,
2264                null_treatment: None,
2265                over: None,
2266                within_group: vec![],
2267            });
2268        }
2269
2270        let mut args = self.parse_function_argument_list()?;
2271        let mut parameters = FunctionArguments::None;
2272        // ClickHouse aggregations support parametric functions like `HISTOGRAM(0.5, 0.6)(x, y)`
2273        // which (0.5, 0.6) is a parameter to the function.
2274        if dialect_of!(self is ClickHouseDialect | GenericDialect)
2275            && self.consume_token(&Token::LParen)
2276        {
2277            parameters = FunctionArguments::List(args);
2278            args = self.parse_function_argument_list()?;
2279        }
2280
2281        let within_group = if self.parse_keywords(&[Keyword::WITHIN, Keyword::GROUP]) {
2282            self.expect_token(&Token::LParen)?;
2283            self.expect_keywords(&[Keyword::ORDER, Keyword::BY])?;
2284            let order_by = self.parse_comma_separated(Parser::parse_order_by_expr)?;
2285            self.expect_token(&Token::RParen)?;
2286            order_by
2287        } else {
2288            vec![]
2289        };
2290
2291        let filter = if self.dialect.supports_filter_during_aggregation()
2292            && self.parse_keyword(Keyword::FILTER)
2293            && self.consume_token(&Token::LParen)
2294            && self.parse_keyword(Keyword::WHERE)
2295        {
2296            let filter = Some(Box::new(self.parse_expr()?));
2297            self.expect_token(&Token::RParen)?;
2298            filter
2299        } else {
2300            None
2301        };
2302
2303        // Syntax for null treatment shows up either in the args list
2304        // or after the function call, but not both.
2305        let null_treatment = if args
2306            .clauses
2307            .iter()
2308            .all(|clause| !matches!(clause, FunctionArgumentClause::IgnoreOrRespectNulls(_)))
2309        {
2310            self.parse_null_treatment()?
2311        } else {
2312            None
2313        };
2314
2315        let over = if self.parse_keyword(Keyword::OVER) {
2316            if self.consume_token(&Token::LParen) {
2317                let window_spec = self.parse_window_spec()?;
2318                Some(WindowType::WindowSpec(window_spec))
2319            } else {
2320                Some(WindowType::NamedWindow(self.parse_identifier()?))
2321            }
2322        } else {
2323            None
2324        };
2325
2326        Ok(Function {
2327            name,
2328            uses_odbc_syntax: false,
2329            parameters,
2330            args: FunctionArguments::List(args),
2331            null_treatment,
2332            filter,
2333            over,
2334            within_group,
2335        })
2336    }
2337
2338    /// Optionally parses a null treatment clause.
2339    fn parse_null_treatment(&mut self) -> Result<Option<NullTreatment>, ParserError> {
2340        match self.parse_one_of_keywords(&[Keyword::RESPECT, Keyword::IGNORE]) {
2341            Some(keyword) => {
2342                self.expect_keyword_is(Keyword::NULLS)?;
2343
2344                Ok(match keyword {
2345                    Keyword::RESPECT => Some(NullTreatment::RespectNulls),
2346                    Keyword::IGNORE => Some(NullTreatment::IgnoreNulls),
2347                    _ => None,
2348                })
2349            }
2350            None => Ok(None),
2351        }
2352    }
2353
2354    pub fn parse_time_functions(&mut self, name: ObjectName) -> Result<Expr, ParserError> {
2355        let args = if self.consume_token(&Token::LParen) {
2356            FunctionArguments::List(self.parse_function_argument_list()?)
2357        } else {
2358            FunctionArguments::None
2359        };
2360        Ok(Expr::Function(Function {
2361            name,
2362            uses_odbc_syntax: false,
2363            parameters: FunctionArguments::None,
2364            args,
2365            filter: None,
2366            over: None,
2367            null_treatment: None,
2368            within_group: vec![],
2369        }))
2370    }
2371
2372    pub fn parse_window_frame_units(&mut self) -> Result<WindowFrameUnits, ParserError> {
2373        let next_token = self.next_token();
2374        match &next_token.token {
2375            Token::Word(w) => match w.keyword {
2376                Keyword::ROWS => Ok(WindowFrameUnits::Rows),
2377                Keyword::RANGE => Ok(WindowFrameUnits::Range),
2378                Keyword::GROUPS => Ok(WindowFrameUnits::Groups),
2379                _ => self.expected("ROWS, RANGE, GROUPS", next_token)?,
2380            },
2381            _ => self.expected("ROWS, RANGE, GROUPS", next_token),
2382        }
2383    }
2384
2385    pub fn parse_window_frame(&mut self) -> Result<WindowFrame, ParserError> {
2386        let units = self.parse_window_frame_units()?;
2387        let (start_bound, end_bound) = if self.parse_keyword(Keyword::BETWEEN) {
2388            let start_bound = self.parse_window_frame_bound()?;
2389            self.expect_keyword_is(Keyword::AND)?;
2390            let end_bound = Some(self.parse_window_frame_bound()?);
2391            (start_bound, end_bound)
2392        } else {
2393            (self.parse_window_frame_bound()?, None)
2394        };
2395        Ok(WindowFrame {
2396            units,
2397            start_bound,
2398            end_bound,
2399        })
2400    }
2401
2402    /// Parse `CURRENT ROW` or `{ <positive number> | UNBOUNDED } { PRECEDING | FOLLOWING }`
2403    pub fn parse_window_frame_bound(&mut self) -> Result<WindowFrameBound, ParserError> {
2404        if self.parse_keywords(&[Keyword::CURRENT, Keyword::ROW]) {
2405            Ok(WindowFrameBound::CurrentRow)
2406        } else {
2407            let rows = if self.parse_keyword(Keyword::UNBOUNDED) {
2408                None
2409            } else {
2410                Some(Box::new(match self.peek_token().token {
2411                    Token::SingleQuotedString(_) => self.parse_interval()?,
2412                    _ => self.parse_expr()?,
2413                }))
2414            };
2415            if self.parse_keyword(Keyword::PRECEDING) {
2416                Ok(WindowFrameBound::Preceding(rows))
2417            } else if self.parse_keyword(Keyword::FOLLOWING) {
2418                Ok(WindowFrameBound::Following(rows))
2419            } else {
2420                self.expected("PRECEDING or FOLLOWING", self.peek_token())
2421            }
2422        }
2423    }
2424
2425    /// Parse a group by expr. Group by expr can be one of group sets, roll up, cube, or simple expr.
2426    fn parse_group_by_expr(&mut self) -> Result<Expr, ParserError> {
2427        if self.dialect.supports_group_by_expr() {
2428            if self.parse_keywords(&[Keyword::GROUPING, Keyword::SETS]) {
2429                self.expect_token(&Token::LParen)?;
2430                let result = self.parse_comma_separated(|p| {
2431                    if p.parse_keyword(Keyword::ROLLUP) {
2432                        p.expect_token(&Token::LParen)?;
2433                        let rollup_exprs = p.parse_comma_separated(|p2| p2.parse_tuple(true, true))?;
2434                        p.expect_token(&Token::RParen)?;
2435                        Ok(vec![Expr::Rollup(rollup_exprs)])
2436                    } else if p.parse_keyword(Keyword::CUBE) {
2437                        p.expect_token(&Token::LParen)?;
2438                        let cube_exprs = p.parse_comma_separated(|p2| p2.parse_tuple(true, true))?;
2439                        p.expect_token(&Token::RParen)?;
2440                        Ok(vec![Expr::Cube(cube_exprs)])
2441                    } else {
2442                        p.parse_tuple(false, true)
2443                    }
2444                })?;
2445                self.expect_token(&Token::RParen)?;
2446                Ok(Expr::GroupingSets(result))
2447            } else if self.parse_keyword(Keyword::CUBE) {
2448                self.expect_token(&Token::LParen)?;
2449                let result = self.parse_comma_separated(|p| p.parse_tuple(true, true))?;
2450                self.expect_token(&Token::RParen)?;
2451                Ok(Expr::Cube(result))
2452            } else if self.parse_keyword(Keyword::ROLLUP) {
2453                self.expect_token(&Token::LParen)?;
2454                let result = self.parse_comma_separated(|p| p.parse_tuple(true, true))?;
2455                self.expect_token(&Token::RParen)?;
2456                Ok(Expr::Rollup(result))
2457            } else if self.consume_tokens(&[Token::LParen, Token::RParen]) {
2458                // PostgreSQL allow to use empty tuple as a group by expression,
2459                // e.g. `GROUP BY (), name`. Please refer to GROUP BY Clause section in
2460                // [PostgreSQL](https://www.postgresql.org/docs/16/sql-select.html)
2461                Ok(Expr::Tuple(vec![]))
2462            } else {
2463                self.parse_expr()
2464            }
2465        } else {
2466            // TODO parse rollup for other dialects
2467            self.parse_expr()
2468        }
2469    }
2470
2471    /// Parse a tuple with `(` and `)`.
2472    /// If `lift_singleton` is true, then a singleton tuple is lifted to a tuple of length 1, otherwise it will fail.
2473    /// If `allow_empty` is true, then an empty tuple is allowed.
2474    fn parse_tuple(
2475        &mut self,
2476        lift_singleton: bool,
2477        allow_empty: bool,
2478    ) -> Result<Vec<Expr>, ParserError> {
2479        if lift_singleton {
2480            if self.consume_token(&Token::LParen) {
2481                let result = if allow_empty && self.consume_token(&Token::RParen) {
2482                    vec![]
2483                } else {
2484                    let result = self.parse_comma_separated(Parser::parse_expr)?;
2485                    self.expect_token(&Token::RParen)?;
2486                    result
2487                };
2488                Ok(result)
2489            } else {
2490                Ok(vec![self.parse_expr()?])
2491            }
2492        } else {
2493            self.expect_token(&Token::LParen)?;
2494            let result = if allow_empty && self.consume_token(&Token::RParen) {
2495                vec![]
2496            } else {
2497                let result = self.parse_comma_separated(Parser::parse_expr)?;
2498                self.expect_token(&Token::RParen)?;
2499                result
2500            };
2501            Ok(result)
2502        }
2503    }
2504
2505    pub fn parse_case_expr(&mut self) -> Result<Expr, ParserError> {
2506        let case_token = AttachedToken(self.get_current_token().clone());
2507        let mut operand = None;
2508        if !self.parse_keyword(Keyword::WHEN) {
2509            operand = Some(Box::new(self.parse_expr()?));
2510            self.expect_keyword_is(Keyword::WHEN)?;
2511        }
2512        let mut conditions = vec![];
2513        loop {
2514            let condition = self.parse_expr()?;
2515            self.expect_keyword_is(Keyword::THEN)?;
2516            let result = self.parse_expr()?;
2517            conditions.push(CaseWhen { condition, result });
2518            if !self.parse_keyword(Keyword::WHEN) {
2519                break;
2520            }
2521        }
2522        let else_result = if self.parse_keyword(Keyword::ELSE) {
2523            Some(Box::new(self.parse_expr()?))
2524        } else {
2525            None
2526        };
2527        let end_token = AttachedToken(self.expect_keyword(Keyword::END)?);
2528        Ok(Expr::Case {
2529            case_token,
2530            end_token,
2531            operand,
2532            conditions,
2533            else_result,
2534        })
2535    }
2536
2537    pub fn parse_optional_cast_format(&mut self) -> Result<Option<CastFormat>, ParserError> {
2538        if self.parse_keyword(Keyword::FORMAT) {
2539            let value = self.parse_value()?.value;
2540            match self.parse_optional_time_zone()? {
2541                Some(tz) => Ok(Some(CastFormat::ValueAtTimeZone(value, tz))),
2542                None => Ok(Some(CastFormat::Value(value))),
2543            }
2544        } else {
2545            Ok(None)
2546        }
2547    }
2548
2549    pub fn parse_optional_time_zone(&mut self) -> Result<Option<Value>, ParserError> {
2550        if self.parse_keywords(&[Keyword::AT, Keyword::TIME, Keyword::ZONE]) {
2551            self.parse_value().map(|v| Some(v.value))
2552        } else {
2553            Ok(None)
2554        }
2555    }
2556
2557    /// mssql-like convert function
2558    fn parse_mssql_convert(&mut self, is_try: bool) -> Result<Expr, ParserError> {
2559        self.expect_token(&Token::LParen)?;
2560        let data_type = self.parse_data_type()?;
2561        self.expect_token(&Token::Comma)?;
2562        let expr = self.parse_expr()?;
2563        let styles = if self.consume_token(&Token::Comma) {
2564            self.parse_comma_separated(Parser::parse_expr)?
2565        } else {
2566            Default::default()
2567        };
2568        self.expect_token(&Token::RParen)?;
2569        Ok(Expr::Convert {
2570            is_try,
2571            expr: Box::new(expr),
2572            data_type: Some(data_type),
2573            charset: None,
2574            target_before_value: true,
2575            styles,
2576        })
2577    }
2578
2579    /// Parse a SQL CONVERT function:
2580    ///  - `CONVERT('héhé' USING utf8mb4)` (MySQL)
2581    ///  - `CONVERT('héhé', CHAR CHARACTER SET utf8mb4)` (MySQL)
2582    ///  - `CONVERT(DECIMAL(10, 5), 42)` (MSSQL) - the type comes first
2583    pub fn parse_convert_expr(&mut self, is_try: bool) -> Result<Expr, ParserError> {
2584        if self.dialect.convert_type_before_value() {
2585            return self.parse_mssql_convert(is_try);
2586        }
2587        self.expect_token(&Token::LParen)?;
2588        let expr = self.parse_expr()?;
2589        if self.parse_keyword(Keyword::USING) {
2590            let charset = self.parse_object_name(false)?;
2591            self.expect_token(&Token::RParen)?;
2592            return Ok(Expr::Convert {
2593                is_try,
2594                expr: Box::new(expr),
2595                data_type: None,
2596                charset: Some(charset),
2597                target_before_value: false,
2598                styles: vec![],
2599            });
2600        }
2601        self.expect_token(&Token::Comma)?;
2602        let data_type = self.parse_data_type()?;
2603        let charset = if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
2604            Some(self.parse_object_name(false)?)
2605        } else {
2606            None
2607        };
2608        self.expect_token(&Token::RParen)?;
2609        Ok(Expr::Convert {
2610            is_try,
2611            expr: Box::new(expr),
2612            data_type: Some(data_type),
2613            charset,
2614            target_before_value: false,
2615            styles: vec![],
2616        })
2617    }
2618
2619    /// Parse a SQL CAST function e.g. `CAST(expr AS FLOAT)`
2620    pub fn parse_cast_expr(&mut self, kind: CastKind) -> Result<Expr, ParserError> {
2621        self.expect_token(&Token::LParen)?;
2622        let expr = self.parse_expr()?;
2623        self.expect_keyword_is(Keyword::AS)?;
2624        let data_type = self.parse_data_type()?;
2625        let format = self.parse_optional_cast_format()?;
2626        self.expect_token(&Token::RParen)?;
2627        Ok(Expr::Cast {
2628            kind,
2629            expr: Box::new(expr),
2630            data_type,
2631            format,
2632        })
2633    }
2634
2635    /// Parse a SQL EXISTS expression e.g. `WHERE EXISTS(SELECT ...)`.
2636    pub fn parse_exists_expr(&mut self, negated: bool) -> Result<Expr, ParserError> {
2637        self.expect_token(&Token::LParen)?;
2638        let exists_node = Expr::Exists {
2639            negated,
2640            subquery: self.parse_query()?,
2641        };
2642        self.expect_token(&Token::RParen)?;
2643        Ok(exists_node)
2644    }
2645
2646    pub fn parse_extract_expr(&mut self) -> Result<Expr, ParserError> {
2647        self.expect_token(&Token::LParen)?;
2648        let field = self.parse_date_time_field()?;
2649
2650        let syntax = if self.parse_keyword(Keyword::FROM) {
2651            ExtractSyntax::From
2652        } else if self.consume_token(&Token::Comma)
2653            && dialect_of!(self is SnowflakeDialect | GenericDialect)
2654        {
2655            ExtractSyntax::Comma
2656        } else {
2657            return Err(ParserError::ParserError(
2658                "Expected 'FROM' or ','".to_string(),
2659            ));
2660        };
2661
2662        let expr = self.parse_expr()?;
2663        self.expect_token(&Token::RParen)?;
2664        Ok(Expr::Extract {
2665            field,
2666            expr: Box::new(expr),
2667            syntax,
2668        })
2669    }
2670
2671    pub fn parse_ceil_floor_expr(&mut self, is_ceil: bool) -> Result<Expr, ParserError> {
2672        self.expect_token(&Token::LParen)?;
2673        let expr = self.parse_expr()?;
2674        // Parse `CEIL/FLOOR(expr)`
2675        let field = if self.parse_keyword(Keyword::TO) {
2676            // Parse `CEIL/FLOOR(expr TO DateTimeField)`
2677            CeilFloorKind::DateTimeField(self.parse_date_time_field()?)
2678        } else if self.consume_token(&Token::Comma) {
2679            // Parse `CEIL/FLOOR(expr, scale)`
2680            match self.parse_value()?.value {
2681                Value::Number(n, s) => CeilFloorKind::Scale(Value::Number(n, s)),
2682                _ => {
2683                    return Err(ParserError::ParserError(
2684                        "Scale field can only be of number type".to_string(),
2685                    ))
2686                }
2687            }
2688        } else {
2689            CeilFloorKind::DateTimeField(DateTimeField::NoDateTime)
2690        };
2691        self.expect_token(&Token::RParen)?;
2692        if is_ceil {
2693            Ok(Expr::Ceil {
2694                expr: Box::new(expr),
2695                field,
2696            })
2697        } else {
2698            Ok(Expr::Floor {
2699                expr: Box::new(expr),
2700                field,
2701            })
2702        }
2703    }
2704
2705    pub fn parse_position_expr(&mut self, ident: Ident) -> Result<Expr, ParserError> {
2706        let between_prec = self.dialect.prec_value(Precedence::Between);
2707        let position_expr = self.maybe_parse(|p| {
2708            // PARSE SELECT POSITION('@' in field)
2709            p.expect_token(&Token::LParen)?;
2710
2711            // Parse the subexpr till the IN keyword
2712            let expr = p.parse_subexpr(between_prec)?;
2713            p.expect_keyword_is(Keyword::IN)?;
2714            let from = p.parse_expr()?;
2715            p.expect_token(&Token::RParen)?;
2716            Ok(Expr::Position {
2717                expr: Box::new(expr),
2718                r#in: Box::new(from),
2719            })
2720        })?;
2721        match position_expr {
2722            Some(expr) => Ok(expr),
2723            // Snowflake supports `position` as an ordinary function call
2724            // without the special `IN` syntax.
2725            None => self.parse_function(ObjectName::from(vec![ident])),
2726        }
2727    }
2728
2729    // { SUBSTRING | SUBSTR } (<EXPR> [FROM 1] [FOR 3])
2730    pub fn parse_substring(&mut self) -> Result<Expr, ParserError> {
2731        let shorthand = match self.expect_one_of_keywords(&[Keyword::SUBSTR, Keyword::SUBSTRING])? {
2732            Keyword::SUBSTR => true,
2733            Keyword::SUBSTRING => false,
2734            _ => {
2735                self.prev_token();
2736                return self.expected("SUBSTR or SUBSTRING", self.peek_token());
2737            }
2738        };
2739        self.expect_token(&Token::LParen)?;
2740        let expr = self.parse_expr()?;
2741        let mut from_expr = None;
2742        let special = self.consume_token(&Token::Comma);
2743        if special || self.parse_keyword(Keyword::FROM) {
2744            from_expr = Some(self.parse_expr()?);
2745        }
2746
2747        let mut to_expr = None;
2748        if self.parse_keyword(Keyword::FOR) || self.consume_token(&Token::Comma) {
2749            to_expr = Some(self.parse_expr()?);
2750        }
2751        self.expect_token(&Token::RParen)?;
2752
2753        Ok(Expr::Substring {
2754            expr: Box::new(expr),
2755            substring_from: from_expr.map(Box::new),
2756            substring_for: to_expr.map(Box::new),
2757            special,
2758            shorthand,
2759        })
2760    }
2761
2762    pub fn parse_overlay_expr(&mut self) -> Result<Expr, ParserError> {
2763        // PARSE OVERLAY (EXPR PLACING EXPR FROM 1 [FOR 3])
2764        self.expect_token(&Token::LParen)?;
2765        let expr = self.parse_expr()?;
2766        self.expect_keyword_is(Keyword::PLACING)?;
2767        let what_expr = self.parse_expr()?;
2768        self.expect_keyword_is(Keyword::FROM)?;
2769        let from_expr = self.parse_expr()?;
2770        let mut for_expr = None;
2771        if self.parse_keyword(Keyword::FOR) {
2772            for_expr = Some(self.parse_expr()?);
2773        }
2774        self.expect_token(&Token::RParen)?;
2775
2776        Ok(Expr::Overlay {
2777            expr: Box::new(expr),
2778            overlay_what: Box::new(what_expr),
2779            overlay_from: Box::new(from_expr),
2780            overlay_for: for_expr.map(Box::new),
2781        })
2782    }
2783
2784    /// ```sql
2785    /// TRIM ([WHERE] ['text' FROM] 'text')
2786    /// TRIM ('text')
2787    /// TRIM(<expr>, [, characters]) -- only Snowflake or BigQuery
2788    /// ```
2789    pub fn parse_trim_expr(&mut self) -> Result<Expr, ParserError> {
2790        self.expect_token(&Token::LParen)?;
2791        let mut trim_where = None;
2792        if let Token::Word(word) = self.peek_token().token {
2793            if [Keyword::BOTH, Keyword::LEADING, Keyword::TRAILING].contains(&word.keyword) {
2794                trim_where = Some(self.parse_trim_where()?);
2795            }
2796        }
2797        let expr = self.parse_expr()?;
2798        if self.parse_keyword(Keyword::FROM) {
2799            let trim_what = Box::new(expr);
2800            let expr = self.parse_expr()?;
2801            self.expect_token(&Token::RParen)?;
2802            Ok(Expr::Trim {
2803                expr: Box::new(expr),
2804                trim_where,
2805                trim_what: Some(trim_what),
2806                trim_characters: None,
2807            })
2808        } else if self.consume_token(&Token::Comma)
2809            && dialect_of!(self is DuckDbDialect | SnowflakeDialect | BigQueryDialect | GenericDialect)
2810        {
2811            let characters = self.parse_comma_separated(Parser::parse_expr)?;
2812            self.expect_token(&Token::RParen)?;
2813            Ok(Expr::Trim {
2814                expr: Box::new(expr),
2815                trim_where: None,
2816                trim_what: None,
2817                trim_characters: Some(characters),
2818            })
2819        } else {
2820            self.expect_token(&Token::RParen)?;
2821            Ok(Expr::Trim {
2822                expr: Box::new(expr),
2823                trim_where,
2824                trim_what: None,
2825                trim_characters: None,
2826            })
2827        }
2828    }
2829
2830    pub fn parse_trim_where(&mut self) -> Result<TrimWhereField, ParserError> {
2831        let next_token = self.next_token();
2832        match &next_token.token {
2833            Token::Word(w) => match w.keyword {
2834                Keyword::BOTH => Ok(TrimWhereField::Both),
2835                Keyword::LEADING => Ok(TrimWhereField::Leading),
2836                Keyword::TRAILING => Ok(TrimWhereField::Trailing),
2837                _ => self.expected("trim_where field", next_token)?,
2838            },
2839            _ => self.expected("trim_where field", next_token),
2840        }
2841    }
2842
2843    /// Parses an array expression `[ex1, ex2, ..]`
2844    /// if `named` is `true`, came from an expression like  `ARRAY[ex1, ex2]`
2845    pub fn parse_array_expr(&mut self, named: bool) -> Result<Expr, ParserError> {
2846        let exprs = self.parse_comma_separated0(Parser::parse_expr, Token::RBracket)?;
2847        self.expect_token(&Token::RBracket)?;
2848        Ok(Expr::Array(Array {
2849            elem: exprs,
2850            named,
2851            element_type: None,
2852        }))
2853    }
2854
2855    /// Parses a typed array expression `ARRAY<type>[ex1, ex2, ..]`
2856    pub fn parse_typed_array_expr(&mut self, element_type: DataType) -> Result<Expr, ParserError> {
2857        self.expect_token(&Token::LBracket)?;
2858        let exprs = self.parse_comma_separated0(Parser::parse_expr, Token::RBracket)?;
2859        self.expect_token(&Token::RBracket)?;
2860        Ok(Expr::Array(Array {
2861            elem: exprs,
2862            named: true,
2863            element_type: Some(Box::new(element_type)),
2864        }))
2865    }
2866
2867    pub fn parse_listagg_on_overflow(&mut self) -> Result<Option<ListAggOnOverflow>, ParserError> {
2868        if self.parse_keywords(&[Keyword::ON, Keyword::OVERFLOW]) {
2869            if self.parse_keyword(Keyword::ERROR) {
2870                Ok(Some(ListAggOnOverflow::Error))
2871            } else {
2872                self.expect_keyword_is(Keyword::TRUNCATE)?;
2873                let filler = match self.peek_token().token {
2874                    Token::Word(w)
2875                        if w.keyword == Keyword::WITH || w.keyword == Keyword::WITHOUT =>
2876                    {
2877                        None
2878                    }
2879                    Token::SingleQuotedString(_)
2880                    | Token::EscapedStringLiteral(_)
2881                    | Token::UnicodeStringLiteral(_)
2882                    | Token::NationalStringLiteral(_)
2883                    | Token::HexStringLiteral(_) => Some(Box::new(self.parse_expr()?)),
2884                    _ => self.expected(
2885                        "either filler, WITH, or WITHOUT in LISTAGG",
2886                        self.peek_token(),
2887                    )?,
2888                };
2889                let with_count = self.parse_keyword(Keyword::WITH);
2890                if !with_count && !self.parse_keyword(Keyword::WITHOUT) {
2891                    self.expected("either WITH or WITHOUT in LISTAGG", self.peek_token())?;
2892                }
2893                self.expect_keyword_is(Keyword::COUNT)?;
2894                Ok(Some(ListAggOnOverflow::Truncate { filler, with_count }))
2895            }
2896        } else {
2897            Ok(None)
2898        }
2899    }
2900
2901    // This function parses date/time fields for the EXTRACT function-like
2902    // operator, interval qualifiers, and the ceil/floor operations.
2903    // EXTRACT supports a wider set of date/time fields than interval qualifiers,
2904    // so this function may need to be split in two.
2905    pub fn parse_date_time_field(&mut self) -> Result<DateTimeField, ParserError> {
2906        let next_token = self.next_token();
2907        match &next_token.token {
2908            Token::Word(w) => match w.keyword {
2909                Keyword::YEAR => Ok(DateTimeField::Year),
2910                Keyword::YEARS => Ok(DateTimeField::Years),
2911                Keyword::MONTH => Ok(DateTimeField::Month),
2912                Keyword::MONTHS => Ok(DateTimeField::Months),
2913                Keyword::WEEK => {
2914                    let week_day = if dialect_of!(self is BigQueryDialect | GenericDialect)
2915                        && self.consume_token(&Token::LParen)
2916                    {
2917                        let week_day = self.parse_identifier()?;
2918                        self.expect_token(&Token::RParen)?;
2919                        Some(week_day)
2920                    } else {
2921                        None
2922                    };
2923                    Ok(DateTimeField::Week(week_day))
2924                }
2925                Keyword::WEEKS => Ok(DateTimeField::Weeks),
2926                Keyword::DAY => Ok(DateTimeField::Day),
2927                Keyword::DAYOFWEEK => Ok(DateTimeField::DayOfWeek),
2928                Keyword::DAYOFYEAR => Ok(DateTimeField::DayOfYear),
2929                Keyword::DAYS => Ok(DateTimeField::Days),
2930                Keyword::DATE => Ok(DateTimeField::Date),
2931                Keyword::DATETIME => Ok(DateTimeField::Datetime),
2932                Keyword::HOUR => Ok(DateTimeField::Hour),
2933                Keyword::HOURS => Ok(DateTimeField::Hours),
2934                Keyword::MINUTE => Ok(DateTimeField::Minute),
2935                Keyword::MINUTES => Ok(DateTimeField::Minutes),
2936                Keyword::SECOND => Ok(DateTimeField::Second),
2937                Keyword::SECONDS => Ok(DateTimeField::Seconds),
2938                Keyword::CENTURY => Ok(DateTimeField::Century),
2939                Keyword::DECADE => Ok(DateTimeField::Decade),
2940                Keyword::DOY => Ok(DateTimeField::Doy),
2941                Keyword::DOW => Ok(DateTimeField::Dow),
2942                Keyword::EPOCH => Ok(DateTimeField::Epoch),
2943                Keyword::ISODOW => Ok(DateTimeField::Isodow),
2944                Keyword::ISOYEAR => Ok(DateTimeField::Isoyear),
2945                Keyword::ISOWEEK => Ok(DateTimeField::IsoWeek),
2946                Keyword::JULIAN => Ok(DateTimeField::Julian),
2947                Keyword::MICROSECOND => Ok(DateTimeField::Microsecond),
2948                Keyword::MICROSECONDS => Ok(DateTimeField::Microseconds),
2949                Keyword::MILLENIUM => Ok(DateTimeField::Millenium),
2950                Keyword::MILLENNIUM => Ok(DateTimeField::Millennium),
2951                Keyword::MILLISECOND => Ok(DateTimeField::Millisecond),
2952                Keyword::MILLISECONDS => Ok(DateTimeField::Milliseconds),
2953                Keyword::NANOSECOND => Ok(DateTimeField::Nanosecond),
2954                Keyword::NANOSECONDS => Ok(DateTimeField::Nanoseconds),
2955                Keyword::QUARTER => Ok(DateTimeField::Quarter),
2956                Keyword::TIME => Ok(DateTimeField::Time),
2957                Keyword::TIMEZONE => Ok(DateTimeField::Timezone),
2958                Keyword::TIMEZONE_ABBR => Ok(DateTimeField::TimezoneAbbr),
2959                Keyword::TIMEZONE_HOUR => Ok(DateTimeField::TimezoneHour),
2960                Keyword::TIMEZONE_MINUTE => Ok(DateTimeField::TimezoneMinute),
2961                Keyword::TIMEZONE_REGION => Ok(DateTimeField::TimezoneRegion),
2962                _ if self.dialect.allow_extract_custom() => {
2963                    self.prev_token();
2964                    let custom = self.parse_identifier()?;
2965                    Ok(DateTimeField::Custom(custom))
2966                }
2967                _ => self.expected("date/time field", next_token),
2968            },
2969            Token::SingleQuotedString(_) if self.dialect.allow_extract_single_quotes() => {
2970                self.prev_token();
2971                let custom = self.parse_identifier()?;
2972                Ok(DateTimeField::Custom(custom))
2973            }
2974            _ => self.expected("date/time field", next_token),
2975        }
2976    }
2977
2978    pub fn parse_not(&mut self) -> Result<Expr, ParserError> {
2979        match self.peek_token().token {
2980            Token::Word(w) => match w.keyword {
2981                Keyword::EXISTS => {
2982                    let negated = true;
2983                    let _ = self.parse_keyword(Keyword::EXISTS);
2984                    self.parse_exists_expr(negated)
2985                }
2986                _ => Ok(Expr::UnaryOp {
2987                    op: UnaryOperator::Not,
2988                    expr: Box::new(
2989                        self.parse_subexpr(self.dialect.prec_value(Precedence::UnaryNot))?,
2990                    ),
2991                }),
2992            },
2993            _ => Ok(Expr::UnaryOp {
2994                op: UnaryOperator::Not,
2995                expr: Box::new(self.parse_subexpr(self.dialect.prec_value(Precedence::UnaryNot))?),
2996            }),
2997        }
2998    }
2999
3000    /// Parse expression types that start with a left brace '{'.
3001    /// Examples:
3002    /// ```sql
3003    /// -- Dictionary expr.
3004    /// {'key1': 'value1', 'key2': 'value2'}
3005    ///
3006    /// -- Function call using the ODBC syntax.
3007    /// { fn CONCAT('foo', 'bar') }
3008    /// ```
3009    fn parse_lbrace_expr(&mut self) -> Result<Expr, ParserError> {
3010        let token = self.expect_token(&Token::LBrace)?;
3011
3012        if let Some(fn_expr) = self.maybe_parse_odbc_body()? {
3013            self.expect_token(&Token::RBrace)?;
3014            return Ok(fn_expr);
3015        }
3016
3017        if self.dialect.supports_dictionary_syntax() {
3018            self.prev_token(); // Put back the '{'
3019            return self.parse_dictionary();
3020        }
3021
3022        self.expected("an expression", token)
3023    }
3024
3025    /// Parses fulltext expressions [`sqlparser::ast::Expr::MatchAgainst`]
3026    ///
3027    /// # Errors
3028    /// This method will raise an error if the column list is empty or with invalid identifiers,
3029    /// the match expression is not a literal string, or if the search modifier is not valid.
3030    pub fn parse_match_against(&mut self) -> Result<Expr, ParserError> {
3031        let columns = self.parse_parenthesized_qualified_column_list(Mandatory, false)?;
3032
3033        self.expect_keyword_is(Keyword::AGAINST)?;
3034
3035        self.expect_token(&Token::LParen)?;
3036
3037        // MySQL is too permissive about the value, IMO we can't validate it perfectly on syntax level.
3038        let match_value = self.parse_value()?.value;
3039
3040        let in_natural_language_mode_keywords = &[
3041            Keyword::IN,
3042            Keyword::NATURAL,
3043            Keyword::LANGUAGE,
3044            Keyword::MODE,
3045        ];
3046
3047        let with_query_expansion_keywords = &[Keyword::WITH, Keyword::QUERY, Keyword::EXPANSION];
3048
3049        let in_boolean_mode_keywords = &[Keyword::IN, Keyword::BOOLEAN, Keyword::MODE];
3050
3051        let opt_search_modifier = if self.parse_keywords(in_natural_language_mode_keywords) {
3052            if self.parse_keywords(with_query_expansion_keywords) {
3053                Some(SearchModifier::InNaturalLanguageModeWithQueryExpansion)
3054            } else {
3055                Some(SearchModifier::InNaturalLanguageMode)
3056            }
3057        } else if self.parse_keywords(in_boolean_mode_keywords) {
3058            Some(SearchModifier::InBooleanMode)
3059        } else if self.parse_keywords(with_query_expansion_keywords) {
3060            Some(SearchModifier::WithQueryExpansion)
3061        } else {
3062            None
3063        };
3064
3065        self.expect_token(&Token::RParen)?;
3066
3067        Ok(Expr::MatchAgainst {
3068            columns,
3069            match_value,
3070            opt_search_modifier,
3071        })
3072    }
3073
3074    /// Parse an `INTERVAL` expression.
3075    ///
3076    /// Some syntactically valid intervals:
3077    ///
3078    /// ```sql
3079    ///   1. INTERVAL '1' DAY
3080    ///   2. INTERVAL '1-1' YEAR TO MONTH
3081    ///   3. INTERVAL '1' SECOND
3082    ///   4. INTERVAL '1:1:1.1' HOUR (5) TO SECOND (5)
3083    ///   5. INTERVAL '1.1' SECOND (2, 2)
3084    ///   6. INTERVAL '1:1' HOUR (5) TO MINUTE (5)
3085    ///   7. (MySql & BigQuery only): INTERVAL 1 DAY
3086    /// ```
3087    ///
3088    /// Note that we do not currently attempt to parse the quoted value.
3089    pub fn parse_interval(&mut self) -> Result<Expr, ParserError> {
3090        // The SQL standard allows an optional sign before the value string, but
3091        // it is not clear if any implementations support that syntax, so we
3092        // don't currently try to parse it. (The sign can instead be included
3093        // inside the value string.)
3094
3095        // to match the different flavours of INTERVAL syntax, we only allow expressions
3096        // if the dialect requires an interval qualifier,
3097        // see https://github.com/sqlparser-rs/sqlparser-rs/pull/1398 for more details
3098        let value = if self.dialect.require_interval_qualifier() {
3099            // parse a whole expression so `INTERVAL 1 + 1 DAY` is valid
3100            self.parse_expr()?
3101        } else {
3102            // parse a prefix expression so `INTERVAL 1 DAY` is valid, but `INTERVAL 1 + 1 DAY` is not
3103            // this also means that `INTERVAL '5 days' > INTERVAL '1 day'` treated properly
3104            self.parse_prefix()?
3105        };
3106
3107        // Following the string literal is a qualifier which indicates the units
3108        // of the duration specified in the string literal.
3109        //
3110        // Note that PostgreSQL allows omitting the qualifier, so we provide
3111        // this more general implementation.
3112        let leading_field = if self.next_token_is_temporal_unit() {
3113            Some(self.parse_date_time_field()?)
3114        } else if self.dialect.require_interval_qualifier() {
3115            return parser_err!(
3116                "INTERVAL requires a unit after the literal value",
3117                self.peek_token().span.start
3118            );
3119        } else {
3120            None
3121        };
3122
3123        let (leading_precision, last_field, fsec_precision) =
3124            if leading_field == Some(DateTimeField::Second) {
3125                // SQL mandates special syntax for `SECOND TO SECOND` literals.
3126                // Instead of
3127                //     `SECOND [(<leading precision>)] TO SECOND[(<fractional seconds precision>)]`
3128                // one must use the special format:
3129                //     `SECOND [( <leading precision> [ , <fractional seconds precision>] )]`
3130                let last_field = None;
3131                let (leading_precision, fsec_precision) = self.parse_optional_precision_scale()?;
3132                (leading_precision, last_field, fsec_precision)
3133            } else {
3134                let leading_precision = self.parse_optional_precision()?;
3135                if self.parse_keyword(Keyword::TO) {
3136                    let last_field = Some(self.parse_date_time_field()?);
3137                    let fsec_precision = if last_field == Some(DateTimeField::Second) {
3138                        self.parse_optional_precision()?
3139                    } else {
3140                        None
3141                    };
3142                    (leading_precision, last_field, fsec_precision)
3143                } else {
3144                    (leading_precision, None, None)
3145                }
3146            };
3147
3148        Ok(Expr::Interval(Interval {
3149            value: Box::new(value),
3150            leading_field,
3151            leading_precision,
3152            last_field,
3153            fractional_seconds_precision: fsec_precision,
3154        }))
3155    }
3156
3157    /// Peek at the next token and determine if it is a temporal unit
3158    /// like `second`.
3159    pub fn next_token_is_temporal_unit(&mut self) -> bool {
3160        if let Token::Word(word) = self.peek_token().token {
3161            matches!(
3162                word.keyword,
3163                Keyword::YEAR
3164                    | Keyword::YEARS
3165                    | Keyword::MONTH
3166                    | Keyword::MONTHS
3167                    | Keyword::WEEK
3168                    | Keyword::WEEKS
3169                    | Keyword::DAY
3170                    | Keyword::DAYS
3171                    | Keyword::HOUR
3172                    | Keyword::HOURS
3173                    | Keyword::MINUTE
3174                    | Keyword::MINUTES
3175                    | Keyword::SECOND
3176                    | Keyword::SECONDS
3177                    | Keyword::CENTURY
3178                    | Keyword::DECADE
3179                    | Keyword::DOW
3180                    | Keyword::DOY
3181                    | Keyword::EPOCH
3182                    | Keyword::ISODOW
3183                    | Keyword::ISOYEAR
3184                    | Keyword::JULIAN
3185                    | Keyword::MICROSECOND
3186                    | Keyword::MICROSECONDS
3187                    | Keyword::MILLENIUM
3188                    | Keyword::MILLENNIUM
3189                    | Keyword::MILLISECOND
3190                    | Keyword::MILLISECONDS
3191                    | Keyword::NANOSECOND
3192                    | Keyword::NANOSECONDS
3193                    | Keyword::QUARTER
3194                    | Keyword::TIMEZONE
3195                    | Keyword::TIMEZONE_HOUR
3196                    | Keyword::TIMEZONE_MINUTE
3197            )
3198        } else {
3199            false
3200        }
3201    }
3202
3203    /// Syntax
3204    /// ```sql
3205    /// -- typed
3206    /// STRUCT<[field_name] field_type, ...>( expr1 [, ... ])
3207    /// -- typeless
3208    /// STRUCT( expr1 [AS field_name] [, ... ])
3209    /// ```
3210    fn parse_struct_literal(&mut self) -> Result<Expr, ParserError> {
3211        // Parse the fields definition if exist `<[field_name] field_type, ...>`
3212        self.prev_token();
3213        let (fields, trailing_bracket) =
3214            self.parse_struct_type_def(Self::parse_struct_field_def)?;
3215        if trailing_bracket.0 {
3216            return parser_err!(
3217                "unmatched > in STRUCT literal",
3218                self.peek_token().span.start
3219            );
3220        }
3221
3222        // Parse the struct values `(expr1 [, ... ])`
3223        self.expect_token(&Token::LParen)?;
3224        let values = self
3225            .parse_comma_separated(|parser| parser.parse_struct_field_expr(!fields.is_empty()))?;
3226        self.expect_token(&Token::RParen)?;
3227
3228        Ok(Expr::Struct { values, fields })
3229    }
3230
3231    /// Parse an expression value for a struct literal
3232    /// Syntax
3233    /// ```sql
3234    /// expr [AS name]
3235    /// ```
3236    ///
3237    /// For biquery [1], Parameter typed_syntax is set to true if the expression
3238    /// is to be parsed as a field expression declared using typed
3239    /// struct syntax [2], and false if using typeless struct syntax [3].
3240    ///
3241    /// [1]: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#constructing_a_struct
3242    /// [2]: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#typed_struct_syntax
3243    /// [3]: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#typeless_struct_syntax
3244    fn parse_struct_field_expr(&mut self, typed_syntax: bool) -> Result<Expr, ParserError> {
3245        let expr = self.parse_expr()?;
3246        if self.parse_keyword(Keyword::AS) {
3247            if typed_syntax {
3248                return parser_err!("Typed syntax does not allow AS", {
3249                    self.prev_token();
3250                    self.peek_token().span.start
3251                });
3252            }
3253            let field_name = self.parse_identifier()?;
3254            Ok(Expr::Named {
3255                expr: expr.into(),
3256                name: field_name,
3257            })
3258        } else {
3259            Ok(expr)
3260        }
3261    }
3262
3263    /// Parse a Struct type definition as a sequence of field-value pairs.
3264    /// The syntax of the Struct elem differs by dialect so it is customised
3265    /// by the `elem_parser` argument.
3266    ///
3267    /// Syntax
3268    /// ```sql
3269    /// Hive:
3270    /// STRUCT<field_name: field_type>
3271    ///
3272    /// BigQuery:
3273    /// STRUCT<[field_name] field_type>
3274    /// ```
3275    fn parse_struct_type_def<F>(
3276        &mut self,
3277        mut elem_parser: F,
3278    ) -> Result<(Vec<StructField>, MatchedTrailingBracket), ParserError>
3279    where
3280        F: FnMut(&mut Parser<'a>) -> Result<(StructField, MatchedTrailingBracket), ParserError>,
3281    {
3282        self.expect_keyword_is(Keyword::STRUCT)?;
3283
3284        // Nothing to do if we have no type information.
3285        if Token::Lt != self.peek_token() {
3286            return Ok((Default::default(), false.into()));
3287        }
3288        self.next_token();
3289
3290        let mut field_defs = vec![];
3291        let trailing_bracket = loop {
3292            let (def, trailing_bracket) = elem_parser(self)?;
3293            field_defs.push(def);
3294            // The struct field definition is finished if it occurs `>>` or comma.
3295            if trailing_bracket.0 || !self.consume_token(&Token::Comma) {
3296                break trailing_bracket;
3297            }
3298        };
3299
3300        Ok((
3301            field_defs,
3302            self.expect_closing_angle_bracket(trailing_bracket)?,
3303        ))
3304    }
3305
3306    /// Duckdb Struct Data Type <https://duckdb.org/docs/sql/data_types/struct.html#retrieving-from-structs>
3307    fn parse_duckdb_struct_type_def(&mut self) -> Result<Vec<StructField>, ParserError> {
3308        self.expect_keyword_is(Keyword::STRUCT)?;
3309        self.expect_token(&Token::LParen)?;
3310        let struct_body = self.parse_comma_separated(|parser| {
3311            let field_name = parser.parse_identifier()?;
3312            let field_type = parser.parse_data_type()?;
3313
3314            Ok(StructField {
3315                field_name: Some(field_name),
3316                field_type,
3317                not_null: false,
3318                options: None,
3319            })
3320        });
3321        self.expect_token(&Token::RParen)?;
3322        struct_body
3323    }
3324
3325    /// Parse a field definition in a [struct] or [tuple].
3326    /// Syntax:
3327    ///
3328    /// ```sql
3329    /// [field_name] field_type
3330    /// ```
3331    ///
3332    /// [struct]: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#declaring_a_struct_type
3333    /// [tuple]: https://clickhouse.com/docs/en/sql-reference/data-types/tuple
3334    fn parse_struct_field_def(
3335        &mut self,
3336    ) -> Result<(StructField, MatchedTrailingBracket), ParserError> {
3337        // Look beyond the next item to infer whether both field name
3338        // and type are specified.
3339        let is_anonymous_field = !matches!(
3340            (self.peek_nth_token(0).token, self.peek_nth_token(1).token),
3341            (Token::Word(_), Token::Word(_))
3342        );
3343
3344        let field_name = if is_anonymous_field {
3345            None
3346        } else {
3347            Some(self.parse_identifier()?)
3348        };
3349
3350        let (field_type, trailing_bracket) = self.parse_data_type_helper()?;
3351
3352        let not_null = self.parse_keywords(&[Keyword::NOT, Keyword::NULL]);
3353
3354        let options = self.maybe_parse_options(Keyword::OPTIONS)?;
3355        Ok((
3356            StructField {
3357                field_name,
3358                field_type,
3359                not_null,
3360                options,
3361            },
3362            trailing_bracket,
3363        ))
3364    }
3365
3366    /// DuckDB specific: Parse a Union type definition as a sequence of field-value pairs.
3367    ///
3368    /// Syntax:
3369    ///
3370    /// ```sql
3371    /// UNION(field_name field_type[,...])
3372    /// ```
3373    ///
3374    /// [1]: https://duckdb.org/docs/sql/data_types/union.html
3375    fn parse_union_type_def(&mut self) -> Result<Vec<UnionField>, ParserError> {
3376        self.expect_keyword_is(Keyword::UNION)?;
3377
3378        self.expect_token(&Token::LParen)?;
3379
3380        let fields = self.parse_comma_separated(|p| {
3381            Ok(UnionField {
3382                field_name: p.parse_identifier()?,
3383                field_type: p.parse_data_type()?,
3384            })
3385        })?;
3386
3387        self.expect_token(&Token::RParen)?;
3388
3389        Ok(fields)
3390    }
3391
3392    /// DuckDB and ClickHouse specific: Parse a duckdb [dictionary] or a clickhouse [map] setting
3393    ///
3394    /// Syntax:
3395    ///
3396    /// ```sql
3397    /// {'field_name': expr1[, ... ]}
3398    /// ```
3399    ///
3400    /// [dictionary]: https://duckdb.org/docs/sql/data_types/struct#creating-structs
3401    /// [map]: https://clickhouse.com/docs/operations/settings/settings#additional_table_filters
3402    fn parse_dictionary(&mut self) -> Result<Expr, ParserError> {
3403        self.expect_token(&Token::LBrace)?;
3404
3405        let fields = self.parse_comma_separated0(Self::parse_dictionary_field, Token::RBrace)?;
3406
3407        self.expect_token(&Token::RBrace)?;
3408
3409        Ok(Expr::Dictionary(fields))
3410    }
3411
3412    /// Parse a field for a duckdb [dictionary] or a clickhouse [map] setting
3413    ///
3414    /// Syntax
3415    ///
3416    /// ```sql
3417    /// 'name': expr
3418    /// ```
3419    ///
3420    /// [dictionary]: https://duckdb.org/docs/sql/data_types/struct#creating-structs
3421    /// [map]: https://clickhouse.com/docs/operations/settings/settings#additional_table_filters
3422    fn parse_dictionary_field(&mut self) -> Result<DictionaryField, ParserError> {
3423        let key = self.parse_identifier()?;
3424
3425        self.expect_token(&Token::Colon)?;
3426
3427        let expr = self.parse_expr()?;
3428
3429        Ok(DictionaryField {
3430            key,
3431            value: Box::new(expr),
3432        })
3433    }
3434
3435    /// DuckDB specific: Parse a duckdb [map]
3436    ///
3437    /// Syntax:
3438    ///
3439    /// ```sql
3440    /// Map {key1: value1[, ... ]}
3441    /// ```
3442    ///
3443    /// [map]: https://duckdb.org/docs/sql/data_types/map.html#creating-maps
3444    fn parse_duckdb_map_literal(&mut self) -> Result<Expr, ParserError> {
3445        self.expect_token(&Token::LBrace)?;
3446        let fields = self.parse_comma_separated0(Self::parse_duckdb_map_field, Token::RBrace)?;
3447        self.expect_token(&Token::RBrace)?;
3448        Ok(Expr::Map(Map { entries: fields }))
3449    }
3450
3451    /// Parse a field for a duckdb [map]
3452    ///
3453    /// Syntax
3454    ///
3455    /// ```sql
3456    /// key: value
3457    /// ```
3458    ///
3459    /// [map]: https://duckdb.org/docs/sql/data_types/map.html#creating-maps
3460    fn parse_duckdb_map_field(&mut self) -> Result<MapEntry, ParserError> {
3461        let key = self.parse_expr()?;
3462
3463        self.expect_token(&Token::Colon)?;
3464
3465        let value = self.parse_expr()?;
3466
3467        Ok(MapEntry {
3468            key: Box::new(key),
3469            value: Box::new(value),
3470        })
3471    }
3472
3473    /// Parse clickhouse [map]
3474    ///
3475    /// Syntax
3476    ///
3477    /// ```sql
3478    /// Map(key_data_type, value_data_type)
3479    /// ```
3480    ///
3481    /// [map]: https://clickhouse.com/docs/en/sql-reference/data-types/map
3482    fn parse_click_house_map_def(&mut self) -> Result<(DataType, DataType), ParserError> {
3483        self.expect_keyword_is(Keyword::MAP)?;
3484        self.expect_token(&Token::LParen)?;
3485        let key_data_type = self.parse_data_type()?;
3486        self.expect_token(&Token::Comma)?;
3487        let value_data_type = self.parse_data_type()?;
3488        self.expect_token(&Token::RParen)?;
3489
3490        Ok((key_data_type, value_data_type))
3491    }
3492
3493    /// Parse clickhouse [tuple]
3494    ///
3495    /// Syntax
3496    ///
3497    /// ```sql
3498    /// Tuple([field_name] field_type, ...)
3499    /// ```
3500    ///
3501    /// [tuple]: https://clickhouse.com/docs/en/sql-reference/data-types/tuple
3502    fn parse_click_house_tuple_def(&mut self) -> Result<Vec<StructField>, ParserError> {
3503        self.expect_keyword_is(Keyword::TUPLE)?;
3504        self.expect_token(&Token::LParen)?;
3505        let mut field_defs = vec![];
3506        loop {
3507            let (def, _) = self.parse_struct_field_def()?;
3508            field_defs.push(def);
3509            if !self.consume_token(&Token::Comma) {
3510                break;
3511            }
3512        }
3513        self.expect_token(&Token::RParen)?;
3514
3515        Ok(field_defs)
3516    }
3517
3518    /// For nested types that use the angle bracket syntax, this matches either
3519    /// `>`, `>>` or nothing depending on which variant is expected (specified by the previously
3520    /// matched `trailing_bracket` argument). It returns whether there is a trailing
3521    /// left to be matched - (i.e. if '>>' was matched).
3522    fn expect_closing_angle_bracket(
3523        &mut self,
3524        trailing_bracket: MatchedTrailingBracket,
3525    ) -> Result<MatchedTrailingBracket, ParserError> {
3526        let trailing_bracket = if !trailing_bracket.0 {
3527            match self.peek_token().token {
3528                Token::Gt => {
3529                    self.next_token();
3530                    false.into()
3531                }
3532                Token::ShiftRight => {
3533                    self.next_token();
3534                    true.into()
3535                }
3536                _ => return self.expected(">", self.peek_token()),
3537            }
3538        } else {
3539            false.into()
3540        };
3541
3542        Ok(trailing_bracket)
3543    }
3544
3545    /// Parse an operator following an expression
3546    pub fn parse_infix(&mut self, expr: Expr, precedence: u8) -> Result<Expr, ParserError> {
3547        // allow the dialect to override infix parsing
3548        if let Some(infix) = self.dialect.parse_infix(self, &expr, precedence) {
3549            return infix;
3550        }
3551
3552        let dialect = self.dialect;
3553
3554        self.advance_token();
3555        let tok = self.get_current_token();
3556        debug!("infix: {tok:?}");
3557        let tok_index = self.get_current_index();
3558        let span = tok.span;
3559        let regular_binary_operator = match &tok.token {
3560            Token::Spaceship => Some(BinaryOperator::Spaceship),
3561            Token::DoubleEq => Some(BinaryOperator::Eq),
3562            Token::Assignment => Some(BinaryOperator::Assignment),
3563            Token::Eq => Some(BinaryOperator::Eq),
3564            Token::Neq => Some(BinaryOperator::NotEq),
3565            Token::Gt => Some(BinaryOperator::Gt),
3566            Token::GtEq => Some(BinaryOperator::GtEq),
3567            Token::Lt => Some(BinaryOperator::Lt),
3568            Token::LtEq => Some(BinaryOperator::LtEq),
3569            Token::Plus => Some(BinaryOperator::Plus),
3570            Token::Minus => Some(BinaryOperator::Minus),
3571            Token::Mul => Some(BinaryOperator::Multiply),
3572            Token::Mod => Some(BinaryOperator::Modulo),
3573            Token::StringConcat => Some(BinaryOperator::StringConcat),
3574            Token::Pipe => Some(BinaryOperator::BitwiseOr),
3575            Token::Caret => {
3576                // In PostgreSQL, ^ stands for the exponentiation operation,
3577                // and # stands for XOR. See https://www.postgresql.org/docs/current/functions-math.html
3578                if dialect_is!(dialect is PostgreSqlDialect) {
3579                    Some(BinaryOperator::PGExp)
3580                } else {
3581                    Some(BinaryOperator::BitwiseXor)
3582                }
3583            }
3584            Token::Ampersand => Some(BinaryOperator::BitwiseAnd),
3585            Token::Div => Some(BinaryOperator::Divide),
3586            Token::DuckIntDiv if dialect_is!(dialect is DuckDbDialect | GenericDialect) => {
3587                Some(BinaryOperator::DuckIntegerDivide)
3588            }
3589            Token::ShiftLeft if dialect_is!(dialect is PostgreSqlDialect | DuckDbDialect | GenericDialect | RedshiftSqlDialect) => {
3590                Some(BinaryOperator::PGBitwiseShiftLeft)
3591            }
3592            Token::ShiftRight if dialect_is!(dialect is PostgreSqlDialect | DuckDbDialect | GenericDialect | RedshiftSqlDialect) => {
3593                Some(BinaryOperator::PGBitwiseShiftRight)
3594            }
3595            Token::Sharp if dialect_is!(dialect is PostgreSqlDialect | RedshiftSqlDialect) => {
3596                Some(BinaryOperator::PGBitwiseXor)
3597            }
3598            Token::Overlap if dialect_is!(dialect is PostgreSqlDialect | RedshiftSqlDialect) => {
3599                Some(BinaryOperator::PGOverlap)
3600            }
3601            Token::Overlap if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
3602                Some(BinaryOperator::PGOverlap)
3603            }
3604            Token::CaretAt if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
3605                Some(BinaryOperator::PGStartsWith)
3606            }
3607            Token::Tilde => Some(BinaryOperator::PGRegexMatch),
3608            Token::TildeAsterisk => Some(BinaryOperator::PGRegexIMatch),
3609            Token::ExclamationMarkTilde => Some(BinaryOperator::PGRegexNotMatch),
3610            Token::ExclamationMarkTildeAsterisk => Some(BinaryOperator::PGRegexNotIMatch),
3611            Token::DoubleTilde => Some(BinaryOperator::PGLikeMatch),
3612            Token::DoubleTildeAsterisk => Some(BinaryOperator::PGILikeMatch),
3613            Token::ExclamationMarkDoubleTilde => Some(BinaryOperator::PGNotLikeMatch),
3614            Token::ExclamationMarkDoubleTildeAsterisk => Some(BinaryOperator::PGNotILikeMatch),
3615            Token::Arrow => Some(BinaryOperator::Arrow),
3616            Token::LongArrow => Some(BinaryOperator::LongArrow),
3617            Token::HashArrow => Some(BinaryOperator::HashArrow),
3618            Token::HashLongArrow => Some(BinaryOperator::HashLongArrow),
3619            Token::AtArrow => Some(BinaryOperator::AtArrow),
3620            Token::ArrowAt => Some(BinaryOperator::ArrowAt),
3621            Token::HashMinus => Some(BinaryOperator::HashMinus),
3622            Token::AtQuestion => Some(BinaryOperator::AtQuestion),
3623            Token::AtAt => Some(BinaryOperator::AtAt),
3624            Token::Question => Some(BinaryOperator::Question),
3625            Token::QuestionAnd => Some(BinaryOperator::QuestionAnd),
3626            Token::QuestionPipe => Some(BinaryOperator::QuestionPipe),
3627            Token::CustomBinaryOperator(s) => Some(BinaryOperator::Custom(s.clone())),
3628            Token::DoubleSharp if self.dialect.supports_geometric_types() => {
3629                Some(BinaryOperator::DoubleHash)
3630            }
3631
3632            Token::AmpersandLeftAngleBracket if self.dialect.supports_geometric_types() => {
3633                Some(BinaryOperator::AndLt)
3634            }
3635            Token::AmpersandRightAngleBracket if self.dialect.supports_geometric_types() => {
3636                Some(BinaryOperator::AndGt)
3637            }
3638            Token::QuestionMarkDash if self.dialect.supports_geometric_types() => {
3639                Some(BinaryOperator::QuestionDash)
3640            }
3641            Token::AmpersandLeftAngleBracketVerticalBar
3642                if self.dialect.supports_geometric_types() =>
3643            {
3644                Some(BinaryOperator::AndLtPipe)
3645            }
3646            Token::VerticalBarAmpersandRightAngleBracket
3647                if self.dialect.supports_geometric_types() =>
3648            {
3649                Some(BinaryOperator::PipeAndGt)
3650            }
3651            Token::TwoWayArrow if self.dialect.supports_geometric_types() => {
3652                Some(BinaryOperator::LtDashGt)
3653            }
3654            Token::LeftAngleBracketCaret if self.dialect.supports_geometric_types() => {
3655                Some(BinaryOperator::LtCaret)
3656            }
3657            Token::RightAngleBracketCaret if self.dialect.supports_geometric_types() => {
3658                Some(BinaryOperator::GtCaret)
3659            }
3660            Token::QuestionMarkSharp if self.dialect.supports_geometric_types() => {
3661                Some(BinaryOperator::QuestionHash)
3662            }
3663            Token::QuestionMarkDoubleVerticalBar if self.dialect.supports_geometric_types() => {
3664                Some(BinaryOperator::QuestionDoublePipe)
3665            }
3666            Token::QuestionMarkDashVerticalBar if self.dialect.supports_geometric_types() => {
3667                Some(BinaryOperator::QuestionDashPipe)
3668            }
3669            Token::TildeEqual if self.dialect.supports_geometric_types() => {
3670                Some(BinaryOperator::TildeEq)
3671            }
3672            Token::ShiftLeftVerticalBar if self.dialect.supports_geometric_types() => {
3673                Some(BinaryOperator::LtLtPipe)
3674            }
3675            Token::VerticalBarShiftRight if self.dialect.supports_geometric_types() => {
3676                Some(BinaryOperator::PipeGtGt)
3677            }
3678            Token::AtSign if self.dialect.supports_geometric_types() => Some(BinaryOperator::At),
3679
3680            Token::Word(w) => match w.keyword {
3681                Keyword::AND => Some(BinaryOperator::And),
3682                Keyword::OR => Some(BinaryOperator::Or),
3683                Keyword::XOR => Some(BinaryOperator::Xor),
3684                Keyword::OVERLAPS => Some(BinaryOperator::Overlaps),
3685                Keyword::OPERATOR if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
3686                    self.expect_token(&Token::LParen)?;
3687                    // there are special rules for operator names in
3688                    // postgres so we can not use 'parse_object'
3689                    // or similar.
3690                    // See https://www.postgresql.org/docs/current/sql-createoperator.html
3691                    let mut idents = vec![];
3692                    loop {
3693                        self.advance_token();
3694                        idents.push(self.get_current_token().to_string());
3695                        if !self.consume_token(&Token::Period) {
3696                            break;
3697                        }
3698                    }
3699                    self.expect_token(&Token::RParen)?;
3700                    Some(BinaryOperator::PGCustomBinaryOperator(idents))
3701                }
3702                _ => None,
3703            },
3704            _ => None,
3705        };
3706
3707        let tok = self.token_at(tok_index);
3708        if let Some(op) = regular_binary_operator {
3709            if let Some(keyword) =
3710                self.parse_one_of_keywords(&[Keyword::ANY, Keyword::ALL, Keyword::SOME])
3711            {
3712                self.expect_token(&Token::LParen)?;
3713                let right = if self.peek_sub_query() {
3714                    // We have a subquery ahead (SELECT\WITH ...) need to rewind and
3715                    // use the parenthesis for parsing the subquery as an expression.
3716                    self.prev_token(); // LParen
3717                    self.parse_subexpr(precedence)?
3718                } else {
3719                    // Non-subquery expression
3720                    let right = self.parse_subexpr(precedence)?;
3721                    self.expect_token(&Token::RParen)?;
3722                    right
3723                };
3724
3725                if !matches!(
3726                    op,
3727                    BinaryOperator::Gt
3728                        | BinaryOperator::Lt
3729                        | BinaryOperator::GtEq
3730                        | BinaryOperator::LtEq
3731                        | BinaryOperator::Eq
3732                        | BinaryOperator::NotEq
3733                        | BinaryOperator::PGRegexMatch
3734                        | BinaryOperator::PGRegexIMatch
3735                        | BinaryOperator::PGRegexNotMatch
3736                        | BinaryOperator::PGRegexNotIMatch
3737                        | BinaryOperator::PGLikeMatch
3738                        | BinaryOperator::PGILikeMatch
3739                        | BinaryOperator::PGNotLikeMatch
3740                        | BinaryOperator::PGNotILikeMatch
3741                ) {
3742                    return parser_err!(
3743                        format!(
3744                        "Expected one of [=, >, <, =>, =<, !=, ~, ~*, !~, !~*, ~~, ~~*, !~~, !~~*] as comparison operator, found: {op}"
3745                    ),
3746                        span.start
3747                    );
3748                };
3749
3750                Ok(match keyword {
3751                    Keyword::ALL => Expr::AllOp {
3752                        left: Box::new(expr),
3753                        compare_op: op,
3754                        right: Box::new(right),
3755                    },
3756                    Keyword::ANY | Keyword::SOME => Expr::AnyOp {
3757                        left: Box::new(expr),
3758                        compare_op: op,
3759                        right: Box::new(right),
3760                        is_some: keyword == Keyword::SOME,
3761                    },
3762                    _ => unreachable!(),
3763                })
3764            } else {
3765                Ok(Expr::BinaryOp {
3766                    left: Box::new(expr),
3767                    op,
3768                    right: Box::new(self.parse_subexpr(precedence)?),
3769                })
3770            }
3771        } else if let Token::Word(w) = &tok.token {
3772            match w.keyword {
3773                Keyword::IS => {
3774                    if self.parse_keyword(Keyword::NULL) {
3775                        Ok(Expr::IsNull(Box::new(expr)))
3776                    } else if self.parse_keywords(&[Keyword::NOT, Keyword::NULL]) {
3777                        Ok(Expr::IsNotNull(Box::new(expr)))
3778                    } else if self.parse_keywords(&[Keyword::TRUE]) {
3779                        Ok(Expr::IsTrue(Box::new(expr)))
3780                    } else if self.parse_keywords(&[Keyword::NOT, Keyword::TRUE]) {
3781                        Ok(Expr::IsNotTrue(Box::new(expr)))
3782                    } else if self.parse_keywords(&[Keyword::FALSE]) {
3783                        Ok(Expr::IsFalse(Box::new(expr)))
3784                    } else if self.parse_keywords(&[Keyword::NOT, Keyword::FALSE]) {
3785                        Ok(Expr::IsNotFalse(Box::new(expr)))
3786                    } else if self.parse_keywords(&[Keyword::UNKNOWN]) {
3787                        Ok(Expr::IsUnknown(Box::new(expr)))
3788                    } else if self.parse_keywords(&[Keyword::NOT, Keyword::UNKNOWN]) {
3789                        Ok(Expr::IsNotUnknown(Box::new(expr)))
3790                    } else if self.parse_keywords(&[Keyword::DISTINCT, Keyword::FROM]) {
3791                        let expr2 = self.parse_expr()?;
3792                        Ok(Expr::IsDistinctFrom(Box::new(expr), Box::new(expr2)))
3793                    } else if self.parse_keywords(&[Keyword::NOT, Keyword::DISTINCT, Keyword::FROM])
3794                    {
3795                        let expr2 = self.parse_expr()?;
3796                        Ok(Expr::IsNotDistinctFrom(Box::new(expr), Box::new(expr2)))
3797                    } else if let Ok(is_normalized) = self.parse_unicode_is_normalized(expr) {
3798                        Ok(is_normalized)
3799                    } else {
3800                        self.expected(
3801                            "[NOT] NULL | TRUE | FALSE | DISTINCT | [form] NORMALIZED FROM after IS",
3802                            self.peek_token(),
3803                        )
3804                    }
3805                }
3806                Keyword::AT => {
3807                    self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
3808                    Ok(Expr::AtTimeZone {
3809                        timestamp: Box::new(expr),
3810                        time_zone: Box::new(self.parse_subexpr(precedence)?),
3811                    })
3812                }
3813                Keyword::NOT
3814                | Keyword::IN
3815                | Keyword::BETWEEN
3816                | Keyword::LIKE
3817                | Keyword::ILIKE
3818                | Keyword::SIMILAR
3819                | Keyword::REGEXP
3820                | Keyword::RLIKE => {
3821                    self.prev_token();
3822                    let negated = self.parse_keyword(Keyword::NOT);
3823                    let regexp = self.parse_keyword(Keyword::REGEXP);
3824                    let rlike = self.parse_keyword(Keyword::RLIKE);
3825                    let null = if !self.in_column_definition_state() {
3826                        self.parse_keyword(Keyword::NULL)
3827                    } else {
3828                        false
3829                    };
3830                    if regexp || rlike {
3831                        Ok(Expr::RLike {
3832                            negated,
3833                            expr: Box::new(expr),
3834                            pattern: Box::new(
3835                                self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
3836                            ),
3837                            regexp,
3838                        })
3839                    } else if negated && null {
3840                        Ok(Expr::IsNotNull(Box::new(expr)))
3841                    } else if self.parse_keyword(Keyword::IN) {
3842                        self.parse_in(expr, negated)
3843                    } else if self.parse_keyword(Keyword::BETWEEN) {
3844                        self.parse_between(expr, negated)
3845                    } else if self.parse_keyword(Keyword::LIKE) {
3846                        Ok(Expr::Like {
3847                            negated,
3848                            any: self.parse_keyword(Keyword::ANY),
3849                            expr: Box::new(expr),
3850                            pattern: Box::new(
3851                                self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
3852                            ),
3853                            escape_char: self.parse_escape_char()?,
3854                        })
3855                    } else if self.parse_keyword(Keyword::ILIKE) {
3856                        Ok(Expr::ILike {
3857                            negated,
3858                            any: self.parse_keyword(Keyword::ANY),
3859                            expr: Box::new(expr),
3860                            pattern: Box::new(
3861                                self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
3862                            ),
3863                            escape_char: self.parse_escape_char()?,
3864                        })
3865                    } else if self.parse_keywords(&[Keyword::SIMILAR, Keyword::TO]) {
3866                        Ok(Expr::SimilarTo {
3867                            negated,
3868                            expr: Box::new(expr),
3869                            pattern: Box::new(
3870                                self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
3871                            ),
3872                            escape_char: self.parse_escape_char()?,
3873                        })
3874                    } else {
3875                        self.expected("IN or BETWEEN after NOT", self.peek_token())
3876                    }
3877                }
3878                Keyword::NOTNULL if dialect.supports_notnull_operator() => {
3879                    Ok(Expr::IsNotNull(Box::new(expr)))
3880                }
3881                Keyword::MEMBER => {
3882                    if self.parse_keyword(Keyword::OF) {
3883                        self.expect_token(&Token::LParen)?;
3884                        let array = self.parse_expr()?;
3885                        self.expect_token(&Token::RParen)?;
3886                        Ok(Expr::MemberOf(MemberOf {
3887                            value: Box::new(expr),
3888                            array: Box::new(array),
3889                        }))
3890                    } else {
3891                        self.expected("OF after MEMBER", self.peek_token())
3892                    }
3893                }
3894                // Can only happen if `get_next_precedence` got out of sync with this function
3895                _ => parser_err!(
3896                    format!("No infix parser for token {:?}", tok.token),
3897                    tok.span.start
3898                ),
3899            }
3900        } else if Token::DoubleColon == *tok {
3901            Ok(Expr::Cast {
3902                kind: CastKind::DoubleColon,
3903                expr: Box::new(expr),
3904                data_type: self.parse_data_type()?,
3905                format: None,
3906            })
3907        } else if Token::ExclamationMark == *tok && self.dialect.supports_factorial_operator() {
3908            Ok(Expr::UnaryOp {
3909                op: UnaryOperator::PGPostfixFactorial,
3910                expr: Box::new(expr),
3911            })
3912        } else if Token::LBracket == *tok && self.dialect.supports_partiql()
3913            || (dialect_of!(self is SnowflakeDialect | GenericDialect) && Token::Colon == *tok)
3914        {
3915            self.prev_token();
3916            self.parse_json_access(expr)
3917        } else {
3918            // Can only happen if `get_next_precedence` got out of sync with this function
3919            parser_err!(
3920                format!("No infix parser for token {:?}", tok.token),
3921                tok.span.start
3922            )
3923        }
3924    }
3925
3926    /// Parse the `ESCAPE CHAR` portion of `LIKE`, `ILIKE`, and `SIMILAR TO`
3927    pub fn parse_escape_char(&mut self) -> Result<Option<Value>, ParserError> {
3928        if self.parse_keyword(Keyword::ESCAPE) {
3929            Ok(Some(self.parse_value()?.into()))
3930        } else {
3931            Ok(None)
3932        }
3933    }
3934
3935    /// Parses an array subscript like
3936    /// * `[:]`
3937    /// * `[l]`
3938    /// * `[l:]`
3939    /// * `[:u]`
3940    /// * `[l:u]`
3941    /// * `[l:u:s]`
3942    ///
3943    /// Parser is right after `[`
3944    fn parse_subscript_inner(&mut self) -> Result<Subscript, ParserError> {
3945        // at either `<lower>:(rest)` or `:(rest)]`
3946        let lower_bound = if self.consume_token(&Token::Colon) {
3947            None
3948        } else {
3949            Some(self.parse_expr()?)
3950        };
3951
3952        // check for end
3953        if self.consume_token(&Token::RBracket) {
3954            if let Some(lower_bound) = lower_bound {
3955                return Ok(Subscript::Index { index: lower_bound });
3956            };
3957            return Ok(Subscript::Slice {
3958                lower_bound,
3959                upper_bound: None,
3960                stride: None,
3961            });
3962        }
3963
3964        // consume the `:`
3965        if lower_bound.is_some() {
3966            self.expect_token(&Token::Colon)?;
3967        }
3968
3969        // we are now at either `]`, `<upper>(rest)]`
3970        let upper_bound = if self.consume_token(&Token::RBracket) {
3971            return Ok(Subscript::Slice {
3972                lower_bound,
3973                upper_bound: None,
3974                stride: None,
3975            });
3976        } else {
3977            Some(self.parse_expr()?)
3978        };
3979
3980        // check for end
3981        if self.consume_token(&Token::RBracket) {
3982            return Ok(Subscript::Slice {
3983                lower_bound,
3984                upper_bound,
3985                stride: None,
3986            });
3987        }
3988
3989        // we are now at `:]` or `:stride]`
3990        self.expect_token(&Token::Colon)?;
3991        let stride = if self.consume_token(&Token::RBracket) {
3992            None
3993        } else {
3994            Some(self.parse_expr()?)
3995        };
3996
3997        if stride.is_some() {
3998            self.expect_token(&Token::RBracket)?;
3999        }
4000
4001        Ok(Subscript::Slice {
4002            lower_bound,
4003            upper_bound,
4004            stride,
4005        })
4006    }
4007
4008    /// Parse a multi-dimension array accessing like `[1:3][1][1]`
4009    pub fn parse_multi_dim_subscript(
4010        &mut self,
4011        chain: &mut Vec<AccessExpr>,
4012    ) -> Result<(), ParserError> {
4013        while self.consume_token(&Token::LBracket) {
4014            self.parse_subscript(chain)?;
4015        }
4016        Ok(())
4017    }
4018
4019    /// Parses an array subscript like `[1:3]`
4020    ///
4021    /// Parser is right after `[`
4022    fn parse_subscript(&mut self, chain: &mut Vec<AccessExpr>) -> Result<(), ParserError> {
4023        let subscript = self.parse_subscript_inner()?;
4024        chain.push(AccessExpr::Subscript(subscript));
4025        Ok(())
4026    }
4027
4028    fn parse_json_path_object_key(&mut self) -> Result<JsonPathElem, ParserError> {
4029        let token = self.next_token();
4030        match token.token {
4031            Token::Word(Word {
4032                value,
4033                // path segments in SF dot notation can be unquoted or double-quoted
4034                quote_style: quote_style @ (Some('"') | None),
4035                // some experimentation suggests that snowflake permits
4036                // any keyword here unquoted.
4037                keyword: _,
4038            }) => Ok(JsonPathElem::Dot {
4039                key: value,
4040                quoted: quote_style.is_some(),
4041            }),
4042
4043            // This token should never be generated on snowflake or generic
4044            // dialects, but we handle it just in case this is used on future
4045            // dialects.
4046            Token::DoubleQuotedString(key) => Ok(JsonPathElem::Dot { key, quoted: true }),
4047
4048            _ => self.expected("variant object key name", token),
4049        }
4050    }
4051
4052    fn parse_json_access(&mut self, expr: Expr) -> Result<Expr, ParserError> {
4053        let path = self.parse_json_path()?;
4054        Ok(Expr::JsonAccess {
4055            value: Box::new(expr),
4056            path,
4057        })
4058    }
4059
4060    fn parse_json_path(&mut self) -> Result<JsonPath, ParserError> {
4061        let mut path = Vec::new();
4062        loop {
4063            match self.next_token().token {
4064                Token::Colon if path.is_empty() => {
4065                    path.push(self.parse_json_path_object_key()?);
4066                }
4067                Token::Period if !path.is_empty() => {
4068                    path.push(self.parse_json_path_object_key()?);
4069                }
4070                Token::LBracket => {
4071                    let key = self.parse_expr()?;
4072                    self.expect_token(&Token::RBracket)?;
4073
4074                    path.push(JsonPathElem::Bracket { key });
4075                }
4076                _ => {
4077                    self.prev_token();
4078                    break;
4079                }
4080            };
4081        }
4082
4083        debug_assert!(!path.is_empty());
4084        Ok(JsonPath { path })
4085    }
4086
4087    /// Parses the parens following the `[ NOT ] IN` operator.
4088    pub fn parse_in(&mut self, expr: Expr, negated: bool) -> Result<Expr, ParserError> {
4089        // BigQuery allows `IN UNNEST(array_expression)`
4090        // https://cloud.google.com/bigquery/docs/reference/standard-sql/operators#in_operators
4091        if self.parse_keyword(Keyword::UNNEST) {
4092            self.expect_token(&Token::LParen)?;
4093            let array_expr = self.parse_expr()?;
4094            self.expect_token(&Token::RParen)?;
4095            return Ok(Expr::InUnnest {
4096                expr: Box::new(expr),
4097                array_expr: Box::new(array_expr),
4098                negated,
4099            });
4100        }
4101        self.expect_token(&Token::LParen)?;
4102        let in_op = match self.maybe_parse(|p| p.parse_query())? {
4103            Some(subquery) => Expr::InSubquery {
4104                expr: Box::new(expr),
4105                subquery,
4106                negated,
4107            },
4108            None => Expr::InList {
4109                expr: Box::new(expr),
4110                list: if self.dialect.supports_in_empty_list() {
4111                    self.parse_comma_separated0(Parser::parse_expr, Token::RParen)?
4112                } else {
4113                    self.parse_comma_separated(Parser::parse_expr)?
4114                },
4115                negated,
4116            },
4117        };
4118        self.expect_token(&Token::RParen)?;
4119        Ok(in_op)
4120    }
4121
4122    /// Parses `BETWEEN <low> AND <high>`, assuming the `BETWEEN` keyword was already consumed.
4123    pub fn parse_between(&mut self, expr: Expr, negated: bool) -> Result<Expr, ParserError> {
4124        // Stop parsing subexpressions for <low> and <high> on tokens with
4125        // precedence lower than that of `BETWEEN`, such as `AND`, `IS`, etc.
4126        let low = self.parse_subexpr(self.dialect.prec_value(Precedence::Between))?;
4127        self.expect_keyword_is(Keyword::AND)?;
4128        let high = self.parse_subexpr(self.dialect.prec_value(Precedence::Between))?;
4129        Ok(Expr::Between {
4130            expr: Box::new(expr),
4131            negated,
4132            low: Box::new(low),
4133            high: Box::new(high),
4134        })
4135    }
4136
4137    /// Parse a PostgreSQL casting style which is in the form of `expr::datatype`.
4138    pub fn parse_pg_cast(&mut self, expr: Expr) -> Result<Expr, ParserError> {
4139        Ok(Expr::Cast {
4140            kind: CastKind::DoubleColon,
4141            expr: Box::new(expr),
4142            data_type: self.parse_data_type()?,
4143            format: None,
4144        })
4145    }
4146
4147    /// Get the precedence of the next token
4148    pub fn get_next_precedence(&self) -> Result<u8, ParserError> {
4149        self.dialect.get_next_precedence_default(self)
4150    }
4151
4152    /// Return the token at the given location, or EOF if the index is beyond
4153    /// the length of the current set of tokens.
4154    pub fn token_at(&self, index: usize) -> &TokenWithSpan {
4155        self.tokens.get(index).unwrap_or(&EOF_TOKEN)
4156    }
4157
4158    /// Return the first non-whitespace token that has not yet been processed
4159    /// or Token::EOF
4160    ///
4161    /// See [`Self::peek_token_ref`] to avoid the copy.
4162    pub fn peek_token(&self) -> TokenWithSpan {
4163        self.peek_nth_token(0)
4164    }
4165
4166    /// Return a reference to the first non-whitespace token that has not yet
4167    /// been processed or Token::EOF
4168    pub fn peek_token_ref(&self) -> &TokenWithSpan {
4169        self.peek_nth_token_ref(0)
4170    }
4171
4172    /// Returns the `N` next non-whitespace tokens that have not yet been
4173    /// processed.
4174    ///
4175    /// Example:
4176    /// ```rust
4177    /// # use sqlparser::dialect::GenericDialect;
4178    /// # use sqlparser::parser::Parser;
4179    /// # use sqlparser::keywords::Keyword;
4180    /// # use sqlparser::tokenizer::{Token, Word};
4181    /// let dialect = GenericDialect {};
4182    /// let mut parser = Parser::new(&dialect).try_with_sql("ORDER BY foo, bar").unwrap();
4183    ///
4184    /// // Note that Rust infers the number of tokens to peek based on the
4185    /// // length of the slice pattern!
4186    /// assert!(matches!(
4187    ///     parser.peek_tokens(),
4188    ///     [
4189    ///         Token::Word(Word { keyword: Keyword::ORDER, .. }),
4190    ///         Token::Word(Word { keyword: Keyword::BY, .. }),
4191    ///     ]
4192    /// ));
4193    /// ```
4194    pub fn peek_tokens<const N: usize>(&self) -> [Token; N] {
4195        self.peek_tokens_with_location()
4196            .map(|with_loc| with_loc.token)
4197    }
4198
4199    /// Returns the `N` next non-whitespace tokens with locations that have not
4200    /// yet been processed.
4201    ///
4202    /// See [`Self::peek_token`] for an example.
4203    pub fn peek_tokens_with_location<const N: usize>(&self) -> [TokenWithSpan; N] {
4204        let mut index = self.index;
4205        core::array::from_fn(|_| loop {
4206            let token = self.tokens.get(index);
4207            index += 1;
4208            if let Some(TokenWithSpan {
4209                token: Token::Whitespace(_),
4210                span: _,
4211            }) = token
4212            {
4213                continue;
4214            }
4215            break token.cloned().unwrap_or(TokenWithSpan {
4216                token: Token::EOF,
4217                span: Span::empty(),
4218            });
4219        })
4220    }
4221
4222    /// Returns references to the `N` next non-whitespace tokens
4223    /// that have not yet been processed.
4224    ///
4225    /// See [`Self::peek_tokens`] for an example.
4226    pub fn peek_tokens_ref<const N: usize>(&self) -> [&TokenWithSpan; N] {
4227        let mut index = self.index;
4228        core::array::from_fn(|_| loop {
4229            let token = self.tokens.get(index);
4230            index += 1;
4231            if let Some(TokenWithSpan {
4232                token: Token::Whitespace(_),
4233                span: _,
4234            }) = token
4235            {
4236                continue;
4237            }
4238            break token.unwrap_or(&EOF_TOKEN);
4239        })
4240    }
4241
4242    /// Return nth non-whitespace token that has not yet been processed
4243    pub fn peek_nth_token(&self, n: usize) -> TokenWithSpan {
4244        self.peek_nth_token_ref(n).clone()
4245    }
4246
4247    /// Return nth non-whitespace token that has not yet been processed
4248    pub fn peek_nth_token_ref(&self, mut n: usize) -> &TokenWithSpan {
4249        let mut index = self.index;
4250        loop {
4251            index += 1;
4252            match self.tokens.get(index - 1) {
4253                Some(TokenWithSpan {
4254                    token: Token::Whitespace(_),
4255                    span: _,
4256                }) => continue,
4257                non_whitespace => {
4258                    if n == 0 {
4259                        return non_whitespace.unwrap_or(&EOF_TOKEN);
4260                    }
4261                    n -= 1;
4262                }
4263            }
4264        }
4265    }
4266
4267    /// Return the first token, possibly whitespace, that has not yet been processed
4268    /// (or None if reached end-of-file).
4269    pub fn peek_token_no_skip(&self) -> TokenWithSpan {
4270        self.peek_nth_token_no_skip(0)
4271    }
4272
4273    /// Return nth token, possibly whitespace, that has not yet been processed.
4274    pub fn peek_nth_token_no_skip(&self, n: usize) -> TokenWithSpan {
4275        self.tokens
4276            .get(self.index + n)
4277            .cloned()
4278            .unwrap_or(TokenWithSpan {
4279                token: Token::EOF,
4280                span: Span::empty(),
4281            })
4282    }
4283
4284    /// Return true if the next tokens exactly `expected`
4285    ///
4286    /// Does not advance the current token.
4287    fn peek_keywords(&mut self, expected: &[Keyword]) -> bool {
4288        let index = self.index;
4289        let matched = self.parse_keywords(expected);
4290        self.index = index;
4291        matched
4292    }
4293
4294    /// Advances to the next non-whitespace token and returns a copy.
4295    ///
4296    /// Please use [`Self::advance_token`] and [`Self::get_current_token`] to
4297    /// avoid the copy.
4298    pub fn next_token(&mut self) -> TokenWithSpan {
4299        self.advance_token();
4300        self.get_current_token().clone()
4301    }
4302
4303    /// Returns the index of the current token
4304    ///
4305    /// This can be used with APIs that expect an index, such as
4306    /// [`Self::token_at`]
4307    pub fn get_current_index(&self) -> usize {
4308        self.index.saturating_sub(1)
4309    }
4310
4311    /// Return the next unprocessed token, possibly whitespace.
4312    pub fn next_token_no_skip(&mut self) -> Option<&TokenWithSpan> {
4313        self.index += 1;
4314        self.tokens.get(self.index - 1)
4315    }
4316
4317    /// Advances the current token to the next non-whitespace token
4318    ///
4319    /// See [`Self::get_current_token`] to get the current token after advancing
4320    pub fn advance_token(&mut self) {
4321        loop {
4322            self.index += 1;
4323            match self.tokens.get(self.index - 1) {
4324                Some(TokenWithSpan {
4325                    token: Token::Whitespace(_),
4326                    span: _,
4327                }) => continue,
4328                _ => break,
4329            }
4330        }
4331    }
4332
4333    /// Returns a reference to the current token
4334    ///
4335    /// Does not advance the current token.
4336    pub fn get_current_token(&self) -> &TokenWithSpan {
4337        self.token_at(self.index.saturating_sub(1))
4338    }
4339
4340    /// Returns a reference to the previous token
4341    ///
4342    /// Does not advance the current token.
4343    pub fn get_previous_token(&self) -> &TokenWithSpan {
4344        self.token_at(self.index.saturating_sub(2))
4345    }
4346
4347    /// Returns a reference to the next token
4348    ///
4349    /// Does not advance the current token.
4350    pub fn get_next_token(&self) -> &TokenWithSpan {
4351        self.token_at(self.index)
4352    }
4353
4354    /// Seek back the last one non-whitespace token.
4355    ///
4356    /// Must be called after `next_token()`, otherwise might panic. OK to call
4357    /// after `next_token()` indicates an EOF.
4358    ///
4359    // TODO rename to backup_token and deprecate prev_token?
4360    pub fn prev_token(&mut self) {
4361        loop {
4362            assert!(self.index > 0);
4363            self.index -= 1;
4364            if let Some(TokenWithSpan {
4365                token: Token::Whitespace(_),
4366                span: _,
4367            }) = self.tokens.get(self.index)
4368            {
4369                continue;
4370            }
4371            return;
4372        }
4373    }
4374
4375    /// Report `found` was encountered instead of `expected`
4376    pub fn expected<T>(&self, expected: &str, found: TokenWithSpan) -> Result<T, ParserError> {
4377        parser_err!(
4378            format!("Expected: {expected}, found: {found}"),
4379            found.span.start
4380        )
4381    }
4382
4383    /// report `found` was encountered instead of `expected`
4384    pub fn expected_ref<T>(&self, expected: &str, found: &TokenWithSpan) -> Result<T, ParserError> {
4385        parser_err!(
4386            format!("Expected: {expected}, found: {found}"),
4387            found.span.start
4388        )
4389    }
4390
4391    /// Report that the token at `index` was found instead of `expected`.
4392    pub fn expected_at<T>(&self, expected: &str, index: usize) -> Result<T, ParserError> {
4393        let found = self.tokens.get(index).unwrap_or(&EOF_TOKEN);
4394        parser_err!(
4395            format!("Expected: {expected}, found: {found}"),
4396            found.span.start
4397        )
4398    }
4399
4400    /// If the current token is the `expected` keyword, consume it and returns
4401    /// true. Otherwise, no tokens are consumed and returns false.
4402    #[must_use]
4403    pub fn parse_keyword(&mut self, expected: Keyword) -> bool {
4404        if self.peek_keyword(expected) {
4405            self.advance_token();
4406            true
4407        } else {
4408            false
4409        }
4410    }
4411
4412    #[must_use]
4413    pub fn peek_keyword(&self, expected: Keyword) -> bool {
4414        matches!(&self.peek_token_ref().token, Token::Word(w) if expected == w.keyword)
4415    }
4416
4417    /// If the current token is the `expected` keyword followed by
4418    /// specified tokens, consume them and returns true.
4419    /// Otherwise, no tokens are consumed and returns false.
4420    ///
4421    /// Note that if the length of `tokens` is too long, this function will
4422    /// not be efficient as it does a loop on the tokens with `peek_nth_token`
4423    /// each time.
4424    pub fn parse_keyword_with_tokens(&mut self, expected: Keyword, tokens: &[Token]) -> bool {
4425        self.keyword_with_tokens(expected, tokens, true)
4426    }
4427
4428    /// Peeks to see if the current token is the `expected` keyword followed by specified tokens
4429    /// without consuming them.
4430    ///
4431    /// See [Self::parse_keyword_with_tokens] for details.
4432    pub(crate) fn peek_keyword_with_tokens(&mut self, expected: Keyword, tokens: &[Token]) -> bool {
4433        self.keyword_with_tokens(expected, tokens, false)
4434    }
4435
4436    fn keyword_with_tokens(&mut self, expected: Keyword, tokens: &[Token], consume: bool) -> bool {
4437        match &self.peek_token_ref().token {
4438            Token::Word(w) if expected == w.keyword => {
4439                for (idx, token) in tokens.iter().enumerate() {
4440                    if self.peek_nth_token_ref(idx + 1).token != *token {
4441                        return false;
4442                    }
4443                }
4444
4445                if consume {
4446                    for _ in 0..(tokens.len() + 1) {
4447                        self.advance_token();
4448                    }
4449                }
4450
4451                true
4452            }
4453            _ => false,
4454        }
4455    }
4456
4457    /// If the current and subsequent tokens exactly match the `keywords`
4458    /// sequence, consume them and returns true. Otherwise, no tokens are
4459    /// consumed and returns false
4460    #[must_use]
4461    pub fn parse_keywords(&mut self, keywords: &[Keyword]) -> bool {
4462        let index = self.index;
4463        for &keyword in keywords {
4464            if !self.parse_keyword(keyword) {
4465                // println!("parse_keywords aborting .. did not find {:?}", keyword);
4466                // reset index and return immediately
4467                self.index = index;
4468                return false;
4469            }
4470        }
4471        true
4472    }
4473
4474    /// If the current token is one of the given `keywords`, returns the keyword
4475    /// that matches, without consuming the token. Otherwise, returns [`None`].
4476    #[must_use]
4477    pub fn peek_one_of_keywords(&self, keywords: &[Keyword]) -> Option<Keyword> {
4478        for keyword in keywords {
4479            if self.peek_keyword(*keyword) {
4480                return Some(*keyword);
4481            }
4482        }
4483        None
4484    }
4485
4486    /// If the current token is one of the given `keywords`, consume the token
4487    /// and return the keyword that matches. Otherwise, no tokens are consumed
4488    /// and returns [`None`].
4489    #[must_use]
4490    pub fn parse_one_of_keywords(&mut self, keywords: &[Keyword]) -> Option<Keyword> {
4491        match &self.peek_token_ref().token {
4492            Token::Word(w) => {
4493                keywords
4494                    .iter()
4495                    .find(|keyword| **keyword == w.keyword)
4496                    .map(|keyword| {
4497                        self.advance_token();
4498                        *keyword
4499                    })
4500            }
4501            _ => None,
4502        }
4503    }
4504
4505    /// If the current token is one of the expected keywords, consume the token
4506    /// and return the keyword that matches. Otherwise, return an error.
4507    pub fn expect_one_of_keywords(&mut self, keywords: &[Keyword]) -> Result<Keyword, ParserError> {
4508        if let Some(keyword) = self.parse_one_of_keywords(keywords) {
4509            Ok(keyword)
4510        } else {
4511            let keywords: Vec<String> = keywords.iter().map(|x| format!("{x:?}")).collect();
4512            self.expected_ref(
4513                &format!("one of {}", keywords.join(" or ")),
4514                self.peek_token_ref(),
4515            )
4516        }
4517    }
4518
4519    /// If the current token is the `expected` keyword, consume the token.
4520    /// Otherwise, return an error.
4521    ///
4522    // todo deprecate in favor of expected_keyword_is
4523    pub fn expect_keyword(&mut self, expected: Keyword) -> Result<TokenWithSpan, ParserError> {
4524        if self.parse_keyword(expected) {
4525            Ok(self.get_current_token().clone())
4526        } else {
4527            self.expected_ref(format!("{:?}", &expected).as_str(), self.peek_token_ref())
4528        }
4529    }
4530
4531    /// If the current token is the `expected` keyword, consume the token.
4532    /// Otherwise, return an error.
4533    ///
4534    /// This differs from expect_keyword only in that the matched keyword
4535    /// token is not returned.
4536    pub fn expect_keyword_is(&mut self, expected: Keyword) -> Result<(), ParserError> {
4537        if self.parse_keyword(expected) {
4538            Ok(())
4539        } else {
4540            self.expected_ref(format!("{:?}", &expected).as_str(), self.peek_token_ref())
4541        }
4542    }
4543
4544    /// If the current and subsequent tokens exactly match the `keywords`
4545    /// sequence, consume them and returns Ok. Otherwise, return an Error.
4546    pub fn expect_keywords(&mut self, expected: &[Keyword]) -> Result<(), ParserError> {
4547        for &kw in expected {
4548            self.expect_keyword_is(kw)?;
4549        }
4550        Ok(())
4551    }
4552
4553    /// Consume the next token if it matches the expected token, otherwise return false
4554    ///
4555    /// See [Self::advance_token] to consume the token unconditionally
4556    #[must_use]
4557    pub fn consume_token(&mut self, expected: &Token) -> bool {
4558        if self.peek_token_ref() == expected {
4559            self.advance_token();
4560            true
4561        } else {
4562            false
4563        }
4564    }
4565
4566    /// If the current and subsequent tokens exactly match the `tokens`
4567    /// sequence, consume them and returns true. Otherwise, no tokens are
4568    /// consumed and returns false
4569    #[must_use]
4570    pub fn consume_tokens(&mut self, tokens: &[Token]) -> bool {
4571        let index = self.index;
4572        for token in tokens {
4573            if !self.consume_token(token) {
4574                self.index = index;
4575                return false;
4576            }
4577        }
4578        true
4579    }
4580
4581    /// Bail out if the current token is not an expected keyword, or consume it if it is
4582    pub fn expect_token(&mut self, expected: &Token) -> Result<TokenWithSpan, ParserError> {
4583        if self.peek_token_ref() == expected {
4584            Ok(self.next_token())
4585        } else {
4586            self.expected_ref(&expected.to_string(), self.peek_token_ref())
4587        }
4588    }
4589
4590    fn parse<T: FromStr>(s: String, loc: Location) -> Result<T, ParserError>
4591    where
4592        <T as FromStr>::Err: Display,
4593    {
4594        s.parse::<T>().map_err(|e| {
4595            ParserError::ParserError(format!(
4596                "Could not parse '{s}' as {}: {e}{loc}",
4597                core::any::type_name::<T>()
4598            ))
4599        })
4600    }
4601
4602    /// Parse a comma-separated list of 1+ SelectItem
4603    pub fn parse_projection(&mut self) -> Result<Vec<SelectItem>, ParserError> {
4604        // BigQuery and Snowflake allow trailing commas, but only in project lists
4605        // e.g. `SELECT 1, 2, FROM t`
4606        // https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#trailing_commas
4607        // https://docs.snowflake.com/en/release-notes/2024/8_11#select-supports-trailing-commas
4608
4609        let trailing_commas =
4610            self.options.trailing_commas | self.dialect.supports_projection_trailing_commas();
4611
4612        self.parse_comma_separated_with_trailing_commas(
4613            |p| p.parse_select_item(),
4614            trailing_commas,
4615            Self::is_reserved_for_column_alias,
4616        )
4617    }
4618
4619    pub fn parse_actions_list(&mut self) -> Result<Vec<Action>, ParserError> {
4620        let mut values = vec![];
4621        loop {
4622            values.push(self.parse_grant_permission()?);
4623            if !self.consume_token(&Token::Comma) {
4624                break;
4625            } else if self.options.trailing_commas {
4626                match self.peek_token().token {
4627                    Token::Word(kw) if kw.keyword == Keyword::ON => {
4628                        break;
4629                    }
4630                    Token::RParen
4631                    | Token::SemiColon
4632                    | Token::EOF
4633                    | Token::RBracket
4634                    | Token::RBrace => break,
4635                    _ => continue,
4636                }
4637            }
4638        }
4639        Ok(values)
4640    }
4641
4642    /// Parse a list of [TableWithJoins]
4643    fn parse_table_with_joins(&mut self) -> Result<Vec<TableWithJoins>, ParserError> {
4644        let trailing_commas = self.dialect.supports_from_trailing_commas();
4645
4646        self.parse_comma_separated_with_trailing_commas(
4647            Parser::parse_table_and_joins,
4648            trailing_commas,
4649            |kw, parser| !self.dialect.is_table_factor(kw, parser),
4650        )
4651    }
4652
4653    /// Parse the comma of a comma-separated syntax element.
4654    /// `R` is a predicate that should return true if the next
4655    /// keyword is a reserved keyword.
4656    /// Allows for control over trailing commas
4657    ///
4658    /// Returns true if there is a next element
4659    fn is_parse_comma_separated_end_with_trailing_commas<R>(
4660        &mut self,
4661        trailing_commas: bool,
4662        is_reserved_keyword: &R,
4663    ) -> bool
4664    where
4665        R: Fn(&Keyword, &mut Parser) -> bool,
4666    {
4667        if !self.consume_token(&Token::Comma) {
4668            true
4669        } else if trailing_commas {
4670            let token = self.next_token().token;
4671            let is_end = match token {
4672                Token::Word(ref kw) if is_reserved_keyword(&kw.keyword, self) => true,
4673                Token::RParen | Token::SemiColon | Token::EOF | Token::RBracket | Token::RBrace => {
4674                    true
4675                }
4676                _ => false,
4677            };
4678            self.prev_token();
4679
4680            is_end
4681        } else {
4682            false
4683        }
4684    }
4685
4686    /// Parse the comma of a comma-separated syntax element.
4687    /// Returns true if there is a next element
4688    fn is_parse_comma_separated_end(&mut self) -> bool {
4689        self.is_parse_comma_separated_end_with_trailing_commas(
4690            self.options.trailing_commas,
4691            &Self::is_reserved_for_column_alias,
4692        )
4693    }
4694
4695    /// Parse a comma-separated list of 1+ items accepted by `F`
4696    pub fn parse_comma_separated<T, F>(&mut self, f: F) -> Result<Vec<T>, ParserError>
4697    where
4698        F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4699    {
4700        self.parse_comma_separated_with_trailing_commas(
4701            f,
4702            self.options.trailing_commas,
4703            Self::is_reserved_for_column_alias,
4704        )
4705    }
4706
4707    /// Parse a comma-separated list of 1+ items accepted by `F`.
4708    /// `R` is a predicate that should return true if the next
4709    /// keyword is a reserved keyword.
4710    /// Allows for control over trailing commas.
4711    fn parse_comma_separated_with_trailing_commas<T, F, R>(
4712        &mut self,
4713        mut f: F,
4714        trailing_commas: bool,
4715        is_reserved_keyword: R,
4716    ) -> Result<Vec<T>, ParserError>
4717    where
4718        F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4719        R: Fn(&Keyword, &mut Parser) -> bool,
4720    {
4721        let mut values = vec![];
4722        loop {
4723            values.push(f(self)?);
4724            if self.is_parse_comma_separated_end_with_trailing_commas(
4725                trailing_commas,
4726                &is_reserved_keyword,
4727            ) {
4728                break;
4729            }
4730        }
4731        Ok(values)
4732    }
4733
4734    /// Parse a period-separated list of 1+ items accepted by `F`
4735    fn parse_period_separated<T, F>(&mut self, mut f: F) -> Result<Vec<T>, ParserError>
4736    where
4737        F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4738    {
4739        let mut values = vec![];
4740        loop {
4741            values.push(f(self)?);
4742            if !self.consume_token(&Token::Period) {
4743                break;
4744            }
4745        }
4746        Ok(values)
4747    }
4748
4749    /// Parse a keyword-separated list of 1+ items accepted by `F`
4750    pub fn parse_keyword_separated<T, F>(
4751        &mut self,
4752        keyword: Keyword,
4753        mut f: F,
4754    ) -> Result<Vec<T>, ParserError>
4755    where
4756        F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4757    {
4758        let mut values = vec![];
4759        loop {
4760            values.push(f(self)?);
4761            if !self.parse_keyword(keyword) {
4762                break;
4763            }
4764        }
4765        Ok(values)
4766    }
4767
4768    pub fn parse_parenthesized<T, F>(&mut self, mut f: F) -> Result<T, ParserError>
4769    where
4770        F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4771    {
4772        self.expect_token(&Token::LParen)?;
4773        let res = f(self)?;
4774        self.expect_token(&Token::RParen)?;
4775        Ok(res)
4776    }
4777
4778    /// Parse a comma-separated list of 0+ items accepted by `F`
4779    /// * `end_token` - expected end token for the closure (e.g. [Token::RParen], [Token::RBrace] ...)
4780    pub fn parse_comma_separated0<T, F>(
4781        &mut self,
4782        f: F,
4783        end_token: Token,
4784    ) -> Result<Vec<T>, ParserError>
4785    where
4786        F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4787    {
4788        if self.peek_token().token == end_token {
4789            return Ok(vec![]);
4790        }
4791
4792        if self.options.trailing_commas && self.peek_tokens() == [Token::Comma, end_token] {
4793            let _ = self.consume_token(&Token::Comma);
4794            return Ok(vec![]);
4795        }
4796
4797        self.parse_comma_separated(f)
4798    }
4799
4800    /// Parses 0 or more statements, each followed by a semicolon.
4801    /// If the next token is any of `terminal_keywords` then no more
4802    /// statements will be parsed.
4803    pub(crate) fn parse_statement_list(
4804        &mut self,
4805        terminal_keywords: &[Keyword],
4806    ) -> Result<Vec<Statement>, ParserError> {
4807        let mut values = vec![];
4808        loop {
4809            match &self.peek_nth_token_ref(0).token {
4810                Token::EOF => break,
4811                Token::Word(w) => {
4812                    if w.quote_style.is_none() && terminal_keywords.contains(&w.keyword) {
4813                        break;
4814                    }
4815                }
4816                _ => {}
4817            }
4818
4819            values.push(self.parse_statement()?);
4820            self.expect_token(&Token::SemiColon)?;
4821        }
4822        Ok(values)
4823    }
4824
4825    /// Default implementation of a predicate that returns true if
4826    /// the specified keyword is reserved for column alias.
4827    /// See [Dialect::is_column_alias]
4828    fn is_reserved_for_column_alias(kw: &Keyword, parser: &mut Parser) -> bool {
4829        !parser.dialect.is_column_alias(kw, parser)
4830    }
4831
4832    /// Run a parser method `f`, reverting back to the current position if unsuccessful.
4833    /// Returns `ParserError::RecursionLimitExceeded` if `f` returns a `RecursionLimitExceeded`.
4834    /// Returns `Ok(None)` if `f` returns any other error.
4835    pub fn maybe_parse<T, F>(&mut self, f: F) -> Result<Option<T>, ParserError>
4836    where
4837        F: FnMut(&mut Parser) -> Result<T, ParserError>,
4838    {
4839        match self.try_parse(f) {
4840            Ok(t) => Ok(Some(t)),
4841            Err(ParserError::RecursionLimitExceeded) => Err(ParserError::RecursionLimitExceeded),
4842            _ => Ok(None),
4843        }
4844    }
4845
4846    /// Run a parser method `f`, reverting back to the current position if unsuccessful.
4847    pub fn try_parse<T, F>(&mut self, mut f: F) -> Result<T, ParserError>
4848    where
4849        F: FnMut(&mut Parser) -> Result<T, ParserError>,
4850    {
4851        let index = self.index;
4852        match f(self) {
4853            Ok(t) => Ok(t),
4854            Err(e) => {
4855                // Unwind stack if limit exceeded
4856                self.index = index;
4857                Err(e)
4858            }
4859        }
4860    }
4861
4862    /// Parse either `ALL`, `DISTINCT` or `DISTINCT ON (...)`. Returns [`None`] if `ALL` is parsed
4863    /// and results in a [`ParserError`] if both `ALL` and `DISTINCT` are found.
4864    pub fn parse_all_or_distinct(&mut self) -> Result<Option<Distinct>, ParserError> {
4865        let loc = self.peek_token().span.start;
4866        let all = self.parse_keyword(Keyword::ALL);
4867        let distinct = self.parse_keyword(Keyword::DISTINCT);
4868        if !distinct {
4869            return Ok(None);
4870        }
4871        if all {
4872            return parser_err!("Cannot specify both ALL and DISTINCT".to_string(), loc);
4873        }
4874        let on = self.parse_keyword(Keyword::ON);
4875        if !on {
4876            return Ok(Some(Distinct::Distinct));
4877        }
4878
4879        self.expect_token(&Token::LParen)?;
4880        let col_names = if self.consume_token(&Token::RParen) {
4881            self.prev_token();
4882            Vec::new()
4883        } else {
4884            self.parse_comma_separated(Parser::parse_expr)?
4885        };
4886        self.expect_token(&Token::RParen)?;
4887        Ok(Some(Distinct::On(col_names)))
4888    }
4889
4890    /// Parse a SQL CREATE statement
4891    pub fn parse_create(&mut self) -> Result<Statement, ParserError> {
4892        let or_replace = self.parse_keywords(&[Keyword::OR, Keyword::REPLACE]);
4893        let or_alter = self.parse_keywords(&[Keyword::OR, Keyword::ALTER]);
4894        let local = self.parse_one_of_keywords(&[Keyword::LOCAL]).is_some();
4895        let global = self.parse_one_of_keywords(&[Keyword::GLOBAL]).is_some();
4896        let transient = self.parse_one_of_keywords(&[Keyword::TRANSIENT]).is_some();
4897        let global: Option<bool> = if global {
4898            Some(true)
4899        } else if local {
4900            Some(false)
4901        } else {
4902            None
4903        };
4904        let temporary = self
4905            .parse_one_of_keywords(&[Keyword::TEMP, Keyword::TEMPORARY])
4906            .is_some();
4907        let persistent = dialect_of!(self is DuckDbDialect)
4908            && self.parse_one_of_keywords(&[Keyword::PERSISTENT]).is_some();
4909        let create_view_params = self.parse_create_view_params()?;
4910        if self.parse_keywords(&[Keyword::TABLE, Keyword::FUNCTION]) {
4911            self.parse_create_table_function(or_replace)
4912        } else if self.parse_keyword(Keyword::TABLE) {
4913            self.parse_create_table(or_replace, temporary, global, transient)
4914        } else if self.peek_keyword(Keyword::MATERIALIZED)
4915            || self.peek_keyword(Keyword::VIEW)
4916            || self.peek_keywords(&[Keyword::SECURE, Keyword::MATERIALIZED, Keyword::VIEW])
4917            || self.peek_keywords(&[Keyword::SECURE, Keyword::VIEW])
4918        {
4919            self.parse_create_view(or_alter, or_replace, temporary, create_view_params)
4920        } else if self.parse_keyword(Keyword::POLICY) {
4921            self.parse_create_policy()
4922        } else if self.parse_keyword(Keyword::EXTERNAL) {
4923            if self.parse_keyword(Keyword::SCHEMA) {
4924                self.parse_create_external_schema()
4925            } else {
4926                self.parse_create_external_table(or_replace)
4927            }
4928        } else if self.parse_keywords(&[Keyword::AGGREGATE, Keyword::FUNCTION]) {
4929            self.parse_create_function_with_aggregate(or_alter, or_replace, temporary, true)
4930        } else if self.parse_keyword(Keyword::FUNCTION) {
4931            self.parse_create_function_with_aggregate(or_alter, or_replace, temporary, false)
4932        } else if self.parse_keyword(Keyword::DOMAIN) {
4933            self.parse_create_domain()
4934        } else if self.parse_keyword(Keyword::TRIGGER) {
4935            self.parse_create_trigger(or_alter, or_replace, false)
4936        } else if self.parse_keywords(&[Keyword::CONSTRAINT, Keyword::TRIGGER]) {
4937            self.parse_create_trigger(or_alter, or_replace, true)
4938        } else if self.parse_keyword(Keyword::MACRO) {
4939            self.parse_create_macro(or_replace, temporary)
4940        } else if self.parse_keyword(Keyword::SECRET) {
4941            self.parse_create_secret(or_replace, temporary, persistent)
4942        } else if self.parse_keyword(Keyword::USER) {
4943            self.parse_create_user(or_replace)
4944        } else if self.parse_keywords(&[Keyword::SEARCH, Keyword::INDEX]) {
4945            self.parse_create_search_index(or_replace)
4946        } else if self.parse_keywords(&[Keyword::VECTOR, Keyword::INDEX]) {
4947            self.parse_create_vector_index(or_replace)
4948        } else if self.parse_keywords(&[Keyword::ROW, Keyword::ACCESS, Keyword::POLICY]) {
4949            self.parse_create_row_access_policy(or_replace)
4950        } else if self.parse_keyword(Keyword::SCHEMA) {
4951            self.parse_create_schema(or_replace)
4952        } else if or_replace {
4953            self.expected(
4954                "[EXTERNAL] TABLE or [MATERIALIZED] VIEW or FUNCTION or SCHEMA after CREATE OR REPLACE",
4955                self.peek_token(),
4956            )
4957        } else if self.parse_keyword(Keyword::EXTENSION) {
4958            self.parse_create_extension()
4959        } else if self.parse_keyword(Keyword::INDEX) {
4960            self.parse_create_index(false)
4961        } else if self.parse_keywords(&[Keyword::UNIQUE, Keyword::INDEX]) {
4962            self.parse_create_index(true)
4963        } else if self.parse_keyword(Keyword::VIRTUAL) {
4964            self.parse_create_virtual_table()
4965        } else if self.parse_keyword(Keyword::DATABASE) {
4966            self.parse_create_database()
4967        } else if self.parse_keyword(Keyword::ROLE) {
4968            self.parse_create_role()
4969        } else if self.parse_keyword(Keyword::SEQUENCE) {
4970            self.parse_create_sequence(temporary)
4971        } else if self.parse_keyword(Keyword::TYPE) {
4972            self.parse_create_type()
4973        } else if self.parse_keyword(Keyword::PROCEDURE) {
4974            self.parse_create_procedure(or_alter)
4975        } else if self.parse_keyword(Keyword::CONNECTOR) {
4976            self.parse_create_connector()
4977        } else if self.parse_keyword(Keyword::SERVER) {
4978            self.parse_pg_create_server()
4979        } else {
4980            self.expected("an object type after CREATE", self.peek_token())
4981        }
4982    }
4983
4984    fn parse_create_user(&mut self, or_replace: bool) -> Result<Statement, ParserError> {
4985        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
4986        let name = self.parse_identifier()?;
4987        let options = self.parse_key_value_options(false, &[Keyword::WITH, Keyword::TAG])?;
4988        let with_tags = self.parse_keyword(Keyword::WITH);
4989        let tags = if self.parse_keyword(Keyword::TAG) {
4990            self.parse_key_value_options(true, &[])?
4991        } else {
4992            vec![]
4993        };
4994        Ok(Statement::CreateUser(CreateUser {
4995            or_replace,
4996            if_not_exists,
4997            name,
4998            options: KeyValueOptions {
4999                options,
5000                delimiter: KeyValueOptionsDelimiter::Space,
5001            },
5002            with_tags,
5003            tags: KeyValueOptions {
5004                options: tags,
5005                delimiter: KeyValueOptionsDelimiter::Comma,
5006            },
5007        }))
5008    }
5009
5010    /// See [DuckDB Docs](https://duckdb.org/docs/sql/statements/create_secret.html) for more details.
5011    pub fn parse_create_secret(
5012        &mut self,
5013        or_replace: bool,
5014        temporary: bool,
5015        persistent: bool,
5016    ) -> Result<Statement, ParserError> {
5017        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5018
5019        let mut storage_specifier = None;
5020        let mut name = None;
5021        if self.peek_token() != Token::LParen {
5022            if self.parse_keyword(Keyword::IN) {
5023                storage_specifier = self.parse_identifier().ok()
5024            } else {
5025                name = self.parse_identifier().ok();
5026            }
5027
5028            // Storage specifier may follow the name
5029            if storage_specifier.is_none()
5030                && self.peek_token() != Token::LParen
5031                && self.parse_keyword(Keyword::IN)
5032            {
5033                storage_specifier = self.parse_identifier().ok();
5034            }
5035        }
5036
5037        self.expect_token(&Token::LParen)?;
5038        self.expect_keyword_is(Keyword::TYPE)?;
5039        let secret_type = self.parse_identifier()?;
5040
5041        let mut options = Vec::new();
5042        if self.consume_token(&Token::Comma) {
5043            options.append(&mut self.parse_comma_separated(|p| {
5044                let key = p.parse_identifier()?;
5045                let value = p.parse_identifier()?;
5046                Ok(SecretOption { key, value })
5047            })?);
5048        }
5049        self.expect_token(&Token::RParen)?;
5050
5051        let temp = match (temporary, persistent) {
5052            (true, false) => Some(true),
5053            (false, true) => Some(false),
5054            (false, false) => None,
5055            _ => self.expected("TEMPORARY or PERSISTENT", self.peek_token())?,
5056        };
5057
5058        Ok(Statement::CreateSecret {
5059            or_replace,
5060            temporary: temp,
5061            if_not_exists,
5062            name,
5063            storage_specifier,
5064            secret_type,
5065            options,
5066        })
5067    }
5068
5069    /// Parse a CACHE TABLE statement
5070    pub fn parse_cache_table(&mut self) -> Result<Statement, ParserError> {
5071        let (mut table_flag, mut options, mut has_as, mut query) = (None, vec![], false, None);
5072        if self.parse_keyword(Keyword::TABLE) {
5073            let table_name = self.parse_object_name(false)?;
5074            if self.peek_token().token != Token::EOF {
5075                if let Token::Word(word) = self.peek_token().token {
5076                    if word.keyword == Keyword::OPTIONS {
5077                        options = self.parse_options(Keyword::OPTIONS)?
5078                    }
5079                };
5080
5081                if self.peek_token().token != Token::EOF {
5082                    let (a, q) = self.parse_as_query()?;
5083                    has_as = a;
5084                    query = Some(q);
5085                }
5086
5087                Ok(Statement::Cache {
5088                    table_flag,
5089                    table_name,
5090                    has_as,
5091                    options,
5092                    query,
5093                })
5094            } else {
5095                Ok(Statement::Cache {
5096                    table_flag,
5097                    table_name,
5098                    has_as,
5099                    options,
5100                    query,
5101                })
5102            }
5103        } else {
5104            table_flag = Some(self.parse_object_name(false)?);
5105            if self.parse_keyword(Keyword::TABLE) {
5106                let table_name = self.parse_object_name(false)?;
5107                if self.peek_token() != Token::EOF {
5108                    if let Token::Word(word) = self.peek_token().token {
5109                        if word.keyword == Keyword::OPTIONS {
5110                            options = self.parse_options(Keyword::OPTIONS)?
5111                        }
5112                    };
5113
5114                    if self.peek_token() != Token::EOF {
5115                        let (a, q) = self.parse_as_query()?;
5116                        has_as = a;
5117                        query = Some(q);
5118                    }
5119
5120                    Ok(Statement::Cache {
5121                        table_flag,
5122                        table_name,
5123                        has_as,
5124                        options,
5125                        query,
5126                    })
5127                } else {
5128                    Ok(Statement::Cache {
5129                        table_flag,
5130                        table_name,
5131                        has_as,
5132                        options,
5133                        query,
5134                    })
5135                }
5136            } else {
5137                if self.peek_token() == Token::EOF {
5138                    self.prev_token();
5139                }
5140                self.expected("a `TABLE` keyword", self.peek_token())
5141            }
5142        }
5143    }
5144
5145    /// Parse 'AS' before as query,such as `WITH XXX AS SELECT XXX` oer `CACHE TABLE AS SELECT XXX`
5146    pub fn parse_as_query(&mut self) -> Result<(bool, Box<Query>), ParserError> {
5147        match self.peek_token().token {
5148            Token::Word(word) => match word.keyword {
5149                Keyword::AS => {
5150                    self.next_token();
5151                    Ok((true, self.parse_query()?))
5152                }
5153                _ => Ok((false, self.parse_query()?)),
5154            },
5155            _ => self.expected("a QUERY statement", self.peek_token()),
5156        }
5157    }
5158
5159    /// Parse a UNCACHE TABLE statement
5160    pub fn parse_uncache_table(&mut self) -> Result<Statement, ParserError> {
5161        self.expect_keyword_is(Keyword::TABLE)?;
5162        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
5163        let table_name = self.parse_object_name(false)?;
5164        Ok(Statement::UNCache {
5165            table_name,
5166            if_exists,
5167        })
5168    }
5169
5170    /// SQLite-specific `CREATE VIRTUAL TABLE`
5171    pub fn parse_create_virtual_table(&mut self) -> Result<Statement, ParserError> {
5172        self.expect_keyword_is(Keyword::TABLE)?;
5173        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5174        let table_name = self.parse_object_name(false)?;
5175        self.expect_keyword_is(Keyword::USING)?;
5176        let module_name = self.parse_identifier()?;
5177        // SQLite docs note that module "arguments syntax is sufficiently
5178        // general that the arguments can be made to appear as column
5179        // definitions in a traditional CREATE TABLE statement", but
5180        // we don't implement that.
5181        let module_args = self.parse_parenthesized_column_list(Optional, false)?;
5182        Ok(Statement::CreateVirtualTable {
5183            name: table_name,
5184            if_not_exists,
5185            module_name,
5186            module_args,
5187        })
5188    }
5189
5190    pub fn parse_create_schema(&mut self, or_replace: bool) -> Result<Statement, ParserError> {
5191        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5192
5193        let schema_name = self.parse_schema_name()?;
5194
5195        let default_collate_spec = if self.parse_keywords(&[Keyword::DEFAULT, Keyword::COLLATE]) {
5196            Some(self.parse_expr()?)
5197        } else {
5198            None
5199        };
5200
5201        let with = if self.peek_keyword(Keyword::WITH) {
5202            Some(self.parse_options(Keyword::WITH)?)
5203        } else {
5204            None
5205        };
5206
5207        let options = if self.peek_keyword(Keyword::OPTIONS) {
5208            Some(self.parse_options(Keyword::OPTIONS)?)
5209        } else {
5210            None
5211        };
5212
5213        let clone = if self.parse_keyword(Keyword::CLONE) {
5214            Some(self.parse_object_name(false)?)
5215        } else {
5216            None
5217        };
5218
5219        Ok(Statement::CreateSchema {
5220            schema_name,
5221            if_not_exists,
5222            or_replace,
5223            with,
5224            options,
5225            default_collate_spec,
5226            clone,
5227        })
5228    }
5229
5230    pub fn parse_create_external_schema(&mut self) -> Result<Statement, ParserError> {
5231        let schema_name = self.parse_schema_name()?;
5232
5233        let options = if self.peek_keyword(Keyword::OPTIONS) {
5234            Some(self.parse_options(Keyword::OPTIONS)?)
5235        } else {
5236            None
5237        };
5238
5239        Ok(Statement::CreateSchema {
5240            schema_name,
5241            if_not_exists: false,
5242            or_replace: false,
5243            with: None,
5244            options,
5245            default_collate_spec: None,
5246            clone: None,
5247        })
5248    }
5249
5250    fn parse_schema_name(&mut self) -> Result<SchemaName, ParserError> {
5251        if self.parse_keyword(Keyword::AUTHORIZATION) {
5252            Ok(SchemaName::UnnamedAuthorization(self.parse_identifier()?))
5253        } else {
5254            let name = self.parse_object_name(false)?;
5255
5256            if self.parse_keyword(Keyword::AUTHORIZATION) {
5257                Ok(SchemaName::NamedAuthorization(
5258                    name,
5259                    self.parse_identifier()?,
5260                ))
5261            } else {
5262                Ok(SchemaName::Simple(name))
5263            }
5264        }
5265    }
5266
5267    pub fn parse_create_database(&mut self) -> Result<Statement, ParserError> {
5268        let ine = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5269        let db_name = self.parse_object_name(false)?;
5270        let mut location = None;
5271        let mut managed_location = None;
5272        loop {
5273            match self.parse_one_of_keywords(&[Keyword::LOCATION, Keyword::MANAGEDLOCATION]) {
5274                Some(Keyword::LOCATION) => location = Some(self.parse_literal_string()?),
5275                Some(Keyword::MANAGEDLOCATION) => {
5276                    managed_location = Some(self.parse_literal_string()?)
5277                }
5278                _ => break,
5279            }
5280        }
5281        let clone = if self.parse_keyword(Keyword::CLONE) {
5282            Some(self.parse_object_name(false)?)
5283        } else {
5284            None
5285        };
5286
5287        Ok(Statement::CreateDatabase {
5288            db_name,
5289            if_not_exists: ine,
5290            location,
5291            managed_location,
5292            or_replace: false,
5293            transient: false,
5294            clone,
5295            data_retention_time_in_days: None,
5296            max_data_extension_time_in_days: None,
5297            external_volume: None,
5298            catalog: None,
5299            replace_invalid_characters: None,
5300            default_ddl_collation: None,
5301            storage_serialization_policy: None,
5302            comment: None,
5303            catalog_sync: None,
5304            catalog_sync_namespace_mode: None,
5305            catalog_sync_namespace_flatten_delimiter: None,
5306            with_tags: None,
5307            with_contacts: None,
5308        })
5309    }
5310
5311    pub fn parse_optional_create_function_using(
5312        &mut self,
5313    ) -> Result<Option<CreateFunctionUsing>, ParserError> {
5314        if !self.parse_keyword(Keyword::USING) {
5315            return Ok(None);
5316        };
5317        let keyword =
5318            self.expect_one_of_keywords(&[Keyword::JAR, Keyword::FILE, Keyword::ARCHIVE])?;
5319
5320        let uri = self.parse_literal_string()?;
5321
5322        match keyword {
5323            Keyword::JAR => Ok(Some(CreateFunctionUsing::Jar(uri))),
5324            Keyword::FILE => Ok(Some(CreateFunctionUsing::File(uri))),
5325            Keyword::ARCHIVE => Ok(Some(CreateFunctionUsing::Archive(uri))),
5326            _ => self.expected(
5327                "JAR, FILE or ARCHIVE, got {:?}",
5328                TokenWithSpan::wrap(Token::make_keyword(format!("{keyword:?}").as_str())),
5329            ),
5330        }
5331    }
5332
5333    pub fn parse_create_function(
5334        &mut self,
5335        or_alter: bool,
5336        or_replace: bool,
5337        temporary: bool,
5338    ) -> Result<Statement, ParserError> {
5339        self.parse_create_function_with_aggregate(or_alter, or_replace, temporary, false)
5340    }
5341
5342    pub fn parse_create_function_with_aggregate(
5343        &mut self,
5344        or_alter: bool,
5345        or_replace: bool,
5346        temporary: bool,
5347        aggregate: bool,
5348    ) -> Result<Statement, ParserError> {
5349        if dialect_of!(self is HiveDialect) {
5350            self.parse_hive_create_function(or_replace, temporary)
5351        } else if dialect_of!(self is PostgreSqlDialect | GenericDialect) {
5352            self.parse_postgres_create_function(or_replace, temporary)
5353        } else if dialect_of!(self is DuckDbDialect) {
5354            self.parse_create_macro(or_replace, temporary)
5355        } else if dialect_of!(self is BigQueryDialect) {
5356            self.parse_bigquery_create_function(or_replace, temporary, aggregate)
5357        } else if dialect_of!(self is MsSqlDialect) {
5358            self.parse_mssql_create_function(or_alter, or_replace, temporary)
5359        } else {
5360            self.prev_token();
5361            self.expected("an object type after CREATE", self.peek_token())
5362        }
5363    }
5364
5365    /// Parse `CREATE TABLE FUNCTION` for BigQuery
5366    pub fn parse_create_table_function(
5367        &mut self,
5368        or_replace: bool,
5369    ) -> Result<Statement, ParserError> {
5370        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5371        let (name, args) = self.parse_create_function_name_and_params()?;
5372
5373        let return_type = if self.parse_keyword(Keyword::RETURNS) {
5374            if self.parse_keyword(Keyword::TABLE) {
5375                self.expect_token(&Token::Lt)?;
5376                let mut columns = vec![];
5377                let mut trailing_bracket: MatchedTrailingBracket;
5378                loop {
5379                    let name = self.parse_identifier()?;
5380                    let (data_type, tb) = self.parse_data_type_helper()?;
5381                    trailing_bracket = tb;
5382                    columns.push(ColumnDef {
5383                        name,
5384                        data_type,
5385                        options: vec![],
5386                    });
5387                    if trailing_bracket.0 || !self.consume_token(&Token::Comma) {
5388                        break;
5389                    }
5390                }
5391                self.expect_closing_angle_bracket(trailing_bracket)?;
5392                Some(DataType::Table(Some(columns)))
5393            } else {
5394                Some(self.parse_data_type()?)
5395            }
5396        } else {
5397            None
5398        };
5399
5400        let language = if self.parse_keyword(Keyword::LANGUAGE) {
5401            Some(self.parse_identifier()?)
5402        } else {
5403            None
5404        };
5405
5406        let options = self.maybe_parse_options(Keyword::OPTIONS)?;
5407
5408        self.expect_keyword_is(Keyword::AS)?;
5409        let expr = self.parse_expr()?;
5410        let function_body = Some(CreateFunctionBody::AsBeforeOptions(expr));
5411
5412        Ok(Statement::CreateFunction(CreateFunction {
5413            or_alter: false,
5414            or_replace,
5415            temporary: false,
5416            if_not_exists,
5417            aggregate: false,
5418            table_function: true,
5419            name,
5420            args: Some(args),
5421            return_type,
5422            function_body,
5423            language,
5424            determinism_specifier: None,
5425            options,
5426            remote_connection: None,
5427            sql_security: None,
5428            using: None,
5429            behavior: None,
5430            called_on_null: None,
5431            parallel: None,
5432        }))
5433    }
5434
5435    /// Parse `CREATE FUNCTION` for [PostgreSQL]
5436    ///
5437    /// [PostgreSQL]: https://www.postgresql.org/docs/15/sql-createfunction.html
5438    fn parse_postgres_create_function(
5439        &mut self,
5440        or_replace: bool,
5441        temporary: bool,
5442    ) -> Result<Statement, ParserError> {
5443        let name = self.parse_object_name(false)?;
5444
5445        self.expect_token(&Token::LParen)?;
5446        let args = if Token::RParen != self.peek_token_ref().token {
5447            self.parse_comma_separated(Parser::parse_function_arg)?
5448        } else {
5449            vec![]
5450        };
5451        self.expect_token(&Token::RParen)?;
5452
5453        let return_type = if self.parse_keyword(Keyword::RETURNS) {
5454            Some(self.parse_data_type()?)
5455        } else {
5456            None
5457        };
5458
5459        #[derive(Default)]
5460        struct Body {
5461            language: Option<Ident>,
5462            behavior: Option<FunctionBehavior>,
5463            function_body: Option<CreateFunctionBody>,
5464            called_on_null: Option<FunctionCalledOnNull>,
5465            parallel: Option<FunctionParallel>,
5466        }
5467        let mut body = Body::default();
5468        loop {
5469            fn ensure_not_set<T>(field: &Option<T>, name: &str) -> Result<(), ParserError> {
5470                if field.is_some() {
5471                    return Err(ParserError::ParserError(format!(
5472                        "{name} specified more than once",
5473                    )));
5474                }
5475                Ok(())
5476            }
5477            if self.parse_keyword(Keyword::AS) {
5478                ensure_not_set(&body.function_body, "AS")?;
5479                body.function_body = Some(CreateFunctionBody::AsBeforeOptions(
5480                    self.parse_create_function_body_string()?,
5481                ));
5482            } else if self.parse_keyword(Keyword::LANGUAGE) {
5483                ensure_not_set(&body.language, "LANGUAGE")?;
5484                body.language = Some(self.parse_identifier()?);
5485            } else if self.parse_keyword(Keyword::IMMUTABLE) {
5486                ensure_not_set(&body.behavior, "IMMUTABLE | STABLE | VOLATILE")?;
5487                body.behavior = Some(FunctionBehavior::Immutable);
5488            } else if self.parse_keyword(Keyword::STABLE) {
5489                ensure_not_set(&body.behavior, "IMMUTABLE | STABLE | VOLATILE")?;
5490                body.behavior = Some(FunctionBehavior::Stable);
5491            } else if self.parse_keyword(Keyword::VOLATILE) {
5492                ensure_not_set(&body.behavior, "IMMUTABLE | STABLE | VOLATILE")?;
5493                body.behavior = Some(FunctionBehavior::Volatile);
5494            } else if self.parse_keywords(&[
5495                Keyword::CALLED,
5496                Keyword::ON,
5497                Keyword::NULL,
5498                Keyword::INPUT,
5499            ]) {
5500                ensure_not_set(
5501                    &body.called_on_null,
5502                    "CALLED ON NULL INPUT | RETURNS NULL ON NULL INPUT | STRICT",
5503                )?;
5504                body.called_on_null = Some(FunctionCalledOnNull::CalledOnNullInput);
5505            } else if self.parse_keywords(&[
5506                Keyword::RETURNS,
5507                Keyword::NULL,
5508                Keyword::ON,
5509                Keyword::NULL,
5510                Keyword::INPUT,
5511            ]) {
5512                ensure_not_set(
5513                    &body.called_on_null,
5514                    "CALLED ON NULL INPUT | RETURNS NULL ON NULL INPUT | STRICT",
5515                )?;
5516                body.called_on_null = Some(FunctionCalledOnNull::ReturnsNullOnNullInput);
5517            } else if self.parse_keyword(Keyword::STRICT) {
5518                ensure_not_set(
5519                    &body.called_on_null,
5520                    "CALLED ON NULL INPUT | RETURNS NULL ON NULL INPUT | STRICT",
5521                )?;
5522                body.called_on_null = Some(FunctionCalledOnNull::Strict);
5523            } else if self.parse_keyword(Keyword::PARALLEL) {
5524                ensure_not_set(&body.parallel, "PARALLEL { UNSAFE | RESTRICTED | SAFE }")?;
5525                if self.parse_keyword(Keyword::UNSAFE) {
5526                    body.parallel = Some(FunctionParallel::Unsafe);
5527                } else if self.parse_keyword(Keyword::RESTRICTED) {
5528                    body.parallel = Some(FunctionParallel::Restricted);
5529                } else if self.parse_keyword(Keyword::SAFE) {
5530                    body.parallel = Some(FunctionParallel::Safe);
5531                } else {
5532                    return self.expected("one of UNSAFE | RESTRICTED | SAFE", self.peek_token());
5533                }
5534            } else if self.parse_keyword(Keyword::RETURN) {
5535                ensure_not_set(&body.function_body, "RETURN")?;
5536                body.function_body = Some(CreateFunctionBody::Return(self.parse_expr()?));
5537            } else {
5538                break;
5539            }
5540        }
5541
5542        Ok(Statement::CreateFunction(CreateFunction {
5543            or_alter: false,
5544            or_replace,
5545            temporary,
5546            if_not_exists: false,
5547            aggregate: false,
5548            table_function: false,
5549            name,
5550            args: Some(args),
5551            return_type,
5552            behavior: body.behavior,
5553            called_on_null: body.called_on_null,
5554            parallel: body.parallel,
5555            language: body.language,
5556            function_body: body.function_body,
5557            using: None,
5558            determinism_specifier: None,
5559            options: None,
5560            remote_connection: None,
5561            sql_security: None,
5562        }))
5563    }
5564
5565    /// Parse `CREATE FUNCTION` for [Hive]
5566    ///
5567    /// [Hive]: https://cwiki.apache.org/confluence/display/hive/languagemanual+ddl#LanguageManualDDL-Create/Drop/ReloadFunction
5568    fn parse_hive_create_function(
5569        &mut self,
5570        or_replace: bool,
5571        temporary: bool,
5572    ) -> Result<Statement, ParserError> {
5573        let name = self.parse_object_name(false)?;
5574        self.expect_keyword_is(Keyword::AS)?;
5575
5576        let as_ = self.parse_create_function_body_string()?;
5577        let using = self.parse_optional_create_function_using()?;
5578
5579        Ok(Statement::CreateFunction(CreateFunction {
5580            or_alter: false,
5581            or_replace,
5582            temporary,
5583            if_not_exists: false,
5584            aggregate: false,
5585            table_function: false,
5586            name,
5587            function_body: Some(CreateFunctionBody::AsBeforeOptions(as_)),
5588            using,
5589            args: None,
5590            return_type: None,
5591            behavior: None,
5592            called_on_null: None,
5593            parallel: None,
5594            language: None,
5595            determinism_specifier: None,
5596            options: None,
5597            remote_connection: None,
5598            sql_security: None,
5599        }))
5600    }
5601
5602    /// Parse `CREATE FUNCTION` for [BigQuery]
5603    ///
5604    /// [BigQuery]: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#create_function_statement
5605    fn parse_bigquery_create_function(
5606        &mut self,
5607        or_replace: bool,
5608        temporary: bool,
5609        aggregate: bool,
5610    ) -> Result<Statement, ParserError> {
5611        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5612        let (name, args) = self.parse_create_function_name_and_params()?;
5613
5614        let return_type = if self.parse_keyword(Keyword::RETURNS) {
5615            Some(self.parse_data_type()?)
5616        } else {
5617            None
5618        };
5619
5620        let determinism_specifier = if self.parse_keyword(Keyword::DETERMINISTIC) {
5621            Some(FunctionDeterminismSpecifier::Deterministic)
5622        } else if self.parse_keywords(&[Keyword::NOT, Keyword::DETERMINISTIC]) {
5623            Some(FunctionDeterminismSpecifier::NotDeterministic)
5624        } else {
5625            None
5626        };
5627
5628        let language = if self.parse_keyword(Keyword::LANGUAGE) {
5629            Some(self.parse_identifier()?)
5630        } else {
5631            None
5632        };
5633
5634        let remote_connection =
5635            if self.parse_keywords(&[Keyword::REMOTE, Keyword::WITH, Keyword::CONNECTION]) {
5636                Some(self.parse_object_name(false)?)
5637            } else {
5638                None
5639            };
5640
5641        let sql_security = if self.parse_keywords(&[Keyword::SQL, Keyword::SECURITY]) {
5642            if self.parse_keyword(Keyword::DEFINER) {
5643                Some(SqlSecurity::Definer)
5644            } else if self.parse_keyword(Keyword::INVOKER) {
5645                Some(SqlSecurity::Invoker)
5646            } else {
5647                return self.expected("DEFINER or INVOKER", self.peek_token());
5648            }
5649        } else {
5650            None
5651        };
5652
5653        // `OPTIONS` may come before of after the function body but
5654        // may be specified at most once.
5655        let mut options = self.maybe_parse_options(Keyword::OPTIONS)?;
5656
5657        let function_body = if remote_connection.is_none() {
5658            self.expect_keyword_is(Keyword::AS)?;
5659            let expr = self.parse_expr()?;
5660            if options.is_none() {
5661                options = self.maybe_parse_options(Keyword::OPTIONS)?;
5662                Some(CreateFunctionBody::AsBeforeOptions(expr))
5663            } else {
5664                Some(CreateFunctionBody::AsAfterOptions(expr))
5665            }
5666        } else {
5667            None
5668        };
5669
5670        Ok(Statement::CreateFunction(CreateFunction {
5671            or_alter: false,
5672            or_replace,
5673            temporary,
5674            if_not_exists,
5675            aggregate,
5676            table_function: false,
5677            name,
5678            args: Some(args),
5679            return_type,
5680            function_body,
5681            language,
5682            determinism_specifier,
5683            options,
5684            remote_connection,
5685            using: None,
5686            behavior: None,
5687            called_on_null: None,
5688            parallel: None,
5689            sql_security,
5690        }))
5691    }
5692
5693    /// Parse `CREATE FUNCTION` for [MsSql]
5694    ///
5695    /// [MsSql]: https://learn.microsoft.com/en-us/sql/t-sql/statements/create-function-transact-sql
5696    fn parse_mssql_create_function(
5697        &mut self,
5698        or_alter: bool,
5699        or_replace: bool,
5700        temporary: bool,
5701    ) -> Result<Statement, ParserError> {
5702        let (name, args) = self.parse_create_function_name_and_params()?;
5703
5704        self.expect_keyword(Keyword::RETURNS)?;
5705
5706        let return_table = self.maybe_parse(|p| {
5707            let return_table_name = p.parse_identifier()?;
5708
5709            p.expect_keyword_is(Keyword::TABLE)?;
5710            p.prev_token();
5711
5712            let table_column_defs = match p.parse_data_type()? {
5713                DataType::Table(Some(table_column_defs)) if !table_column_defs.is_empty() => {
5714                    table_column_defs
5715                }
5716                _ => parser_err!(
5717                    "Expected table column definitions after TABLE keyword",
5718                    p.peek_token().span.start
5719                )?,
5720            };
5721
5722            Ok(DataType::NamedTable {
5723                name: ObjectName(vec![ObjectNamePart::Identifier(return_table_name)]),
5724                columns: table_column_defs,
5725            })
5726        })?;
5727
5728        let return_type = if return_table.is_some() {
5729            return_table
5730        } else {
5731            Some(self.parse_data_type()?)
5732        };
5733
5734        let _ = self.parse_keyword(Keyword::AS);
5735
5736        let function_body = if self.peek_keyword(Keyword::BEGIN) {
5737            let begin_token = self.expect_keyword(Keyword::BEGIN)?;
5738            let statements = self.parse_statement_list(&[Keyword::END])?;
5739            let end_token = self.expect_keyword(Keyword::END)?;
5740
5741            Some(CreateFunctionBody::AsBeginEnd(BeginEndStatements {
5742                begin_token: AttachedToken(begin_token),
5743                statements,
5744                end_token: AttachedToken(end_token),
5745            }))
5746        } else if self.parse_keyword(Keyword::RETURN) {
5747            if self.peek_token() == Token::LParen {
5748                Some(CreateFunctionBody::AsReturnExpr(self.parse_expr()?))
5749            } else if self.peek_keyword(Keyword::SELECT) {
5750                let select = self.parse_select()?;
5751                Some(CreateFunctionBody::AsReturnSelect(select))
5752            } else {
5753                parser_err!(
5754                    "Expected a subquery (or bare SELECT statement) after RETURN",
5755                    self.peek_token().span.start
5756                )?
5757            }
5758        } else {
5759            parser_err!("Unparsable function body", self.peek_token().span.start)?
5760        };
5761
5762        Ok(Statement::CreateFunction(CreateFunction {
5763            or_alter,
5764            or_replace,
5765            temporary,
5766            if_not_exists: false,
5767            aggregate: false,
5768            table_function: false,
5769            name,
5770            args: Some(args),
5771            return_type,
5772            function_body,
5773            language: None,
5774            determinism_specifier: None,
5775            options: None,
5776            remote_connection: None,
5777            using: None,
5778            behavior: None,
5779            called_on_null: None,
5780            parallel: None,
5781            sql_security: None,
5782        }))
5783    }
5784
5785    fn parse_create_function_name_and_params(
5786        &mut self,
5787    ) -> Result<(ObjectName, Vec<OperateFunctionArg>), ParserError> {
5788        let name = self.parse_object_name(false)?;
5789        let parse_function_param =
5790            |parser: &mut Parser| -> Result<OperateFunctionArg, ParserError> {
5791                let name = parser.parse_identifier()?;
5792                let data_type = parser.parse_data_type()?;
5793                let default_expr = if parser.consume_token(&Token::Eq)
5794                    || parser.parse_keyword(Keyword::DEFAULT)
5795                {
5796                    Some(parser.parse_expr()?)
5797                } else {
5798                    None
5799                };
5800
5801                Ok(OperateFunctionArg {
5802                    mode: None,
5803                    name: Some(name),
5804                    data_type,
5805                    default_expr,
5806                })
5807            };
5808        self.expect_token(&Token::LParen)?;
5809        let args = self.parse_comma_separated0(parse_function_param, Token::RParen)?;
5810        self.expect_token(&Token::RParen)?;
5811        Ok((name, args))
5812    }
5813
5814    fn parse_function_arg(&mut self) -> Result<OperateFunctionArg, ParserError> {
5815        let mode = if self.parse_keyword(Keyword::IN) {
5816            Some(ArgMode::In)
5817        } else if self.parse_keyword(Keyword::OUT) {
5818            Some(ArgMode::Out)
5819        } else if self.parse_keyword(Keyword::INOUT) {
5820            Some(ArgMode::InOut)
5821        } else {
5822            None
5823        };
5824
5825        // parse: [ argname ] argtype
5826        let mut name = None;
5827        let mut data_type = self.parse_data_type()?;
5828
5829        // To check whether the first token is a name or a type, we need to
5830        // peek the next token, which if it is another type keyword, then the
5831        // first token is a name and not a type in itself.
5832        let data_type_idx = self.get_current_index();
5833        if let Some(next_data_type) = self.maybe_parse(|parser| parser.parse_data_type())? {
5834            let token = self.token_at(data_type_idx);
5835
5836            // We ensure that the token is a `Word` token, and not other special tokens.
5837            if !matches!(token.token, Token::Word(_)) {
5838                return self.expected("a name or type", token.clone());
5839            }
5840
5841            name = Some(Ident::new(token.to_string()));
5842            data_type = next_data_type;
5843        }
5844
5845        let default_expr = if self.parse_keyword(Keyword::DEFAULT) || self.consume_token(&Token::Eq)
5846        {
5847            Some(self.parse_expr()?)
5848        } else {
5849            None
5850        };
5851        Ok(OperateFunctionArg {
5852            mode,
5853            name,
5854            data_type,
5855            default_expr,
5856        })
5857    }
5858
5859    /// Parse statements of the DropTrigger type such as:
5860    ///
5861    /// ```sql
5862    /// DROP TRIGGER [ IF EXISTS ] name ON table_name [ CASCADE | RESTRICT ]
5863    /// ```
5864    pub fn parse_drop_trigger(&mut self) -> Result<Statement, ParserError> {
5865        if !dialect_of!(self is PostgreSqlDialect | GenericDialect | MySqlDialect | MsSqlDialect) {
5866            self.prev_token();
5867            return self.expected("an object type after DROP", self.peek_token());
5868        }
5869        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
5870        let trigger_name = self.parse_object_name(false)?;
5871        let table_name = if self.parse_keyword(Keyword::ON) {
5872            Some(self.parse_object_name(false)?)
5873        } else {
5874            None
5875        };
5876        let option = self
5877            .parse_one_of_keywords(&[Keyword::CASCADE, Keyword::RESTRICT])
5878            .map(|keyword| match keyword {
5879                Keyword::CASCADE => ReferentialAction::Cascade,
5880                Keyword::RESTRICT => ReferentialAction::Restrict,
5881                _ => unreachable!(),
5882            });
5883        Ok(Statement::DropTrigger(DropTrigger {
5884            if_exists,
5885            trigger_name,
5886            table_name,
5887            option,
5888        }))
5889    }
5890
5891    pub fn parse_create_trigger(
5892        &mut self,
5893        or_alter: bool,
5894        or_replace: bool,
5895        is_constraint: bool,
5896    ) -> Result<Statement, ParserError> {
5897        if !dialect_of!(self is PostgreSqlDialect | GenericDialect | MySqlDialect | MsSqlDialect) {
5898            self.prev_token();
5899            return self.expected("an object type after CREATE", self.peek_token());
5900        }
5901
5902        let name = self.parse_object_name(false)?;
5903        let period = self.parse_trigger_period()?;
5904
5905        let events = self.parse_keyword_separated(Keyword::OR, Parser::parse_trigger_event)?;
5906        self.expect_keyword_is(Keyword::ON)?;
5907        let table_name = self.parse_object_name(false)?;
5908
5909        let referenced_table_name = if self.parse_keyword(Keyword::FROM) {
5910            self.parse_object_name(true).ok()
5911        } else {
5912            None
5913        };
5914
5915        let characteristics = self.parse_constraint_characteristics()?;
5916
5917        let mut referencing = vec![];
5918        if self.parse_keyword(Keyword::REFERENCING) {
5919            while let Some(refer) = self.parse_trigger_referencing()? {
5920                referencing.push(refer);
5921            }
5922        }
5923
5924        self.expect_keyword_is(Keyword::FOR)?;
5925        let include_each = self.parse_keyword(Keyword::EACH);
5926        let trigger_object =
5927            match self.expect_one_of_keywords(&[Keyword::ROW, Keyword::STATEMENT])? {
5928                Keyword::ROW => TriggerObject::Row,
5929                Keyword::STATEMENT => TriggerObject::Statement,
5930                _ => unreachable!(),
5931            };
5932
5933        let condition = self
5934            .parse_keyword(Keyword::WHEN)
5935            .then(|| self.parse_expr())
5936            .transpose()?;
5937
5938        let mut exec_body = None;
5939        let mut statements = None;
5940        if self.parse_keyword(Keyword::EXECUTE) {
5941            exec_body = Some(self.parse_trigger_exec_body()?);
5942        } else {
5943            statements = Some(self.parse_conditional_statements(&[Keyword::END])?);
5944        }
5945
5946        Ok(Statement::CreateTrigger(CreateTrigger {
5947            or_alter,
5948            or_replace,
5949            is_constraint,
5950            name,
5951            period,
5952            period_before_table: true,
5953            events,
5954            table_name,
5955            referenced_table_name,
5956            referencing,
5957            trigger_object,
5958            include_each,
5959            condition,
5960            exec_body,
5961            statements_as: false,
5962            statements,
5963            characteristics,
5964        }))
5965    }
5966
5967    pub fn parse_trigger_period(&mut self) -> Result<TriggerPeriod, ParserError> {
5968        Ok(
5969            match self.expect_one_of_keywords(&[
5970                Keyword::FOR,
5971                Keyword::BEFORE,
5972                Keyword::AFTER,
5973                Keyword::INSTEAD,
5974            ])? {
5975                Keyword::FOR => TriggerPeriod::For,
5976                Keyword::BEFORE => TriggerPeriod::Before,
5977                Keyword::AFTER => TriggerPeriod::After,
5978                Keyword::INSTEAD => self
5979                    .expect_keyword_is(Keyword::OF)
5980                    .map(|_| TriggerPeriod::InsteadOf)?,
5981                _ => unreachable!(),
5982            },
5983        )
5984    }
5985
5986    pub fn parse_trigger_event(&mut self) -> Result<TriggerEvent, ParserError> {
5987        Ok(
5988            match self.expect_one_of_keywords(&[
5989                Keyword::INSERT,
5990                Keyword::UPDATE,
5991                Keyword::DELETE,
5992                Keyword::TRUNCATE,
5993            ])? {
5994                Keyword::INSERT => TriggerEvent::Insert,
5995                Keyword::UPDATE => {
5996                    if self.parse_keyword(Keyword::OF) {
5997                        let cols = self.parse_comma_separated(Parser::parse_identifier)?;
5998                        TriggerEvent::Update(cols)
5999                    } else {
6000                        TriggerEvent::Update(vec![])
6001                    }
6002                }
6003                Keyword::DELETE => TriggerEvent::Delete,
6004                Keyword::TRUNCATE => TriggerEvent::Truncate,
6005                _ => unreachable!(),
6006            },
6007        )
6008    }
6009
6010    pub fn parse_trigger_referencing(&mut self) -> Result<Option<TriggerReferencing>, ParserError> {
6011        let refer_type = match self.parse_one_of_keywords(&[Keyword::OLD, Keyword::NEW]) {
6012            Some(Keyword::OLD) if self.parse_keyword(Keyword::TABLE) => {
6013                TriggerReferencingType::OldTable
6014            }
6015            Some(Keyword::NEW) if self.parse_keyword(Keyword::TABLE) => {
6016                TriggerReferencingType::NewTable
6017            }
6018            _ => {
6019                return Ok(None);
6020            }
6021        };
6022
6023        let is_as = self.parse_keyword(Keyword::AS);
6024        let transition_relation_name = self.parse_object_name(false)?;
6025        Ok(Some(TriggerReferencing {
6026            refer_type,
6027            is_as,
6028            transition_relation_name,
6029        }))
6030    }
6031
6032    pub fn parse_trigger_exec_body(&mut self) -> Result<TriggerExecBody, ParserError> {
6033        Ok(TriggerExecBody {
6034            exec_type: match self
6035                .expect_one_of_keywords(&[Keyword::FUNCTION, Keyword::PROCEDURE])?
6036            {
6037                Keyword::FUNCTION => TriggerExecBodyType::Function,
6038                Keyword::PROCEDURE => TriggerExecBodyType::Procedure,
6039                _ => unreachable!(),
6040            },
6041            func_desc: self.parse_function_desc()?,
6042        })
6043    }
6044
6045    pub fn parse_create_macro(
6046        &mut self,
6047        or_replace: bool,
6048        temporary: bool,
6049    ) -> Result<Statement, ParserError> {
6050        if dialect_of!(self is DuckDbDialect |  GenericDialect) {
6051            let name = self.parse_object_name(false)?;
6052            self.expect_token(&Token::LParen)?;
6053            let args = if self.consume_token(&Token::RParen) {
6054                self.prev_token();
6055                None
6056            } else {
6057                Some(self.parse_comma_separated(Parser::parse_macro_arg)?)
6058            };
6059
6060            self.expect_token(&Token::RParen)?;
6061            self.expect_keyword_is(Keyword::AS)?;
6062
6063            Ok(Statement::CreateMacro {
6064                or_replace,
6065                temporary,
6066                name,
6067                args,
6068                definition: if self.parse_keyword(Keyword::TABLE) {
6069                    MacroDefinition::Table(self.parse_query()?)
6070                } else {
6071                    MacroDefinition::Expr(self.parse_expr()?)
6072                },
6073            })
6074        } else {
6075            self.prev_token();
6076            self.expected("an object type after CREATE", self.peek_token())
6077        }
6078    }
6079
6080    fn parse_macro_arg(&mut self) -> Result<MacroArg, ParserError> {
6081        let name = self.parse_identifier()?;
6082
6083        let default_expr =
6084            if self.consume_token(&Token::Assignment) || self.consume_token(&Token::RArrow) {
6085                Some(self.parse_expr()?)
6086            } else {
6087                None
6088            };
6089        Ok(MacroArg { name, default_expr })
6090    }
6091
6092    pub fn parse_create_external_table(
6093        &mut self,
6094        or_replace: bool,
6095    ) -> Result<Statement, ParserError> {
6096        self.expect_keyword_is(Keyword::TABLE)?;
6097        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
6098        let table_name = self.parse_object_name(false)?;
6099        let (columns, constraints) = self.parse_columns()?;
6100
6101        let mut _partition_columns: Vec<ColumnDef> = vec![];
6102        if self.parse_keywords(&[Keyword::WITH, Keyword::PARTITION, Keyword::COLUMNS]) {
6103            if self.consume_token(&Token::LParen) {
6104                _partition_columns = self.parse_comma_separated(|p| p.parse_column_def())?;
6105                self.expect_token(&Token::RParen)?;
6106            }
6107        }
6108
6109        let mut _connection: Option<ObjectName> = None;
6110        if self.parse_keywords(&[Keyword::WITH, Keyword::CONNECTION]) {
6111            _connection = Some(self.parse_object_name(false)?);
6112        }
6113
6114        let bigquery_options = self.parse_options(Keyword::OPTIONS)?;
6115        if !bigquery_options.is_empty() {
6116            return Ok(CreateTableBuilder::new(table_name)
6117                .columns(columns)
6118                .constraints(constraints)
6119                .or_replace(or_replace)
6120                .if_not_exists(if_not_exists)
6121                .external(true)
6122                .table_options(CreateTableOptions::Options(bigquery_options))
6123                .build());
6124        }
6125
6126        let hive_distribution = self.parse_hive_distribution()?;
6127        let hive_formats = self.parse_hive_formats()?;
6128
6129        let file_format = if let Some(ff) = &hive_formats.storage {
6130            match ff {
6131                HiveIOFormat::FileFormat { format } => Some(*format),
6132                _ => None,
6133            }
6134        } else {
6135            None
6136        };
6137        let location = hive_formats.location.clone();
6138        let table_properties = self.parse_options(Keyword::TBLPROPERTIES)?;
6139        let table_options = if !table_properties.is_empty() {
6140            CreateTableOptions::TableProperties(table_properties)
6141        } else {
6142            CreateTableOptions::None
6143        };
6144        Ok(CreateTableBuilder::new(table_name)
6145            .columns(columns)
6146            .constraints(constraints)
6147            .hive_distribution(hive_distribution)
6148            .hive_formats(Some(hive_formats))
6149            .table_options(table_options)
6150            .or_replace(or_replace)
6151            .if_not_exists(if_not_exists)
6152            .external(true)
6153            .file_format(file_format)
6154            .location(location)
6155            .build())
6156    }
6157
6158    pub fn parse_file_format(&mut self) -> Result<FileFormat, ParserError> {
6159        let next_token = self.next_token();
6160        match &next_token.token {
6161            Token::Word(w) => match w.keyword {
6162                Keyword::AVRO => Ok(FileFormat::AVRO),
6163                Keyword::JSONFILE => Ok(FileFormat::JSONFILE),
6164                Keyword::ORC => Ok(FileFormat::ORC),
6165                Keyword::PARQUET => Ok(FileFormat::PARQUET),
6166                Keyword::RCFILE => Ok(FileFormat::RCFILE),
6167                Keyword::SEQUENCEFILE => Ok(FileFormat::SEQUENCEFILE),
6168                Keyword::TEXTFILE => Ok(FileFormat::TEXTFILE),
6169                _ => self.expected("fileformat", next_token),
6170            },
6171            _ => self.expected("fileformat", next_token),
6172        }
6173    }
6174
6175    fn parse_analyze_format_kind(&mut self) -> Result<AnalyzeFormatKind, ParserError> {
6176        if self.consume_token(&Token::Eq) {
6177            Ok(AnalyzeFormatKind::Assignment(self.parse_analyze_format()?))
6178        } else {
6179            Ok(AnalyzeFormatKind::Keyword(self.parse_analyze_format()?))
6180        }
6181    }
6182
6183    pub fn parse_analyze_format(&mut self) -> Result<AnalyzeFormat, ParserError> {
6184        let next_token = self.next_token();
6185        match &next_token.token {
6186            Token::Word(w) => match w.keyword {
6187                Keyword::TEXT => Ok(AnalyzeFormat::TEXT),
6188                Keyword::GRAPHVIZ => Ok(AnalyzeFormat::GRAPHVIZ),
6189                Keyword::JSON => Ok(AnalyzeFormat::JSON),
6190                _ => self.expected("fileformat", next_token),
6191            },
6192            _ => self.expected("fileformat", next_token),
6193        }
6194    }
6195
6196    pub fn parse_create_view(
6197        &mut self,
6198        or_alter: bool,
6199        or_replace: bool,
6200        temporary: bool,
6201        create_view_params: Option<CreateViewParams>,
6202    ) -> Result<Statement, ParserError> {
6203        let secure = self.parse_keyword(Keyword::SECURE);
6204        let materialized = self.parse_keyword(Keyword::MATERIALIZED);
6205        self.expect_keyword_is(Keyword::VIEW)?;
6206
6207        let is_replica = materialized && self.parse_keyword(Keyword::REPLICA);
6208
6209        let allow_unquoted_hyphen = dialect_of!(self is BigQueryDialect);
6210        // Tries to parse IF NOT EXISTS either before name or after name
6211        // Name before IF NOT EXISTS is supported by snowflake but undocumented
6212        let if_not_exists_first =
6213            self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
6214        let name = self.parse_object_name(allow_unquoted_hyphen)?;
6215
6216        if is_replica {
6217            self.expect_keywords(&[Keyword::AS, Keyword::REPLICA, Keyword::OF])?;
6218            let source = self.parse_object_name(allow_unquoted_hyphen)?;
6219            return Ok(Statement::CreateMaterializedViewReplica {
6220                name,
6221                source,
6222                if_not_exists: if_not_exists_first,
6223            });
6224        }
6225        let name_before_not_exists = !if_not_exists_first
6226            && self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
6227        let if_not_exists = if_not_exists_first || name_before_not_exists;
6228        // Many dialects support `OR ALTER` right after `CREATE`, but we don't (yet).
6229        // ANSI SQL and Postgres support RECURSIVE here, but we don't support it either.
6230        let columns = self.parse_view_columns()?;
6231        let mut options = CreateTableOptions::None;
6232        let with_options = self.parse_options(Keyword::WITH)?;
6233        if !with_options.is_empty() {
6234            options = CreateTableOptions::With(with_options);
6235        }
6236
6237        let partition_by = if dialect_of!(self is BigQueryDialect | GenericDialect) && self.parse_keywords(&[Keyword::PARTITION, Keyword::BY]) {
6238            Some(Box::new(self.parse_expr()?))
6239        } else {
6240            None
6241        };
6242
6243        let cluster_by = if self.parse_keyword(Keyword::CLUSTER) {
6244            self.expect_keyword_is(Keyword::BY)?;
6245            if dialect_of!(self is BigQueryDialect) {
6246                self.parse_comma_separated(|p| p.parse_identifier())?
6247            } else {
6248                self.parse_parenthesized_column_list(Optional, false)?
6249            }
6250        } else {
6251            vec![]
6252        };
6253
6254        if dialect_of!(self is BigQueryDialect | GenericDialect) {
6255            if let Some(opts) = self.maybe_parse_options(Keyword::OPTIONS)? {
6256                if !opts.is_empty() {
6257                    options = CreateTableOptions::Options(opts);
6258                }
6259            };
6260        }
6261
6262        let sql_security = if dialect_of!(self is BigQueryDialect | GenericDialect)
6263            && self.parse_keywords(&[Keyword::SQL, Keyword::SECURITY])
6264        {
6265            if self.parse_keyword(Keyword::DEFINER) {
6266                Some(SqlSecurity::Definer)
6267            } else if self.parse_keyword(Keyword::INVOKER) {
6268                Some(SqlSecurity::Invoker)
6269            } else {
6270                return self.expected("DEFINER or INVOKER", self.peek_token());
6271            }
6272        } else {
6273            None
6274        };
6275
6276        let to = if dialect_of!(self is ClickHouseDialect | GenericDialect)
6277            && self.parse_keyword(Keyword::TO)
6278        {
6279            Some(self.parse_object_name(false)?)
6280        } else {
6281            None
6282        };
6283
6284        let comment = if dialect_of!(self is SnowflakeDialect | GenericDialect)
6285            && self.parse_keyword(Keyword::COMMENT)
6286        {
6287            self.expect_token(&Token::Eq)?;
6288            Some(self.parse_comment_value()?)
6289        } else {
6290            None
6291        };
6292
6293        self.expect_keyword_is(Keyword::AS)?;
6294        let query = self.parse_query()?;
6295        // Optional `WITH [ CASCADED | LOCAL ] CHECK OPTION` is widely supported here.
6296
6297        let with_no_schema_binding = dialect_of!(self is RedshiftSqlDialect | GenericDialect)
6298            && self.parse_keywords(&[
6299                Keyword::WITH,
6300                Keyword::NO,
6301                Keyword::SCHEMA,
6302                Keyword::BINDING,
6303            ]);
6304
6305        Ok(Statement::CreateView {
6306            or_alter,
6307            name,
6308            columns,
6309            query,
6310            materialized,
6311            secure,
6312            or_replace,
6313            options,
6314            partition_by,
6315            cluster_by,
6316            comment,
6317            with_no_schema_binding,
6318            if_not_exists,
6319            temporary,
6320            to,
6321            params: create_view_params,
6322            name_before_not_exists,
6323            sql_security,
6324        })
6325    }
6326
6327    /// Parse optional parameters for the `CREATE VIEW` statement supported by [MySQL].
6328    ///
6329    /// [MySQL]: https://dev.mysql.com/doc/refman/9.1/en/create-view.html
6330    fn parse_create_view_params(&mut self) -> Result<Option<CreateViewParams>, ParserError> {
6331        let algorithm = if self.parse_keyword(Keyword::ALGORITHM) {
6332            self.expect_token(&Token::Eq)?;
6333            Some(
6334                match self.expect_one_of_keywords(&[
6335                    Keyword::UNDEFINED,
6336                    Keyword::MERGE,
6337                    Keyword::TEMPTABLE,
6338                ])? {
6339                    Keyword::UNDEFINED => CreateViewAlgorithm::Undefined,
6340                    Keyword::MERGE => CreateViewAlgorithm::Merge,
6341                    Keyword::TEMPTABLE => CreateViewAlgorithm::TempTable,
6342                    _ => {
6343                        self.prev_token();
6344                        let found = self.next_token();
6345                        return self
6346                            .expected("UNDEFINED or MERGE or TEMPTABLE after ALGORITHM =", found);
6347                    }
6348                },
6349            )
6350        } else {
6351            None
6352        };
6353        let definer = if self.parse_keyword(Keyword::DEFINER) {
6354            self.expect_token(&Token::Eq)?;
6355            Some(self.parse_grantee_name()?)
6356        } else {
6357            None
6358        };
6359        let security = if self.parse_keywords(&[Keyword::SQL, Keyword::SECURITY]) {
6360            Some(
6361                match self.expect_one_of_keywords(&[Keyword::DEFINER, Keyword::INVOKER])? {
6362                    Keyword::DEFINER => CreateViewSecurity::Definer,
6363                    Keyword::INVOKER => CreateViewSecurity::Invoker,
6364                    _ => {
6365                        self.prev_token();
6366                        let found = self.next_token();
6367                        return self.expected("DEFINER or INVOKER after SQL SECURITY", found);
6368                    }
6369                },
6370            )
6371        } else {
6372            None
6373        };
6374        if algorithm.is_some() || definer.is_some() || security.is_some() {
6375            Ok(Some(CreateViewParams {
6376                algorithm,
6377                definer,
6378                security,
6379            }))
6380        } else {
6381            Ok(None)
6382        }
6383    }
6384
6385    pub fn parse_create_role(&mut self) -> Result<Statement, ParserError> {
6386        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
6387        let names = self.parse_comma_separated(|p| p.parse_object_name(false))?;
6388
6389        let _ = self.parse_keyword(Keyword::WITH); // [ WITH ]
6390
6391        let optional_keywords = if dialect_of!(self is MsSqlDialect) {
6392            vec![Keyword::AUTHORIZATION]
6393        } else if dialect_of!(self is PostgreSqlDialect) {
6394            vec![
6395                Keyword::LOGIN,
6396                Keyword::NOLOGIN,
6397                Keyword::INHERIT,
6398                Keyword::NOINHERIT,
6399                Keyword::BYPASSRLS,
6400                Keyword::NOBYPASSRLS,
6401                Keyword::PASSWORD,
6402                Keyword::CREATEDB,
6403                Keyword::NOCREATEDB,
6404                Keyword::CREATEROLE,
6405                Keyword::NOCREATEROLE,
6406                Keyword::SUPERUSER,
6407                Keyword::NOSUPERUSER,
6408                Keyword::REPLICATION,
6409                Keyword::NOREPLICATION,
6410                Keyword::CONNECTION,
6411                Keyword::VALID,
6412                Keyword::IN,
6413                Keyword::ROLE,
6414                Keyword::ADMIN,
6415                Keyword::USER,
6416            ]
6417        } else {
6418            vec![]
6419        };
6420
6421        // MSSQL
6422        let mut authorization_owner = None;
6423        // Postgres
6424        let mut login = None;
6425        let mut inherit = None;
6426        let mut bypassrls = None;
6427        let mut password = None;
6428        let mut create_db = None;
6429        let mut create_role = None;
6430        let mut superuser = None;
6431        let mut replication = None;
6432        let mut connection_limit = None;
6433        let mut valid_until = None;
6434        let mut in_role = vec![];
6435        let mut in_group = vec![];
6436        let mut role = vec![];
6437        let mut user = vec![];
6438        let mut admin = vec![];
6439
6440        while let Some(keyword) = self.parse_one_of_keywords(&optional_keywords) {
6441            let loc = self
6442                .tokens
6443                .get(self.index - 1)
6444                .map_or(Location { line: 0, column: 0 }, |t| t.span.start);
6445            match keyword {
6446                Keyword::AUTHORIZATION => {
6447                    if authorization_owner.is_some() {
6448                        parser_err!("Found multiple AUTHORIZATION", loc)
6449                    } else {
6450                        authorization_owner = Some(self.parse_object_name(false)?);
6451                        Ok(())
6452                    }
6453                }
6454                Keyword::LOGIN | Keyword::NOLOGIN => {
6455                    if login.is_some() {
6456                        parser_err!("Found multiple LOGIN or NOLOGIN", loc)
6457                    } else {
6458                        login = Some(keyword == Keyword::LOGIN);
6459                        Ok(())
6460                    }
6461                }
6462                Keyword::INHERIT | Keyword::NOINHERIT => {
6463                    if inherit.is_some() {
6464                        parser_err!("Found multiple INHERIT or NOINHERIT", loc)
6465                    } else {
6466                        inherit = Some(keyword == Keyword::INHERIT);
6467                        Ok(())
6468                    }
6469                }
6470                Keyword::BYPASSRLS | Keyword::NOBYPASSRLS => {
6471                    if bypassrls.is_some() {
6472                        parser_err!("Found multiple BYPASSRLS or NOBYPASSRLS", loc)
6473                    } else {
6474                        bypassrls = Some(keyword == Keyword::BYPASSRLS);
6475                        Ok(())
6476                    }
6477                }
6478                Keyword::CREATEDB | Keyword::NOCREATEDB => {
6479                    if create_db.is_some() {
6480                        parser_err!("Found multiple CREATEDB or NOCREATEDB", loc)
6481                    } else {
6482                        create_db = Some(keyword == Keyword::CREATEDB);
6483                        Ok(())
6484                    }
6485                }
6486                Keyword::CREATEROLE | Keyword::NOCREATEROLE => {
6487                    if create_role.is_some() {
6488                        parser_err!("Found multiple CREATEROLE or NOCREATEROLE", loc)
6489                    } else {
6490                        create_role = Some(keyword == Keyword::CREATEROLE);
6491                        Ok(())
6492                    }
6493                }
6494                Keyword::SUPERUSER | Keyword::NOSUPERUSER => {
6495                    if superuser.is_some() {
6496                        parser_err!("Found multiple SUPERUSER or NOSUPERUSER", loc)
6497                    } else {
6498                        superuser = Some(keyword == Keyword::SUPERUSER);
6499                        Ok(())
6500                    }
6501                }
6502                Keyword::REPLICATION | Keyword::NOREPLICATION => {
6503                    if replication.is_some() {
6504                        parser_err!("Found multiple REPLICATION or NOREPLICATION", loc)
6505                    } else {
6506                        replication = Some(keyword == Keyword::REPLICATION);
6507                        Ok(())
6508                    }
6509                }
6510                Keyword::PASSWORD => {
6511                    if password.is_some() {
6512                        parser_err!("Found multiple PASSWORD", loc)
6513                    } else {
6514                        password = if self.parse_keyword(Keyword::NULL) {
6515                            Some(Password::NullPassword)
6516                        } else {
6517                            Some(Password::Password(Expr::Value(self.parse_value()?)))
6518                        };
6519                        Ok(())
6520                    }
6521                }
6522                Keyword::CONNECTION => {
6523                    self.expect_keyword_is(Keyword::LIMIT)?;
6524                    if connection_limit.is_some() {
6525                        parser_err!("Found multiple CONNECTION LIMIT", loc)
6526                    } else {
6527                        connection_limit = Some(Expr::Value(self.parse_number_value()?));
6528                        Ok(())
6529                    }
6530                }
6531                Keyword::VALID => {
6532                    self.expect_keyword_is(Keyword::UNTIL)?;
6533                    if valid_until.is_some() {
6534                        parser_err!("Found multiple VALID UNTIL", loc)
6535                    } else {
6536                        valid_until = Some(Expr::Value(self.parse_value()?));
6537                        Ok(())
6538                    }
6539                }
6540                Keyword::IN => {
6541                    if self.parse_keyword(Keyword::ROLE) {
6542                        if !in_role.is_empty() {
6543                            parser_err!("Found multiple IN ROLE", loc)
6544                        } else {
6545                            in_role = self.parse_comma_separated(|p| p.parse_identifier())?;
6546                            Ok(())
6547                        }
6548                    } else if self.parse_keyword(Keyword::GROUP) {
6549                        if !in_group.is_empty() {
6550                            parser_err!("Found multiple IN GROUP", loc)
6551                        } else {
6552                            in_group = self.parse_comma_separated(|p| p.parse_identifier())?;
6553                            Ok(())
6554                        }
6555                    } else {
6556                        self.expected("ROLE or GROUP after IN", self.peek_token())
6557                    }
6558                }
6559                Keyword::ROLE => {
6560                    if !role.is_empty() {
6561                        parser_err!("Found multiple ROLE", loc)
6562                    } else {
6563                        role = self.parse_comma_separated(|p| p.parse_identifier())?;
6564                        Ok(())
6565                    }
6566                }
6567                Keyword::USER => {
6568                    if !user.is_empty() {
6569                        parser_err!("Found multiple USER", loc)
6570                    } else {
6571                        user = self.parse_comma_separated(|p| p.parse_identifier())?;
6572                        Ok(())
6573                    }
6574                }
6575                Keyword::ADMIN => {
6576                    if !admin.is_empty() {
6577                        parser_err!("Found multiple ADMIN", loc)
6578                    } else {
6579                        admin = self.parse_comma_separated(|p| p.parse_identifier())?;
6580                        Ok(())
6581                    }
6582                }
6583                _ => break,
6584            }?
6585        }
6586
6587        Ok(Statement::CreateRole {
6588            names,
6589            if_not_exists,
6590            login,
6591            inherit,
6592            bypassrls,
6593            password,
6594            create_db,
6595            create_role,
6596            replication,
6597            superuser,
6598            connection_limit,
6599            valid_until,
6600            in_role,
6601            in_group,
6602            role,
6603            user,
6604            admin,
6605            authorization_owner,
6606        })
6607    }
6608
6609    pub fn parse_owner(&mut self) -> Result<Owner, ParserError> {
6610        let owner = match self.parse_one_of_keywords(&[Keyword::CURRENT_USER, Keyword::CURRENT_ROLE, Keyword::SESSION_USER]) {
6611            Some(Keyword::CURRENT_USER) => Owner::CurrentUser,
6612            Some(Keyword::CURRENT_ROLE) => Owner::CurrentRole,
6613            Some(Keyword::SESSION_USER) => Owner::SessionUser,
6614            Some(_) => unreachable!(),
6615            None => {
6616                match self.parse_identifier() {
6617                    Ok(ident) => Owner::Ident(ident),
6618                    Err(e) => {
6619                        return Err(ParserError::ParserError(format!("Expected: CURRENT_USER, CURRENT_ROLE, SESSION_USER or identifier after OWNER TO. {e}")))
6620                    }
6621                }
6622            }
6623        };
6624        Ok(owner)
6625    }
6626
6627    /// Parses a [Statement::CreateDomain] statement.
6628    fn parse_create_domain(&mut self) -> Result<Statement, ParserError> {
6629        let name = self.parse_object_name(false)?;
6630        self.expect_keyword_is(Keyword::AS)?;
6631        let data_type = self.parse_data_type()?;
6632        let collation = if self.parse_keyword(Keyword::COLLATE) {
6633            Some(self.parse_identifier()?)
6634        } else {
6635            None
6636        };
6637        let default = if self.parse_keyword(Keyword::DEFAULT) {
6638            Some(self.parse_expr()?)
6639        } else {
6640            None
6641        };
6642        let mut constraints = Vec::new();
6643        while let Some(constraint) = self.parse_optional_table_constraint()? {
6644            constraints.push(constraint);
6645        }
6646
6647        Ok(Statement::CreateDomain(CreateDomain {
6648            name,
6649            data_type,
6650            collation,
6651            default,
6652            constraints,
6653        }))
6654    }
6655
6656    /// ```sql
6657    ///     CREATE POLICY name ON table_name [ AS { PERMISSIVE | RESTRICTIVE } ]
6658    ///     [ FOR { ALL | SELECT | INSERT | UPDATE | DELETE } ]
6659    ///     [ TO { role_name | PUBLIC | CURRENT_USER | CURRENT_ROLE | SESSION_USER } [, ...] ]
6660    ///     [ USING ( using_expression ) ]
6661    ///     [ WITH CHECK ( with_check_expression ) ]
6662    /// ```
6663    ///
6664    /// [PostgreSQL Documentation](https://www.postgresql.org/docs/current/sql-createpolicy.html)
6665    pub fn parse_create_policy(&mut self) -> Result<Statement, ParserError> {
6666        let name = self.parse_identifier()?;
6667        self.expect_keyword_is(Keyword::ON)?;
6668        let table_name = self.parse_object_name(false)?;
6669
6670        let policy_type = if self.parse_keyword(Keyword::AS) {
6671            let keyword =
6672                self.expect_one_of_keywords(&[Keyword::PERMISSIVE, Keyword::RESTRICTIVE])?;
6673            Some(match keyword {
6674                Keyword::PERMISSIVE => CreatePolicyType::Permissive,
6675                Keyword::RESTRICTIVE => CreatePolicyType::Restrictive,
6676                _ => unreachable!(),
6677            })
6678        } else {
6679            None
6680        };
6681
6682        let command = if self.parse_keyword(Keyword::FOR) {
6683            let keyword = self.expect_one_of_keywords(&[
6684                Keyword::ALL,
6685                Keyword::SELECT,
6686                Keyword::INSERT,
6687                Keyword::UPDATE,
6688                Keyword::DELETE,
6689            ])?;
6690            Some(match keyword {
6691                Keyword::ALL => CreatePolicyCommand::All,
6692                Keyword::SELECT => CreatePolicyCommand::Select,
6693                Keyword::INSERT => CreatePolicyCommand::Insert,
6694                Keyword::UPDATE => CreatePolicyCommand::Update,
6695                Keyword::DELETE => CreatePolicyCommand::Delete,
6696                _ => unreachable!(),
6697            })
6698        } else {
6699            None
6700        };
6701
6702        let to = if self.parse_keyword(Keyword::TO) {
6703            Some(self.parse_comma_separated(|p| p.parse_owner())?)
6704        } else {
6705            None
6706        };
6707
6708        let using = if self.parse_keyword(Keyword::USING) {
6709            self.expect_token(&Token::LParen)?;
6710            let expr = self.parse_expr()?;
6711            self.expect_token(&Token::RParen)?;
6712            Some(expr)
6713        } else {
6714            None
6715        };
6716
6717        let with_check = if self.parse_keywords(&[Keyword::WITH, Keyword::CHECK]) {
6718            self.expect_token(&Token::LParen)?;
6719            let expr = self.parse_expr()?;
6720            self.expect_token(&Token::RParen)?;
6721            Some(expr)
6722        } else {
6723            None
6724        };
6725
6726        Ok(CreatePolicy {
6727            name,
6728            table_name,
6729            policy_type,
6730            command,
6731            to,
6732            using,
6733            with_check,
6734        })
6735    }
6736
6737    /// ```sql
6738    /// CREATE CONNECTOR [IF NOT EXISTS] connector_name
6739    /// [TYPE datasource_type]
6740    /// [URL datasource_url]
6741    /// [COMMENT connector_comment]
6742    /// [WITH DCPROPERTIES(property_name=property_value, ...)]
6743    /// ```
6744    ///
6745    /// [Hive Documentation](https://cwiki.apache.org/confluence/pages/viewpage.action?pageId=27362034#LanguageManualDDL-CreateDataConnectorCreateConnector)
6746    pub fn parse_create_connector(&mut self) -> Result<Statement, ParserError> {
6747        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
6748        let name = self.parse_identifier()?;
6749
6750        let connector_type = if self.parse_keyword(Keyword::TYPE) {
6751            Some(self.parse_literal_string()?)
6752        } else {
6753            None
6754        };
6755
6756        let url = if self.parse_keyword(Keyword::URL) {
6757            Some(self.parse_literal_string()?)
6758        } else {
6759            None
6760        };
6761
6762        let comment = self.parse_optional_inline_comment()?;
6763
6764        let with_dcproperties =
6765            match self.parse_options_with_keywords(&[Keyword::WITH, Keyword::DCPROPERTIES])? {
6766                properties if !properties.is_empty() => Some(properties),
6767                _ => None,
6768            };
6769
6770        Ok(Statement::CreateConnector(CreateConnector {
6771            name,
6772            if_not_exists,
6773            connector_type,
6774            url,
6775            comment,
6776            with_dcproperties,
6777        }))
6778    }
6779
6780    pub fn parse_drop(&mut self) -> Result<Statement, ParserError> {
6781        // MySQL dialect supports `TEMPORARY`
6782        let temporary = dialect_of!(self is MySqlDialect | GenericDialect | DuckDbDialect)
6783            && self.parse_keyword(Keyword::TEMPORARY);
6784        let persistent = dialect_of!(self is DuckDbDialect)
6785            && self.parse_one_of_keywords(&[Keyword::PERSISTENT]).is_some();
6786
6787        let object_type = if self.parse_keywords(&[Keyword::TABLE, Keyword::FUNCTION]) {
6788            return self.parse_drop_table_function();
6789        } else if self.parse_keyword(Keyword::TABLE) {
6790            ObjectType::Table
6791        } else if self.parse_keyword(Keyword::VIEW) {
6792            ObjectType::View
6793        } else if self.parse_keywords(&[Keyword::MATERIALIZED, Keyword::VIEW]) {
6794            ObjectType::MaterializedView
6795        } else if self.parse_keyword(Keyword::INDEX) {
6796            ObjectType::Index
6797        } else if self.parse_keyword(Keyword::ROLE) {
6798            ObjectType::Role
6799        } else if self.parse_keyword(Keyword::SCHEMA) {
6800            ObjectType::Schema
6801        } else if self.parse_keywords(&[Keyword::EXTERNAL, Keyword::SCHEMA]) {
6802            ObjectType::Schema
6803        } else if self.parse_keyword(Keyword::DATABASE) {
6804            ObjectType::Database
6805        } else if self.parse_keyword(Keyword::SEQUENCE) {
6806            ObjectType::Sequence
6807        } else if self.parse_keyword(Keyword::STAGE) {
6808            ObjectType::Stage
6809        } else if self.parse_keyword(Keyword::TYPE) {
6810            ObjectType::Type
6811        } else if self.parse_keyword(Keyword::USER) {
6812            ObjectType::User
6813        } else if self.parse_keyword(Keyword::STREAM) {
6814            ObjectType::Stream
6815        } else if self.parse_keywords(&[Keyword::AGGREGATE, Keyword::FUNCTION]) {
6816            return self.parse_drop_function();
6817        } else if self.parse_keyword(Keyword::FUNCTION) {
6818            return self.parse_drop_function();
6819        } else if self.parse_keyword(Keyword::POLICY) {
6820            return self.parse_drop_policy();
6821        } else if self.parse_keyword(Keyword::CONNECTOR) {
6822            return self.parse_drop_connector();
6823        } else if self.parse_keyword(Keyword::DOMAIN) {
6824            return self.parse_drop_domain();
6825        } else if self.parse_keyword(Keyword::PROCEDURE) {
6826            return self.parse_drop_procedure();
6827        } else if self.parse_keyword(Keyword::SECRET) {
6828            return self.parse_drop_secret(temporary, persistent);
6829        } else if self.parse_keyword(Keyword::TRIGGER) {
6830            return self.parse_drop_trigger();
6831        } else if self.parse_keyword(Keyword::EXTENSION) {
6832            return self.parse_drop_extension();
6833        } else if self.parse_keywords(&[Keyword::SEARCH, Keyword::INDEX]) {
6834            return self.parse_drop_search_index();
6835        } else if self.parse_keywords(&[Keyword::VECTOR, Keyword::INDEX]) {
6836            return self.parse_drop_vector_index();
6837        } else if self.parse_keywords(&[Keyword::ROW, Keyword::ACCESS, Keyword::POLICY]) {
6838            return self.parse_drop_row_access_policy();
6839        } else if self.parse_keywords(&[Keyword::ALL, Keyword::ROW, Keyword::ACCESS, Keyword::POLICIES]) {
6840            return self.parse_drop_all_row_access_policies();
6841        } else {
6842            return self.expected(
6843                "CONNECTOR, DATABASE, EXTENSION, FUNCTION, INDEX, POLICY, PROCEDURE, ROLE, SCHEMA, SECRET, SEQUENCE, STAGE, TABLE, TRIGGER, TYPE, VIEW, MATERIALIZED VIEW, SEARCH INDEX, VECTOR INDEX, ROW ACCESS POLICY or USER after DROP",
6844                self.peek_token(),
6845            );
6846        };
6847        // Many dialects support the non-standard `IF EXISTS` clause and allow
6848        // specifying multiple objects to delete in a single statement
6849        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6850        let names = self.parse_comma_separated(|p| p.parse_object_name(false))?;
6851
6852        let loc = self.peek_token().span.start;
6853        let cascade = self.parse_keyword(Keyword::CASCADE);
6854        let restrict = self.parse_keyword(Keyword::RESTRICT);
6855        let purge = self.parse_keyword(Keyword::PURGE);
6856        if cascade && restrict {
6857            return parser_err!("Cannot specify both CASCADE and RESTRICT in DROP", loc);
6858        }
6859        if object_type == ObjectType::Role && (cascade || restrict || purge) {
6860            return parser_err!(
6861                "Cannot specify CASCADE, RESTRICT, or PURGE in DROP ROLE",
6862                loc
6863            );
6864        }
6865        let table = if self.parse_keyword(Keyword::ON) {
6866            Some(self.parse_object_name(false)?)
6867        } else {
6868            None
6869        };
6870        Ok(Statement::Drop {
6871            object_type,
6872            if_exists,
6873            names,
6874            cascade,
6875            restrict,
6876            purge,
6877            temporary,
6878            table,
6879        })
6880    }
6881
6882    pub fn parse_undrop(&mut self) -> Result<Statement, ParserError> {
6883        if self.parse_keyword(Keyword::SCHEMA) {
6884            let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
6885            let schema_name = self.parse_object_name(false)?;
6886            Ok(Statement::UndropSchema {
6887                if_not_exists,
6888                schema_name,
6889            })
6890        } else {
6891            self.expected("SCHEMA after UNDROP", self.peek_token())
6892        }
6893    }
6894
6895    fn parse_optional_drop_behavior(&mut self) -> Option<DropBehavior> {
6896        match self.parse_one_of_keywords(&[Keyword::CASCADE, Keyword::RESTRICT]) {
6897            Some(Keyword::CASCADE) => Some(DropBehavior::Cascade),
6898            Some(Keyword::RESTRICT) => Some(DropBehavior::Restrict),
6899            _ => None,
6900        }
6901    }
6902
6903    /// ```sql
6904    /// DROP FUNCTION [ IF EXISTS ] name [ ( [ [ argmode ] [ argname ] argtype [, ...] ] ) ] [, ...]
6905    /// [ CASCADE | RESTRICT ]
6906    /// ```
6907    fn parse_drop_function(&mut self) -> Result<Statement, ParserError> {
6908        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6909        let func_desc = self.parse_comma_separated(Parser::parse_function_desc)?;
6910        let drop_behavior = self.parse_optional_drop_behavior();
6911        Ok(Statement::DropFunction {
6912            if_exists,
6913            func_desc,
6914            drop_behavior,
6915        })
6916    }
6917
6918    fn parse_drop_table_function(&mut self) -> Result<Statement, ParserError> {
6919        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6920        let func_desc = self.parse_comma_separated(Parser::parse_function_desc)?;
6921        Ok(Statement::DropFunction {
6922            if_exists,
6923            func_desc,
6924            drop_behavior: None,
6925        })
6926    }
6927
6928    /// ```sql
6929    /// DROP POLICY [ IF EXISTS ] name ON table_name [ CASCADE | RESTRICT ]
6930    /// ```
6931    ///
6932    /// [PostgreSQL Documentation](https://www.postgresql.org/docs/current/sql-droppolicy.html)
6933    fn parse_drop_policy(&mut self) -> Result<Statement, ParserError> {
6934        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6935        let name = self.parse_identifier()?;
6936        self.expect_keyword_is(Keyword::ON)?;
6937        let table_name = self.parse_object_name(false)?;
6938        let drop_behavior = self.parse_optional_drop_behavior();
6939        Ok(Statement::DropPolicy {
6940            if_exists,
6941            name,
6942            table_name,
6943            drop_behavior,
6944        })
6945    }
6946    /// ```sql
6947    /// DROP CONNECTOR [IF EXISTS] name
6948    /// ```
6949    ///
6950    /// See [Hive](https://cwiki.apache.org/confluence/pages/viewpage.action?pageId=27362034#LanguageManualDDL-DropConnector)
6951    fn parse_drop_connector(&mut self) -> Result<Statement, ParserError> {
6952        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6953        let name = self.parse_identifier()?;
6954        Ok(Statement::DropConnector { if_exists, name })
6955    }
6956
6957    /// ```sql
6958    /// DROP DOMAIN [ IF EXISTS ] name [ CASCADE | RESTRICT ]
6959    /// ```
6960    fn parse_drop_domain(&mut self) -> Result<Statement, ParserError> {
6961        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6962        let name = self.parse_object_name(false)?;
6963        let drop_behavior = self.parse_optional_drop_behavior();
6964        Ok(Statement::DropDomain(DropDomain {
6965            if_exists,
6966            name,
6967            drop_behavior,
6968        }))
6969    }
6970
6971    /// ```sql
6972    /// DROP PROCEDURE [ IF EXISTS ] name [ ( [ [ argmode ] [ argname ] argtype [, ...] ] ) ] [, ...]
6973    /// [ CASCADE | RESTRICT ]
6974    /// ```
6975    fn parse_drop_procedure(&mut self) -> Result<Statement, ParserError> {
6976        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6977        let proc_desc = self.parse_comma_separated(Parser::parse_function_desc)?;
6978        let drop_behavior = self.parse_optional_drop_behavior();
6979        Ok(Statement::DropProcedure {
6980            if_exists,
6981            proc_desc,
6982            drop_behavior,
6983        })
6984    }
6985
6986    fn parse_function_desc(&mut self) -> Result<FunctionDesc, ParserError> {
6987        let name = self.parse_object_name(false)?;
6988
6989        let args = if self.consume_token(&Token::LParen) {
6990            if self.consume_token(&Token::RParen) {
6991                Some(vec![])
6992            } else {
6993                let args = self.parse_comma_separated(Parser::parse_function_arg)?;
6994                self.expect_token(&Token::RParen)?;
6995                Some(args)
6996            }
6997        } else {
6998            None
6999        };
7000
7001        Ok(FunctionDesc { name, args })
7002    }
7003
7004    /// See [DuckDB Docs](https://duckdb.org/docs/sql/statements/create_secret.html) for more details.
7005    fn parse_drop_secret(
7006        &mut self,
7007        temporary: bool,
7008        persistent: bool,
7009    ) -> Result<Statement, ParserError> {
7010        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7011        let name = self.parse_identifier()?;
7012        let storage_specifier = if self.parse_keyword(Keyword::FROM) {
7013            self.parse_identifier().ok()
7014        } else {
7015            None
7016        };
7017        let temp = match (temporary, persistent) {
7018            (true, false) => Some(true),
7019            (false, true) => Some(false),
7020            (false, false) => None,
7021            _ => self.expected("TEMPORARY or PERSISTENT", self.peek_token())?,
7022        };
7023
7024        Ok(Statement::DropSecret {
7025            if_exists,
7026            temporary: temp,
7027            name,
7028            storage_specifier,
7029        })
7030    }
7031
7032    /// Parse a `DECLARE` statement.
7033    ///
7034    /// ```sql
7035    /// DECLARE name [ BINARY ] [ ASENSITIVE | INSENSITIVE ] [ [ NO ] SCROLL ]
7036    ///     CURSOR [ { WITH | WITHOUT } HOLD ] FOR query
7037    /// ```
7038    ///
7039    /// The syntax can vary significantly between warehouses. See the grammar
7040    /// on the warehouse specific function in such cases.
7041    pub fn parse_declare(&mut self) -> Result<Statement, ParserError> {
7042        if dialect_of!(self is BigQueryDialect) {
7043            return self.parse_big_query_declare();
7044        }
7045        if dialect_of!(self is SnowflakeDialect) {
7046            return self.parse_snowflake_declare();
7047        }
7048        if dialect_of!(self is MsSqlDialect) {
7049            return self.parse_mssql_declare();
7050        }
7051
7052        let name = self.parse_identifier()?;
7053
7054        let binary = Some(self.parse_keyword(Keyword::BINARY));
7055        let sensitive = if self.parse_keyword(Keyword::INSENSITIVE) {
7056            Some(true)
7057        } else if self.parse_keyword(Keyword::ASENSITIVE) {
7058            Some(false)
7059        } else {
7060            None
7061        };
7062        let scroll = if self.parse_keyword(Keyword::SCROLL) {
7063            Some(true)
7064        } else if self.parse_keywords(&[Keyword::NO, Keyword::SCROLL]) {
7065            Some(false)
7066        } else {
7067            None
7068        };
7069
7070        self.expect_keyword_is(Keyword::CURSOR)?;
7071        let declare_type = Some(DeclareType::Cursor);
7072
7073        let hold = match self.parse_one_of_keywords(&[Keyword::WITH, Keyword::WITHOUT]) {
7074            Some(keyword) => {
7075                self.expect_keyword_is(Keyword::HOLD)?;
7076
7077                match keyword {
7078                    Keyword::WITH => Some(true),
7079                    Keyword::WITHOUT => Some(false),
7080                    _ => unreachable!(),
7081                }
7082            }
7083            None => None,
7084        };
7085
7086        self.expect_keyword_is(Keyword::FOR)?;
7087
7088        let query = Some(self.parse_query()?);
7089
7090        Ok(Statement::Declare {
7091            stmts: vec![Declare {
7092                names: vec![name],
7093                data_type: None,
7094                assignment: None,
7095                declare_type,
7096                binary,
7097                sensitive,
7098                scroll,
7099                hold,
7100                for_query: query,
7101            }],
7102        })
7103    }
7104
7105    /// Parse a [BigQuery] `DECLARE` statement.
7106    ///
7107    /// Syntax:
7108    /// ```text
7109    /// DECLARE variable_name[, ...] [{ <variable_type> | <DEFAULT expression> }];
7110    /// ```
7111    /// [BigQuery]: https://cloud.google.com/bigquery/docs/reference/standard-sql/procedural-language#declare
7112    pub fn parse_big_query_declare(&mut self) -> Result<Statement, ParserError> {
7113        let names = self.parse_comma_separated(Parser::parse_identifier)?;
7114
7115        let data_type = match self.peek_token().token {
7116            Token::Word(w) if w.keyword == Keyword::DEFAULT => None,
7117            _ => Some(self.parse_data_type()?),
7118        };
7119
7120        let expr = if data_type.is_some() {
7121            if self.parse_keyword(Keyword::DEFAULT) {
7122                Some(self.parse_expr()?)
7123            } else {
7124                None
7125            }
7126        } else {
7127            // If no variable type - default expression must be specified, per BQ docs.
7128            // i.e `DECLARE foo;` is invalid.
7129            self.expect_keyword_is(Keyword::DEFAULT)?;
7130            Some(self.parse_expr()?)
7131        };
7132
7133        Ok(Statement::Declare {
7134            stmts: vec![Declare {
7135                names,
7136                data_type,
7137                assignment: expr.map(|expr| DeclareAssignment::Default(Box::new(expr))),
7138                declare_type: None,
7139                binary: None,
7140                sensitive: None,
7141                scroll: None,
7142                hold: None,
7143                for_query: None,
7144            }],
7145        })
7146    }
7147
7148    /// Parse a [Snowflake] `DECLARE` statement.
7149    ///
7150    /// Syntax:
7151    /// ```text
7152    /// DECLARE
7153    ///   [{ <variable_declaration>
7154    ///      | <cursor_declaration>
7155    ///      | <resultset_declaration>
7156    ///      | <exception_declaration> }; ... ]
7157    ///
7158    /// <variable_declaration>
7159    /// <variable_name> [<type>] [ { DEFAULT | := } <expression>]
7160    ///
7161    /// <cursor_declaration>
7162    /// <cursor_name> CURSOR FOR <query>
7163    ///
7164    /// <resultset_declaration>
7165    /// <resultset_name> RESULTSET [ { DEFAULT | := } ( <query> ) ] ;
7166    ///
7167    /// <exception_declaration>
7168    /// <exception_name> EXCEPTION [ ( <exception_number> , '<exception_message>' ) ] ;
7169    /// ```
7170    ///
7171    /// [Snowflake]: https://docs.snowflake.com/en/sql-reference/snowflake-scripting/declare
7172    pub fn parse_snowflake_declare(&mut self) -> Result<Statement, ParserError> {
7173        let mut stmts = vec![];
7174        loop {
7175            let name = self.parse_identifier()?;
7176            let (declare_type, for_query, assigned_expr, data_type) =
7177                if self.parse_keyword(Keyword::CURSOR) {
7178                    self.expect_keyword_is(Keyword::FOR)?;
7179                    match self.peek_token().token {
7180                        Token::Word(w) if w.keyword == Keyword::SELECT => (
7181                            Some(DeclareType::Cursor),
7182                            Some(self.parse_query()?),
7183                            None,
7184                            None,
7185                        ),
7186                        _ => (
7187                            Some(DeclareType::Cursor),
7188                            None,
7189                            Some(DeclareAssignment::For(Box::new(self.parse_expr()?))),
7190                            None,
7191                        ),
7192                    }
7193                } else if self.parse_keyword(Keyword::RESULTSET) {
7194                    let assigned_expr = if self.peek_token().token != Token::SemiColon {
7195                        self.parse_snowflake_variable_declaration_expression()?
7196                    } else {
7197                        // Nothing more to do. The statement has no further parameters.
7198                        None
7199                    };
7200
7201                    (Some(DeclareType::ResultSet), None, assigned_expr, None)
7202                } else if self.parse_keyword(Keyword::EXCEPTION) {
7203                    let assigned_expr = if self.peek_token().token == Token::LParen {
7204                        Some(DeclareAssignment::Expr(Box::new(self.parse_expr()?)))
7205                    } else {
7206                        // Nothing more to do. The statement has no further parameters.
7207                        None
7208                    };
7209
7210                    (Some(DeclareType::Exception), None, assigned_expr, None)
7211                } else {
7212                    // Without an explicit keyword, the only valid option is variable declaration.
7213                    let (assigned_expr, data_type) = if let Some(assigned_expr) =
7214                        self.parse_snowflake_variable_declaration_expression()?
7215                    {
7216                        (Some(assigned_expr), None)
7217                    } else if let Token::Word(_) = self.peek_token().token {
7218                        let data_type = self.parse_data_type()?;
7219                        (
7220                            self.parse_snowflake_variable_declaration_expression()?,
7221                            Some(data_type),
7222                        )
7223                    } else {
7224                        (None, None)
7225                    };
7226                    (None, None, assigned_expr, data_type)
7227                };
7228            let stmt = Declare {
7229                names: vec![name],
7230                data_type,
7231                assignment: assigned_expr,
7232                declare_type,
7233                binary: None,
7234                sensitive: None,
7235                scroll: None,
7236                hold: None,
7237                for_query,
7238            };
7239
7240            stmts.push(stmt);
7241            if self.consume_token(&Token::SemiColon) {
7242                match self.peek_token().token {
7243                    Token::Word(w)
7244                        if ALL_KEYWORDS
7245                            .binary_search(&w.value.to_uppercase().as_str())
7246                            .is_err() =>
7247                    {
7248                        // Not a keyword - start of a new declaration.
7249                        continue;
7250                    }
7251                    _ => {
7252                        // Put back the semicolon, this is the end of the DECLARE statement.
7253                        self.prev_token();
7254                    }
7255                }
7256            }
7257
7258            break;
7259        }
7260
7261        Ok(Statement::Declare { stmts })
7262    }
7263
7264    /// Parse a [MsSql] `DECLARE` statement.
7265    ///
7266    /// Syntax:
7267    /// ```text
7268    /// DECLARE
7269    // {
7270    //   { @local_variable [AS] data_type [ = value ] }
7271    //   | { @cursor_variable_name CURSOR [ FOR ] }
7272    // } [ ,...n ]
7273    /// ```
7274    /// [MsSql]: https://learn.microsoft.com/en-us/sql/t-sql/language-elements/declare-local-variable-transact-sql?view=sql-server-ver16
7275    pub fn parse_mssql_declare(&mut self) -> Result<Statement, ParserError> {
7276        let stmts = self.parse_comma_separated(Parser::parse_mssql_declare_stmt)?;
7277
7278        Ok(Statement::Declare { stmts })
7279    }
7280
7281    /// Parse the body of a [MsSql] `DECLARE`statement.
7282    ///
7283    /// Syntax:
7284    /// ```text
7285    // {
7286    //   { @local_variable [AS] data_type [ = value ] }
7287    //   | { @cursor_variable_name CURSOR [ FOR ]}
7288    // } [ ,...n ]
7289    /// ```
7290    /// [MsSql]: https://learn.microsoft.com/en-us/sql/t-sql/language-elements/declare-local-variable-transact-sql?view=sql-server-ver16
7291    pub fn parse_mssql_declare_stmt(&mut self) -> Result<Declare, ParserError> {
7292        let name = {
7293            let ident = self.parse_identifier()?;
7294            if !ident.value.starts_with('@')
7295                && !matches!(
7296                    self.peek_token().token,
7297                    Token::Word(w) if w.keyword == Keyword::CURSOR
7298                )
7299            {
7300                Err(ParserError::TokenizerError(
7301                    "Invalid MsSql variable declaration.".to_string(),
7302                ))
7303            } else {
7304                Ok(ident)
7305            }
7306        }?;
7307
7308        let (declare_type, data_type) = match self.peek_token().token {
7309            Token::Word(w) => match w.keyword {
7310                Keyword::CURSOR => {
7311                    self.next_token();
7312                    (Some(DeclareType::Cursor), None)
7313                }
7314                Keyword::AS => {
7315                    self.next_token();
7316                    (None, Some(self.parse_data_type()?))
7317                }
7318                _ => (None, Some(self.parse_data_type()?)),
7319            },
7320            _ => (None, Some(self.parse_data_type()?)),
7321        };
7322
7323        let (for_query, assignment) = if self.peek_keyword(Keyword::FOR) {
7324            self.next_token();
7325            let query = Some(self.parse_query()?);
7326            (query, None)
7327        } else {
7328            let assignment = self.parse_mssql_variable_declaration_expression()?;
7329            (None, assignment)
7330        };
7331
7332        Ok(Declare {
7333            names: vec![name],
7334            data_type,
7335            assignment,
7336            declare_type,
7337            binary: None,
7338            sensitive: None,
7339            scroll: None,
7340            hold: None,
7341            for_query,
7342        })
7343    }
7344
7345    /// Parses the assigned expression in a variable declaration.
7346    ///
7347    /// Syntax:
7348    /// ```text
7349    /// [ { DEFAULT | := } <expression>]
7350    /// ```
7351    /// <https://docs.snowflake.com/en/sql-reference/snowflake-scripting/declare#variable-declaration-syntax>
7352    pub fn parse_snowflake_variable_declaration_expression(
7353        &mut self,
7354    ) -> Result<Option<DeclareAssignment>, ParserError> {
7355        Ok(match self.peek_token().token {
7356            Token::Word(w) if w.keyword == Keyword::DEFAULT => {
7357                self.next_token(); // Skip `DEFAULT`
7358                Some(DeclareAssignment::Default(Box::new(self.parse_expr()?)))
7359            }
7360            Token::Assignment => {
7361                self.next_token(); // Skip `:=`
7362                Some(DeclareAssignment::DuckAssignment(Box::new(
7363                    self.parse_expr()?,
7364                )))
7365            }
7366            _ => None,
7367        })
7368    }
7369
7370    /// Parses the assigned expression in a variable declaration.
7371    ///
7372    /// Syntax:
7373    /// ```text
7374    /// [ = <expression>]
7375    /// ```
7376    pub fn parse_mssql_variable_declaration_expression(
7377        &mut self,
7378    ) -> Result<Option<DeclareAssignment>, ParserError> {
7379        Ok(match self.peek_token().token {
7380            Token::Eq => {
7381                self.next_token(); // Skip `=`
7382                Some(DeclareAssignment::MsSqlAssignment(Box::new(
7383                    self.parse_expr()?,
7384                )))
7385            }
7386            _ => None,
7387        })
7388    }
7389
7390    // FETCH [ direction { FROM | IN } ] cursor INTO target;
7391    pub fn parse_fetch_statement(&mut self) -> Result<Statement, ParserError> {
7392        let direction = if self.parse_keyword(Keyword::NEXT) {
7393            FetchDirection::Next
7394        } else if self.parse_keyword(Keyword::PRIOR) {
7395            FetchDirection::Prior
7396        } else if self.parse_keyword(Keyword::FIRST) {
7397            FetchDirection::First
7398        } else if self.parse_keyword(Keyword::LAST) {
7399            FetchDirection::Last
7400        } else if self.parse_keyword(Keyword::ABSOLUTE) {
7401            FetchDirection::Absolute {
7402                limit: self.parse_number_value()?.value,
7403            }
7404        } else if self.parse_keyword(Keyword::RELATIVE) {
7405            FetchDirection::Relative {
7406                limit: self.parse_number_value()?.value,
7407            }
7408        } else if self.parse_keyword(Keyword::FORWARD) {
7409            if self.parse_keyword(Keyword::ALL) {
7410                FetchDirection::ForwardAll
7411            } else {
7412                FetchDirection::Forward {
7413                    // TODO: Support optional
7414                    limit: Some(self.parse_number_value()?.value),
7415                }
7416            }
7417        } else if self.parse_keyword(Keyword::BACKWARD) {
7418            if self.parse_keyword(Keyword::ALL) {
7419                FetchDirection::BackwardAll
7420            } else {
7421                FetchDirection::Backward {
7422                    // TODO: Support optional
7423                    limit: Some(self.parse_number_value()?.value),
7424                }
7425            }
7426        } else if self.parse_keyword(Keyword::ALL) {
7427            FetchDirection::All
7428        } else {
7429            FetchDirection::Count {
7430                limit: self.parse_number_value()?.value,
7431            }
7432        };
7433
7434        let position = if self.peek_keyword(Keyword::FROM) {
7435            self.expect_keyword(Keyword::FROM)?;
7436            FetchPosition::From
7437        } else if self.peek_keyword(Keyword::IN) {
7438            self.expect_keyword(Keyword::IN)?;
7439            FetchPosition::In
7440        } else {
7441            return parser_err!("Expected FROM or IN", self.peek_token().span.start);
7442        };
7443
7444        let name = self.parse_identifier()?;
7445
7446        let into = if self.parse_keyword(Keyword::INTO) {
7447            Some(self.parse_object_name(false)?)
7448        } else {
7449            None
7450        };
7451
7452        Ok(Statement::Fetch {
7453            name,
7454            direction,
7455            position,
7456            into,
7457        })
7458    }
7459
7460    pub fn parse_discard(&mut self) -> Result<Statement, ParserError> {
7461        let object_type = if self.parse_keyword(Keyword::ALL) {
7462            DiscardObject::ALL
7463        } else if self.parse_keyword(Keyword::PLANS) {
7464            DiscardObject::PLANS
7465        } else if self.parse_keyword(Keyword::SEQUENCES) {
7466            DiscardObject::SEQUENCES
7467        } else if self.parse_keyword(Keyword::TEMP) || self.parse_keyword(Keyword::TEMPORARY) {
7468            DiscardObject::TEMP
7469        } else {
7470            return self.expected(
7471                "ALL, PLANS, SEQUENCES, TEMP or TEMPORARY after DISCARD",
7472                self.peek_token(),
7473            );
7474        };
7475        Ok(Statement::Discard { object_type })
7476    }
7477
7478    pub fn parse_create_index(&mut self, unique: bool) -> Result<Statement, ParserError> {
7479        let concurrently = self.parse_keyword(Keyword::CONCURRENTLY);
7480        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
7481        let index_name = if if_not_exists || !self.parse_keyword(Keyword::ON) {
7482            let index_name = self.parse_object_name(false)?;
7483            self.expect_keyword_is(Keyword::ON)?;
7484            Some(index_name)
7485        } else {
7486            None
7487        };
7488        let table_name = self.parse_object_name(false)?;
7489        let using = if self.parse_keyword(Keyword::USING) {
7490            Some(self.parse_index_type()?)
7491        } else {
7492            None
7493        };
7494
7495        let columns = self.parse_parenthesized_index_column_list()?;
7496
7497        let include = if self.parse_keyword(Keyword::INCLUDE) {
7498            self.expect_token(&Token::LParen)?;
7499            let columns = self.parse_comma_separated(|p| p.parse_identifier())?;
7500            self.expect_token(&Token::RParen)?;
7501            columns
7502        } else {
7503            vec![]
7504        };
7505
7506        let nulls_distinct = if self.parse_keyword(Keyword::NULLS) {
7507            let not = self.parse_keyword(Keyword::NOT);
7508            self.expect_keyword_is(Keyword::DISTINCT)?;
7509            Some(!not)
7510        } else {
7511            None
7512        };
7513
7514        let with = if self.dialect.supports_create_index_with_clause()
7515            && self.parse_keyword(Keyword::WITH)
7516        {
7517            self.expect_token(&Token::LParen)?;
7518            let with_params = self.parse_comma_separated(Parser::parse_expr)?;
7519            self.expect_token(&Token::RParen)?;
7520            with_params
7521        } else {
7522            Vec::new()
7523        };
7524
7525        let predicate = if self.parse_keyword(Keyword::WHERE) {
7526            Some(self.parse_expr()?)
7527        } else {
7528            None
7529        };
7530
7531        // MySQL options (including the modern style of `USING` after the column list instead of
7532        // before, which is deprecated) shouldn't conflict with other preceding options (e.g. `WITH
7533        // PARSER` won't be caught by the above `WITH` clause parsing because MySQL doesn't set that
7534        // support flag). This is probably invalid syntax for other dialects, but it is simpler to
7535        // parse it anyway (as we do inside `ALTER TABLE` and `CREATE TABLE` parsing).
7536        let index_options = self.parse_index_options()?;
7537
7538        // MySQL allows `ALGORITHM` and `LOCK` options. Unlike in `ALTER TABLE`, they need not be comma separated.
7539        let mut alter_options = Vec::new();
7540        while self
7541            .peek_one_of_keywords(&[Keyword::ALGORITHM, Keyword::LOCK])
7542            .is_some()
7543        {
7544            alter_options.push(self.parse_alter_table_operation()?)
7545        }
7546
7547        Ok(Statement::CreateIndex(CreateIndex {
7548            name: index_name,
7549            table_name,
7550            using,
7551            columns,
7552            unique,
7553            concurrently,
7554            if_not_exists,
7555            include,
7556            nulls_distinct,
7557            with,
7558            predicate,
7559            index_options,
7560            alter_options,
7561        }))
7562    }
7563
7564    pub fn parse_create_search_index(
7565        &mut self,
7566        or_replace: bool,
7567    ) -> Result<Statement, ParserError> {
7568        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
7569        let name = self.parse_object_name(false)?;
7570        self.expect_keyword_is(Keyword::ON)?;
7571        let table_name = self.parse_object_name(false)?;
7572
7573        self.expect_token(&Token::LParen)?;
7574        let (columns, all_columns) = if self.parse_keywords(&[Keyword::ALL, Keyword::COLUMNS]) {
7575            (vec![], true)
7576        } else {
7577            (self.parse_comma_separated(|p| p.parse_identifier())?, false)
7578        };
7579        self.expect_token(&Token::RParen)?;
7580
7581        let options = if let Some(opts) = self.maybe_parse_options(Keyword::OPTIONS)? {
7582            opts
7583        } else {
7584            vec![]
7585        };
7586
7587        Ok(Statement::CreateSearchIndex {
7588            or_replace,
7589            if_not_exists,
7590            name,
7591            table_name,
7592            columns,
7593            all_columns,
7594            options,
7595        })
7596    }
7597
7598    pub fn parse_create_vector_index(
7599        &mut self,
7600        or_replace: bool,
7601    ) -> Result<Statement, ParserError> {
7602        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
7603        let name = self.parse_object_name(false)?;
7604        self.expect_keyword_is(Keyword::ON)?;
7605        let table_name = self.parse_object_name(false)?;
7606
7607        self.expect_token(&Token::LParen)?;
7608        let column = self.parse_identifier()?;
7609        self.expect_token(&Token::RParen)?;
7610
7611        let storing = if self.parse_keyword(Keyword::STORING) {
7612            self.expect_token(&Token::LParen)?;
7613            let cols = self.parse_comma_separated(|p| p.parse_identifier())?;
7614            self.expect_token(&Token::RParen)?;
7615            cols
7616        } else {
7617            vec![]
7618        };
7619
7620        let options = if let Some(opts) = self.maybe_parse_options(Keyword::OPTIONS)? {
7621            opts
7622        } else {
7623            vec![]
7624        };
7625
7626        Ok(Statement::CreateVectorIndex {
7627            or_replace,
7628            if_not_exists,
7629            name,
7630            table_name,
7631            column,
7632            storing,
7633            options,
7634        })
7635    }
7636
7637    pub fn parse_create_row_access_policy(
7638        &mut self,
7639        or_replace: bool,
7640    ) -> Result<Statement, ParserError> {
7641        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
7642        let name = self.parse_object_name(false)?;
7643        self.expect_keyword_is(Keyword::ON)?;
7644        let table_name = self.parse_object_name(false)?;
7645
7646        self.expect_keywords(&[Keyword::GRANT, Keyword::TO])?;
7647        self.expect_token(&Token::LParen)?;
7648        let grant_to = self.parse_comma_separated(Parser::parse_expr)?;
7649        self.expect_token(&Token::RParen)?;
7650
7651        self.expect_keyword_is(Keyword::FILTER)?;
7652        self.expect_keyword_is(Keyword::USING)?;
7653        self.expect_token(&Token::LParen)?;
7654        let filter_using = self.parse_expr()?;
7655        self.expect_token(&Token::RParen)?;
7656
7657        Ok(Statement::CreateRowAccessPolicy {
7658            or_replace,
7659            if_not_exists,
7660            name,
7661            table_name,
7662            grant_to,
7663            filter_using,
7664        })
7665    }
7666
7667    pub fn parse_drop_search_index(&mut self) -> Result<Statement, ParserError> {
7668        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7669        let name = self.parse_object_name(false)?;
7670        self.expect_keyword_is(Keyword::ON)?;
7671        let table_name = self.parse_object_name(false)?;
7672
7673        Ok(Statement::DropSearchIndex {
7674            if_exists,
7675            name,
7676            table_name,
7677        })
7678    }
7679
7680    pub fn parse_drop_vector_index(&mut self) -> Result<Statement, ParserError> {
7681        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7682        let name = self.parse_object_name(false)?;
7683        self.expect_keyword_is(Keyword::ON)?;
7684        let table_name = self.parse_object_name(false)?;
7685
7686        Ok(Statement::DropVectorIndex {
7687            if_exists,
7688            name,
7689            table_name,
7690        })
7691    }
7692
7693    pub fn parse_drop_row_access_policy(&mut self) -> Result<Statement, ParserError> {
7694        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7695        let name = self.parse_object_name(false)?;
7696        self.expect_keyword_is(Keyword::ON)?;
7697        let table_name = self.parse_object_name(false)?;
7698
7699        Ok(Statement::DropRowAccessPolicy {
7700            if_exists,
7701            name,
7702            table_name,
7703        })
7704    }
7705
7706    pub fn parse_drop_all_row_access_policies(&mut self) -> Result<Statement, ParserError> {
7707        self.expect_keyword_is(Keyword::ON)?;
7708        let table_name = self.parse_object_name(false)?;
7709
7710        Ok(Statement::DropAllRowAccessPolicies { table_name })
7711    }
7712
7713    pub fn parse_create_extension(&mut self) -> Result<Statement, ParserError> {
7714        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
7715        let name = self.parse_identifier()?;
7716
7717        let (schema, version, cascade) = if self.parse_keyword(Keyword::WITH) {
7718            let schema = if self.parse_keyword(Keyword::SCHEMA) {
7719                Some(self.parse_identifier()?)
7720            } else {
7721                None
7722            };
7723
7724            let version = if self.parse_keyword(Keyword::VERSION) {
7725                Some(self.parse_identifier()?)
7726            } else {
7727                None
7728            };
7729
7730            let cascade = self.parse_keyword(Keyword::CASCADE);
7731
7732            (schema, version, cascade)
7733        } else {
7734            (None, None, false)
7735        };
7736
7737        Ok(Statement::CreateExtension {
7738            name,
7739            if_not_exists,
7740            schema,
7741            version,
7742            cascade,
7743        })
7744    }
7745
7746    /// Parse a PostgreSQL-specific [Statement::DropExtension] statement.
7747    pub fn parse_drop_extension(&mut self) -> Result<Statement, ParserError> {
7748        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7749        let names = self.parse_comma_separated(|p| p.parse_identifier())?;
7750        let cascade_or_restrict =
7751            self.parse_one_of_keywords(&[Keyword::CASCADE, Keyword::RESTRICT]);
7752        Ok(Statement::DropExtension {
7753            names,
7754            if_exists,
7755            cascade_or_restrict: cascade_or_restrict
7756                .map(|k| match k {
7757                    Keyword::CASCADE => Ok(ReferentialAction::Cascade),
7758                    Keyword::RESTRICT => Ok(ReferentialAction::Restrict),
7759                    _ => self.expected("CASCADE or RESTRICT", self.peek_token()),
7760                })
7761                .transpose()?,
7762        })
7763    }
7764
7765    //TODO: Implement parsing for Skewed
7766    pub fn parse_hive_distribution(&mut self) -> Result<HiveDistributionStyle, ParserError> {
7767        if self.parse_keywords(&[Keyword::PARTITIONED, Keyword::BY]) {
7768            self.expect_token(&Token::LParen)?;
7769            let columns = self.parse_comma_separated(Parser::parse_column_def)?;
7770            self.expect_token(&Token::RParen)?;
7771            Ok(HiveDistributionStyle::PARTITIONED { columns })
7772        } else {
7773            Ok(HiveDistributionStyle::NONE)
7774        }
7775    }
7776
7777    pub fn parse_hive_formats(&mut self) -> Result<HiveFormat, ParserError> {
7778        let mut hive_format = HiveFormat::default();
7779        loop {
7780            match self.parse_one_of_keywords(&[
7781                Keyword::ROW,
7782                Keyword::STORED,
7783                Keyword::LOCATION,
7784                Keyword::WITH,
7785            ]) {
7786                Some(Keyword::ROW) => {
7787                    hive_format.row_format = Some(self.parse_row_format()?);
7788                }
7789                Some(Keyword::STORED) => {
7790                    self.expect_keyword_is(Keyword::AS)?;
7791                    if self.parse_keyword(Keyword::INPUTFORMAT) {
7792                        let input_format = self.parse_expr()?;
7793                        self.expect_keyword_is(Keyword::OUTPUTFORMAT)?;
7794                        let output_format = self.parse_expr()?;
7795                        hive_format.storage = Some(HiveIOFormat::IOF {
7796                            input_format,
7797                            output_format,
7798                        });
7799                    } else {
7800                        let format = self.parse_file_format()?;
7801                        hive_format.storage = Some(HiveIOFormat::FileFormat { format });
7802                    }
7803                }
7804                Some(Keyword::LOCATION) => {
7805                    hive_format.location = Some(self.parse_literal_string()?);
7806                }
7807                Some(Keyword::WITH) => {
7808                    self.prev_token();
7809                    let properties = self
7810                        .parse_options_with_keywords(&[Keyword::WITH, Keyword::SERDEPROPERTIES])?;
7811                    if !properties.is_empty() {
7812                        hive_format.serde_properties = Some(properties);
7813                    } else {
7814                        break;
7815                    }
7816                }
7817                None => break,
7818                _ => break,
7819            }
7820        }
7821
7822        Ok(hive_format)
7823    }
7824
7825    pub fn parse_row_format(&mut self) -> Result<HiveRowFormat, ParserError> {
7826        self.expect_keyword_is(Keyword::FORMAT)?;
7827        match self.parse_one_of_keywords(&[Keyword::SERDE, Keyword::DELIMITED]) {
7828            Some(Keyword::SERDE) => {
7829                let class = self.parse_literal_string()?;
7830                Ok(HiveRowFormat::SERDE { class })
7831            }
7832            _ => {
7833                let mut row_delimiters = vec![];
7834
7835                loop {
7836                    match self.parse_one_of_keywords(&[
7837                        Keyword::FIELDS,
7838                        Keyword::COLLECTION,
7839                        Keyword::MAP,
7840                        Keyword::LINES,
7841                        Keyword::NULL,
7842                    ]) {
7843                        Some(Keyword::FIELDS) => {
7844                            if self.parse_keywords(&[Keyword::TERMINATED, Keyword::BY]) {
7845                                row_delimiters.push(HiveRowDelimiter {
7846                                    delimiter: HiveDelimiter::FieldsTerminatedBy,
7847                                    char: self.parse_identifier()?,
7848                                });
7849
7850                                if self.parse_keywords(&[Keyword::ESCAPED, Keyword::BY]) {
7851                                    row_delimiters.push(HiveRowDelimiter {
7852                                        delimiter: HiveDelimiter::FieldsEscapedBy,
7853                                        char: self.parse_identifier()?,
7854                                    });
7855                                }
7856                            } else {
7857                                break;
7858                            }
7859                        }
7860                        Some(Keyword::COLLECTION) => {
7861                            if self.parse_keywords(&[
7862                                Keyword::ITEMS,
7863                                Keyword::TERMINATED,
7864                                Keyword::BY,
7865                            ]) {
7866                                row_delimiters.push(HiveRowDelimiter {
7867                                    delimiter: HiveDelimiter::CollectionItemsTerminatedBy,
7868                                    char: self.parse_identifier()?,
7869                                });
7870                            } else {
7871                                break;
7872                            }
7873                        }
7874                        Some(Keyword::MAP) => {
7875                            if self.parse_keywords(&[
7876                                Keyword::KEYS,
7877                                Keyword::TERMINATED,
7878                                Keyword::BY,
7879                            ]) {
7880                                row_delimiters.push(HiveRowDelimiter {
7881                                    delimiter: HiveDelimiter::MapKeysTerminatedBy,
7882                                    char: self.parse_identifier()?,
7883                                });
7884                            } else {
7885                                break;
7886                            }
7887                        }
7888                        Some(Keyword::LINES) => {
7889                            if self.parse_keywords(&[Keyword::TERMINATED, Keyword::BY]) {
7890                                row_delimiters.push(HiveRowDelimiter {
7891                                    delimiter: HiveDelimiter::LinesTerminatedBy,
7892                                    char: self.parse_identifier()?,
7893                                });
7894                            } else {
7895                                break;
7896                            }
7897                        }
7898                        Some(Keyword::NULL) => {
7899                            if self.parse_keywords(&[Keyword::DEFINED, Keyword::AS]) {
7900                                row_delimiters.push(HiveRowDelimiter {
7901                                    delimiter: HiveDelimiter::NullDefinedAs,
7902                                    char: self.parse_identifier()?,
7903                                });
7904                            } else {
7905                                break;
7906                            }
7907                        }
7908                        _ => {
7909                            break;
7910                        }
7911                    }
7912                }
7913
7914                Ok(HiveRowFormat::DELIMITED {
7915                    delimiters: row_delimiters,
7916                })
7917            }
7918        }
7919    }
7920
7921    fn parse_optional_on_cluster(&mut self) -> Result<Option<Ident>, ParserError> {
7922        if self.parse_keywords(&[Keyword::ON, Keyword::CLUSTER]) {
7923            Ok(Some(self.parse_identifier()?))
7924        } else {
7925            Ok(None)
7926        }
7927    }
7928
7929    pub fn parse_create_table(
7930        &mut self,
7931        or_replace: bool,
7932        temporary: bool,
7933        global: Option<bool>,
7934        transient: bool,
7935    ) -> Result<Statement, ParserError> {
7936        let allow_unquoted_hyphen = dialect_of!(self is BigQueryDialect);
7937        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
7938        let table_name = self.parse_object_name(allow_unquoted_hyphen)?;
7939
7940        // Clickhouse has `ON CLUSTER 'cluster'` syntax for DDLs
7941        let on_cluster = self.parse_optional_on_cluster()?;
7942
7943        let like = self.maybe_parse_create_table_like(allow_unquoted_hyphen)?;
7944
7945        let (clone, clone_version) = if self.parse_keyword(Keyword::CLONE) {
7946            let name = self.parse_object_name(allow_unquoted_hyphen).ok();
7947            let version = self.maybe_parse_table_version()?;
7948            (name, version)
7949        } else {
7950            (None, None)
7951        };
7952
7953        let copy = if self.parse_keyword(Keyword::COPY) {
7954            self.parse_object_name(allow_unquoted_hyphen).ok()
7955        } else {
7956            None
7957        };
7958
7959        // parse optional column list (schema)
7960        let (columns, constraints) = self.parse_columns()?;
7961        let comment_after_column_def =
7962            if dialect_of!(self is HiveDialect) && self.parse_keyword(Keyword::COMMENT) {
7963                let next_token = self.next_token();
7964                match next_token.token {
7965                    Token::SingleQuotedString(str) => Some(CommentDef::WithoutEq(str)),
7966                    _ => self.expected("comment", next_token)?,
7967                }
7968            } else {
7969                None
7970            };
7971
7972        // SQLite supports `WITHOUT ROWID` at the end of `CREATE TABLE`
7973        let without_rowid = self.parse_keywords(&[Keyword::WITHOUT, Keyword::ROWID]);
7974
7975        let hive_distribution = self.parse_hive_distribution()?;
7976        let clustered_by = self.parse_optional_clustered_by()?;
7977        let hive_formats = self.parse_hive_formats()?;
7978
7979        let create_table_config = self.parse_optional_create_table_config()?;
7980
7981        // ClickHouse supports `PRIMARY KEY`, before `ORDER BY`
7982        // https://clickhouse.com/docs/en/sql-reference/statements/create/table#primary-key
7983        let primary_key = if dialect_of!(self is ClickHouseDialect | GenericDialect)
7984            && self.parse_keywords(&[Keyword::PRIMARY, Keyword::KEY])
7985        {
7986            Some(Box::new(self.parse_expr()?))
7987        } else {
7988            None
7989        };
7990
7991        let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
7992            if self.consume_token(&Token::LParen) {
7993                let columns = if self.peek_token() != Token::RParen {
7994                    self.parse_comma_separated(|p| p.parse_expr())?
7995                } else {
7996                    vec![]
7997                };
7998                self.expect_token(&Token::RParen)?;
7999                Some(OneOrManyWithParens::Many(columns))
8000            } else {
8001                Some(OneOrManyWithParens::One(self.parse_expr()?))
8002            }
8003        } else {
8004            None
8005        };
8006
8007        let on_commit = if self.parse_keywords(&[Keyword::ON, Keyword::COMMIT]) {
8008            Some(self.parse_create_table_on_commit()?)
8009        } else {
8010            None
8011        };
8012
8013        let strict = self.parse_keyword(Keyword::STRICT);
8014
8015        // Parse optional `AS ( query )`
8016        let query = if self.parse_keyword(Keyword::AS) {
8017            Some(self.parse_query()?)
8018        } else if self.dialect.supports_create_table_select() && self.parse_keyword(Keyword::SELECT)
8019        {
8020            // rewind the SELECT keyword
8021            self.prev_token();
8022            Some(self.parse_query()?)
8023        } else {
8024            None
8025        };
8026
8027        Ok(CreateTableBuilder::new(table_name)
8028            .temporary(temporary)
8029            .columns(columns)
8030            .constraints(constraints)
8031            .or_replace(or_replace)
8032            .if_not_exists(if_not_exists)
8033            .transient(transient)
8034            .hive_distribution(hive_distribution)
8035            .hive_formats(Some(hive_formats))
8036            .global(global)
8037            .query(query)
8038            .without_rowid(without_rowid)
8039            .like(like)
8040            .clone_clause(clone)
8041            .copy_clause(copy)
8042            .version(clone_version)
8043            .comment_after_column_def(comment_after_column_def)
8044            .order_by(order_by)
8045            .on_commit(on_commit)
8046            .on_cluster(on_cluster)
8047            .clustered_by(clustered_by)
8048            .partition_by(create_table_config.partition_by)
8049            .cluster_by(create_table_config.cluster_by)
8050            .inherits(create_table_config.inherits)
8051            .table_options(create_table_config.table_options)
8052            .primary_key(primary_key)
8053            .strict(strict)
8054            .build())
8055    }
8056
8057    fn maybe_parse_create_table_like(
8058        &mut self,
8059        allow_unquoted_hyphen: bool,
8060    ) -> Result<Option<CreateTableLikeKind>, ParserError> {
8061        let like = if self.dialect.supports_create_table_like_parenthesized()
8062            && self.consume_token(&Token::LParen)
8063        {
8064            if self.parse_keyword(Keyword::LIKE) {
8065                let name = self.parse_object_name(allow_unquoted_hyphen)?;
8066                let defaults = if self.parse_keywords(&[Keyword::INCLUDING, Keyword::DEFAULTS]) {
8067                    Some(CreateTableLikeDefaults::Including)
8068                } else if self.parse_keywords(&[Keyword::EXCLUDING, Keyword::DEFAULTS]) {
8069                    Some(CreateTableLikeDefaults::Excluding)
8070                } else {
8071                    None
8072                };
8073                self.expect_token(&Token::RParen)?;
8074                Some(CreateTableLikeKind::Parenthesized(CreateTableLike {
8075                    name,
8076                    defaults,
8077                }))
8078            } else {
8079                // Rollback the '(' it's probably the columns list
8080                self.prev_token();
8081                None
8082            }
8083        } else if self.parse_keyword(Keyword::LIKE) || self.parse_keyword(Keyword::ILIKE) {
8084            let name = self.parse_object_name(allow_unquoted_hyphen)?;
8085            Some(CreateTableLikeKind::Plain(CreateTableLike {
8086                name,
8087                defaults: None,
8088            }))
8089        } else {
8090            None
8091        };
8092        Ok(like)
8093    }
8094
8095    pub(crate) fn parse_create_table_on_commit(&mut self) -> Result<OnCommit, ParserError> {
8096        if self.parse_keywords(&[Keyword::DELETE, Keyword::ROWS]) {
8097            Ok(OnCommit::DeleteRows)
8098        } else if self.parse_keywords(&[Keyword::PRESERVE, Keyword::ROWS]) {
8099            Ok(OnCommit::PreserveRows)
8100        } else if self.parse_keywords(&[Keyword::DROP]) {
8101            Ok(OnCommit::Drop)
8102        } else {
8103            parser_err!(
8104                "Expecting DELETE ROWS, PRESERVE ROWS or DROP",
8105                self.peek_token()
8106            )
8107        }
8108    }
8109
8110    /// Parse configuration like inheritance, partitioning, clustering information during the table creation.
8111    ///
8112    /// [BigQuery](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#syntax_2)
8113    /// [PostgreSQL](https://www.postgresql.org/docs/current/ddl-partitioning.html)
8114    /// [MySql](https://dev.mysql.com/doc/refman/8.4/en/create-table.html)
8115    fn parse_optional_create_table_config(
8116        &mut self,
8117    ) -> Result<CreateTableConfiguration, ParserError> {
8118        let mut table_options = CreateTableOptions::None;
8119
8120        let inherits = if self.parse_keyword(Keyword::INHERITS) {
8121            Some(self.parse_parenthesized_qualified_column_list(IsOptional::Mandatory, false)?)
8122        } else {
8123            None
8124        };
8125
8126        // PostgreSQL supports `WITH ( options )`, before `AS`
8127        let with_options = self.parse_options(Keyword::WITH)?;
8128        if !with_options.is_empty() {
8129            table_options = CreateTableOptions::With(with_options)
8130        }
8131
8132        let table_properties = self.parse_options(Keyword::TBLPROPERTIES)?;
8133        if !table_properties.is_empty() {
8134            table_options = CreateTableOptions::TableProperties(table_properties);
8135        }
8136        let partition_by = if dialect_of!(self is BigQueryDialect | PostgreSqlDialect | GenericDialect)
8137            && self.parse_keywords(&[Keyword::PARTITION, Keyword::BY])
8138        {
8139            Some(Box::new(self.parse_expr()?))
8140        } else {
8141            None
8142        };
8143
8144        let mut cluster_by = None;
8145        if dialect_of!(self is BigQueryDialect | GenericDialect) {
8146            if self.parse_keywords(&[Keyword::CLUSTER, Keyword::BY]) {
8147                cluster_by = Some(WrappedCollection::NoWrapping(
8148                    self.parse_comma_separated(|p| p.parse_expr())?,
8149                ));
8150            };
8151
8152            if let Token::Word(word) = self.peek_token().token {
8153                if word.keyword == Keyword::OPTIONS {
8154                    table_options =
8155                        CreateTableOptions::Options(self.parse_options(Keyword::OPTIONS)?)
8156                }
8157            };
8158        }
8159
8160        if !dialect_of!(self is HiveDialect) && table_options == CreateTableOptions::None {
8161            let plain_options = self.parse_plain_options()?;
8162            if !plain_options.is_empty() {
8163                table_options = CreateTableOptions::Plain(plain_options)
8164            }
8165        };
8166
8167        Ok(CreateTableConfiguration {
8168            partition_by,
8169            cluster_by,
8170            inherits,
8171            table_options,
8172        })
8173    }
8174
8175    fn parse_plain_option(&mut self) -> Result<Option<SqlOption>, ParserError> {
8176        // Single parameter option
8177        // <https://dev.mysql.com/doc/refman/8.4/en/create-table.html>
8178        if self.parse_keywords(&[Keyword::START, Keyword::TRANSACTION]) {
8179            return Ok(Some(SqlOption::Ident(Ident::new("START TRANSACTION"))));
8180        }
8181
8182        // Custom option
8183        // <https://dev.mysql.com/doc/refman/8.4/en/create-table.html>
8184        if self.parse_keywords(&[Keyword::COMMENT]) {
8185            let has_eq = self.consume_token(&Token::Eq);
8186            let value = self.next_token();
8187
8188            let comment = match (has_eq, value.token) {
8189                (true, Token::SingleQuotedString(s)) => {
8190                    Ok(Some(SqlOption::Comment(CommentDef::WithEq(s))))
8191                }
8192                (false, Token::SingleQuotedString(s)) => {
8193                    Ok(Some(SqlOption::Comment(CommentDef::WithoutEq(s))))
8194                }
8195                (_, token) => {
8196                    self.expected("Token::SingleQuotedString", TokenWithSpan::wrap(token))
8197                }
8198            };
8199            return comment;
8200        }
8201
8202        // <https://dev.mysql.com/doc/refman/8.4/en/create-table.html>
8203        // <https://clickhouse.com/docs/sql-reference/statements/create/table>
8204        if self.parse_keywords(&[Keyword::ENGINE]) {
8205            let _ = self.consume_token(&Token::Eq);
8206            let value = self.next_token();
8207
8208            let engine = match value.token {
8209                Token::Word(w) => {
8210                    let parameters = if self.peek_token() == Token::LParen {
8211                        self.parse_parenthesized_identifiers()?
8212                    } else {
8213                        vec![]
8214                    };
8215
8216                    Ok(Some(SqlOption::NamedParenthesizedList(
8217                        NamedParenthesizedList {
8218                            key: Ident::new("ENGINE"),
8219                            name: Some(Ident::new(w.value)),
8220                            values: parameters,
8221                        },
8222                    )))
8223                }
8224                _ => {
8225                    return self.expected("Token::Word", value)?;
8226                }
8227            };
8228
8229            return engine;
8230        }
8231
8232        // <https://dev.mysql.com/doc/refman/8.4/en/create-table.html>
8233        if self.parse_keywords(&[Keyword::TABLESPACE]) {
8234            let _ = self.consume_token(&Token::Eq);
8235            let value = self.next_token();
8236
8237            let tablespace = match value.token {
8238                Token::Word(Word { value: name, .. }) | Token::SingleQuotedString(name) => {
8239                    let storage = match self.parse_keyword(Keyword::STORAGE) {
8240                        true => {
8241                            let _ = self.consume_token(&Token::Eq);
8242                            let storage_token = self.next_token();
8243                            match &storage_token.token {
8244                                Token::Word(w) => match w.value.to_uppercase().as_str() {
8245                                    "DISK" => Some(StorageType::Disk),
8246                                    "MEMORY" => Some(StorageType::Memory),
8247                                    _ => self
8248                                        .expected("Storage type (DISK or MEMORY)", storage_token)?,
8249                                },
8250                                _ => self.expected("Token::Word", storage_token)?,
8251                            }
8252                        }
8253                        false => None,
8254                    };
8255
8256                    Ok(Some(SqlOption::TableSpace(TablespaceOption {
8257                        name,
8258                        storage,
8259                    })))
8260                }
8261                _ => {
8262                    return self.expected("Token::Word", value)?;
8263                }
8264            };
8265
8266            return tablespace;
8267        }
8268
8269        // <https://dev.mysql.com/doc/refman/8.4/en/create-table.html>
8270        if self.parse_keyword(Keyword::UNION) {
8271            let _ = self.consume_token(&Token::Eq);
8272            let value = self.next_token();
8273
8274            match value.token {
8275                Token::LParen => {
8276                    let tables: Vec<Ident> =
8277                        self.parse_comma_separated0(Parser::parse_identifier, Token::RParen)?;
8278                    self.expect_token(&Token::RParen)?;
8279
8280                    return Ok(Some(SqlOption::NamedParenthesizedList(
8281                        NamedParenthesizedList {
8282                            key: Ident::new("UNION"),
8283                            name: None,
8284                            values: tables,
8285                        },
8286                    )));
8287                }
8288                _ => {
8289                    return self.expected("Token::LParen", value)?;
8290                }
8291            }
8292        }
8293
8294        // Key/Value parameter option
8295        let key = if self.parse_keywords(&[Keyword::DEFAULT, Keyword::CHARSET]) {
8296            Ident::new("DEFAULT CHARSET")
8297        } else if self.parse_keyword(Keyword::CHARSET) {
8298            Ident::new("CHARSET")
8299        } else if self.parse_keywords(&[Keyword::DEFAULT, Keyword::CHARACTER, Keyword::SET]) {
8300            Ident::new("DEFAULT CHARACTER SET")
8301        } else if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
8302            Ident::new("CHARACTER SET")
8303        } else if self.parse_keywords(&[Keyword::DEFAULT, Keyword::COLLATE]) {
8304            Ident::new("DEFAULT COLLATE")
8305        } else if self.parse_keyword(Keyword::COLLATE) {
8306            Ident::new("COLLATE")
8307        } else if self.parse_keywords(&[Keyword::DATA, Keyword::DIRECTORY]) {
8308            Ident::new("DATA DIRECTORY")
8309        } else if self.parse_keywords(&[Keyword::INDEX, Keyword::DIRECTORY]) {
8310            Ident::new("INDEX DIRECTORY")
8311        } else if self.parse_keyword(Keyword::KEY_BLOCK_SIZE) {
8312            Ident::new("KEY_BLOCK_SIZE")
8313        } else if self.parse_keyword(Keyword::ROW_FORMAT) {
8314            Ident::new("ROW_FORMAT")
8315        } else if self.parse_keyword(Keyword::PACK_KEYS) {
8316            Ident::new("PACK_KEYS")
8317        } else if self.parse_keyword(Keyword::STATS_AUTO_RECALC) {
8318            Ident::new("STATS_AUTO_RECALC")
8319        } else if self.parse_keyword(Keyword::STATS_PERSISTENT) {
8320            Ident::new("STATS_PERSISTENT")
8321        } else if self.parse_keyword(Keyword::STATS_SAMPLE_PAGES) {
8322            Ident::new("STATS_SAMPLE_PAGES")
8323        } else if self.parse_keyword(Keyword::DELAY_KEY_WRITE) {
8324            Ident::new("DELAY_KEY_WRITE")
8325        } else if self.parse_keyword(Keyword::COMPRESSION) {
8326            Ident::new("COMPRESSION")
8327        } else if self.parse_keyword(Keyword::ENCRYPTION) {
8328            Ident::new("ENCRYPTION")
8329        } else if self.parse_keyword(Keyword::MAX_ROWS) {
8330            Ident::new("MAX_ROWS")
8331        } else if self.parse_keyword(Keyword::MIN_ROWS) {
8332            Ident::new("MIN_ROWS")
8333        } else if self.parse_keyword(Keyword::AUTOEXTEND_SIZE) {
8334            Ident::new("AUTOEXTEND_SIZE")
8335        } else if self.parse_keyword(Keyword::AVG_ROW_LENGTH) {
8336            Ident::new("AVG_ROW_LENGTH")
8337        } else if self.parse_keyword(Keyword::CHECKSUM) {
8338            Ident::new("CHECKSUM")
8339        } else if self.parse_keyword(Keyword::CONNECTION) {
8340            Ident::new("CONNECTION")
8341        } else if self.parse_keyword(Keyword::ENGINE_ATTRIBUTE) {
8342            Ident::new("ENGINE_ATTRIBUTE")
8343        } else if self.parse_keyword(Keyword::PASSWORD) {
8344            Ident::new("PASSWORD")
8345        } else if self.parse_keyword(Keyword::SECONDARY_ENGINE_ATTRIBUTE) {
8346            Ident::new("SECONDARY_ENGINE_ATTRIBUTE")
8347        } else if self.parse_keyword(Keyword::INSERT_METHOD) {
8348            Ident::new("INSERT_METHOD")
8349        } else if self.parse_keyword(Keyword::AUTO_INCREMENT) {
8350            Ident::new("AUTO_INCREMENT")
8351        } else {
8352            return Ok(None);
8353        };
8354
8355        let _ = self.consume_token(&Token::Eq);
8356
8357        let value = match self
8358            .maybe_parse(|parser| parser.parse_value())?
8359            .map(Expr::Value)
8360        {
8361            Some(expr) => expr,
8362            None => Expr::Identifier(self.parse_identifier()?),
8363        };
8364
8365        Ok(Some(SqlOption::KeyValue { key, value }))
8366    }
8367
8368    pub fn parse_plain_options(&mut self) -> Result<Vec<SqlOption>, ParserError> {
8369        let mut options = Vec::new();
8370
8371        while let Some(option) = self.parse_plain_option()? {
8372            options.push(option);
8373            // Some dialects support comma-separated options; it shouldn't introduce ambiguity to
8374            // consume it for all dialects.
8375            let _ = self.consume_token(&Token::Comma);
8376        }
8377
8378        Ok(options)
8379    }
8380
8381    pub fn parse_optional_inline_comment(&mut self) -> Result<Option<CommentDef>, ParserError> {
8382        let comment = if self.parse_keyword(Keyword::COMMENT) {
8383            let has_eq = self.consume_token(&Token::Eq);
8384            let comment = self.parse_comment_value()?;
8385            Some(if has_eq {
8386                CommentDef::WithEq(comment)
8387            } else {
8388                CommentDef::WithoutEq(comment)
8389            })
8390        } else {
8391            None
8392        };
8393        Ok(comment)
8394    }
8395
8396    pub fn parse_comment_value(&mut self) -> Result<String, ParserError> {
8397        let next_token = self.next_token();
8398        let value = match next_token.token {
8399            Token::SingleQuotedString(str) => str,
8400            Token::DollarQuotedString(str) => str.value,
8401            _ => self.expected("string literal", next_token)?,
8402        };
8403        Ok(value)
8404    }
8405
8406    pub fn parse_optional_procedure_parameters(
8407        &mut self,
8408    ) -> Result<Option<Vec<ProcedureParam>>, ParserError> {
8409        let mut params = vec![];
8410        if !self.consume_token(&Token::LParen) || self.consume_token(&Token::RParen) {
8411            return Ok(Some(params));
8412        }
8413        loop {
8414            if let Token::Word(_) = self.peek_token().token {
8415                params.push(self.parse_procedure_param()?)
8416            }
8417            let comma = self.consume_token(&Token::Comma);
8418            if self.consume_token(&Token::RParen) {
8419                // allow a trailing comma, even though it's not in standard
8420                break;
8421            } else if !comma {
8422                return self.expected("',' or ')' after parameter definition", self.peek_token());
8423            }
8424        }
8425        Ok(Some(params))
8426    }
8427
8428    pub fn parse_columns(&mut self) -> Result<(Vec<ColumnDef>, Vec<TableConstraint>), ParserError> {
8429        let mut columns = vec![];
8430        let mut constraints = vec![];
8431        if !self.consume_token(&Token::LParen) || self.consume_token(&Token::RParen) {
8432            return Ok((columns, constraints));
8433        }
8434
8435        loop {
8436            if let Some(constraint) = self.parse_optional_table_constraint()? {
8437                constraints.push(constraint);
8438            } else if let Token::Word(_) = self.peek_token().token {
8439                columns.push(self.parse_column_def()?);
8440            } else {
8441                return self.expected("column name or constraint definition", self.peek_token());
8442            }
8443
8444            let comma = self.consume_token(&Token::Comma);
8445            let rparen = self.peek_token().token == Token::RParen;
8446
8447            if !comma && !rparen {
8448                return self.expected("',' or ')' after column definition", self.peek_token());
8449            };
8450
8451            if rparen
8452                && (!comma
8453                    || self.dialect.supports_column_definition_trailing_commas()
8454                    || self.options.trailing_commas)
8455            {
8456                let _ = self.consume_token(&Token::RParen);
8457                break;
8458            }
8459        }
8460
8461        Ok((columns, constraints))
8462    }
8463
8464    pub fn parse_procedure_param(&mut self) -> Result<ProcedureParam, ParserError> {
8465        let mode = if self.parse_keyword(Keyword::IN) {
8466            Some(ArgMode::In)
8467        } else if self.parse_keyword(Keyword::OUT) {
8468            Some(ArgMode::Out)
8469        } else if self.parse_keyword(Keyword::INOUT) {
8470            Some(ArgMode::InOut)
8471        } else {
8472            None
8473        };
8474        let name = self.parse_identifier()?;
8475        let data_type = self.parse_data_type()?;
8476        Ok(ProcedureParam {
8477            name,
8478            data_type,
8479            mode,
8480        })
8481    }
8482
8483    pub fn parse_column_def(&mut self) -> Result<ColumnDef, ParserError> {
8484        let name = self.parse_identifier()?;
8485        let data_type = if self.is_column_type_sqlite_unspecified() {
8486            DataType::Unspecified
8487        } else {
8488            self.parse_data_type()?
8489        };
8490        let mut options = vec![];
8491        loop {
8492            if self.parse_keyword(Keyword::CONSTRAINT) {
8493                let name = Some(self.parse_identifier()?);
8494                if let Some(option) = self.parse_optional_column_option()? {
8495                    options.push(ColumnOptionDef { name, option });
8496                } else {
8497                    return self.expected(
8498                        "constraint details after CONSTRAINT <name>",
8499                        self.peek_token(),
8500                    );
8501                }
8502            } else if let Some(option) = self.parse_optional_column_option()? {
8503                options.push(ColumnOptionDef { name: None, option });
8504            } else {
8505                break;
8506            };
8507        }
8508        Ok(ColumnDef {
8509            name,
8510            data_type,
8511            options,
8512        })
8513    }
8514
8515    fn is_column_type_sqlite_unspecified(&mut self) -> bool {
8516        if dialect_of!(self is SQLiteDialect) {
8517            match self.peek_token().token {
8518                Token::Word(word) => matches!(
8519                    word.keyword,
8520                    Keyword::CONSTRAINT
8521                        | Keyword::PRIMARY
8522                        | Keyword::NOT
8523                        | Keyword::UNIQUE
8524                        | Keyword::CHECK
8525                        | Keyword::DEFAULT
8526                        | Keyword::COLLATE
8527                        | Keyword::REFERENCES
8528                        | Keyword::GENERATED
8529                        | Keyword::AS
8530                ),
8531                _ => true, // e.g. comma immediately after column name
8532            }
8533        } else {
8534            false
8535        }
8536    }
8537
8538    pub fn parse_optional_column_option(&mut self) -> Result<Option<ColumnOption>, ParserError> {
8539        if let Some(option) = self.dialect.parse_column_option(self)? {
8540            return option;
8541        }
8542
8543        self.with_state(
8544            ColumnDefinition,
8545            |parser| -> Result<Option<ColumnOption>, ParserError> {
8546                parser.parse_optional_column_option_inner()
8547            },
8548        )
8549    }
8550
8551    fn parse_optional_column_option_inner(&mut self) -> Result<Option<ColumnOption>, ParserError> {
8552        if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
8553            Ok(Some(ColumnOption::CharacterSet(
8554                self.parse_object_name(false)?,
8555            )))
8556        } else if self.parse_keywords(&[Keyword::COLLATE]) {
8557            Ok(Some(ColumnOption::Collation(
8558                self.parse_object_name(false)?,
8559            )))
8560        } else if self.parse_keywords(&[Keyword::NOT, Keyword::NULL]) {
8561            Ok(Some(ColumnOption::NotNull))
8562        } else if self.parse_keywords(&[Keyword::COMMENT]) {
8563            Ok(Some(ColumnOption::Comment(self.parse_comment_value()?)))
8564        } else if self.parse_keyword(Keyword::NULL) {
8565            Ok(Some(ColumnOption::Null))
8566        } else if self.parse_keyword(Keyword::DEFAULT) {
8567            Ok(Some(ColumnOption::Default(
8568                self.parse_column_option_expr()?,
8569            )))
8570        } else if dialect_of!(self is ClickHouseDialect| GenericDialect)
8571            && self.parse_keyword(Keyword::MATERIALIZED)
8572        {
8573            Ok(Some(ColumnOption::Materialized(
8574                self.parse_column_option_expr()?,
8575            )))
8576        } else if dialect_of!(self is ClickHouseDialect| GenericDialect)
8577            && self.parse_keyword(Keyword::ALIAS)
8578        {
8579            Ok(Some(ColumnOption::Alias(self.parse_column_option_expr()?)))
8580        } else if dialect_of!(self is ClickHouseDialect| GenericDialect)
8581            && self.parse_keyword(Keyword::EPHEMERAL)
8582        {
8583            // The expression is optional for the EPHEMERAL syntax, so we need to check
8584            // if the column definition has remaining tokens before parsing the expression.
8585            if matches!(self.peek_token().token, Token::Comma | Token::RParen) {
8586                Ok(Some(ColumnOption::Ephemeral(None)))
8587            } else {
8588                Ok(Some(ColumnOption::Ephemeral(Some(
8589                    self.parse_column_option_expr()?,
8590                ))))
8591            }
8592        } else if self.parse_keywords(&[Keyword::PRIMARY, Keyword::KEY]) {
8593            let characteristics = self.parse_constraint_characteristics()?;
8594            Ok(Some(ColumnOption::Unique {
8595                is_primary: true,
8596                characteristics,
8597            }))
8598        } else if self.parse_keyword(Keyword::UNIQUE) {
8599            let characteristics = self.parse_constraint_characteristics()?;
8600            Ok(Some(ColumnOption::Unique {
8601                is_primary: false,
8602                characteristics,
8603            }))
8604        } else if self.parse_keyword(Keyword::REFERENCES) {
8605            let foreign_table = self.parse_object_name(false)?;
8606            // PostgreSQL allows omitting the column list and
8607            // uses the primary key column of the foreign table by default
8608            let referred_columns = self.parse_parenthesized_column_list(Optional, false)?;
8609            let mut on_delete = None;
8610            let mut on_update = None;
8611            loop {
8612                if on_delete.is_none() && self.parse_keywords(&[Keyword::ON, Keyword::DELETE]) {
8613                    on_delete = Some(self.parse_referential_action()?);
8614                } else if on_update.is_none()
8615                    && self.parse_keywords(&[Keyword::ON, Keyword::UPDATE])
8616                {
8617                    on_update = Some(self.parse_referential_action()?);
8618                } else {
8619                    break;
8620                }
8621            }
8622            let characteristics = self.parse_constraint_characteristics()?;
8623
8624            Ok(Some(ColumnOption::ForeignKey {
8625                foreign_table,
8626                referred_columns,
8627                on_delete,
8628                on_update,
8629                characteristics,
8630            }))
8631        } else if self.parse_keyword(Keyword::CHECK) {
8632            self.expect_token(&Token::LParen)?;
8633            // since `CHECK` requires parentheses, we can parse the inner expression in ParserState::Normal
8634            let expr: Expr = self.with_state(ParserState::Normal, |p| p.parse_expr())?;
8635            self.expect_token(&Token::RParen)?;
8636            Ok(Some(ColumnOption::Check(expr)))
8637        } else if self.parse_keyword(Keyword::AUTO_INCREMENT)
8638            && dialect_of!(self is MySqlDialect | GenericDialect)
8639        {
8640            // Support AUTO_INCREMENT for MySQL
8641            Ok(Some(ColumnOption::DialectSpecific(vec![
8642                Token::make_keyword("AUTO_INCREMENT"),
8643            ])))
8644        } else if self.parse_keyword(Keyword::AUTOINCREMENT)
8645            && dialect_of!(self is SQLiteDialect |  GenericDialect)
8646        {
8647            // Support AUTOINCREMENT for SQLite
8648            Ok(Some(ColumnOption::DialectSpecific(vec![
8649                Token::make_keyword("AUTOINCREMENT"),
8650            ])))
8651        } else if self.parse_keyword(Keyword::ASC)
8652            && self.dialect.supports_asc_desc_in_column_definition()
8653        {
8654            // Support ASC for SQLite
8655            Ok(Some(ColumnOption::DialectSpecific(vec![
8656                Token::make_keyword("ASC"),
8657            ])))
8658        } else if self.parse_keyword(Keyword::DESC)
8659            && self.dialect.supports_asc_desc_in_column_definition()
8660        {
8661            // Support DESC for SQLite
8662            Ok(Some(ColumnOption::DialectSpecific(vec![
8663                Token::make_keyword("DESC"),
8664            ])))
8665        } else if self.parse_keywords(&[Keyword::ON, Keyword::UPDATE])
8666            && dialect_of!(self is MySqlDialect | GenericDialect)
8667        {
8668            let expr = self.parse_column_option_expr()?;
8669            Ok(Some(ColumnOption::OnUpdate(expr)))
8670        } else if self.parse_keyword(Keyword::GENERATED) {
8671            self.parse_optional_column_option_generated()
8672        } else if dialect_of!(self is BigQueryDialect | GenericDialect)
8673            && self.parse_keyword(Keyword::OPTIONS)
8674        {
8675            self.prev_token();
8676            Ok(Some(ColumnOption::Options(
8677                self.parse_options(Keyword::OPTIONS)?,
8678            )))
8679        } else if self.parse_keyword(Keyword::AS)
8680            && dialect_of!(self is MySqlDialect | SQLiteDialect | DuckDbDialect | GenericDialect)
8681        {
8682            self.parse_optional_column_option_as()
8683        } else if self.parse_keyword(Keyword::SRID)
8684            && dialect_of!(self is MySqlDialect | GenericDialect)
8685        {
8686            Ok(Some(ColumnOption::Srid(Box::new(
8687                self.parse_column_option_expr()?,
8688            ))))
8689        } else if self.parse_keyword(Keyword::IDENTITY)
8690            && dialect_of!(self is MsSqlDialect | GenericDialect)
8691        {
8692            let parameters = if self.consume_token(&Token::LParen) {
8693                let seed = self.parse_number()?;
8694                self.expect_token(&Token::Comma)?;
8695                let increment = self.parse_number()?;
8696                self.expect_token(&Token::RParen)?;
8697
8698                Some(IdentityPropertyFormatKind::FunctionCall(
8699                    IdentityParameters { seed, increment },
8700                ))
8701            } else {
8702                None
8703            };
8704            Ok(Some(ColumnOption::Identity(
8705                IdentityPropertyKind::Identity(IdentityProperty {
8706                    parameters,
8707                    order: None,
8708                }),
8709            )))
8710        } else if dialect_of!(self is SQLiteDialect | GenericDialect)
8711            && self.parse_keywords(&[Keyword::ON, Keyword::CONFLICT])
8712        {
8713            // Support ON CONFLICT for SQLite
8714            Ok(Some(ColumnOption::OnConflict(
8715                self.expect_one_of_keywords(&[
8716                    Keyword::ROLLBACK,
8717                    Keyword::ABORT,
8718                    Keyword::FAIL,
8719                    Keyword::IGNORE,
8720                    Keyword::REPLACE,
8721                ])?,
8722            )))
8723        } else {
8724            Ok(None)
8725        }
8726    }
8727
8728    /// When parsing some column option expressions we need to revert to [ParserState::Normal] since
8729    /// `NOT NULL` is allowed as an alias for `IS NOT NULL`.
8730    /// In those cases we use this helper instead of calling [Parser::parse_expr] directly.
8731    ///
8732    /// For example, consider these `CREATE TABLE` statements:
8733    /// ```sql
8734    /// CREATE TABLE foo (abc BOOL DEFAULT (42 NOT NULL) NOT NULL);
8735    /// ```
8736    /// vs
8737    /// ```sql
8738    /// CREATE TABLE foo (abc BOOL NOT NULL);
8739    /// ```
8740    ///
8741    /// In the first we should parse the inner portion of `(42 NOT NULL)` as [Expr::IsNotNull],
8742    /// whereas is both statements that trailing `NOT NULL` should only be parsed as a
8743    /// [ColumnOption::NotNull].
8744    fn parse_column_option_expr(&mut self) -> Result<Expr, ParserError> {
8745        if self.peek_token_ref().token == Token::LParen {
8746            let expr: Expr = self.with_state(ParserState::Normal, |p| p.parse_prefix())?;
8747            Ok(expr)
8748        } else {
8749            Ok(self.parse_expr()?)
8750        }
8751    }
8752
8753    pub(crate) fn parse_tag(&mut self) -> Result<Tag, ParserError> {
8754        let name = self.parse_object_name(false)?;
8755        self.expect_token(&Token::Eq)?;
8756        let value = self.parse_literal_string()?;
8757
8758        Ok(Tag::new(name, value))
8759    }
8760
8761    fn parse_optional_column_option_generated(
8762        &mut self,
8763    ) -> Result<Option<ColumnOption>, ParserError> {
8764        if self.parse_keywords(&[Keyword::ALWAYS, Keyword::AS, Keyword::IDENTITY]) {
8765            let mut sequence_options = vec![];
8766            if self.expect_token(&Token::LParen).is_ok() {
8767                sequence_options = self.parse_create_sequence_options()?;
8768                self.expect_token(&Token::RParen)?;
8769            }
8770            Ok(Some(ColumnOption::Generated {
8771                generated_as: GeneratedAs::Always,
8772                sequence_options: Some(sequence_options),
8773                generation_expr: None,
8774                generation_expr_mode: None,
8775                generated_keyword: true,
8776            }))
8777        } else if self.parse_keywords(&[
8778            Keyword::BY,
8779            Keyword::DEFAULT,
8780            Keyword::AS,
8781            Keyword::IDENTITY,
8782        ]) {
8783            let mut sequence_options = vec![];
8784            if self.expect_token(&Token::LParen).is_ok() {
8785                sequence_options = self.parse_create_sequence_options()?;
8786                self.expect_token(&Token::RParen)?;
8787            }
8788            Ok(Some(ColumnOption::Generated {
8789                generated_as: GeneratedAs::ByDefault,
8790                sequence_options: Some(sequence_options),
8791                generation_expr: None,
8792                generation_expr_mode: None,
8793                generated_keyword: true,
8794            }))
8795        } else if self.parse_keywords(&[Keyword::ALWAYS, Keyword::AS]) {
8796            if self.expect_token(&Token::LParen).is_ok() {
8797                let expr: Expr = self.with_state(ParserState::Normal, |p| p.parse_expr())?;
8798                self.expect_token(&Token::RParen)?;
8799                let (gen_as, expr_mode) = if self.parse_keywords(&[Keyword::STORED]) {
8800                    Ok((
8801                        GeneratedAs::ExpStored,
8802                        Some(GeneratedExpressionMode::Stored),
8803                    ))
8804                } else if dialect_of!(self is PostgreSqlDialect) {
8805                    // Postgres' AS IDENTITY branches are above, this one needs STORED
8806                    self.expected("STORED", self.peek_token())
8807                } else if self.parse_keywords(&[Keyword::VIRTUAL]) {
8808                    Ok((GeneratedAs::Always, Some(GeneratedExpressionMode::Virtual)))
8809                } else {
8810                    Ok((GeneratedAs::Always, None))
8811                }?;
8812
8813                Ok(Some(ColumnOption::Generated {
8814                    generated_as: gen_as,
8815                    sequence_options: None,
8816                    generation_expr: Some(expr),
8817                    generation_expr_mode: expr_mode,
8818                    generated_keyword: true,
8819                }))
8820            } else {
8821                Ok(None)
8822            }
8823        } else {
8824            Ok(None)
8825        }
8826    }
8827
8828    fn parse_optional_column_option_as(&mut self) -> Result<Option<ColumnOption>, ParserError> {
8829        // Some DBs allow 'AS (expr)', shorthand for GENERATED ALWAYS AS
8830        self.expect_token(&Token::LParen)?;
8831        let expr = self.parse_expr()?;
8832        self.expect_token(&Token::RParen)?;
8833
8834        let (gen_as, expr_mode) = if self.parse_keywords(&[Keyword::STORED]) {
8835            (
8836                GeneratedAs::ExpStored,
8837                Some(GeneratedExpressionMode::Stored),
8838            )
8839        } else if self.parse_keywords(&[Keyword::VIRTUAL]) {
8840            (GeneratedAs::Always, Some(GeneratedExpressionMode::Virtual))
8841        } else {
8842            (GeneratedAs::Always, None)
8843        };
8844
8845        Ok(Some(ColumnOption::Generated {
8846            generated_as: gen_as,
8847            sequence_options: None,
8848            generation_expr: Some(expr),
8849            generation_expr_mode: expr_mode,
8850            generated_keyword: false,
8851        }))
8852    }
8853
8854    pub fn parse_optional_clustered_by(&mut self) -> Result<Option<ClusteredBy>, ParserError> {
8855        let clustered_by = if dialect_of!(self is HiveDialect|GenericDialect)
8856            && self.parse_keywords(&[Keyword::CLUSTERED, Keyword::BY])
8857        {
8858            let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
8859
8860            let sorted_by = if self.parse_keywords(&[Keyword::SORTED, Keyword::BY]) {
8861                self.expect_token(&Token::LParen)?;
8862                let sorted_by_columns = self.parse_comma_separated(|p| p.parse_order_by_expr())?;
8863                self.expect_token(&Token::RParen)?;
8864                Some(sorted_by_columns)
8865            } else {
8866                None
8867            };
8868
8869            self.expect_keyword_is(Keyword::INTO)?;
8870            let num_buckets = self.parse_number_value()?.value;
8871            self.expect_keyword_is(Keyword::BUCKETS)?;
8872            Some(ClusteredBy {
8873                columns,
8874                sorted_by,
8875                num_buckets,
8876            })
8877        } else {
8878            None
8879        };
8880        Ok(clustered_by)
8881    }
8882
8883    pub fn parse_referential_action(&mut self) -> Result<ReferentialAction, ParserError> {
8884        if self.parse_keyword(Keyword::RESTRICT) {
8885            Ok(ReferentialAction::Restrict)
8886        } else if self.parse_keyword(Keyword::CASCADE) {
8887            Ok(ReferentialAction::Cascade)
8888        } else if self.parse_keywords(&[Keyword::SET, Keyword::NULL]) {
8889            Ok(ReferentialAction::SetNull)
8890        } else if self.parse_keywords(&[Keyword::NO, Keyword::ACTION]) {
8891            Ok(ReferentialAction::NoAction)
8892        } else if self.parse_keywords(&[Keyword::SET, Keyword::DEFAULT]) {
8893            Ok(ReferentialAction::SetDefault)
8894        } else {
8895            self.expected(
8896                "one of RESTRICT, CASCADE, SET NULL, NO ACTION or SET DEFAULT",
8897                self.peek_token(),
8898            )
8899        }
8900    }
8901
8902    pub fn parse_constraint_characteristics(
8903        &mut self,
8904    ) -> Result<Option<ConstraintCharacteristics>, ParserError> {
8905        let mut cc = ConstraintCharacteristics::default();
8906
8907        loop {
8908            if cc.deferrable.is_none() && self.parse_keywords(&[Keyword::NOT, Keyword::DEFERRABLE])
8909            {
8910                cc.deferrable = Some(false);
8911            } else if cc.deferrable.is_none() && self.parse_keyword(Keyword::DEFERRABLE) {
8912                cc.deferrable = Some(true);
8913            } else if cc.initially.is_none() && self.parse_keyword(Keyword::INITIALLY) {
8914                if self.parse_keyword(Keyword::DEFERRED) {
8915                    cc.initially = Some(DeferrableInitial::Deferred);
8916                } else if self.parse_keyword(Keyword::IMMEDIATE) {
8917                    cc.initially = Some(DeferrableInitial::Immediate);
8918                } else {
8919                    self.expected("one of DEFERRED or IMMEDIATE", self.peek_token())?;
8920                }
8921            } else if cc.enforced.is_none() && self.parse_keyword(Keyword::ENFORCED) {
8922                cc.enforced = Some(true);
8923            } else if cc.enforced.is_none()
8924                && self.parse_keywords(&[Keyword::NOT, Keyword::ENFORCED])
8925            {
8926                cc.enforced = Some(false);
8927            } else {
8928                break;
8929            }
8930        }
8931
8932        if cc.deferrable.is_some() || cc.initially.is_some() || cc.enforced.is_some() {
8933            Ok(Some(cc))
8934        } else {
8935            Ok(None)
8936        }
8937    }
8938
8939    pub fn parse_optional_table_constraint(
8940        &mut self,
8941    ) -> Result<Option<TableConstraint>, ParserError> {
8942        let name = if self.parse_keyword(Keyword::CONSTRAINT) {
8943            Some(self.parse_identifier()?)
8944        } else {
8945            None
8946        };
8947
8948        let next_token = self.next_token();
8949        match next_token.token {
8950            Token::Word(w) if w.keyword == Keyword::UNIQUE => {
8951                let index_type_display = self.parse_index_type_display();
8952                if !dialect_of!(self is GenericDialect | MySqlDialect)
8953                    && !index_type_display.is_none()
8954                {
8955                    return self
8956                        .expected("`index_name` or `(column_name [, ...])`", self.peek_token());
8957                }
8958
8959                let nulls_distinct = self.parse_optional_nulls_distinct()?;
8960
8961                // optional index name
8962                let index_name = self.parse_optional_ident()?;
8963                let index_type = self.parse_optional_using_then_index_type()?;
8964
8965                let columns = self.parse_parenthesized_index_column_list()?;
8966                let index_options = self.parse_index_options()?;
8967                let characteristics = self.parse_constraint_characteristics()?;
8968                Ok(Some(TableConstraint::Unique {
8969                    name,
8970                    index_name,
8971                    index_type_display,
8972                    index_type,
8973                    columns,
8974                    index_options,
8975                    characteristics,
8976                    nulls_distinct,
8977                }))
8978            }
8979            Token::Word(w) if w.keyword == Keyword::PRIMARY => {
8980                // after `PRIMARY` always stay `KEY`
8981                self.expect_keyword_is(Keyword::KEY)?;
8982
8983                // optional index name
8984                let index_name = self.parse_optional_ident()?;
8985                let index_type = self.parse_optional_using_then_index_type()?;
8986
8987                let columns = self.parse_parenthesized_index_column_list()?;
8988                let index_options = self.parse_index_options()?;
8989                let characteristics = self.parse_constraint_characteristics()?;
8990                Ok(Some(TableConstraint::PrimaryKey {
8991                    name,
8992                    index_name,
8993                    index_type,
8994                    columns,
8995                    index_options,
8996                    characteristics,
8997                }))
8998            }
8999            Token::Word(w) if w.keyword == Keyword::FOREIGN => {
9000                self.expect_keyword_is(Keyword::KEY)?;
9001                let index_name = self.parse_optional_ident()?;
9002                let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
9003                self.expect_keyword_is(Keyword::REFERENCES)?;
9004                let foreign_table = self.parse_object_name(false)?;
9005                let referred_columns = self.parse_parenthesized_column_list(Optional, false)?;
9006                let mut on_delete = None;
9007                let mut on_update = None;
9008                loop {
9009                    if on_delete.is_none() && self.parse_keywords(&[Keyword::ON, Keyword::DELETE]) {
9010                        on_delete = Some(self.parse_referential_action()?);
9011                    } else if on_update.is_none()
9012                        && self.parse_keywords(&[Keyword::ON, Keyword::UPDATE])
9013                    {
9014                        on_update = Some(self.parse_referential_action()?);
9015                    } else {
9016                        break;
9017                    }
9018                }
9019
9020                let characteristics = self.parse_constraint_characteristics()?;
9021
9022                Ok(Some(TableConstraint::ForeignKey {
9023                    name,
9024                    index_name,
9025                    columns,
9026                    foreign_table,
9027                    referred_columns,
9028                    on_delete,
9029                    on_update,
9030                    characteristics,
9031                }))
9032            }
9033            Token::Word(w) if w.keyword == Keyword::CHECK => {
9034                self.expect_token(&Token::LParen)?;
9035                let expr = Box::new(self.parse_expr()?);
9036                self.expect_token(&Token::RParen)?;
9037
9038                let enforced = if self.parse_keyword(Keyword::ENFORCED) {
9039                    Some(true)
9040                } else if self.parse_keywords(&[Keyword::NOT, Keyword::ENFORCED]) {
9041                    Some(false)
9042                } else {
9043                    None
9044                };
9045
9046                Ok(Some(TableConstraint::Check {
9047                    name,
9048                    expr,
9049                    enforced,
9050                }))
9051            }
9052            Token::Word(w)
9053                if (w.keyword == Keyword::INDEX || w.keyword == Keyword::KEY)
9054                    && dialect_of!(self is GenericDialect | MySqlDialect)
9055                    && name.is_none() =>
9056            {
9057                let display_as_key = w.keyword == Keyword::KEY;
9058
9059                let name = match self.peek_token().token {
9060                    Token::Word(word) if word.keyword == Keyword::USING => None,
9061                    _ => self.parse_optional_ident()?,
9062                };
9063
9064                let index_type = self.parse_optional_using_then_index_type()?;
9065                let columns = self.parse_parenthesized_index_column_list()?;
9066                let index_options = self.parse_index_options()?;
9067
9068                Ok(Some(TableConstraint::Index {
9069                    display_as_key,
9070                    name,
9071                    index_type,
9072                    columns,
9073                    index_options,
9074                }))
9075            }
9076            Token::Word(w)
9077                if (w.keyword == Keyword::FULLTEXT || w.keyword == Keyword::SPATIAL)
9078                    && dialect_of!(self is GenericDialect | MySqlDialect) =>
9079            {
9080                if let Some(name) = name {
9081                    return self.expected(
9082                        "FULLTEXT or SPATIAL option without constraint name",
9083                        TokenWithSpan {
9084                            token: Token::make_keyword(&name.to_string()),
9085                            span: next_token.span,
9086                        },
9087                    );
9088                }
9089
9090                let fulltext = w.keyword == Keyword::FULLTEXT;
9091
9092                let index_type_display = self.parse_index_type_display();
9093
9094                let opt_index_name = self.parse_optional_ident()?;
9095
9096                let columns = self.parse_parenthesized_index_column_list()?;
9097
9098                Ok(Some(TableConstraint::FulltextOrSpatial {
9099                    fulltext,
9100                    index_type_display,
9101                    opt_index_name,
9102                    columns,
9103                }))
9104            }
9105            _ => {
9106                if name.is_some() {
9107                    self.expected("PRIMARY, UNIQUE, FOREIGN, or CHECK", next_token)
9108                } else {
9109                    self.prev_token();
9110                    Ok(None)
9111                }
9112            }
9113        }
9114    }
9115
9116    fn parse_optional_nulls_distinct(&mut self) -> Result<NullsDistinctOption, ParserError> {
9117        Ok(if self.parse_keyword(Keyword::NULLS) {
9118            let not = self.parse_keyword(Keyword::NOT);
9119            self.expect_keyword_is(Keyword::DISTINCT)?;
9120            if not {
9121                NullsDistinctOption::NotDistinct
9122            } else {
9123                NullsDistinctOption::Distinct
9124            }
9125        } else {
9126            NullsDistinctOption::None
9127        })
9128    }
9129
9130    pub fn maybe_parse_options(
9131        &mut self,
9132        keyword: Keyword,
9133    ) -> Result<Option<Vec<SqlOption>>, ParserError> {
9134        if let Token::Word(word) = self.peek_token().token {
9135            if word.keyword == keyword {
9136                return Ok(Some(self.parse_options(keyword)?));
9137            }
9138        };
9139        Ok(None)
9140    }
9141
9142    pub fn parse_options(&mut self, keyword: Keyword) -> Result<Vec<SqlOption>, ParserError> {
9143        if self.parse_keyword(keyword) {
9144            self.expect_token(&Token::LParen)?;
9145            let options = self.parse_comma_separated0(Parser::parse_sql_option, Token::RParen)?;
9146            self.expect_token(&Token::RParen)?;
9147            Ok(options)
9148        } else {
9149            Ok(vec![])
9150        }
9151    }
9152
9153    pub fn parse_options_with_keywords(
9154        &mut self,
9155        keywords: &[Keyword],
9156    ) -> Result<Vec<SqlOption>, ParserError> {
9157        if self.parse_keywords(keywords) {
9158            self.expect_token(&Token::LParen)?;
9159            let options = self.parse_comma_separated(Parser::parse_sql_option)?;
9160            self.expect_token(&Token::RParen)?;
9161            Ok(options)
9162        } else {
9163            Ok(vec![])
9164        }
9165    }
9166
9167    pub fn parse_index_type(&mut self) -> Result<IndexType, ParserError> {
9168        Ok(if self.parse_keyword(Keyword::BTREE) {
9169            IndexType::BTree
9170        } else if self.parse_keyword(Keyword::HASH) {
9171            IndexType::Hash
9172        } else if self.parse_keyword(Keyword::GIN) {
9173            IndexType::GIN
9174        } else if self.parse_keyword(Keyword::GIST) {
9175            IndexType::GiST
9176        } else if self.parse_keyword(Keyword::SPGIST) {
9177            IndexType::SPGiST
9178        } else if self.parse_keyword(Keyword::BRIN) {
9179            IndexType::BRIN
9180        } else if self.parse_keyword(Keyword::BLOOM) {
9181            IndexType::Bloom
9182        } else {
9183            IndexType::Custom(self.parse_identifier()?)
9184        })
9185    }
9186
9187    /// Optionally parse the `USING` keyword, followed by an [IndexType]
9188    /// Example:
9189    /// ```sql
9190    //// USING BTREE (name, age DESC)
9191    /// ```
9192    pub fn parse_optional_using_then_index_type(
9193        &mut self,
9194    ) -> Result<Option<IndexType>, ParserError> {
9195        if self.parse_keyword(Keyword::USING) {
9196            Ok(Some(self.parse_index_type()?))
9197        } else {
9198            Ok(None)
9199        }
9200    }
9201
9202    /// Parse `[ident]`, mostly `ident` is name, like:
9203    /// `window_name`, `index_name`, ...
9204    pub fn parse_optional_ident(&mut self) -> Result<Option<Ident>, ParserError> {
9205        self.maybe_parse(|parser| parser.parse_identifier())
9206    }
9207
9208    #[must_use]
9209    pub fn parse_index_type_display(&mut self) -> KeyOrIndexDisplay {
9210        if self.parse_keyword(Keyword::KEY) {
9211            KeyOrIndexDisplay::Key
9212        } else if self.parse_keyword(Keyword::INDEX) {
9213            KeyOrIndexDisplay::Index
9214        } else {
9215            KeyOrIndexDisplay::None
9216        }
9217    }
9218
9219    pub fn parse_optional_index_option(&mut self) -> Result<Option<IndexOption>, ParserError> {
9220        if let Some(index_type) = self.parse_optional_using_then_index_type()? {
9221            Ok(Some(IndexOption::Using(index_type)))
9222        } else if self.parse_keyword(Keyword::COMMENT) {
9223            let s = self.parse_literal_string()?;
9224            Ok(Some(IndexOption::Comment(s)))
9225        } else {
9226            Ok(None)
9227        }
9228    }
9229
9230    pub fn parse_index_options(&mut self) -> Result<Vec<IndexOption>, ParserError> {
9231        let mut options = Vec::new();
9232
9233        loop {
9234            match self.parse_optional_index_option()? {
9235                Some(index_option) => options.push(index_option),
9236                None => return Ok(options),
9237            }
9238        }
9239    }
9240
9241    pub fn parse_sql_option(&mut self) -> Result<SqlOption, ParserError> {
9242        let is_mssql = dialect_of!(self is MsSqlDialect|GenericDialect);
9243
9244        match self.peek_token().token {
9245            Token::Word(w) if w.keyword == Keyword::HEAP && is_mssql => {
9246                Ok(SqlOption::Ident(self.parse_identifier()?))
9247            }
9248            Token::Word(w) if w.keyword == Keyword::PARTITION && is_mssql => {
9249                self.parse_option_partition()
9250            }
9251            Token::Word(w) if w.keyword == Keyword::CLUSTERED && is_mssql => {
9252                self.parse_option_clustered()
9253            }
9254            _ => {
9255                let name = self.parse_identifier()?;
9256                self.expect_token(&Token::Eq)?;
9257                let value = self.parse_expr()?;
9258
9259                Ok(SqlOption::KeyValue { key: name, value })
9260            }
9261        }
9262    }
9263
9264    pub fn parse_option_clustered(&mut self) -> Result<SqlOption, ParserError> {
9265        if self.parse_keywords(&[
9266            Keyword::CLUSTERED,
9267            Keyword::COLUMNSTORE,
9268            Keyword::INDEX,
9269            Keyword::ORDER,
9270        ]) {
9271            Ok(SqlOption::Clustered(
9272                TableOptionsClustered::ColumnstoreIndexOrder(
9273                    self.parse_parenthesized_column_list(IsOptional::Mandatory, false)?,
9274                ),
9275            ))
9276        } else if self.parse_keywords(&[Keyword::CLUSTERED, Keyword::COLUMNSTORE, Keyword::INDEX]) {
9277            Ok(SqlOption::Clustered(
9278                TableOptionsClustered::ColumnstoreIndex,
9279            ))
9280        } else if self.parse_keywords(&[Keyword::CLUSTERED, Keyword::INDEX]) {
9281            self.expect_token(&Token::LParen)?;
9282
9283            let columns = self.parse_comma_separated(|p| {
9284                let name = p.parse_identifier()?;
9285                let asc = p.parse_asc_desc();
9286
9287                Ok(ClusteredIndex { name, asc })
9288            })?;
9289
9290            self.expect_token(&Token::RParen)?;
9291
9292            Ok(SqlOption::Clustered(TableOptionsClustered::Index(columns)))
9293        } else {
9294            Err(ParserError::ParserError(
9295                "invalid CLUSTERED sequence".to_string(),
9296            ))
9297        }
9298    }
9299
9300    pub fn parse_option_partition(&mut self) -> Result<SqlOption, ParserError> {
9301        self.expect_keyword_is(Keyword::PARTITION)?;
9302        self.expect_token(&Token::LParen)?;
9303        let column_name = self.parse_identifier()?;
9304
9305        self.expect_keyword_is(Keyword::RANGE)?;
9306        let range_direction = if self.parse_keyword(Keyword::LEFT) {
9307            Some(PartitionRangeDirection::Left)
9308        } else if self.parse_keyword(Keyword::RIGHT) {
9309            Some(PartitionRangeDirection::Right)
9310        } else {
9311            None
9312        };
9313
9314        self.expect_keywords(&[Keyword::FOR, Keyword::VALUES])?;
9315        self.expect_token(&Token::LParen)?;
9316
9317        let for_values = self.parse_comma_separated(Parser::parse_expr)?;
9318
9319        self.expect_token(&Token::RParen)?;
9320        self.expect_token(&Token::RParen)?;
9321
9322        Ok(SqlOption::Partition {
9323            column_name,
9324            range_direction,
9325            for_values,
9326        })
9327    }
9328
9329    pub fn parse_partition(&mut self) -> Result<Partition, ParserError> {
9330        self.expect_token(&Token::LParen)?;
9331        let partitions = self.parse_comma_separated(Parser::parse_expr)?;
9332        self.expect_token(&Token::RParen)?;
9333        Ok(Partition::Partitions(partitions))
9334    }
9335
9336    pub fn parse_projection_select(&mut self) -> Result<ProjectionSelect, ParserError> {
9337        self.expect_token(&Token::LParen)?;
9338        self.expect_keyword_is(Keyword::SELECT)?;
9339        let projection = self.parse_projection()?;
9340        let group_by = self.parse_optional_group_by()?;
9341        let order_by = self.parse_optional_order_by()?;
9342        self.expect_token(&Token::RParen)?;
9343        Ok(ProjectionSelect {
9344            projection,
9345            group_by,
9346            order_by,
9347        })
9348    }
9349    pub fn parse_alter_table_add_projection(&mut self) -> Result<AlterTableOperation, ParserError> {
9350        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
9351        let name = self.parse_identifier()?;
9352        let query = self.parse_projection_select()?;
9353        Ok(AlterTableOperation::AddProjection {
9354            if_not_exists,
9355            name,
9356            select: query,
9357        })
9358    }
9359
9360    pub fn parse_alter_table_operation(&mut self) -> Result<AlterTableOperation, ParserError> {
9361        let operation = if self.parse_keyword(Keyword::ADD) {
9362            if let Some(constraint) = self.parse_optional_table_constraint()? {
9363                let not_valid = self.parse_keywords(&[Keyword::NOT, Keyword::VALID]);
9364                AlterTableOperation::AddConstraint {
9365                    constraint,
9366                    not_valid,
9367                }
9368            } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
9369                && self.parse_keyword(Keyword::PROJECTION)
9370            {
9371                return self.parse_alter_table_add_projection();
9372            } else {
9373                let if_not_exists =
9374                    self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
9375                let mut new_partitions = vec![];
9376                loop {
9377                    if self.parse_keyword(Keyword::PARTITION) {
9378                        new_partitions.push(self.parse_partition()?);
9379                    } else {
9380                        break;
9381                    }
9382                }
9383                if !new_partitions.is_empty() {
9384                    AlterTableOperation::AddPartitions {
9385                        if_not_exists,
9386                        new_partitions,
9387                    }
9388                } else {
9389                    let column_keyword = self.parse_keyword(Keyword::COLUMN);
9390
9391                    let if_not_exists = if dialect_of!(self is PostgreSqlDialect | BigQueryDialect | DuckDbDialect | GenericDialect)
9392                    {
9393                        self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS])
9394                            || if_not_exists
9395                    } else {
9396                        false
9397                    };
9398
9399                    let column_def = self.parse_column_def()?;
9400
9401                    let column_position = self.parse_column_position()?;
9402
9403                    AlterTableOperation::AddColumn {
9404                        column_keyword,
9405                        if_not_exists,
9406                        column_def,
9407                        column_position,
9408                    }
9409                }
9410            }
9411        } else if self.parse_keyword(Keyword::RENAME) {
9412            if dialect_of!(self is PostgreSqlDialect) && self.parse_keyword(Keyword::CONSTRAINT) {
9413                let old_name = self.parse_identifier()?;
9414                self.expect_keyword_is(Keyword::TO)?;
9415                let new_name = self.parse_identifier()?;
9416                AlterTableOperation::RenameConstraint { old_name, new_name }
9417            } else if self.parse_keyword(Keyword::TO) {
9418                let table_name = self.parse_object_name(false)?;
9419                AlterTableOperation::RenameTable {
9420                    table_name: RenameTableNameKind::To(table_name),
9421                }
9422            } else if self.parse_keyword(Keyword::AS) {
9423                let table_name = self.parse_object_name(false)?;
9424                AlterTableOperation::RenameTable {
9425                    table_name: RenameTableNameKind::As(table_name),
9426                }
9427            } else {
9428                let _ = self.parse_keyword(Keyword::COLUMN); // [ COLUMN ]
9429                let old_column_name = self.parse_identifier()?;
9430                self.expect_keyword_is(Keyword::TO)?;
9431                let new_column_name = self.parse_identifier()?;
9432                AlterTableOperation::RenameColumn {
9433                    old_column_name,
9434                    new_column_name,
9435                }
9436            }
9437        } else if self.parse_keyword(Keyword::DISABLE) {
9438            if self.parse_keywords(&[Keyword::ROW, Keyword::LEVEL, Keyword::SECURITY]) {
9439                AlterTableOperation::DisableRowLevelSecurity {}
9440            } else if self.parse_keyword(Keyword::RULE) {
9441                let name = self.parse_identifier()?;
9442                AlterTableOperation::DisableRule { name }
9443            } else if self.parse_keyword(Keyword::TRIGGER) {
9444                let name = self.parse_identifier()?;
9445                AlterTableOperation::DisableTrigger { name }
9446            } else {
9447                return self.expected(
9448                    "ROW LEVEL SECURITY, RULE, or TRIGGER after DISABLE",
9449                    self.peek_token(),
9450                );
9451            }
9452        } else if self.parse_keyword(Keyword::ENABLE) {
9453            if self.parse_keywords(&[Keyword::ALWAYS, Keyword::RULE]) {
9454                let name = self.parse_identifier()?;
9455                AlterTableOperation::EnableAlwaysRule { name }
9456            } else if self.parse_keywords(&[Keyword::ALWAYS, Keyword::TRIGGER]) {
9457                let name = self.parse_identifier()?;
9458                AlterTableOperation::EnableAlwaysTrigger { name }
9459            } else if self.parse_keywords(&[Keyword::ROW, Keyword::LEVEL, Keyword::SECURITY]) {
9460                AlterTableOperation::EnableRowLevelSecurity {}
9461            } else if self.parse_keywords(&[Keyword::REPLICA, Keyword::RULE]) {
9462                let name = self.parse_identifier()?;
9463                AlterTableOperation::EnableReplicaRule { name }
9464            } else if self.parse_keywords(&[Keyword::REPLICA, Keyword::TRIGGER]) {
9465                let name = self.parse_identifier()?;
9466                AlterTableOperation::EnableReplicaTrigger { name }
9467            } else if self.parse_keyword(Keyword::RULE) {
9468                let name = self.parse_identifier()?;
9469                AlterTableOperation::EnableRule { name }
9470            } else if self.parse_keyword(Keyword::TRIGGER) {
9471                let name = self.parse_identifier()?;
9472                AlterTableOperation::EnableTrigger { name }
9473            } else {
9474                return self.expected(
9475                    "ALWAYS, REPLICA, ROW LEVEL SECURITY, RULE, or TRIGGER after ENABLE",
9476                    self.peek_token(),
9477                );
9478            }
9479        } else if self.parse_keywords(&[Keyword::CLEAR, Keyword::PROJECTION])
9480            && dialect_of!(self is ClickHouseDialect|GenericDialect)
9481        {
9482            let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
9483            let name = self.parse_identifier()?;
9484            let partition = if self.parse_keywords(&[Keyword::IN, Keyword::PARTITION]) {
9485                Some(self.parse_identifier()?)
9486            } else {
9487                None
9488            };
9489            AlterTableOperation::ClearProjection {
9490                if_exists,
9491                name,
9492                partition,
9493            }
9494        } else if self.parse_keywords(&[Keyword::MATERIALIZE, Keyword::PROJECTION])
9495            && dialect_of!(self is ClickHouseDialect|GenericDialect)
9496        {
9497            let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
9498            let name = self.parse_identifier()?;
9499            let partition = if self.parse_keywords(&[Keyword::IN, Keyword::PARTITION]) {
9500                Some(self.parse_identifier()?)
9501            } else {
9502                None
9503            };
9504            AlterTableOperation::MaterializeProjection {
9505                if_exists,
9506                name,
9507                partition,
9508            }
9509        } else if self.parse_keyword(Keyword::DROP) {
9510            if self.parse_keywords(&[Keyword::IF, Keyword::EXISTS, Keyword::PARTITION]) {
9511                self.expect_token(&Token::LParen)?;
9512                let partitions = self.parse_comma_separated(Parser::parse_expr)?;
9513                self.expect_token(&Token::RParen)?;
9514                AlterTableOperation::DropPartitions {
9515                    partitions,
9516                    if_exists: true,
9517                }
9518            } else if self.parse_keyword(Keyword::PARTITION) {
9519                self.expect_token(&Token::LParen)?;
9520                let partitions = self.parse_comma_separated(Parser::parse_expr)?;
9521                self.expect_token(&Token::RParen)?;
9522                AlterTableOperation::DropPartitions {
9523                    partitions,
9524                    if_exists: false,
9525                }
9526            } else if self.parse_keyword(Keyword::CONSTRAINT) {
9527                let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
9528                let name = self.parse_identifier()?;
9529                let drop_behavior = self.parse_optional_drop_behavior();
9530                AlterTableOperation::DropConstraint {
9531                    if_exists,
9532                    name,
9533                    drop_behavior,
9534                }
9535            } else if self.parse_keywords(&[Keyword::PRIMARY, Keyword::KEY]) {
9536                let drop_behavior = self.parse_optional_drop_behavior();
9537                AlterTableOperation::DropPrimaryKey { drop_behavior }
9538            } else if self.parse_keywords(&[Keyword::FOREIGN, Keyword::KEY]) {
9539                let name = self.parse_identifier()?;
9540                let drop_behavior = self.parse_optional_drop_behavior();
9541                AlterTableOperation::DropForeignKey {
9542                    name,
9543                    drop_behavior,
9544                }
9545            } else if self.parse_keyword(Keyword::INDEX) {
9546                let name = self.parse_identifier()?;
9547                AlterTableOperation::DropIndex { name }
9548            } else if self.parse_keyword(Keyword::PROJECTION)
9549                && dialect_of!(self is ClickHouseDialect|GenericDialect)
9550            {
9551                let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
9552                let name = self.parse_identifier()?;
9553                AlterTableOperation::DropProjection { if_exists, name }
9554            } else if self.parse_keywords(&[Keyword::CLUSTERING, Keyword::KEY]) {
9555                AlterTableOperation::DropClusteringKey
9556            } else {
9557                let has_column_keyword = self.parse_keyword(Keyword::COLUMN); // [ COLUMN ]
9558                let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
9559                let column_names = if self.dialect.supports_comma_separated_drop_column_list() {
9560                    self.parse_comma_separated(Parser::parse_identifier)?
9561                } else {
9562                    vec![self.parse_identifier()?]
9563                };
9564                let drop_behavior = self.parse_optional_drop_behavior();
9565                AlterTableOperation::DropColumn {
9566                    has_column_keyword,
9567                    column_names,
9568                    if_exists,
9569                    drop_behavior,
9570                }
9571            }
9572        } else if self.parse_keyword(Keyword::PARTITION) {
9573            self.expect_token(&Token::LParen)?;
9574            let before = self.parse_comma_separated(Parser::parse_expr)?;
9575            self.expect_token(&Token::RParen)?;
9576            self.expect_keyword_is(Keyword::RENAME)?;
9577            self.expect_keywords(&[Keyword::TO, Keyword::PARTITION])?;
9578            self.expect_token(&Token::LParen)?;
9579            let renames = self.parse_comma_separated(Parser::parse_expr)?;
9580            self.expect_token(&Token::RParen)?;
9581            AlterTableOperation::RenamePartitions {
9582                old_partitions: before,
9583                new_partitions: renames,
9584            }
9585        } else if self.parse_keyword(Keyword::CHANGE) {
9586            let _ = self.parse_keyword(Keyword::COLUMN); // [ COLUMN ]
9587            let old_name = self.parse_identifier()?;
9588            let new_name = self.parse_identifier()?;
9589            let data_type = self.parse_data_type()?;
9590            let mut options = vec![];
9591            while let Some(option) = self.parse_optional_column_option()? {
9592                options.push(option);
9593            }
9594
9595            let column_position = self.parse_column_position()?;
9596
9597            AlterTableOperation::ChangeColumn {
9598                old_name,
9599                new_name,
9600                data_type,
9601                options,
9602                column_position,
9603            }
9604        } else if self.parse_keyword(Keyword::MODIFY) {
9605            let _ = self.parse_keyword(Keyword::COLUMN); // [ COLUMN ]
9606            let col_name = self.parse_identifier()?;
9607            let data_type = self.parse_data_type()?;
9608            let mut options = vec![];
9609            while let Some(option) = self.parse_optional_column_option()? {
9610                options.push(option);
9611            }
9612
9613            let column_position = self.parse_column_position()?;
9614
9615            AlterTableOperation::ModifyColumn {
9616                col_name,
9617                data_type,
9618                options,
9619                column_position,
9620            }
9621        } else if self.parse_keyword(Keyword::ALTER) {
9622            let _ = self.parse_keyword(Keyword::COLUMN); // [ COLUMN ]
9623            let column_name = self.parse_identifier()?;
9624            let is_postgresql = dialect_of!(self is PostgreSqlDialect);
9625
9626            let op: AlterColumnOperation = if self.parse_keywords(&[
9627                Keyword::SET,
9628                Keyword::NOT,
9629                Keyword::NULL,
9630            ]) {
9631                AlterColumnOperation::SetNotNull {}
9632            } else if self.parse_keywords(&[Keyword::DROP, Keyword::NOT, Keyword::NULL]) {
9633                AlterColumnOperation::DropNotNull {}
9634            } else if self.parse_keywords(&[Keyword::SET, Keyword::DEFAULT]) {
9635                AlterColumnOperation::SetDefault {
9636                    value: self.parse_expr()?,
9637                }
9638            } else if self.parse_keywords(&[Keyword::DROP, Keyword::DEFAULT]) {
9639                AlterColumnOperation::DropDefault {}
9640            } else if self.parse_keywords(&[Keyword::SET, Keyword::DATA, Keyword::TYPE]) {
9641                self.parse_set_data_type(true)?
9642            } else if self.parse_keyword(Keyword::TYPE) {
9643                self.parse_set_data_type(false)?
9644            } else if self.parse_keywords(&[Keyword::SET, Keyword::OPTIONS]) {
9645                self.expect_token(&Token::LParen)?;
9646                let options = self.parse_comma_separated(Parser::parse_sql_option)?;
9647                self.expect_token(&Token::RParen)?;
9648                AlterColumnOperation::SetOptions { options }
9649            } else if self.parse_keywords(&[Keyword::ADD, Keyword::GENERATED]) {
9650                let generated_as = if self.parse_keyword(Keyword::ALWAYS) {
9651                    Some(GeneratedAs::Always)
9652                } else if self.parse_keywords(&[Keyword::BY, Keyword::DEFAULT]) {
9653                    Some(GeneratedAs::ByDefault)
9654                } else {
9655                    None
9656                };
9657
9658                self.expect_keywords(&[Keyword::AS, Keyword::IDENTITY])?;
9659
9660                let mut sequence_options: Option<Vec<SequenceOptions>> = None;
9661
9662                if self.peek_token().token == Token::LParen {
9663                    self.expect_token(&Token::LParen)?;
9664                    sequence_options = Some(self.parse_create_sequence_options()?);
9665                    self.expect_token(&Token::RParen)?;
9666                }
9667
9668                AlterColumnOperation::AddGenerated {
9669                    generated_as,
9670                    sequence_options,
9671                }
9672            } else {
9673                let message = if is_postgresql {
9674                    "SET/DROP NOT NULL, SET DEFAULT, SET DATA TYPE, or ADD GENERATED after ALTER COLUMN"
9675                } else {
9676                    "SET/DROP NOT NULL, SET DEFAULT, or SET DATA TYPE after ALTER COLUMN"
9677                };
9678
9679                return self.expected(message, self.peek_token());
9680            };
9681            AlterTableOperation::AlterColumn { column_name, op }
9682        } else if self.parse_keyword(Keyword::SWAP) {
9683            self.expect_keyword_is(Keyword::WITH)?;
9684            let table_name = self.parse_object_name(false)?;
9685            AlterTableOperation::SwapWith { table_name }
9686        } else if dialect_of!(self is PostgreSqlDialect | GenericDialect)
9687            && self.parse_keywords(&[Keyword::OWNER, Keyword::TO])
9688        {
9689            let new_owner = self.parse_owner()?;
9690            AlterTableOperation::OwnerTo { new_owner }
9691        } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
9692            && self.parse_keyword(Keyword::ATTACH)
9693        {
9694            AlterTableOperation::AttachPartition {
9695                partition: self.parse_part_or_partition()?,
9696            }
9697        } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
9698            && self.parse_keyword(Keyword::DETACH)
9699        {
9700            AlterTableOperation::DetachPartition {
9701                partition: self.parse_part_or_partition()?,
9702            }
9703        } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
9704            && self.parse_keyword(Keyword::FREEZE)
9705        {
9706            let partition = self.parse_part_or_partition()?;
9707            let with_name = if self.parse_keyword(Keyword::WITH) {
9708                self.expect_keyword_is(Keyword::NAME)?;
9709                Some(self.parse_identifier()?)
9710            } else {
9711                None
9712            };
9713            AlterTableOperation::FreezePartition {
9714                partition,
9715                with_name,
9716            }
9717        } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
9718            && self.parse_keyword(Keyword::UNFREEZE)
9719        {
9720            let partition = self.parse_part_or_partition()?;
9721            let with_name = if self.parse_keyword(Keyword::WITH) {
9722                self.expect_keyword_is(Keyword::NAME)?;
9723                Some(self.parse_identifier()?)
9724            } else {
9725                None
9726            };
9727            AlterTableOperation::UnfreezePartition {
9728                partition,
9729                with_name,
9730            }
9731        } else if self.parse_keywords(&[Keyword::CLUSTER, Keyword::BY]) {
9732            self.expect_token(&Token::LParen)?;
9733            let exprs = self.parse_comma_separated(|parser| parser.parse_expr())?;
9734            self.expect_token(&Token::RParen)?;
9735            AlterTableOperation::ClusterBy { exprs }
9736        } else if self.parse_keywords(&[Keyword::SUSPEND, Keyword::RECLUSTER]) {
9737            AlterTableOperation::SuspendRecluster
9738        } else if self.parse_keywords(&[Keyword::RESUME, Keyword::RECLUSTER]) {
9739            AlterTableOperation::ResumeRecluster
9740        } else if self.parse_keyword(Keyword::LOCK) {
9741            let equals = self.consume_token(&Token::Eq);
9742            let lock = match self.parse_one_of_keywords(&[
9743                Keyword::DEFAULT,
9744                Keyword::EXCLUSIVE,
9745                Keyword::NONE,
9746                Keyword::SHARED,
9747            ]) {
9748                Some(Keyword::DEFAULT) => AlterTableLock::Default,
9749                Some(Keyword::EXCLUSIVE) => AlterTableLock::Exclusive,
9750                Some(Keyword::NONE) => AlterTableLock::None,
9751                Some(Keyword::SHARED) => AlterTableLock::Shared,
9752                _ => self.expected(
9753                    "DEFAULT, EXCLUSIVE, NONE or SHARED after LOCK [=]",
9754                    self.peek_token(),
9755                )?,
9756            };
9757            AlterTableOperation::Lock { equals, lock }
9758        } else if self.parse_keyword(Keyword::ALGORITHM) {
9759            let equals = self.consume_token(&Token::Eq);
9760            let algorithm = match self.parse_one_of_keywords(&[
9761                Keyword::DEFAULT,
9762                Keyword::INSTANT,
9763                Keyword::INPLACE,
9764                Keyword::COPY,
9765            ]) {
9766                Some(Keyword::DEFAULT) => AlterTableAlgorithm::Default,
9767                Some(Keyword::INSTANT) => AlterTableAlgorithm::Instant,
9768                Some(Keyword::INPLACE) => AlterTableAlgorithm::Inplace,
9769                Some(Keyword::COPY) => AlterTableAlgorithm::Copy,
9770                _ => self.expected(
9771                    "DEFAULT, INSTANT, INPLACE, or COPY after ALGORITHM [=]",
9772                    self.peek_token(),
9773                )?,
9774            };
9775            AlterTableOperation::Algorithm { equals, algorithm }
9776        } else if self.parse_keyword(Keyword::AUTO_INCREMENT) {
9777            let equals = self.consume_token(&Token::Eq);
9778            let value = self.parse_number_value()?;
9779            AlterTableOperation::AutoIncrement { equals, value }
9780        } else if self.parse_keywords(&[Keyword::REPLICA, Keyword::IDENTITY]) {
9781            let identity = if self.parse_keyword(Keyword::NONE) {
9782                ReplicaIdentity::None
9783            } else if self.parse_keyword(Keyword::FULL) {
9784                ReplicaIdentity::Full
9785            } else if self.parse_keyword(Keyword::DEFAULT) {
9786                ReplicaIdentity::Default
9787            } else if self.parse_keywords(&[Keyword::USING, Keyword::INDEX]) {
9788                ReplicaIdentity::Index(self.parse_identifier()?)
9789            } else {
9790                return self.expected(
9791                    "NONE, FULL, DEFAULT, or USING INDEX index_name after REPLICA IDENTITY",
9792                    self.peek_token(),
9793                );
9794            };
9795
9796            AlterTableOperation::ReplicaIdentity { identity }
9797        } else if self.parse_keywords(&[Keyword::SET, Keyword::DEFAULT, Keyword::COLLATE]) {
9798            let collate = self.parse_expr()?;
9799            AlterTableOperation::SetDefaultCollate { collate }
9800        } else if self.parse_keywords(&[Keyword::VALIDATE, Keyword::CONSTRAINT]) {
9801            let name = self.parse_identifier()?;
9802            AlterTableOperation::ValidateConstraint { name }
9803        } else {
9804            let mut options =
9805                self.parse_options_with_keywords(&[Keyword::SET, Keyword::TBLPROPERTIES])?;
9806            if !options.is_empty() {
9807                AlterTableOperation::SetTblProperties {
9808                    table_properties: options,
9809                }
9810            } else {
9811                options =
9812                    self.parse_options_with_keywords(&[Keyword::SET, Keyword::OPTIONS])?;
9813                if !options.is_empty() {
9814                    AlterTableOperation::SetTblProperties {
9815                        table_properties: options,
9816                    }
9817                } else {
9818                    options = self.parse_options(Keyword::SET)?;
9819                    if !options.is_empty() {
9820                        AlterTableOperation::SetOptionsParens { options }
9821                    } else {
9822                        return self.expected(
9823                        "ADD, RENAME, PARTITION, SWAP, DROP, REPLICA IDENTITY, SET, SET OPTIONS, or SET TBLPROPERTIES after ALTER TABLE",
9824                        self.peek_token(),
9825                      );
9826                    }
9827                }
9828            }
9829        };
9830        Ok(operation)
9831    }
9832
9833    fn parse_set_data_type(&mut self, had_set: bool) -> Result<AlterColumnOperation, ParserError> {
9834        let data_type = self.parse_data_type()?;
9835        let using = if self.dialect.supports_alter_column_type_using()
9836            && self.parse_keyword(Keyword::USING)
9837        {
9838            Some(self.parse_expr()?)
9839        } else {
9840            None
9841        };
9842        Ok(AlterColumnOperation::SetDataType {
9843            data_type,
9844            using,
9845            had_set,
9846        })
9847    }
9848
9849    fn parse_part_or_partition(&mut self) -> Result<Partition, ParserError> {
9850        let keyword = self.expect_one_of_keywords(&[Keyword::PART, Keyword::PARTITION])?;
9851        match keyword {
9852            Keyword::PART => Ok(Partition::Part(self.parse_expr()?)),
9853            Keyword::PARTITION => Ok(Partition::Expr(self.parse_expr()?)),
9854            // unreachable because expect_one_of_keywords used above
9855            _ => unreachable!(),
9856        }
9857    }
9858
9859    pub fn parse_alter(&mut self) -> Result<Statement, ParserError> {
9860        let object_type = self.expect_one_of_keywords(&[
9861            Keyword::VIEW,
9862            Keyword::TYPE,
9863            Keyword::TABLE,
9864            Keyword::INDEX,
9865            Keyword::ROLE,
9866            Keyword::POLICY,
9867            Keyword::CONNECTOR,
9868            Keyword::ICEBERG,
9869            Keyword::SCHEMA,
9870            Keyword::MATERIALIZED,
9871            Keyword::FUNCTION,
9872            Keyword::PROCEDURE,
9873        ])?;
9874        match object_type {
9875            Keyword::SCHEMA => {
9876                self.prev_token();
9877                self.prev_token();
9878                self.parse_alter_schema()
9879            }
9880            Keyword::VIEW => self.parse_alter_view(),
9881            Keyword::TYPE => self.parse_alter_type(),
9882            Keyword::TABLE => self.parse_alter_table(false),
9883            Keyword::ICEBERG => {
9884                self.expect_keyword(Keyword::TABLE)?;
9885                self.parse_alter_table(true)
9886            }
9887            Keyword::MATERIALIZED => {
9888                self.expect_keyword(Keyword::VIEW)?;
9889                self.parse_alter_materialized_view()
9890            }
9891            Keyword::INDEX => {
9892                let index_name = self.parse_object_name(false)?;
9893                let operation = if self.parse_keyword(Keyword::RENAME) {
9894                    if self.parse_keyword(Keyword::TO) {
9895                        let index_name = self.parse_object_name(false)?;
9896                        AlterIndexOperation::RenameIndex { index_name }
9897                    } else {
9898                        return self.expected("TO after RENAME", self.peek_token());
9899                    }
9900                } else {
9901                    return self.expected("RENAME after ALTER INDEX", self.peek_token());
9902                };
9903
9904                Ok(Statement::AlterIndex {
9905                    name: index_name,
9906                    operation,
9907                })
9908            }
9909            Keyword::ROLE => self.parse_alter_role(),
9910            Keyword::POLICY => self.parse_alter_policy(),
9911            Keyword::CONNECTOR => self.parse_alter_connector(),
9912            Keyword::FUNCTION => self.parse_alter_function(),
9913            Keyword::PROCEDURE => self.parse_alter_procedure(),
9914            // unreachable because expect_one_of_keywords used above
9915            _ => unreachable!(),
9916        }
9917    }
9918
9919    fn parse_alter_function(&mut self) -> Result<Statement, ParserError> {
9920        let name = self.parse_object_name(false)?;
9921        self.expect_keywords(&[Keyword::SET, Keyword::OPTIONS])?;
9922        self.expect_token(&Token::LParen)?;
9923        let options = self.parse_comma_separated(Parser::parse_sql_option)?;
9924        self.expect_token(&Token::RParen)?;
9925        Ok(Statement::AlterFunction {
9926            name,
9927            options,
9928        })
9929    }
9930
9931    fn parse_alter_procedure(&mut self) -> Result<Statement, ParserError> {
9932        let name = self.parse_object_name(false)?;
9933        self.expect_keywords(&[Keyword::SET, Keyword::OPTIONS])?;
9934        self.expect_token(&Token::LParen)?;
9935        let options = self.parse_comma_separated(Parser::parse_sql_option)?;
9936        self.expect_token(&Token::RParen)?;
9937        Ok(Statement::AlterProcedure {
9938            name,
9939            options,
9940        })
9941    }
9942
9943    /// Parse a [Statement::AlterTable]
9944    pub fn parse_alter_table(&mut self, iceberg: bool) -> Result<Statement, ParserError> {
9945        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
9946        let only = self.parse_keyword(Keyword::ONLY); // [ ONLY ]
9947        let table_name = self.parse_object_name(false)?;
9948        let on_cluster = self.parse_optional_on_cluster()?;
9949        let operations = self.parse_comma_separated(Parser::parse_alter_table_operation)?;
9950
9951        let mut location = None;
9952        if self.parse_keyword(Keyword::LOCATION) {
9953            location = Some(HiveSetLocation {
9954                has_set: false,
9955                location: self.parse_identifier()?,
9956            });
9957        } else if self.parse_keywords(&[Keyword::SET, Keyword::LOCATION]) {
9958            location = Some(HiveSetLocation {
9959                has_set: true,
9960                location: self.parse_identifier()?,
9961            });
9962        }
9963
9964        let end_token = if self.peek_token_ref().token == Token::SemiColon {
9965            self.peek_token_ref().clone()
9966        } else {
9967            self.get_current_token().clone()
9968        };
9969
9970        Ok(Statement::AlterTable {
9971            name: table_name,
9972            if_exists,
9973            only,
9974            operations,
9975            location,
9976            on_cluster,
9977            iceberg,
9978            end_token: AttachedToken(end_token),
9979        })
9980    }
9981
9982    pub fn parse_alter_view(&mut self) -> Result<Statement, ParserError> {
9983        let name = self.parse_object_name(false)?;
9984
9985        if self.parse_keywords(&[Keyword::SET, Keyword::OPTIONS]) {
9986            self.expect_token(&Token::LParen)?;
9987            let options = self.parse_comma_separated(Parser::parse_sql_option)?;
9988            self.expect_token(&Token::RParen)?;
9989            return Ok(Statement::AlterViewWithOperations {
9990                name,
9991                operations: vec![AlterViewOperation::SetOptions { options }],
9992            });
9993        }
9994
9995        if self.parse_keywords(&[Keyword::ALTER, Keyword::COLUMN]) {
9996            let column_name = self.parse_identifier()?;
9997            self.expect_keywords(&[Keyword::SET, Keyword::OPTIONS])?;
9998            self.expect_token(&Token::LParen)?;
9999            let options = self.parse_comma_separated(Parser::parse_sql_option)?;
10000            self.expect_token(&Token::RParen)?;
10001            return Ok(Statement::AlterViewWithOperations {
10002                name,
10003                operations: vec![AlterViewOperation::AlterColumn {
10004                    column_name,
10005                    operation: AlterColumnOperation::SetOptions { options },
10006                }],
10007            });
10008        }
10009
10010        let columns = self.parse_parenthesized_column_list(Optional, false)?;
10011
10012        let with_options = self.parse_options(Keyword::WITH)?;
10013
10014        self.expect_keyword_is(Keyword::AS)?;
10015        let query = self.parse_query()?;
10016
10017        Ok(Statement::AlterView {
10018            name,
10019            columns,
10020            query,
10021            with_options,
10022        })
10023    }
10024
10025    pub fn parse_alter_materialized_view(&mut self) -> Result<Statement, ParserError> {
10026        let name = self.parse_object_name(false)?;
10027        let mut operations = vec![];
10028        let mut options = vec![];
10029
10030        loop {
10031            if self.parse_keywords(&[Keyword::SET, Keyword::OPTIONS]) {
10032                self.expect_token(&Token::LParen)?;
10033                options = self.parse_comma_separated(Parser::parse_sql_option)?;
10034                self.expect_token(&Token::RParen)?;
10035            } else if self.parse_keywords(&[Keyword::ALTER, Keyword::COLUMN]) {
10036                let column_name = self.parse_identifier()?;
10037                self.expect_keywords(&[Keyword::SET, Keyword::OPTIONS])?;
10038                self.expect_token(&Token::LParen)?;
10039                let col_options = self.parse_comma_separated(Parser::parse_sql_option)?;
10040                self.expect_token(&Token::RParen)?;
10041                operations.push(AlterViewOperation::AlterColumn {
10042                    column_name,
10043                    operation: AlterColumnOperation::SetOptions { options: col_options },
10044                });
10045            } else {
10046                break;
10047            }
10048        }
10049
10050        Ok(Statement::AlterMaterializedView { name, options, operations })
10051    }
10052
10053    /// Parse a [Statement::AlterType]
10054    pub fn parse_alter_type(&mut self) -> Result<Statement, ParserError> {
10055        let name = self.parse_object_name(false)?;
10056
10057        if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
10058            let new_name = self.parse_identifier()?;
10059            Ok(Statement::AlterType(AlterType {
10060                name,
10061                operation: AlterTypeOperation::Rename(AlterTypeRename { new_name }),
10062            }))
10063        } else if self.parse_keywords(&[Keyword::ADD, Keyword::VALUE]) {
10064            let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
10065            let new_enum_value = self.parse_identifier()?;
10066            let position = if self.parse_keyword(Keyword::BEFORE) {
10067                Some(AlterTypeAddValuePosition::Before(self.parse_identifier()?))
10068            } else if self.parse_keyword(Keyword::AFTER) {
10069                Some(AlterTypeAddValuePosition::After(self.parse_identifier()?))
10070            } else {
10071                None
10072            };
10073
10074            Ok(Statement::AlterType(AlterType {
10075                name,
10076                operation: AlterTypeOperation::AddValue(AlterTypeAddValue {
10077                    if_not_exists,
10078                    value: new_enum_value,
10079                    position,
10080                }),
10081            }))
10082        } else if self.parse_keywords(&[Keyword::RENAME, Keyword::VALUE]) {
10083            let existing_enum_value = self.parse_identifier()?;
10084            self.expect_keyword(Keyword::TO)?;
10085            let new_enum_value = self.parse_identifier()?;
10086
10087            Ok(Statement::AlterType(AlterType {
10088                name,
10089                operation: AlterTypeOperation::RenameValue(AlterTypeRenameValue {
10090                    from: existing_enum_value,
10091                    to: new_enum_value,
10092                }),
10093            }))
10094        } else {
10095            self.expected_ref(
10096                "{RENAME TO | { RENAME | ADD } VALUE}",
10097                self.peek_token_ref(),
10098            )
10099        }
10100    }
10101
10102    // Parse a [Statement::AlterSchema]
10103    // ALTER SCHEMA [ IF EXISTS ] schema_name
10104    pub fn parse_alter_schema(&mut self) -> Result<Statement, ParserError> {
10105        self.expect_keywords(&[Keyword::ALTER, Keyword::SCHEMA])?;
10106        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
10107        let name = self.parse_object_name(false)?;
10108        let operation = if self.parse_keywords(&[Keyword::SET, Keyword::OPTIONS]) {
10109            self.prev_token();
10110            let options = self.parse_options(Keyword::OPTIONS)?;
10111            AlterSchemaOperation::SetOptionsParens { options }
10112        } else if self.parse_keywords(&[Keyword::SET, Keyword::DEFAULT, Keyword::COLLATE]) {
10113            let collate = self.parse_expr()?;
10114            AlterSchemaOperation::SetDefaultCollate { collate }
10115        } else if self.parse_keywords(&[Keyword::ADD, Keyword::REPLICA]) {
10116            let replica = self.parse_identifier()?;
10117            let options = if self.peek_keyword(Keyword::OPTIONS) {
10118                Some(self.parse_options(Keyword::OPTIONS)?)
10119            } else {
10120                None
10121            };
10122            AlterSchemaOperation::AddReplica { replica, options }
10123        } else if self.parse_keywords(&[Keyword::DROP, Keyword::REPLICA]) {
10124            let replica = self.parse_identifier()?;
10125            AlterSchemaOperation::DropReplica { replica }
10126        } else if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
10127            let new_name = self.parse_object_name(false)?;
10128            AlterSchemaOperation::Rename { name: new_name }
10129        } else if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
10130            let owner = self.parse_owner()?;
10131            AlterSchemaOperation::OwnerTo { owner }
10132        } else {
10133            return self.expected_ref("ALTER SCHEMA operation", self.peek_token_ref());
10134        };
10135        Ok(Statement::AlterSchema(AlterSchema {
10136            name,
10137            if_exists,
10138            operations: vec![operation],
10139        }))
10140    }
10141
10142    /// Parse a `CALL procedure_name(arg1, arg2, ...)`
10143    /// or `CALL procedure_name` statement
10144    pub fn parse_call(&mut self) -> Result<Statement, ParserError> {
10145        let object_name = self.parse_object_name(false)?;
10146        if self.peek_token().token == Token::LParen {
10147            match self.parse_function(object_name)? {
10148                Expr::Function(f) => Ok(Statement::Call(f)),
10149                other => parser_err!(
10150                    format!("Expected a simple procedure call but found: {other}"),
10151                    self.peek_token().span.start
10152                ),
10153            }
10154        } else {
10155            Ok(Statement::Call(Function {
10156                name: object_name,
10157                uses_odbc_syntax: false,
10158                parameters: FunctionArguments::None,
10159                args: FunctionArguments::None,
10160                over: None,
10161                filter: None,
10162                null_treatment: None,
10163                within_group: vec![],
10164            }))
10165        }
10166    }
10167
10168    /// Parse a copy statement
10169    pub fn parse_copy(&mut self) -> Result<Statement, ParserError> {
10170        let source;
10171        if self.consume_token(&Token::LParen) {
10172            source = CopySource::Query(self.parse_query()?);
10173            self.expect_token(&Token::RParen)?;
10174        } else {
10175            let table_name = self.parse_object_name(false)?;
10176            let columns = self.parse_parenthesized_column_list(Optional, false)?;
10177            source = CopySource::Table {
10178                table_name,
10179                columns,
10180            };
10181        }
10182        let to = match self.parse_one_of_keywords(&[Keyword::FROM, Keyword::TO]) {
10183            Some(Keyword::FROM) => false,
10184            Some(Keyword::TO) => true,
10185            _ => self.expected("FROM or TO", self.peek_token())?,
10186        };
10187        if !to {
10188            // Use a separate if statement to prevent Rust compiler from complaining about
10189            // "if statement in this position is unstable: https://github.com/rust-lang/rust/issues/53667"
10190            if let CopySource::Query(_) = source {
10191                return Err(ParserError::ParserError(
10192                    "COPY ... FROM does not support query as a source".to_string(),
10193                ));
10194            }
10195        }
10196        let target = if self.parse_keyword(Keyword::STDIN) {
10197            CopyTarget::Stdin
10198        } else if self.parse_keyword(Keyword::STDOUT) {
10199            CopyTarget::Stdout
10200        } else if self.parse_keyword(Keyword::PROGRAM) {
10201            CopyTarget::Program {
10202                command: self.parse_literal_string()?,
10203            }
10204        } else {
10205            CopyTarget::File {
10206                filename: self.parse_literal_string()?,
10207            }
10208        };
10209        let _ = self.parse_keyword(Keyword::WITH); // [ WITH ]
10210        let mut options = vec![];
10211        if self.consume_token(&Token::LParen) {
10212            options = self.parse_comma_separated(Parser::parse_copy_option)?;
10213            self.expect_token(&Token::RParen)?;
10214        }
10215        let mut legacy_options = vec![];
10216        while let Some(opt) = self.maybe_parse(|parser| parser.parse_copy_legacy_option())? {
10217            legacy_options.push(opt);
10218        }
10219        let values = if let CopyTarget::Stdin = target {
10220            self.expect_token(&Token::SemiColon)?;
10221            self.parse_tsv()
10222        } else {
10223            vec![]
10224        };
10225        Ok(Statement::Copy {
10226            source,
10227            to,
10228            target,
10229            options,
10230            legacy_options,
10231            values,
10232        })
10233    }
10234
10235    /// Parse [Statement::Open]
10236    fn parse_open(&mut self) -> Result<Statement, ParserError> {
10237        self.expect_keyword(Keyword::OPEN)?;
10238        Ok(Statement::Open(OpenStatement {
10239            cursor_name: self.parse_identifier()?,
10240        }))
10241    }
10242
10243    pub fn parse_close(&mut self) -> Result<Statement, ParserError> {
10244        let cursor = if self.parse_keyword(Keyword::ALL) {
10245            CloseCursor::All
10246        } else {
10247            let name = self.parse_identifier()?;
10248
10249            CloseCursor::Specific { name }
10250        };
10251
10252        Ok(Statement::Close { cursor })
10253    }
10254
10255    fn parse_copy_option(&mut self) -> Result<CopyOption, ParserError> {
10256        let ret = match self.parse_one_of_keywords(&[
10257            Keyword::FORMAT,
10258            Keyword::FREEZE,
10259            Keyword::DELIMITER,
10260            Keyword::NULL,
10261            Keyword::HEADER,
10262            Keyword::QUOTE,
10263            Keyword::ESCAPE,
10264            Keyword::FORCE_QUOTE,
10265            Keyword::FORCE_NOT_NULL,
10266            Keyword::FORCE_NULL,
10267            Keyword::ENCODING,
10268        ]) {
10269            Some(Keyword::FORMAT) => CopyOption::Format(self.parse_identifier()?),
10270            Some(Keyword::FREEZE) => CopyOption::Freeze(!matches!(
10271                self.parse_one_of_keywords(&[Keyword::TRUE, Keyword::FALSE]),
10272                Some(Keyword::FALSE)
10273            )),
10274            Some(Keyword::DELIMITER) => CopyOption::Delimiter(self.parse_literal_char()?),
10275            Some(Keyword::NULL) => CopyOption::Null(self.parse_literal_string()?),
10276            Some(Keyword::HEADER) => CopyOption::Header(!matches!(
10277                self.parse_one_of_keywords(&[Keyword::TRUE, Keyword::FALSE]),
10278                Some(Keyword::FALSE)
10279            )),
10280            Some(Keyword::QUOTE) => CopyOption::Quote(self.parse_literal_char()?),
10281            Some(Keyword::ESCAPE) => CopyOption::Escape(self.parse_literal_char()?),
10282            Some(Keyword::FORCE_QUOTE) => {
10283                CopyOption::ForceQuote(self.parse_parenthesized_column_list(Mandatory, false)?)
10284            }
10285            Some(Keyword::FORCE_NOT_NULL) => {
10286                CopyOption::ForceNotNull(self.parse_parenthesized_column_list(Mandatory, false)?)
10287            }
10288            Some(Keyword::FORCE_NULL) => {
10289                CopyOption::ForceNull(self.parse_parenthesized_column_list(Mandatory, false)?)
10290            }
10291            Some(Keyword::ENCODING) => CopyOption::Encoding(self.parse_literal_string()?),
10292            _ => self.expected("option", self.peek_token())?,
10293        };
10294        Ok(ret)
10295    }
10296
10297    fn parse_copy_legacy_option(&mut self) -> Result<CopyLegacyOption, ParserError> {
10298        // FORMAT \[ AS \] is optional
10299        if self.parse_keyword(Keyword::FORMAT) {
10300            let _ = self.parse_keyword(Keyword::AS);
10301        }
10302
10303        let ret = match self.parse_one_of_keywords(&[
10304            Keyword::ACCEPTANYDATE,
10305            Keyword::ACCEPTINVCHARS,
10306            Keyword::ADDQUOTES,
10307            Keyword::ALLOWOVERWRITE,
10308            Keyword::BINARY,
10309            Keyword::BLANKSASNULL,
10310            Keyword::BZIP2,
10311            Keyword::CLEANPATH,
10312            Keyword::CSV,
10313            Keyword::DATEFORMAT,
10314            Keyword::DELIMITER,
10315            Keyword::EMPTYASNULL,
10316            Keyword::ENCRYPTED,
10317            Keyword::ESCAPE,
10318            Keyword::EXTENSION,
10319            Keyword::FIXEDWIDTH,
10320            Keyword::GZIP,
10321            Keyword::HEADER,
10322            Keyword::IAM_ROLE,
10323            Keyword::IGNOREHEADER,
10324            Keyword::JSON,
10325            Keyword::MANIFEST,
10326            Keyword::MAXFILESIZE,
10327            Keyword::NULL,
10328            Keyword::PARALLEL,
10329            Keyword::PARQUET,
10330            Keyword::PARTITION,
10331            Keyword::REGION,
10332            Keyword::ROWGROUPSIZE,
10333            Keyword::TIMEFORMAT,
10334            Keyword::TRUNCATECOLUMNS,
10335            Keyword::ZSTD,
10336        ]) {
10337            Some(Keyword::ACCEPTANYDATE) => CopyLegacyOption::AcceptAnyDate,
10338            Some(Keyword::ACCEPTINVCHARS) => {
10339                let _ = self.parse_keyword(Keyword::AS); // [ AS ]
10340                let ch = if matches!(self.peek_token().token, Token::SingleQuotedString(_)) {
10341                    Some(self.parse_literal_string()?)
10342                } else {
10343                    None
10344                };
10345                CopyLegacyOption::AcceptInvChars(ch)
10346            }
10347            Some(Keyword::ADDQUOTES) => CopyLegacyOption::AddQuotes,
10348            Some(Keyword::ALLOWOVERWRITE) => CopyLegacyOption::AllowOverwrite,
10349            Some(Keyword::BINARY) => CopyLegacyOption::Binary,
10350            Some(Keyword::BLANKSASNULL) => CopyLegacyOption::BlankAsNull,
10351            Some(Keyword::BZIP2) => CopyLegacyOption::Bzip2,
10352            Some(Keyword::CLEANPATH) => CopyLegacyOption::CleanPath,
10353            Some(Keyword::CSV) => CopyLegacyOption::Csv({
10354                let mut opts = vec![];
10355                while let Some(opt) =
10356                    self.maybe_parse(|parser| parser.parse_copy_legacy_csv_option())?
10357                {
10358                    opts.push(opt);
10359                }
10360                opts
10361            }),
10362            Some(Keyword::DATEFORMAT) => {
10363                let _ = self.parse_keyword(Keyword::AS);
10364                let fmt = if matches!(self.peek_token().token, Token::SingleQuotedString(_)) {
10365                    Some(self.parse_literal_string()?)
10366                } else {
10367                    None
10368                };
10369                CopyLegacyOption::DateFormat(fmt)
10370            }
10371            Some(Keyword::DELIMITER) => {
10372                let _ = self.parse_keyword(Keyword::AS);
10373                CopyLegacyOption::Delimiter(self.parse_literal_char()?)
10374            }
10375            Some(Keyword::EMPTYASNULL) => CopyLegacyOption::EmptyAsNull,
10376            Some(Keyword::ENCRYPTED) => {
10377                let auto = self.parse_keyword(Keyword::AUTO);
10378                CopyLegacyOption::Encrypted { auto }
10379            }
10380            Some(Keyword::ESCAPE) => CopyLegacyOption::Escape,
10381            Some(Keyword::EXTENSION) => {
10382                let ext = self.parse_literal_string()?;
10383                CopyLegacyOption::Extension(ext)
10384            }
10385            Some(Keyword::FIXEDWIDTH) => {
10386                let spec = self.parse_literal_string()?;
10387                CopyLegacyOption::FixedWidth(spec)
10388            }
10389            Some(Keyword::GZIP) => CopyLegacyOption::Gzip,
10390            Some(Keyword::HEADER) => CopyLegacyOption::Header,
10391            Some(Keyword::IAM_ROLE) => CopyLegacyOption::IamRole(self.parse_iam_role_kind()?),
10392            Some(Keyword::IGNOREHEADER) => {
10393                let _ = self.parse_keyword(Keyword::AS);
10394                let num_rows = self.parse_literal_uint()?;
10395                CopyLegacyOption::IgnoreHeader(num_rows)
10396            }
10397            Some(Keyword::JSON) => CopyLegacyOption::Json,
10398            Some(Keyword::MANIFEST) => {
10399                let verbose = self.parse_keyword(Keyword::VERBOSE);
10400                CopyLegacyOption::Manifest { verbose }
10401            }
10402            Some(Keyword::MAXFILESIZE) => {
10403                let _ = self.parse_keyword(Keyword::AS);
10404                let size = self.parse_number_value()?.value;
10405                let unit = match self.parse_one_of_keywords(&[Keyword::MB, Keyword::GB]) {
10406                    Some(Keyword::MB) => Some(FileSizeUnit::MB),
10407                    Some(Keyword::GB) => Some(FileSizeUnit::GB),
10408                    _ => None,
10409                };
10410                CopyLegacyOption::MaxFileSize(FileSize { size, unit })
10411            }
10412            Some(Keyword::NULL) => {
10413                let _ = self.parse_keyword(Keyword::AS);
10414                CopyLegacyOption::Null(self.parse_literal_string()?)
10415            }
10416            Some(Keyword::PARALLEL) => {
10417                let enabled = match self.parse_one_of_keywords(&[
10418                    Keyword::TRUE,
10419                    Keyword::FALSE,
10420                    Keyword::ON,
10421                    Keyword::OFF,
10422                ]) {
10423                    Some(Keyword::TRUE) | Some(Keyword::ON) => Some(true),
10424                    Some(Keyword::FALSE) | Some(Keyword::OFF) => Some(false),
10425                    _ => None,
10426                };
10427                CopyLegacyOption::Parallel(enabled)
10428            }
10429            Some(Keyword::PARQUET) => CopyLegacyOption::Parquet,
10430            Some(Keyword::PARTITION) => {
10431                self.expect_keyword(Keyword::BY)?;
10432                let columns = self.parse_parenthesized_column_list(IsOptional::Mandatory, false)?;
10433                let include = self.parse_keyword(Keyword::INCLUDE);
10434                CopyLegacyOption::PartitionBy(UnloadPartitionBy { columns, include })
10435            }
10436            Some(Keyword::REGION) => {
10437                let _ = self.parse_keyword(Keyword::AS);
10438                let region = self.parse_literal_string()?;
10439                CopyLegacyOption::Region(region)
10440            }
10441            Some(Keyword::ROWGROUPSIZE) => {
10442                let _ = self.parse_keyword(Keyword::AS);
10443                let file_size = self.parse_file_size()?;
10444                CopyLegacyOption::RowGroupSize(file_size)
10445            }
10446            Some(Keyword::TIMEFORMAT) => {
10447                let _ = self.parse_keyword(Keyword::AS);
10448                let fmt = if matches!(self.peek_token().token, Token::SingleQuotedString(_)) {
10449                    Some(self.parse_literal_string()?)
10450                } else {
10451                    None
10452                };
10453                CopyLegacyOption::TimeFormat(fmt)
10454            }
10455            Some(Keyword::TRUNCATECOLUMNS) => CopyLegacyOption::TruncateColumns,
10456            Some(Keyword::ZSTD) => CopyLegacyOption::Zstd,
10457            _ => self.expected("option", self.peek_token())?,
10458        };
10459        Ok(ret)
10460    }
10461
10462    fn parse_file_size(&mut self) -> Result<FileSize, ParserError> {
10463        let size = self.parse_number_value()?.value;
10464        let unit = self.maybe_parse_file_size_unit();
10465        Ok(FileSize { size, unit })
10466    }
10467
10468    fn maybe_parse_file_size_unit(&mut self) -> Option<FileSizeUnit> {
10469        match self.parse_one_of_keywords(&[Keyword::MB, Keyword::GB]) {
10470            Some(Keyword::MB) => Some(FileSizeUnit::MB),
10471            Some(Keyword::GB) => Some(FileSizeUnit::GB),
10472            _ => None,
10473        }
10474    }
10475
10476    fn parse_iam_role_kind(&mut self) -> Result<IamRoleKind, ParserError> {
10477        if self.parse_keyword(Keyword::DEFAULT) {
10478            Ok(IamRoleKind::Default)
10479        } else {
10480            let arn = self.parse_literal_string()?;
10481            Ok(IamRoleKind::Arn(arn))
10482        }
10483    }
10484
10485    fn parse_copy_legacy_csv_option(&mut self) -> Result<CopyLegacyCsvOption, ParserError> {
10486        let ret = match self.parse_one_of_keywords(&[
10487            Keyword::HEADER,
10488            Keyword::QUOTE,
10489            Keyword::ESCAPE,
10490            Keyword::FORCE,
10491        ]) {
10492            Some(Keyword::HEADER) => CopyLegacyCsvOption::Header,
10493            Some(Keyword::QUOTE) => {
10494                let _ = self.parse_keyword(Keyword::AS); // [ AS ]
10495                CopyLegacyCsvOption::Quote(self.parse_literal_char()?)
10496            }
10497            Some(Keyword::ESCAPE) => {
10498                let _ = self.parse_keyword(Keyword::AS); // [ AS ]
10499                CopyLegacyCsvOption::Escape(self.parse_literal_char()?)
10500            }
10501            Some(Keyword::FORCE) if self.parse_keywords(&[Keyword::NOT, Keyword::NULL]) => {
10502                CopyLegacyCsvOption::ForceNotNull(
10503                    self.parse_comma_separated(|p| p.parse_identifier())?,
10504                )
10505            }
10506            Some(Keyword::FORCE) if self.parse_keywords(&[Keyword::QUOTE]) => {
10507                CopyLegacyCsvOption::ForceQuote(
10508                    self.parse_comma_separated(|p| p.parse_identifier())?,
10509                )
10510            }
10511            _ => self.expected("csv option", self.peek_token())?,
10512        };
10513        Ok(ret)
10514    }
10515
10516    fn parse_literal_char(&mut self) -> Result<char, ParserError> {
10517        let s = self.parse_literal_string()?;
10518        if s.len() != 1 {
10519            let loc = self
10520                .tokens
10521                .get(self.index - 1)
10522                .map_or(Location { line: 0, column: 0 }, |t| t.span.start);
10523            return parser_err!(format!("Expect a char, found {s:?}"), loc);
10524        }
10525        Ok(s.chars().next().unwrap())
10526    }
10527
10528    /// Parse a tab separated values in
10529    /// COPY payload
10530    pub fn parse_tsv(&mut self) -> Vec<Option<String>> {
10531        self.parse_tab_value()
10532    }
10533
10534    pub fn parse_tab_value(&mut self) -> Vec<Option<String>> {
10535        let mut values = vec![];
10536        let mut content = String::from("");
10537        while let Some(t) = self.next_token_no_skip().map(|t| &t.token) {
10538            match t {
10539                Token::Whitespace(Whitespace::Tab) => {
10540                    values.push(Some(content.to_string()));
10541                    content.clear();
10542                }
10543                Token::Whitespace(Whitespace::Newline) => {
10544                    values.push(Some(content.to_string()));
10545                    content.clear();
10546                }
10547                Token::Backslash => {
10548                    if self.consume_token(&Token::Period) {
10549                        return values;
10550                    }
10551                    if let Token::Word(w) = self.next_token().token {
10552                        if w.value == "N" {
10553                            values.push(None);
10554                        }
10555                    }
10556                }
10557                _ => {
10558                    content.push_str(&t.to_string());
10559                }
10560            }
10561        }
10562        values
10563    }
10564
10565    /// Parse a literal value (numbers, strings, date/time, booleans)
10566    pub fn parse_value(&mut self) -> Result<ValueWithSpan, ParserError> {
10567        let next_token = self.next_token();
10568        let span = next_token.span;
10569        let ok_value = |value: Value| Ok(value.with_span(span));
10570        match next_token.token {
10571            Token::Word(w) => match w.keyword {
10572                Keyword::TRUE if self.dialect.supports_boolean_literals() => {
10573                    ok_value(Value::Boolean(true))
10574                }
10575                Keyword::FALSE if self.dialect.supports_boolean_literals() => {
10576                    ok_value(Value::Boolean(false))
10577                }
10578                Keyword::NULL => ok_value(Value::Null),
10579                Keyword::NoKeyword if w.quote_style.is_some() => match w.quote_style {
10580                    Some('"') => ok_value(Value::DoubleQuotedString(w.value)),
10581                    Some('\'') => ok_value(Value::SingleQuotedString(w.value)),
10582                    _ => self.expected(
10583                        "A value?",
10584                        TokenWithSpan {
10585                            token: Token::Word(w),
10586                            span,
10587                        },
10588                    )?,
10589                },
10590                _ => self.expected(
10591                    "a concrete value",
10592                    TokenWithSpan {
10593                        token: Token::Word(w),
10594                        span,
10595                    },
10596                ),
10597            },
10598            // The call to n.parse() returns a bigdecimal when the
10599            // bigdecimal feature is enabled, and is otherwise a no-op
10600            // (i.e., it returns the input string).
10601            Token::Number(n, l) => ok_value(Value::Number(Self::parse(n, span.start)?, l)),
10602            Token::SingleQuotedString(ref s) => ok_value(Value::SingleQuotedString(
10603                self.maybe_concat_string_literal(s.to_string()),
10604            )),
10605            Token::DoubleQuotedString(ref s) => ok_value(Value::DoubleQuotedString(
10606                self.maybe_concat_string_literal(s.to_string()),
10607            )),
10608            Token::TripleSingleQuotedString(ref s) => {
10609                ok_value(Value::TripleSingleQuotedString(s.to_string()))
10610            }
10611            Token::TripleDoubleQuotedString(ref s) => {
10612                ok_value(Value::TripleDoubleQuotedString(s.to_string()))
10613            }
10614            Token::DollarQuotedString(ref s) => ok_value(Value::DollarQuotedString(s.clone())),
10615            Token::SingleQuotedByteStringLiteral(ref s) => {
10616                ok_value(Value::SingleQuotedByteStringLiteral(s.clone()))
10617            }
10618            Token::DoubleQuotedByteStringLiteral(ref s) => {
10619                ok_value(Value::DoubleQuotedByteStringLiteral(s.clone()))
10620            }
10621            Token::TripleSingleQuotedByteStringLiteral(ref s) => {
10622                ok_value(Value::TripleSingleQuotedByteStringLiteral(s.clone()))
10623            }
10624            Token::TripleDoubleQuotedByteStringLiteral(ref s) => {
10625                ok_value(Value::TripleDoubleQuotedByteStringLiteral(s.clone()))
10626            }
10627            Token::SingleQuotedRawStringLiteral(ref s) => {
10628                ok_value(Value::SingleQuotedRawStringLiteral(s.clone()))
10629            }
10630            Token::DoubleQuotedRawStringLiteral(ref s) => {
10631                ok_value(Value::DoubleQuotedRawStringLiteral(s.clone()))
10632            }
10633            Token::TripleSingleQuotedRawStringLiteral(ref s) => {
10634                ok_value(Value::TripleSingleQuotedRawStringLiteral(s.clone()))
10635            }
10636            Token::TripleDoubleQuotedRawStringLiteral(ref s) => {
10637                ok_value(Value::TripleDoubleQuotedRawStringLiteral(s.clone()))
10638            }
10639            Token::NationalStringLiteral(ref s) => {
10640                ok_value(Value::NationalStringLiteral(s.to_string()))
10641            }
10642            Token::EscapedStringLiteral(ref s) => {
10643                ok_value(Value::EscapedStringLiteral(s.to_string()))
10644            }
10645            Token::UnicodeStringLiteral(ref s) => {
10646                ok_value(Value::UnicodeStringLiteral(s.to_string()))
10647            }
10648            Token::HexStringLiteral(ref s) => ok_value(Value::HexStringLiteral(s.to_string())),
10649            Token::Placeholder(ref s) => ok_value(Value::Placeholder(s.to_string())),
10650            tok @ Token::Colon | tok @ Token::AtSign => {
10651                // 1. Not calling self.parse_identifier(false)?
10652                //    because only in placeholder we want to check
10653                //    numbers as idfentifies.  This because snowflake
10654                //    allows numbers as placeholders
10655                // 2. Not calling self.next_token() to enforce `tok`
10656                //    be followed immediately by a word/number, ie.
10657                //    without any whitespace in between
10658                let next_token = self.next_token_no_skip().unwrap_or(&EOF_TOKEN).clone();
10659                let ident = match next_token.token {
10660                    Token::Word(w) => Ok(w.into_ident(next_token.span)),
10661                    Token::Number(w, false) => Ok(Ident::with_span(next_token.span, w)),
10662                    _ => self.expected("placeholder", next_token),
10663                }?;
10664                Ok(Value::Placeholder(tok.to_string() + &ident.value)
10665                    .with_span(Span::new(span.start, ident.span.end)))
10666            }
10667            unexpected => self.expected(
10668                "a value",
10669                TokenWithSpan {
10670                    token: unexpected,
10671                    span,
10672                },
10673            ),
10674        }
10675    }
10676
10677    fn maybe_concat_string_literal(&mut self, mut str: String) -> String {
10678        if self.dialect.supports_string_literal_concatenation() {
10679            while let Token::SingleQuotedString(ref s) | Token::DoubleQuotedString(ref s) =
10680                self.peek_token_ref().token
10681            {
10682                str.push_str(s.clone().as_str());
10683                self.advance_token();
10684            }
10685        }
10686        str
10687    }
10688
10689    /// Parse an unsigned numeric literal
10690    pub fn parse_number_value(&mut self) -> Result<ValueWithSpan, ParserError> {
10691        let value_wrapper = self.parse_value()?;
10692        match &value_wrapper.value {
10693            Value::Number(_, _) => Ok(value_wrapper),
10694            Value::Placeholder(_) => Ok(value_wrapper),
10695            _ => {
10696                self.prev_token();
10697                self.expected("literal number", self.peek_token())
10698            }
10699        }
10700    }
10701
10702    /// Parse a numeric literal as an expression. Returns a [`Expr::UnaryOp`] if the number is signed,
10703    /// otherwise returns a [`Expr::Value`]
10704    pub fn parse_number(&mut self) -> Result<Expr, ParserError> {
10705        let next_token = self.next_token();
10706        match next_token.token {
10707            Token::Plus => Ok(Expr::UnaryOp {
10708                op: UnaryOperator::Plus,
10709                expr: Box::new(Expr::Value(self.parse_number_value()?)),
10710            }),
10711            Token::Minus => Ok(Expr::UnaryOp {
10712                op: UnaryOperator::Minus,
10713                expr: Box::new(Expr::Value(self.parse_number_value()?)),
10714            }),
10715            _ => {
10716                self.prev_token();
10717                Ok(Expr::Value(self.parse_number_value()?))
10718            }
10719        }
10720    }
10721
10722    fn parse_introduced_string_expr(&mut self) -> Result<Expr, ParserError> {
10723        let next_token = self.next_token();
10724        let span = next_token.span;
10725        match next_token.token {
10726            Token::SingleQuotedString(ref s) => Ok(Expr::Value(
10727                Value::SingleQuotedString(s.to_string()).with_span(span),
10728            )),
10729            Token::DoubleQuotedString(ref s) => Ok(Expr::Value(
10730                Value::DoubleQuotedString(s.to_string()).with_span(span),
10731            )),
10732            Token::HexStringLiteral(ref s) => Ok(Expr::Value(
10733                Value::HexStringLiteral(s.to_string()).with_span(span),
10734            )),
10735            unexpected => self.expected(
10736                "a string value",
10737                TokenWithSpan {
10738                    token: unexpected,
10739                    span,
10740                },
10741            ),
10742        }
10743    }
10744
10745    /// Parse an unsigned literal integer/long
10746    pub fn parse_literal_uint(&mut self) -> Result<u64, ParserError> {
10747        let next_token = self.next_token();
10748        match next_token.token {
10749            Token::Number(s, _) => Self::parse::<u64>(s, next_token.span.start),
10750            _ => self.expected("literal int", next_token),
10751        }
10752    }
10753
10754    /// Parse the body of a `CREATE FUNCTION` specified as a string.
10755    /// e.g. `CREATE FUNCTION ... AS $$ body $$`.
10756    fn parse_create_function_body_string(&mut self) -> Result<Expr, ParserError> {
10757        let peek_token = self.peek_token();
10758        let span = peek_token.span;
10759        match peek_token.token {
10760            Token::DollarQuotedString(s) if dialect_of!(self is PostgreSqlDialect | GenericDialect) =>
10761            {
10762                self.next_token();
10763                Ok(Expr::Value(Value::DollarQuotedString(s).with_span(span)))
10764            }
10765            _ => Ok(Expr::Value(
10766                Value::SingleQuotedString(self.parse_literal_string()?).with_span(span),
10767            )),
10768        }
10769    }
10770
10771    /// Parse a literal string
10772    pub fn parse_literal_string(&mut self) -> Result<String, ParserError> {
10773        let next_token = self.next_token();
10774        match next_token.token {
10775            Token::Word(Word {
10776                value,
10777                keyword: Keyword::NoKeyword,
10778                ..
10779            }) => Ok(value),
10780            Token::SingleQuotedString(s) => Ok(s),
10781            Token::DoubleQuotedString(s) => Ok(s),
10782            Token::EscapedStringLiteral(s) if dialect_of!(self is PostgreSqlDialect | GenericDialect) => {
10783                Ok(s)
10784            }
10785            Token::UnicodeStringLiteral(s) => Ok(s),
10786            _ => self.expected("literal string", next_token),
10787        }
10788    }
10789
10790    /// Parse a boolean string
10791    pub(crate) fn parse_boolean_string(&mut self) -> Result<bool, ParserError> {
10792        match self.parse_one_of_keywords(&[Keyword::TRUE, Keyword::FALSE]) {
10793            Some(Keyword::TRUE) => Ok(true),
10794            Some(Keyword::FALSE) => Ok(false),
10795            _ => self.expected("TRUE or FALSE", self.peek_token()),
10796        }
10797    }
10798
10799    /// Parse a literal unicode normalization clause
10800    pub fn parse_unicode_is_normalized(&mut self, expr: Expr) -> Result<Expr, ParserError> {
10801        let neg = self.parse_keyword(Keyword::NOT);
10802        let normalized_form = self.maybe_parse(|parser| {
10803            match parser.parse_one_of_keywords(&[
10804                Keyword::NFC,
10805                Keyword::NFD,
10806                Keyword::NFKC,
10807                Keyword::NFKD,
10808            ]) {
10809                Some(Keyword::NFC) => Ok(NormalizationForm::NFC),
10810                Some(Keyword::NFD) => Ok(NormalizationForm::NFD),
10811                Some(Keyword::NFKC) => Ok(NormalizationForm::NFKC),
10812                Some(Keyword::NFKD) => Ok(NormalizationForm::NFKD),
10813                _ => parser.expected("unicode normalization form", parser.peek_token()),
10814            }
10815        })?;
10816        if self.parse_keyword(Keyword::NORMALIZED) {
10817            return Ok(Expr::IsNormalized {
10818                expr: Box::new(expr),
10819                form: normalized_form,
10820                negated: neg,
10821            });
10822        }
10823        self.expected("unicode normalization form", self.peek_token())
10824    }
10825
10826    pub fn parse_enum_values(&mut self) -> Result<Vec<EnumMember>, ParserError> {
10827        self.expect_token(&Token::LParen)?;
10828        let values = self.parse_comma_separated(|parser| {
10829            let name = parser.parse_literal_string()?;
10830            let e = if parser.consume_token(&Token::Eq) {
10831                let value = parser.parse_number()?;
10832                EnumMember::NamedValue(name, value)
10833            } else {
10834                EnumMember::Name(name)
10835            };
10836            Ok(e)
10837        })?;
10838        self.expect_token(&Token::RParen)?;
10839
10840        Ok(values)
10841    }
10842
10843    /// Parse a SQL datatype (in the context of a CREATE TABLE statement for example)
10844    pub fn parse_data_type(&mut self) -> Result<DataType, ParserError> {
10845        let (ty, trailing_bracket) = self.parse_data_type_helper()?;
10846        if trailing_bracket.0 {
10847            return parser_err!(
10848                format!("unmatched > after parsing data type {ty}"),
10849                self.peek_token()
10850            );
10851        }
10852
10853        Ok(ty)
10854    }
10855
10856    fn parse_data_type_helper(
10857        &mut self,
10858    ) -> Result<(DataType, MatchedTrailingBracket), ParserError> {
10859        let dialect = self.dialect;
10860        self.advance_token();
10861        let next_token = self.get_current_token();
10862        let next_token_index = self.get_current_index();
10863
10864        let mut trailing_bracket: MatchedTrailingBracket = false.into();
10865        let mut data = match &next_token.token {
10866            Token::Word(w) => match w.keyword {
10867                Keyword::BOOLEAN => Ok(DataType::Boolean),
10868                Keyword::BOOL => Ok(DataType::Bool),
10869                Keyword::FLOAT => {
10870                    let precision = self.parse_exact_number_optional_precision_scale()?;
10871
10872                    if self.parse_keyword(Keyword::UNSIGNED) {
10873                        Ok(DataType::FloatUnsigned(precision))
10874                    } else {
10875                        Ok(DataType::Float(precision))
10876                    }
10877                }
10878                Keyword::REAL => {
10879                    if self.parse_keyword(Keyword::UNSIGNED) {
10880                        Ok(DataType::RealUnsigned)
10881                    } else {
10882                        Ok(DataType::Real)
10883                    }
10884                }
10885                Keyword::FLOAT4 => Ok(DataType::Float4),
10886                Keyword::FLOAT32 => Ok(DataType::Float32),
10887                Keyword::FLOAT64 => Ok(DataType::Float64),
10888                Keyword::FLOAT8 => Ok(DataType::Float8),
10889                Keyword::DOUBLE => {
10890                    if self.parse_keyword(Keyword::PRECISION) {
10891                        if self.parse_keyword(Keyword::UNSIGNED) {
10892                            Ok(DataType::DoublePrecisionUnsigned)
10893                        } else {
10894                            Ok(DataType::DoublePrecision)
10895                        }
10896                    } else {
10897                        let precision = self.parse_exact_number_optional_precision_scale()?;
10898
10899                        if self.parse_keyword(Keyword::UNSIGNED) {
10900                            Ok(DataType::DoubleUnsigned(precision))
10901                        } else {
10902                            Ok(DataType::Double(precision))
10903                        }
10904                    }
10905                }
10906                Keyword::TINYINT => {
10907                    let optional_precision = self.parse_optional_precision();
10908                    if self.parse_keyword(Keyword::UNSIGNED) {
10909                        Ok(DataType::TinyIntUnsigned(optional_precision?))
10910                    } else {
10911                        if dialect.supports_data_type_signed_suffix() {
10912                            let _ = self.parse_keyword(Keyword::SIGNED);
10913                        }
10914                        Ok(DataType::TinyInt(optional_precision?))
10915                    }
10916                }
10917                Keyword::INT2 => {
10918                    let optional_precision = self.parse_optional_precision();
10919                    if self.parse_keyword(Keyword::UNSIGNED) {
10920                        Ok(DataType::Int2Unsigned(optional_precision?))
10921                    } else {
10922                        Ok(DataType::Int2(optional_precision?))
10923                    }
10924                }
10925                Keyword::SMALLINT => {
10926                    let optional_precision = self.parse_optional_precision();
10927                    if self.parse_keyword(Keyword::UNSIGNED) {
10928                        Ok(DataType::SmallIntUnsigned(optional_precision?))
10929                    } else {
10930                        if dialect.supports_data_type_signed_suffix() {
10931                            let _ = self.parse_keyword(Keyword::SIGNED);
10932                        }
10933                        Ok(DataType::SmallInt(optional_precision?))
10934                    }
10935                }
10936                Keyword::MEDIUMINT => {
10937                    let optional_precision = self.parse_optional_precision();
10938                    if self.parse_keyword(Keyword::UNSIGNED) {
10939                        Ok(DataType::MediumIntUnsigned(optional_precision?))
10940                    } else {
10941                        if dialect.supports_data_type_signed_suffix() {
10942                            let _ = self.parse_keyword(Keyword::SIGNED);
10943                        }
10944                        Ok(DataType::MediumInt(optional_precision?))
10945                    }
10946                }
10947                Keyword::INT => {
10948                    let optional_precision = self.parse_optional_precision();
10949                    if self.parse_keyword(Keyword::UNSIGNED) {
10950                        Ok(DataType::IntUnsigned(optional_precision?))
10951                    } else {
10952                        if dialect.supports_data_type_signed_suffix() {
10953                            let _ = self.parse_keyword(Keyword::SIGNED);
10954                        }
10955                        Ok(DataType::Int(optional_precision?))
10956                    }
10957                }
10958                Keyword::INT4 => {
10959                    let optional_precision = self.parse_optional_precision();
10960                    if self.parse_keyword(Keyword::UNSIGNED) {
10961                        Ok(DataType::Int4Unsigned(optional_precision?))
10962                    } else {
10963                        Ok(DataType::Int4(optional_precision?))
10964                    }
10965                }
10966                Keyword::INT8 => {
10967                    let optional_precision = self.parse_optional_precision();
10968                    if self.parse_keyword(Keyword::UNSIGNED) {
10969                        Ok(DataType::Int8Unsigned(optional_precision?))
10970                    } else {
10971                        Ok(DataType::Int8(optional_precision?))
10972                    }
10973                }
10974                Keyword::INT16 => Ok(DataType::Int16),
10975                Keyword::INT32 => Ok(DataType::Int32),
10976                Keyword::INT64 => Ok(DataType::Int64),
10977                Keyword::INT128 => Ok(DataType::Int128),
10978                Keyword::INT256 => Ok(DataType::Int256),
10979                Keyword::INTEGER => {
10980                    let optional_precision = self.parse_optional_precision();
10981                    if self.parse_keyword(Keyword::UNSIGNED) {
10982                        Ok(DataType::IntegerUnsigned(optional_precision?))
10983                    } else {
10984                        if dialect.supports_data_type_signed_suffix() {
10985                            let _ = self.parse_keyword(Keyword::SIGNED);
10986                        }
10987                        Ok(DataType::Integer(optional_precision?))
10988                    }
10989                }
10990                Keyword::BIGINT => {
10991                    let optional_precision = self.parse_optional_precision();
10992                    if self.parse_keyword(Keyword::UNSIGNED) {
10993                        Ok(DataType::BigIntUnsigned(optional_precision?))
10994                    } else {
10995                        if dialect.supports_data_type_signed_suffix() {
10996                            let _ = self.parse_keyword(Keyword::SIGNED);
10997                        }
10998                        Ok(DataType::BigInt(optional_precision?))
10999                    }
11000                }
11001                Keyword::HUGEINT => Ok(DataType::HugeInt),
11002                Keyword::UBIGINT => Ok(DataType::UBigInt),
11003                Keyword::UHUGEINT => Ok(DataType::UHugeInt),
11004                Keyword::USMALLINT => Ok(DataType::USmallInt),
11005                Keyword::UTINYINT => Ok(DataType::UTinyInt),
11006                Keyword::UINT8 => Ok(DataType::UInt8),
11007                Keyword::UINT16 => Ok(DataType::UInt16),
11008                Keyword::UINT32 => Ok(DataType::UInt32),
11009                Keyword::UINT64 => Ok(DataType::UInt64),
11010                Keyword::UINT128 => Ok(DataType::UInt128),
11011                Keyword::UINT256 => Ok(DataType::UInt256),
11012                Keyword::VARCHAR => Ok(DataType::Varchar(self.parse_optional_character_length()?)),
11013                Keyword::NVARCHAR => {
11014                    Ok(DataType::Nvarchar(self.parse_optional_character_length()?))
11015                }
11016                Keyword::CHARACTER => {
11017                    if self.parse_keyword(Keyword::VARYING) {
11018                        Ok(DataType::CharacterVarying(
11019                            self.parse_optional_character_length()?,
11020                        ))
11021                    } else if self.parse_keywords(&[Keyword::LARGE, Keyword::OBJECT]) {
11022                        Ok(DataType::CharacterLargeObject(
11023                            self.parse_optional_precision()?,
11024                        ))
11025                    } else {
11026                        Ok(DataType::Character(self.parse_optional_character_length()?))
11027                    }
11028                }
11029                Keyword::CHAR => {
11030                    if self.parse_keyword(Keyword::VARYING) {
11031                        Ok(DataType::CharVarying(
11032                            self.parse_optional_character_length()?,
11033                        ))
11034                    } else if self.parse_keywords(&[Keyword::LARGE, Keyword::OBJECT]) {
11035                        Ok(DataType::CharLargeObject(self.parse_optional_precision()?))
11036                    } else {
11037                        Ok(DataType::Char(self.parse_optional_character_length()?))
11038                    }
11039                }
11040                Keyword::CLOB => Ok(DataType::Clob(self.parse_optional_precision()?)),
11041                Keyword::BINARY => Ok(DataType::Binary(self.parse_optional_precision()?)),
11042                Keyword::VARBINARY => Ok(DataType::Varbinary(self.parse_optional_binary_length()?)),
11043                Keyword::BLOB => Ok(DataType::Blob(self.parse_optional_precision()?)),
11044                Keyword::TINYBLOB => Ok(DataType::TinyBlob),
11045                Keyword::MEDIUMBLOB => Ok(DataType::MediumBlob),
11046                Keyword::LONGBLOB => Ok(DataType::LongBlob),
11047                Keyword::BYTES => Ok(DataType::Bytes(self.parse_optional_precision()?)),
11048                Keyword::BIT => {
11049                    if self.parse_keyword(Keyword::VARYING) {
11050                        Ok(DataType::BitVarying(self.parse_optional_precision()?))
11051                    } else {
11052                        Ok(DataType::Bit(self.parse_optional_precision()?))
11053                    }
11054                }
11055                Keyword::VARBIT => Ok(DataType::VarBit(self.parse_optional_precision()?)),
11056                Keyword::UUID => Ok(DataType::Uuid),
11057                Keyword::DATE => Ok(DataType::Date),
11058                Keyword::DATE32 => Ok(DataType::Date32),
11059                Keyword::RANGE_DATE => Ok(DataType::Range(Box::new(DataType::Date))),
11060                Keyword::RANGE_DATETIME => {
11061                    Ok(DataType::Range(Box::new(DataType::Datetime(None))))
11062                }
11063                Keyword::RANGE_TIMESTAMP => {
11064                    Ok(DataType::Range(Box::new(DataType::Timestamp(None, TimezoneInfo::None))))
11065                }
11066                Keyword::RANGE => {
11067                    self.expect_token(&Token::Lt)?;
11068                    let (inside_type, _trailing_bracket) = self.parse_data_type_helper()?;
11069                    trailing_bracket = self.expect_closing_angle_bracket(_trailing_bracket)?;
11070                    Ok(DataType::Range(Box::new(inside_type)))
11071                }
11072                Keyword::DATETIME => Ok(DataType::Datetime(self.parse_optional_precision()?)),
11073                Keyword::DATETIME64 => {
11074                    self.prev_token();
11075                    let (precision, time_zone) = self.parse_datetime_64()?;
11076                    Ok(DataType::Datetime64(precision, time_zone))
11077                }
11078                Keyword::TIMESTAMP => {
11079                    let precision = self.parse_optional_precision()?;
11080                    let tz = if self.parse_keyword(Keyword::WITH) {
11081                        self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
11082                        TimezoneInfo::WithTimeZone
11083                    } else if self.parse_keyword(Keyword::WITHOUT) {
11084                        self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
11085                        TimezoneInfo::WithoutTimeZone
11086                    } else {
11087                        TimezoneInfo::None
11088                    };
11089                    Ok(DataType::Timestamp(precision, tz))
11090                }
11091                Keyword::TIMESTAMPTZ => Ok(DataType::Timestamp(
11092                    self.parse_optional_precision()?,
11093                    TimezoneInfo::Tz,
11094                )),
11095                Keyword::TIMESTAMP_NTZ => Ok(DataType::TimestampNtz),
11096                Keyword::TIME => {
11097                    let precision = self.parse_optional_precision()?;
11098                    let tz = if self.parse_keyword(Keyword::WITH) {
11099                        self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
11100                        TimezoneInfo::WithTimeZone
11101                    } else if self.parse_keyword(Keyword::WITHOUT) {
11102                        self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
11103                        TimezoneInfo::WithoutTimeZone
11104                    } else {
11105                        TimezoneInfo::None
11106                    };
11107                    Ok(DataType::Time(precision, tz))
11108                }
11109                Keyword::TIMETZ => Ok(DataType::Time(
11110                    self.parse_optional_precision()?,
11111                    TimezoneInfo::Tz,
11112                )),
11113                Keyword::INTERVAL => {
11114                    if self.dialect.supports_interval_options() {
11115                        let fields = self.maybe_parse_optional_interval_fields()?;
11116                        let precision = self.parse_optional_precision()?;
11117                        Ok(DataType::Interval { fields, precision })
11118                    } else {
11119                        Ok(DataType::Interval {
11120                            fields: None,
11121                            precision: None,
11122                        })
11123                    }
11124                }
11125                Keyword::JSON => Ok(DataType::JSON),
11126                Keyword::JSONB => Ok(DataType::JSONB),
11127                Keyword::REGCLASS => Ok(DataType::Regclass),
11128                Keyword::STRING => Ok(DataType::String(self.parse_optional_precision()?)),
11129                Keyword::FIXEDSTRING => {
11130                    self.expect_token(&Token::LParen)?;
11131                    let character_length = self.parse_literal_uint()?;
11132                    self.expect_token(&Token::RParen)?;
11133                    Ok(DataType::FixedString(character_length))
11134                }
11135                Keyword::TEXT => Ok(DataType::Text),
11136                Keyword::TINYTEXT => Ok(DataType::TinyText),
11137                Keyword::MEDIUMTEXT => Ok(DataType::MediumText),
11138                Keyword::LONGTEXT => Ok(DataType::LongText),
11139                Keyword::BYTEA => Ok(DataType::Bytea),
11140                Keyword::NUMERIC => Ok(DataType::Numeric(
11141                    self.parse_exact_number_optional_precision_scale()?,
11142                )),
11143                Keyword::DECIMAL => {
11144                    let precision = self.parse_exact_number_optional_precision_scale()?;
11145
11146                    if self.parse_keyword(Keyword::UNSIGNED) {
11147                        Ok(DataType::DecimalUnsigned(precision))
11148                    } else {
11149                        Ok(DataType::Decimal(precision))
11150                    }
11151                }
11152                Keyword::DEC => {
11153                    let precision = self.parse_exact_number_optional_precision_scale()?;
11154
11155                    if self.parse_keyword(Keyword::UNSIGNED) {
11156                        Ok(DataType::DecUnsigned(precision))
11157                    } else {
11158                        Ok(DataType::Dec(precision))
11159                    }
11160                }
11161                Keyword::BIGNUMERIC => Ok(DataType::BigNumeric(
11162                    self.parse_exact_number_optional_precision_scale()?,
11163                )),
11164                Keyword::BIGDECIMAL => Ok(DataType::BigDecimal(
11165                    self.parse_exact_number_optional_precision_scale()?,
11166                )),
11167                Keyword::ENUM => Ok(DataType::Enum(self.parse_enum_values()?, None)),
11168                Keyword::ENUM8 => Ok(DataType::Enum(self.parse_enum_values()?, Some(8))),
11169                Keyword::ENUM16 => Ok(DataType::Enum(self.parse_enum_values()?, Some(16))),
11170                Keyword::SET => Ok(DataType::Set(self.parse_string_values()?)),
11171                Keyword::ARRAY => {
11172                    if dialect_of!(self is SnowflakeDialect) {
11173                        Ok(DataType::Array(ArrayElemTypeDef::None))
11174                    } else if dialect_of!(self is ClickHouseDialect) {
11175                        Ok(self.parse_sub_type(|internal_type| {
11176                            DataType::Array(ArrayElemTypeDef::Parenthesis(internal_type))
11177                        })?)
11178                    } else {
11179                        self.expect_token(&Token::Lt)?;
11180                        let (inside_type, _trailing_bracket) = self.parse_data_type_helper()?;
11181                        trailing_bracket = self.expect_closing_angle_bracket(_trailing_bracket)?;
11182                        Ok(DataType::Array(ArrayElemTypeDef::AngleBracket(Box::new(
11183                            inside_type,
11184                        ))))
11185                    }
11186                }
11187                Keyword::STRUCT if dialect_is!(dialect is DuckDbDialect) => {
11188                    self.prev_token();
11189                    let field_defs = self.parse_duckdb_struct_type_def()?;
11190                    Ok(DataType::Struct(field_defs, StructBracketKind::Parentheses))
11191                }
11192                Keyword::STRUCT if dialect_is!(dialect is BigQueryDialect | GenericDialect) => {
11193                    self.prev_token();
11194                    let (field_defs, _trailing_bracket) =
11195                        self.parse_struct_type_def(Self::parse_struct_field_def)?;
11196                    trailing_bracket = _trailing_bracket;
11197                    Ok(DataType::Struct(
11198                        field_defs,
11199                        StructBracketKind::AngleBrackets,
11200                    ))
11201                }
11202                Keyword::UNION if dialect_is!(dialect is DuckDbDialect | GenericDialect) => {
11203                    self.prev_token();
11204                    let fields = self.parse_union_type_def()?;
11205                    Ok(DataType::Union(fields))
11206                }
11207                Keyword::NULLABLE if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
11208                    Ok(self.parse_sub_type(DataType::Nullable)?)
11209                }
11210                Keyword::LOWCARDINALITY if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
11211                    Ok(self.parse_sub_type(DataType::LowCardinality)?)
11212                }
11213                Keyword::MAP if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
11214                    self.prev_token();
11215                    let (key_data_type, value_data_type) = self.parse_click_house_map_def()?;
11216                    Ok(DataType::Map(
11217                        Box::new(key_data_type),
11218                        Box::new(value_data_type),
11219                    ))
11220                }
11221                Keyword::NESTED if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
11222                    self.expect_token(&Token::LParen)?;
11223                    let field_defs = self.parse_comma_separated(Parser::parse_column_def)?;
11224                    self.expect_token(&Token::RParen)?;
11225                    Ok(DataType::Nested(field_defs))
11226                }
11227                Keyword::TUPLE if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
11228                    self.prev_token();
11229                    let field_defs = self.parse_click_house_tuple_def()?;
11230                    Ok(DataType::Tuple(field_defs))
11231                }
11232                Keyword::TRIGGER => Ok(DataType::Trigger),
11233                Keyword::ANY if self.peek_keyword(Keyword::TYPE) => {
11234                    let _ = self.parse_keyword(Keyword::TYPE);
11235                    Ok(DataType::AnyType)
11236                }
11237                Keyword::TABLE => {
11238                    if self.peek_token() == Token::LParen {
11239                        let columns = self.parse_returns_table_columns()?;
11240                        Ok(DataType::Table(Some(columns)))
11241                    } else if self.peek_token() == Token::Lt {
11242                        let columns = self.parse_table_columns_angle_bracket()?;
11243                        Ok(DataType::Table(Some(columns)))
11244                    } else {
11245                        Ok(DataType::Table(None))
11246                    }
11247                }
11248                Keyword::SIGNED => {
11249                    if self.parse_keyword(Keyword::INTEGER) {
11250                        Ok(DataType::SignedInteger)
11251                    } else {
11252                        Ok(DataType::Signed)
11253                    }
11254                }
11255                Keyword::UNSIGNED => {
11256                    if self.parse_keyword(Keyword::INTEGER) {
11257                        Ok(DataType::UnsignedInteger)
11258                    } else {
11259                        Ok(DataType::Unsigned)
11260                    }
11261                }
11262                Keyword::TSVECTOR if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
11263                    Ok(DataType::TsVector)
11264                }
11265                Keyword::TSQUERY if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
11266                    Ok(DataType::TsQuery)
11267                }
11268                _ => {
11269                    self.prev_token();
11270                    let type_name = self.parse_object_name(false)?;
11271                    if let Some(modifiers) = self.parse_optional_type_modifiers()? {
11272                        Ok(DataType::Custom(type_name, modifiers))
11273                    } else {
11274                        Ok(DataType::Custom(type_name, vec![]))
11275                    }
11276                }
11277            },
11278            _ => self.expected_at("a data type name", next_token_index),
11279        }?;
11280
11281        if self.dialect.supports_array_typedef_with_brackets() {
11282            while self.consume_token(&Token::LBracket) {
11283                // Parse optional array data type size
11284                let size = self.maybe_parse(|p| p.parse_literal_uint())?;
11285                self.expect_token(&Token::RBracket)?;
11286                data = DataType::Array(ArrayElemTypeDef::SquareBracket(Box::new(data), size))
11287            }
11288        }
11289        Ok((data, trailing_bracket))
11290    }
11291
11292    fn parse_returns_table_column(&mut self) -> Result<ColumnDef, ParserError> {
11293        self.parse_column_def()
11294    }
11295
11296    fn parse_returns_table_columns(&mut self) -> Result<Vec<ColumnDef>, ParserError> {
11297        self.expect_token(&Token::LParen)?;
11298        let columns = self.parse_comma_separated(Parser::parse_returns_table_column)?;
11299        self.expect_token(&Token::RParen)?;
11300        Ok(columns)
11301    }
11302
11303    fn parse_table_columns_angle_bracket(&mut self) -> Result<Vec<ColumnDef>, ParserError> {
11304        self.expect_token(&Token::Lt)?;
11305        let mut columns = vec![];
11306        loop {
11307            let name = self.parse_identifier()?;
11308            let (data_type, trailing_bracket) = self.parse_data_type_helper()?;
11309            columns.push(ColumnDef {
11310                name,
11311                data_type,
11312                options: vec![],
11313            });
11314            if trailing_bracket.0 {
11315                return Ok(columns);
11316            }
11317            if !self.consume_token(&Token::Comma) {
11318                self.expect_closing_angle_bracket(false.into())?;
11319                return Ok(columns);
11320            }
11321        }
11322    }
11323
11324    pub fn parse_string_values(&mut self) -> Result<Vec<String>, ParserError> {
11325        self.expect_token(&Token::LParen)?;
11326        let mut values = Vec::new();
11327        loop {
11328            let next_token = self.next_token();
11329            match next_token.token {
11330                Token::SingleQuotedString(value) => values.push(value),
11331                _ => self.expected("a string", next_token)?,
11332            }
11333            let next_token = self.next_token();
11334            match next_token.token {
11335                Token::Comma => (),
11336                Token::RParen => break,
11337                _ => self.expected(", or }", next_token)?,
11338            }
11339        }
11340        Ok(values)
11341    }
11342
11343    /// Strictly parse `identifier AS identifier`
11344    pub fn parse_identifier_with_alias(&mut self) -> Result<IdentWithAlias, ParserError> {
11345        let ident = self.parse_identifier()?;
11346        self.expect_keyword_is(Keyword::AS)?;
11347        let alias = self.parse_identifier()?;
11348        Ok(IdentWithAlias { ident, alias })
11349    }
11350
11351    /// Parse `identifier [AS] identifier` where the AS keyword is optional
11352    fn parse_identifier_with_optional_alias(&mut self) -> Result<IdentWithAlias, ParserError> {
11353        let ident = self.parse_identifier()?;
11354        let _after_as = self.parse_keyword(Keyword::AS);
11355        let alias = self.parse_identifier()?;
11356        Ok(IdentWithAlias { ident, alias })
11357    }
11358
11359    /// Parse comma-separated list of parenthesized queries for pipe operators
11360    fn parse_pipe_operator_queries(&mut self) -> Result<Vec<Query>, ParserError> {
11361        self.parse_comma_separated(|parser| {
11362            parser.expect_token(&Token::LParen)?;
11363            let query = parser.parse_query()?;
11364            parser.expect_token(&Token::RParen)?;
11365            Ok(*query)
11366        })
11367    }
11368
11369    /// Parse set quantifier for pipe operators that require DISTINCT. E.g. INTERSECT and EXCEPT
11370    fn parse_distinct_required_set_quantifier(
11371        &mut self,
11372        operator_name: &str,
11373    ) -> Result<SetQuantifier, ParserError> {
11374        let quantifier = self.parse_set_quantifier(&Some(SetOperator::Intersect));
11375        match quantifier {
11376            SetQuantifier::Distinct | SetQuantifier::DistinctByName => Ok(quantifier),
11377            _ => Err(ParserError::ParserError(format!(
11378                "{operator_name} pipe operator requires DISTINCT modifier",
11379            ))),
11380        }
11381    }
11382
11383    /// Parse optional identifier alias (with or without AS keyword)
11384    fn parse_identifier_optional_alias(&mut self) -> Result<Option<Ident>, ParserError> {
11385        if self.parse_keyword(Keyword::AS) {
11386            Ok(Some(self.parse_identifier()?))
11387        } else {
11388            // Check if the next token is an identifier (implicit alias)
11389            self.maybe_parse(|parser| parser.parse_identifier())
11390        }
11391    }
11392
11393    /// Optionally parses an alias for a select list item
11394    fn maybe_parse_select_item_alias(&mut self) -> Result<Option<Ident>, ParserError> {
11395        fn validator(explicit: bool, kw: &Keyword, parser: &mut Parser) -> bool {
11396            parser.dialect.is_select_item_alias(explicit, kw, parser)
11397        }
11398        self.parse_optional_alias_inner(None, validator)
11399    }
11400
11401    /// Optionally parses an alias for a table like in `... FROM generate_series(1, 10) AS t (col)`.
11402    /// In this case, the alias is allowed to optionally name the columns in the table, in
11403    /// addition to the table itself.
11404    pub fn maybe_parse_table_alias(&mut self) -> Result<Option<TableAlias>, ParserError> {
11405        fn validator(explicit: bool, kw: &Keyword, parser: &mut Parser) -> bool {
11406            parser.dialect.is_table_factor_alias(explicit, kw, parser)
11407        }
11408        match self.parse_optional_alias_inner(None, validator)? {
11409            Some(name) => {
11410                let columns = self.parse_table_alias_column_defs()?;
11411                Ok(Some(TableAlias { name, columns }))
11412            }
11413            None => Ok(None),
11414        }
11415    }
11416
11417    fn parse_table_index_hints(&mut self) -> Result<Vec<TableIndexHints>, ParserError> {
11418        let mut hints = vec![];
11419        while let Some(hint_type) =
11420            self.parse_one_of_keywords(&[Keyword::USE, Keyword::IGNORE, Keyword::FORCE])
11421        {
11422            let hint_type = match hint_type {
11423                Keyword::USE => TableIndexHintType::Use,
11424                Keyword::IGNORE => TableIndexHintType::Ignore,
11425                Keyword::FORCE => TableIndexHintType::Force,
11426                _ => {
11427                    return self.expected(
11428                        "expected to match USE/IGNORE/FORCE keyword",
11429                        self.peek_token(),
11430                    )
11431                }
11432            };
11433            let index_type = match self.parse_one_of_keywords(&[Keyword::INDEX, Keyword::KEY]) {
11434                Some(Keyword::INDEX) => TableIndexType::Index,
11435                Some(Keyword::KEY) => TableIndexType::Key,
11436                _ => {
11437                    return self.expected("expected to match INDEX/KEY keyword", self.peek_token())
11438                }
11439            };
11440            let for_clause = if self.parse_keyword(Keyword::FOR) {
11441                let clause = if self.parse_keyword(Keyword::JOIN) {
11442                    TableIndexHintForClause::Join
11443                } else if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
11444                    TableIndexHintForClause::OrderBy
11445                } else if self.parse_keywords(&[Keyword::GROUP, Keyword::BY]) {
11446                    TableIndexHintForClause::GroupBy
11447                } else {
11448                    return self.expected(
11449                        "expected to match FOR/ORDER BY/GROUP BY table hint in for clause",
11450                        self.peek_token(),
11451                    );
11452                };
11453                Some(clause)
11454            } else {
11455                None
11456            };
11457
11458            self.expect_token(&Token::LParen)?;
11459            let index_names = if self.peek_token().token != Token::RParen {
11460                self.parse_comma_separated(Parser::parse_identifier)?
11461            } else {
11462                vec![]
11463            };
11464            self.expect_token(&Token::RParen)?;
11465            hints.push(TableIndexHints {
11466                hint_type,
11467                index_type,
11468                for_clause,
11469                index_names,
11470            });
11471        }
11472        Ok(hints)
11473    }
11474
11475    /// Wrapper for parse_optional_alias_inner, left for backwards-compatibility
11476    /// but new flows should use the context-specific methods such as `maybe_parse_select_item_alias`
11477    /// and `maybe_parse_table_alias`.
11478    pub fn parse_optional_alias(
11479        &mut self,
11480        reserved_kwds: &[Keyword],
11481    ) -> Result<Option<Ident>, ParserError> {
11482        fn validator(_explicit: bool, _kw: &Keyword, _parser: &mut Parser) -> bool {
11483            false
11484        }
11485        self.parse_optional_alias_inner(Some(reserved_kwds), validator)
11486    }
11487
11488    /// Parses an optional alias after a SQL element such as a select list item
11489    /// or a table name.
11490    ///
11491    /// This method accepts an optional list of reserved keywords or a function
11492    /// to call to validate if a keyword should be parsed as an alias, to allow
11493    /// callers to customize the parsing logic based on their context.
11494    fn parse_optional_alias_inner<F>(
11495        &mut self,
11496        reserved_kwds: Option<&[Keyword]>,
11497        validator: F,
11498    ) -> Result<Option<Ident>, ParserError>
11499    where
11500        F: Fn(bool, &Keyword, &mut Parser) -> bool,
11501    {
11502        let after_as = self.parse_keyword(Keyword::AS);
11503
11504        let next_token = self.next_token();
11505        match next_token.token {
11506            // By default, if a word is located after the `AS` keyword we consider it an alias
11507            // as long as it's not reserved.
11508            Token::Word(w)
11509                if after_as || reserved_kwds.is_some_and(|x| !x.contains(&w.keyword)) =>
11510            {
11511                Ok(Some(w.into_ident(next_token.span)))
11512            }
11513            // This pattern allows for customizing the acceptance of words as aliases based on the caller's
11514            // context, such as to what SQL element this word is a potential alias of (select item alias, table name
11515            // alias, etc.) or dialect-specific logic that goes beyond a simple list of reserved keywords.
11516            Token::Word(w) if validator(after_as, &w.keyword, self) => {
11517                Ok(Some(w.into_ident(next_token.span)))
11518            }
11519            // For backwards-compatibility, we accept quoted strings as aliases regardless of the context.
11520            Token::SingleQuotedString(s) => Ok(Some(Ident::with_quote('\'', s))),
11521            Token::DoubleQuotedString(s) => Ok(Some(Ident::with_quote('\"', s))),
11522            _ => {
11523                if after_as {
11524                    return self.expected("an identifier after AS", next_token);
11525                }
11526                self.prev_token();
11527                Ok(None) // no alias found
11528            }
11529        }
11530    }
11531
11532    pub fn parse_optional_group_by(&mut self) -> Result<Option<GroupByExpr>, ParserError> {
11533        if self.parse_keywords(&[Keyword::GROUP, Keyword::BY]) {
11534            let expressions = if self.parse_keyword(Keyword::ALL) {
11535                None
11536            } else {
11537                Some(self.parse_comma_separated(Parser::parse_group_by_expr)?)
11538            };
11539
11540            let mut modifiers = vec![];
11541            if self.dialect.supports_group_by_with_modifier() {
11542                loop {
11543                    if !self.parse_keyword(Keyword::WITH) {
11544                        break;
11545                    }
11546                    let keyword = self.expect_one_of_keywords(&[
11547                        Keyword::ROLLUP,
11548                        Keyword::CUBE,
11549                        Keyword::TOTALS,
11550                    ])?;
11551                    modifiers.push(match keyword {
11552                        Keyword::ROLLUP => GroupByWithModifier::Rollup,
11553                        Keyword::CUBE => GroupByWithModifier::Cube,
11554                        Keyword::TOTALS => GroupByWithModifier::Totals,
11555                        _ => {
11556                            return parser_err!(
11557                                "BUG: expected to match GroupBy modifier keyword",
11558                                self.peek_token().span.start
11559                            )
11560                        }
11561                    });
11562                }
11563            }
11564            if self.parse_keywords(&[Keyword::GROUPING, Keyword::SETS]) {
11565                self.expect_token(&Token::LParen)?;
11566                let result = self.parse_comma_separated(|p| {
11567                    if p.parse_keyword(Keyword::ROLLUP) {
11568                        p.expect_token(&Token::LParen)?;
11569                        let rollup_exprs = p.parse_comma_separated(|p2| p2.parse_tuple(true, true))?;
11570                        p.expect_token(&Token::RParen)?;
11571                        Ok(vec![Expr::Rollup(rollup_exprs)])
11572                    } else if p.parse_keyword(Keyword::CUBE) {
11573                        p.expect_token(&Token::LParen)?;
11574                        let cube_exprs = p.parse_comma_separated(|p2| p2.parse_tuple(true, true))?;
11575                        p.expect_token(&Token::RParen)?;
11576                        Ok(vec![Expr::Cube(cube_exprs)])
11577                    } else if p.peek_token_ref().token == Token::LParen {
11578                        p.parse_tuple(true, true)
11579                    } else {
11580                        Ok(vec![p.parse_expr()?])
11581                    }
11582                })?;
11583                self.expect_token(&Token::RParen)?;
11584                modifiers.push(GroupByWithModifier::GroupingSets(Expr::GroupingSets(
11585                    result,
11586                )));
11587            };
11588            let group_by = match expressions {
11589                None => GroupByExpr::All(modifiers),
11590                Some(exprs) => GroupByExpr::Expressions(exprs, modifiers),
11591            };
11592            Ok(Some(group_by))
11593        } else {
11594            Ok(None)
11595        }
11596    }
11597
11598    pub fn parse_optional_order_by(&mut self) -> Result<Option<OrderBy>, ParserError> {
11599        if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
11600            let order_by =
11601                if self.dialect.supports_order_by_all() && self.parse_keyword(Keyword::ALL) {
11602                    let order_by_options = self.parse_order_by_options()?;
11603                    OrderBy {
11604                        kind: OrderByKind::All(order_by_options),
11605                        interpolate: None,
11606                    }
11607                } else {
11608                    let exprs = self.parse_comma_separated(Parser::parse_order_by_expr)?;
11609                    let interpolate = if dialect_of!(self is ClickHouseDialect | GenericDialect) {
11610                        self.parse_interpolations()?
11611                    } else {
11612                        None
11613                    };
11614                    OrderBy {
11615                        kind: OrderByKind::Expressions(exprs),
11616                        interpolate,
11617                    }
11618                };
11619            Ok(Some(order_by))
11620        } else {
11621            Ok(None)
11622        }
11623    }
11624
11625    fn parse_optional_limit_clause(&mut self) -> Result<Option<LimitClause>, ParserError> {
11626        let mut offset = if self.parse_keyword(Keyword::OFFSET) {
11627            Some(self.parse_offset()?)
11628        } else {
11629            None
11630        };
11631
11632        let (limit, limit_by) = if self.parse_keyword(Keyword::LIMIT) {
11633            let expr = self.parse_limit()?;
11634
11635            if self.dialect.supports_limit_comma()
11636                && offset.is_none()
11637                && expr.is_some() // ALL not supported with comma
11638                && self.consume_token(&Token::Comma)
11639            {
11640                let offset = expr.ok_or_else(|| {
11641                    ParserError::ParserError(
11642                        "Missing offset for LIMIT <offset>, <limit>".to_string(),
11643                    )
11644                })?;
11645                return Ok(Some(LimitClause::OffsetCommaLimit {
11646                    offset,
11647                    limit: self.parse_expr()?,
11648                }));
11649            }
11650
11651            let limit_by = if dialect_of!(self is ClickHouseDialect | GenericDialect)
11652                && self.parse_keyword(Keyword::BY)
11653            {
11654                Some(self.parse_comma_separated(Parser::parse_expr)?)
11655            } else {
11656                None
11657            };
11658
11659            (Some(expr), limit_by)
11660        } else {
11661            (None, None)
11662        };
11663
11664        if offset.is_none() && limit.is_some() && self.parse_keyword(Keyword::OFFSET) {
11665            offset = Some(self.parse_offset()?);
11666        }
11667
11668        if offset.is_some() || (limit.is_some() && limit != Some(None)) || limit_by.is_some() {
11669            Ok(Some(LimitClause::LimitOffset {
11670                limit: limit.unwrap_or_default(),
11671                offset,
11672                limit_by: limit_by.unwrap_or_default(),
11673            }))
11674        } else {
11675            Ok(None)
11676        }
11677    }
11678
11679    /// Parse a table object for insertion
11680    /// e.g. `some_database.some_table` or `FUNCTION some_table_func(...)`
11681    pub fn parse_table_object(&mut self) -> Result<TableObject, ParserError> {
11682        if self.dialect.supports_insert_table_function() && self.parse_keyword(Keyword::FUNCTION) {
11683            let fn_name = self.parse_object_name(false)?;
11684            self.parse_function_call(fn_name)
11685                .map(TableObject::TableFunction)
11686        } else {
11687            self.parse_object_name(false).map(TableObject::TableName)
11688        }
11689    }
11690
11691    /// Parse a possibly qualified, possibly quoted identifier, e.g.
11692    /// `foo` or `myschema."table"
11693    ///
11694    /// The `in_table_clause` parameter indicates whether the object name is a table in a FROM, JOIN,
11695    /// or similar table clause. Currently, this is used only to support unquoted hyphenated identifiers
11696    /// in this context on BigQuery.
11697    pub fn parse_object_name(&mut self, in_table_clause: bool) -> Result<ObjectName, ParserError> {
11698        self.parse_object_name_inner(in_table_clause, false)
11699    }
11700
11701    /// Parse a possibly qualified, possibly quoted identifier, e.g.
11702    /// `foo` or `myschema."table"
11703    ///
11704    /// The `in_table_clause` parameter indicates whether the object name is a table in a FROM, JOIN,
11705    /// or similar table clause. Currently, this is used only to support unquoted hyphenated identifiers
11706    /// in this context on BigQuery.
11707    ///
11708    /// The `allow_wildcards` parameter indicates whether to allow for wildcards in the object name
11709    /// e.g. *, *.*, `foo`.*, or "foo"."bar"
11710    fn parse_object_name_inner(
11711        &mut self,
11712        in_table_clause: bool,
11713        allow_wildcards: bool,
11714    ) -> Result<ObjectName, ParserError> {
11715        let mut parts = vec![];
11716        if dialect_of!(self is BigQueryDialect) && in_table_clause {
11717            loop {
11718                let (ident, end_with_period) = self.parse_unquoted_hyphenated_identifier()?;
11719                parts.push(ObjectNamePart::Identifier(ident));
11720                if !self.consume_token(&Token::Period) && !end_with_period {
11721                    break;
11722                }
11723            }
11724        } else {
11725            loop {
11726                if allow_wildcards && self.peek_token().token == Token::Mul {
11727                    let span = self.next_token().span;
11728                    parts.push(ObjectNamePart::Identifier(Ident {
11729                        value: Token::Mul.to_string(),
11730                        quote_style: None,
11731                        span,
11732                    }));
11733                } else if dialect_of!(self is BigQueryDialect) && in_table_clause {
11734                    let (ident, end_with_period) = self.parse_unquoted_hyphenated_identifier()?;
11735                    parts.push(ObjectNamePart::Identifier(ident));
11736                    if !self.consume_token(&Token::Period) && !end_with_period {
11737                        break;
11738                    }
11739                } else if self.dialect.supports_object_name_double_dot_notation()
11740                    && parts.len() == 1
11741                    && matches!(self.peek_token().token, Token::Period)
11742                {
11743                    // Empty string here means default schema
11744                    parts.push(ObjectNamePart::Identifier(Ident::new("")));
11745                } else {
11746                    let ident = self.parse_identifier()?;
11747                    let part = if self
11748                        .dialect
11749                        .is_identifier_generating_function_name(&ident, &parts)
11750                    {
11751                        self.expect_token(&Token::LParen)?;
11752                        let args: Vec<FunctionArg> =
11753                            self.parse_comma_separated0(Self::parse_function_args, Token::RParen)?;
11754                        self.expect_token(&Token::RParen)?;
11755                        ObjectNamePart::Function(ObjectNamePartFunction { name: ident, args })
11756                    } else {
11757                        ObjectNamePart::Identifier(ident)
11758                    };
11759                    parts.push(part);
11760                }
11761
11762                if !self.consume_token(&Token::Period) {
11763                    break;
11764                }
11765            }
11766        }
11767
11768        // BigQuery accepts any number of quoted identifiers of a table name.
11769        // https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#quoted_identifiers
11770        if dialect_of!(self is BigQueryDialect)
11771            && parts.iter().any(|part| {
11772                part.as_ident()
11773                    .is_some_and(|ident| ident.value.contains('.'))
11774            })
11775        {
11776            parts = parts
11777                .into_iter()
11778                .flat_map(|part| match part.as_ident() {
11779                    Some(ident) => ident
11780                        .value
11781                        .split('.')
11782                        .map(|value| {
11783                            ObjectNamePart::Identifier(Ident {
11784                                value: value.into(),
11785                                quote_style: ident.quote_style,
11786                                span: ident.span,
11787                            })
11788                        })
11789                        .collect::<Vec<_>>(),
11790                    None => vec![part],
11791                })
11792                .collect()
11793        }
11794
11795        Ok(ObjectName(parts))
11796    }
11797
11798    /// Parse identifiers
11799    pub fn parse_identifiers(&mut self) -> Result<Vec<Ident>, ParserError> {
11800        let mut idents = vec![];
11801        loop {
11802            match &self.peek_token_ref().token {
11803                Token::Word(w) => {
11804                    idents.push(w.clone().into_ident(self.peek_token_ref().span));
11805                }
11806                Token::EOF | Token::Eq => break,
11807                _ => {}
11808            }
11809            self.advance_token();
11810        }
11811        Ok(idents)
11812    }
11813
11814    /// Parse identifiers of form ident1[.identN]*
11815    ///
11816    /// Similar in functionality to [parse_identifiers], with difference
11817    /// being this function is much more strict about parsing a valid multipart identifier, not
11818    /// allowing extraneous tokens to be parsed, otherwise it fails.
11819    ///
11820    /// For example:
11821    ///
11822    /// ```rust
11823    /// use sqlparser::ast::Ident;
11824    /// use sqlparser::dialect::GenericDialect;
11825    /// use sqlparser::parser::Parser;
11826    ///
11827    /// let dialect = GenericDialect {};
11828    /// let expected = vec![Ident::new("one"), Ident::new("two")];
11829    ///
11830    /// // expected usage
11831    /// let sql = "one.two";
11832    /// let mut parser = Parser::new(&dialect).try_with_sql(sql).unwrap();
11833    /// let actual = parser.parse_multipart_identifier().unwrap();
11834    /// assert_eq!(&actual, &expected);
11835    ///
11836    /// // parse_identifiers is more loose on what it allows, parsing successfully
11837    /// let sql = "one + two";
11838    /// let mut parser = Parser::new(&dialect).try_with_sql(sql).unwrap();
11839    /// let actual = parser.parse_identifiers().unwrap();
11840    /// assert_eq!(&actual, &expected);
11841    ///
11842    /// // expected to strictly fail due to + separator
11843    /// let sql = "one + two";
11844    /// let mut parser = Parser::new(&dialect).try_with_sql(sql).unwrap();
11845    /// let actual = parser.parse_multipart_identifier().unwrap_err();
11846    /// assert_eq!(
11847    ///     actual.to_string(),
11848    ///     "sql parser error: Unexpected token in identifier: +"
11849    /// );
11850    /// ```
11851    ///
11852    /// [parse_identifiers]: Parser::parse_identifiers
11853    pub fn parse_multipart_identifier(&mut self) -> Result<Vec<Ident>, ParserError> {
11854        let mut idents = vec![];
11855
11856        // expecting at least one word for identifier
11857        let next_token = self.next_token();
11858        match next_token.token {
11859            Token::Word(w) => idents.push(w.into_ident(next_token.span)),
11860            Token::EOF => {
11861                return Err(ParserError::ParserError(
11862                    "Empty input when parsing identifier".to_string(),
11863                ))?
11864            }
11865            token => {
11866                return Err(ParserError::ParserError(format!(
11867                    "Unexpected token in identifier: {token}"
11868                )))?
11869            }
11870        };
11871
11872        // parse optional next parts if exist
11873        loop {
11874            match self.next_token().token {
11875                // ensure that optional period is succeeded by another identifier
11876                Token::Period => {
11877                    let next_token = self.next_token();
11878                    match next_token.token {
11879                        Token::Word(w) => idents.push(w.into_ident(next_token.span)),
11880                        Token::EOF => {
11881                            return Err(ParserError::ParserError(
11882                                "Trailing period in identifier".to_string(),
11883                            ))?
11884                        }
11885                        token => {
11886                            return Err(ParserError::ParserError(format!(
11887                                "Unexpected token following period in identifier: {token}"
11888                            )))?
11889                        }
11890                    }
11891                }
11892                Token::EOF => break,
11893                token => {
11894                    return Err(ParserError::ParserError(format!(
11895                        "Unexpected token in identifier: {token}"
11896                    )))?
11897                }
11898            }
11899        }
11900
11901        Ok(idents)
11902    }
11903
11904    /// Parse a simple one-word identifier (possibly quoted, possibly a keyword)
11905    pub fn parse_identifier(&mut self) -> Result<Ident, ParserError> {
11906        let next_token = self.next_token();
11907        match next_token.token {
11908            Token::Word(w) => Ok(w.into_ident(next_token.span)),
11909            Token::SingleQuotedString(s) => Ok(Ident::with_quote('\'', s)),
11910            Token::DoubleQuotedString(s) => Ok(Ident::with_quote('\"', s)),
11911            _ => self.expected("identifier", next_token),
11912        }
11913    }
11914
11915    /// On BigQuery, hyphens are permitted in unquoted identifiers inside of a FROM or
11916    /// TABLE clause.
11917    ///
11918    /// The first segment must be an ordinary unquoted identifier, e.g. it must not start
11919    /// with a digit. Subsequent segments are either must either be valid identifiers or
11920    /// integers, e.g. foo-123 is allowed, but foo-123a is not.
11921    ///
11922    /// [BigQuery-lexical](https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical)
11923    ///
11924    /// Return a tuple of the identifier and a boolean indicating it ends with a period.
11925    fn parse_unquoted_hyphenated_identifier(&mut self) -> Result<(Ident, bool), ParserError> {
11926        match self.peek_token().token {
11927            Token::Word(w) => {
11928                let quote_style_is_none = w.quote_style.is_none();
11929                let mut requires_whitespace = false;
11930                let mut ident = w.into_ident(self.next_token().span);
11931                if quote_style_is_none {
11932                    while matches!(self.peek_token_no_skip().token, Token::Minus) {
11933                        self.next_token();
11934                        ident.value.push('-');
11935
11936                        let token = self
11937                            .next_token_no_skip()
11938                            .cloned()
11939                            .unwrap_or(TokenWithSpan::wrap(Token::EOF));
11940                        requires_whitespace = match token.token {
11941                            Token::Word(next_word) if next_word.quote_style.is_none() => {
11942                                ident.value.push_str(&next_word.value);
11943                                false
11944                            }
11945                            Token::Number(s, false) => {
11946                                // A number token can represent a decimal value ending with a period, e.g., `Number('123.')`.
11947                                // However, for an [ObjectName], it is part of a hyphenated identifier, e.g., `foo-123.bar`.
11948                                //
11949                                // If a number token is followed by a period, it is part of an [ObjectName].
11950                                // Return the identifier with `true` if the number token is followed by a period, indicating that
11951                                // parsing should continue for the next part of the hyphenated identifier.
11952                                if s.ends_with('.') {
11953                                    let Some(s) = s.split('.').next().filter(|s| {
11954                                        !s.is_empty() && s.chars().all(|c| c.is_ascii_digit())
11955                                    }) else {
11956                                        return self.expected(
11957                                            "continuation of hyphenated identifier",
11958                                            TokenWithSpan::new(Token::Number(s, false), token.span),
11959                                        );
11960                                    };
11961                                    ident.value.push_str(s);
11962                                    return Ok((ident, true));
11963                                } else {
11964                                    ident.value.push_str(&s);
11965                                }
11966                                // If next token is period, then it is part of an ObjectName and we don't expect whitespace
11967                                // after the number.
11968                                !matches!(self.peek_token().token, Token::Period)
11969                            }
11970                            _ => {
11971                                return self
11972                                    .expected("continuation of hyphenated identifier", token);
11973                            }
11974                        }
11975                    }
11976
11977                    // If the last segment was a number, we must check that it's followed by whitespace,
11978                    // otherwise foo-123a will be parsed as `foo-123` with the alias `a`.
11979                    if requires_whitespace {
11980                        let token = self.next_token();
11981                        if !matches!(token.token, Token::EOF | Token::Whitespace(_)) {
11982                            return self
11983                                .expected("whitespace following hyphenated identifier", token);
11984                        }
11985                    }
11986                }
11987                Ok((ident, false))
11988            }
11989            _ => Ok((self.parse_identifier()?, false)),
11990        }
11991    }
11992
11993    /// Parses a parenthesized, comma-separated list of column definitions within a view.
11994    fn parse_view_columns(&mut self) -> Result<Vec<ViewColumnDef>, ParserError> {
11995        if self.consume_token(&Token::LParen) {
11996            if self.peek_token().token == Token::RParen {
11997                self.next_token();
11998                Ok(vec![])
11999            } else {
12000                let cols = self.parse_comma_separated_with_trailing_commas(
12001                    Parser::parse_view_column,
12002                    self.dialect.supports_column_definition_trailing_commas(),
12003                    Self::is_reserved_for_column_alias,
12004                )?;
12005                self.expect_token(&Token::RParen)?;
12006                Ok(cols)
12007            }
12008        } else {
12009            Ok(vec![])
12010        }
12011    }
12012
12013    /// Parses a column definition within a view.
12014    fn parse_view_column(&mut self) -> Result<ViewColumnDef, ParserError> {
12015        let name = self.parse_identifier()?;
12016        let options = self.parse_view_column_options()?;
12017        let data_type = if dialect_of!(self is ClickHouseDialect) {
12018            Some(self.parse_data_type()?)
12019        } else {
12020            None
12021        };
12022        Ok(ViewColumnDef {
12023            name,
12024            data_type,
12025            options,
12026        })
12027    }
12028
12029    fn parse_view_column_options(&mut self) -> Result<Option<ColumnOptions>, ParserError> {
12030        let mut options = Vec::new();
12031        loop {
12032            let option = self.parse_optional_column_option()?;
12033            if let Some(option) = option {
12034                options.push(option);
12035            } else {
12036                break;
12037            }
12038        }
12039        if options.is_empty() {
12040            Ok(None)
12041        } else if self.dialect.supports_space_separated_column_options() {
12042            Ok(Some(ColumnOptions::SpaceSeparated(options)))
12043        } else {
12044            Ok(Some(ColumnOptions::CommaSeparated(options)))
12045        }
12046    }
12047
12048    /// Parses a parenthesized comma-separated list of unqualified, possibly quoted identifiers.
12049    /// For example: `(col1, "col 2", ...)`
12050    pub fn parse_parenthesized_column_list(
12051        &mut self,
12052        optional: IsOptional,
12053        allow_empty: bool,
12054    ) -> Result<Vec<Ident>, ParserError> {
12055        self.parse_parenthesized_column_list_inner(optional, allow_empty, |p| p.parse_identifier())
12056    }
12057
12058    pub fn parse_parenthesized_compound_identifier_list(
12059        &mut self,
12060        optional: IsOptional,
12061        allow_empty: bool,
12062    ) -> Result<Vec<Expr>, ParserError> {
12063        self.parse_parenthesized_column_list_inner(optional, allow_empty, |p| {
12064            Ok(Expr::CompoundIdentifier(
12065                p.parse_period_separated(|p| p.parse_identifier())?,
12066            ))
12067        })
12068    }
12069
12070    /// Parses a parenthesized comma-separated list of index columns, which can be arbitrary
12071    /// expressions with ordering information (and an opclass in some dialects).
12072    fn parse_parenthesized_index_column_list(&mut self) -> Result<Vec<IndexColumn>, ParserError> {
12073        self.parse_parenthesized_column_list_inner(Mandatory, false, |p| {
12074            p.parse_create_index_expr()
12075        })
12076    }
12077
12078    /// Parses a parenthesized comma-separated list of qualified, possibly quoted identifiers.
12079    /// For example: `(db1.sc1.tbl1.col1, db1.sc1.tbl1."col 2", ...)`
12080    pub fn parse_parenthesized_qualified_column_list(
12081        &mut self,
12082        optional: IsOptional,
12083        allow_empty: bool,
12084    ) -> Result<Vec<ObjectName>, ParserError> {
12085        self.parse_parenthesized_column_list_inner(optional, allow_empty, |p| {
12086            p.parse_object_name(true)
12087        })
12088    }
12089
12090    /// Parses a parenthesized comma-separated list of columns using
12091    /// the provided function to parse each element.
12092    fn parse_parenthesized_column_list_inner<F, T>(
12093        &mut self,
12094        optional: IsOptional,
12095        allow_empty: bool,
12096        mut f: F,
12097    ) -> Result<Vec<T>, ParserError>
12098    where
12099        F: FnMut(&mut Parser) -> Result<T, ParserError>,
12100    {
12101        if self.consume_token(&Token::LParen) {
12102            if allow_empty && self.peek_token().token == Token::RParen {
12103                self.next_token();
12104                Ok(vec![])
12105            } else {
12106                let cols = self.parse_comma_separated(|p| f(p))?;
12107                self.expect_token(&Token::RParen)?;
12108                Ok(cols)
12109            }
12110        } else if optional == Optional {
12111            Ok(vec![])
12112        } else {
12113            self.expected("a list of columns in parentheses", self.peek_token())
12114        }
12115    }
12116
12117    /// Parses a parenthesized comma-separated list of table alias column definitions.
12118    fn parse_table_alias_column_defs(&mut self) -> Result<Vec<TableAliasColumnDef>, ParserError> {
12119        if self.consume_token(&Token::LParen) {
12120            let cols = self.parse_comma_separated(|p| {
12121                let name = p.parse_identifier()?;
12122                let data_type = p.maybe_parse(|p| p.parse_data_type())?;
12123                Ok(TableAliasColumnDef { name, data_type })
12124            })?;
12125            self.expect_token(&Token::RParen)?;
12126            Ok(cols)
12127        } else {
12128            Ok(vec![])
12129        }
12130    }
12131
12132    pub fn parse_precision(&mut self) -> Result<u64, ParserError> {
12133        self.expect_token(&Token::LParen)?;
12134        let n = self.parse_literal_uint()?;
12135        self.expect_token(&Token::RParen)?;
12136        Ok(n)
12137    }
12138
12139    pub fn parse_optional_precision(&mut self) -> Result<Option<u64>, ParserError> {
12140        if self.consume_token(&Token::LParen) {
12141            let n = self.parse_literal_uint()?;
12142            self.expect_token(&Token::RParen)?;
12143            Ok(Some(n))
12144        } else {
12145            Ok(None)
12146        }
12147    }
12148
12149    fn maybe_parse_optional_interval_fields(
12150        &mut self,
12151    ) -> Result<Option<IntervalFields>, ParserError> {
12152        match self.parse_one_of_keywords(&[
12153            // Can be followed by `TO` option
12154            Keyword::YEAR,
12155            Keyword::DAY,
12156            Keyword::HOUR,
12157            Keyword::MINUTE,
12158            // No `TO` option
12159            Keyword::MONTH,
12160            Keyword::SECOND,
12161        ]) {
12162            Some(Keyword::YEAR) => {
12163                if self.peek_keyword(Keyword::TO) {
12164                    self.expect_keyword(Keyword::TO)?;
12165                    self.expect_keyword(Keyword::MONTH)?;
12166                    Ok(Some(IntervalFields::YearToMonth))
12167                } else {
12168                    Ok(Some(IntervalFields::Year))
12169                }
12170            }
12171            Some(Keyword::DAY) => {
12172                if self.peek_keyword(Keyword::TO) {
12173                    self.expect_keyword(Keyword::TO)?;
12174                    match self.expect_one_of_keywords(&[
12175                        Keyword::HOUR,
12176                        Keyword::MINUTE,
12177                        Keyword::SECOND,
12178                    ])? {
12179                        Keyword::HOUR => Ok(Some(IntervalFields::DayToHour)),
12180                        Keyword::MINUTE => Ok(Some(IntervalFields::DayToMinute)),
12181                        Keyword::SECOND => Ok(Some(IntervalFields::DayToSecond)),
12182                        _ => {
12183                            self.prev_token();
12184                            self.expected("HOUR, MINUTE, or SECOND", self.peek_token())
12185                        }
12186                    }
12187                } else {
12188                    Ok(Some(IntervalFields::Day))
12189                }
12190            }
12191            Some(Keyword::HOUR) => {
12192                if self.peek_keyword(Keyword::TO) {
12193                    self.expect_keyword(Keyword::TO)?;
12194                    match self.expect_one_of_keywords(&[Keyword::MINUTE, Keyword::SECOND])? {
12195                        Keyword::MINUTE => Ok(Some(IntervalFields::HourToMinute)),
12196                        Keyword::SECOND => Ok(Some(IntervalFields::HourToSecond)),
12197                        _ => {
12198                            self.prev_token();
12199                            self.expected("MINUTE or SECOND", self.peek_token())
12200                        }
12201                    }
12202                } else {
12203                    Ok(Some(IntervalFields::Hour))
12204                }
12205            }
12206            Some(Keyword::MINUTE) => {
12207                if self.peek_keyword(Keyword::TO) {
12208                    self.expect_keyword(Keyword::TO)?;
12209                    self.expect_keyword(Keyword::SECOND)?;
12210                    Ok(Some(IntervalFields::MinuteToSecond))
12211                } else {
12212                    Ok(Some(IntervalFields::Minute))
12213                }
12214            }
12215            Some(Keyword::MONTH) => Ok(Some(IntervalFields::Month)),
12216            Some(Keyword::SECOND) => Ok(Some(IntervalFields::Second)),
12217            Some(_) => {
12218                self.prev_token();
12219                self.expected(
12220                    "YEAR, MONTH, DAY, HOUR, MINUTE, or SECOND",
12221                    self.peek_token(),
12222                )
12223            }
12224            None => Ok(None),
12225        }
12226    }
12227
12228    /// Parse datetime64 [1]
12229    /// Syntax
12230    /// ```sql
12231    /// DateTime64(precision[, timezone])
12232    /// ```
12233    ///
12234    /// [1]: https://clickhouse.com/docs/en/sql-reference/data-types/datetime64
12235    pub fn parse_datetime_64(&mut self) -> Result<(u64, Option<String>), ParserError> {
12236        self.expect_keyword_is(Keyword::DATETIME64)?;
12237        self.expect_token(&Token::LParen)?;
12238        let precision = self.parse_literal_uint()?;
12239        let time_zone = if self.consume_token(&Token::Comma) {
12240            Some(self.parse_literal_string()?)
12241        } else {
12242            None
12243        };
12244        self.expect_token(&Token::RParen)?;
12245        Ok((precision, time_zone))
12246    }
12247
12248    pub fn parse_optional_character_length(
12249        &mut self,
12250    ) -> Result<Option<CharacterLength>, ParserError> {
12251        if self.consume_token(&Token::LParen) {
12252            let character_length = self.parse_character_length()?;
12253            self.expect_token(&Token::RParen)?;
12254            Ok(Some(character_length))
12255        } else {
12256            Ok(None)
12257        }
12258    }
12259
12260    pub fn parse_optional_binary_length(&mut self) -> Result<Option<BinaryLength>, ParserError> {
12261        if self.consume_token(&Token::LParen) {
12262            let binary_length = self.parse_binary_length()?;
12263            self.expect_token(&Token::RParen)?;
12264            Ok(Some(binary_length))
12265        } else {
12266            Ok(None)
12267        }
12268    }
12269
12270    pub fn parse_character_length(&mut self) -> Result<CharacterLength, ParserError> {
12271        if self.parse_keyword(Keyword::MAX) {
12272            return Ok(CharacterLength::Max);
12273        }
12274        let length = self.parse_literal_uint()?;
12275        let unit = if self.parse_keyword(Keyword::CHARACTERS) {
12276            Some(CharLengthUnits::Characters)
12277        } else if self.parse_keyword(Keyword::OCTETS) {
12278            Some(CharLengthUnits::Octets)
12279        } else {
12280            None
12281        };
12282        Ok(CharacterLength::IntegerLength { length, unit })
12283    }
12284
12285    pub fn parse_binary_length(&mut self) -> Result<BinaryLength, ParserError> {
12286        if self.parse_keyword(Keyword::MAX) {
12287            return Ok(BinaryLength::Max);
12288        }
12289        let length = self.parse_literal_uint()?;
12290        Ok(BinaryLength::IntegerLength { length })
12291    }
12292
12293    pub fn parse_optional_precision_scale(
12294        &mut self,
12295    ) -> Result<(Option<u64>, Option<u64>), ParserError> {
12296        if self.consume_token(&Token::LParen) {
12297            let n = self.parse_literal_uint()?;
12298            let scale = if self.consume_token(&Token::Comma) {
12299                Some(self.parse_literal_uint()?)
12300            } else {
12301                None
12302            };
12303            self.expect_token(&Token::RParen)?;
12304            Ok((Some(n), scale))
12305        } else {
12306            Ok((None, None))
12307        }
12308    }
12309
12310    pub fn parse_exact_number_optional_precision_scale(
12311        &mut self,
12312    ) -> Result<ExactNumberInfo, ParserError> {
12313        if self.consume_token(&Token::LParen) {
12314            let precision = self.parse_literal_uint()?;
12315            let scale = if self.consume_token(&Token::Comma) {
12316                Some(self.parse_signed_integer()?)
12317            } else {
12318                None
12319            };
12320
12321            self.expect_token(&Token::RParen)?;
12322
12323            match scale {
12324                None => Ok(ExactNumberInfo::Precision(precision)),
12325                Some(scale) => Ok(ExactNumberInfo::PrecisionAndScale(precision, scale)),
12326            }
12327        } else {
12328            Ok(ExactNumberInfo::None)
12329        }
12330    }
12331
12332    /// Parse an optionally signed integer literal.
12333    fn parse_signed_integer(&mut self) -> Result<i64, ParserError> {
12334        let is_negative = self.consume_token(&Token::Minus);
12335
12336        if !is_negative {
12337            let _ = self.consume_token(&Token::Plus);
12338        }
12339
12340        let current_token = self.peek_token_ref();
12341        match &current_token.token {
12342            Token::Number(s, _) => {
12343                let s = s.clone();
12344                let span_start = current_token.span.start;
12345                self.advance_token();
12346                let value = Self::parse::<i64>(s, span_start)?;
12347                Ok(if is_negative { -value } else { value })
12348            }
12349            _ => self.expected_ref("number", current_token),
12350        }
12351    }
12352
12353    pub fn parse_optional_type_modifiers(&mut self) -> Result<Option<Vec<String>>, ParserError> {
12354        if self.consume_token(&Token::LParen) {
12355            let mut modifiers = Vec::new();
12356            loop {
12357                let next_token = self.next_token();
12358                match next_token.token {
12359                    Token::Word(w) => modifiers.push(w.to_string()),
12360                    Token::Number(n, _) => modifiers.push(n),
12361                    Token::SingleQuotedString(s) => modifiers.push(s),
12362
12363                    Token::Comma => {
12364                        continue;
12365                    }
12366                    Token::RParen => {
12367                        break;
12368                    }
12369                    _ => self.expected("type modifiers", next_token)?,
12370                }
12371            }
12372
12373            Ok(Some(modifiers))
12374        } else {
12375            Ok(None)
12376        }
12377    }
12378
12379    /// Parse a parenthesized sub data type
12380    fn parse_sub_type<F>(&mut self, parent_type: F) -> Result<DataType, ParserError>
12381    where
12382        F: FnOnce(Box<DataType>) -> DataType,
12383    {
12384        self.expect_token(&Token::LParen)?;
12385        let inside_type = self.parse_data_type()?;
12386        self.expect_token(&Token::RParen)?;
12387        Ok(parent_type(inside_type.into()))
12388    }
12389
12390    /// Parse a DELETE statement, returning a `Box`ed SetExpr
12391    ///
12392    /// This is used to reduce the size of the stack frames in debug builds
12393    fn parse_delete_setexpr_boxed(&mut self) -> Result<Box<SetExpr>, ParserError> {
12394        Ok(Box::new(SetExpr::Delete(self.parse_delete()?)))
12395    }
12396
12397    /// Parse a MERGE statement, returning a `Box`ed SetExpr
12398    ///
12399    /// This is used to reduce the size of the stack frames in debug builds
12400    fn parse_merge_setexpr_boxed(&mut self) -> Result<Box<SetExpr>, ParserError> {
12401        Ok(Box::new(SetExpr::Merge(self.parse_merge()?)))
12402    }
12403
12404    pub fn parse_delete(&mut self) -> Result<Statement, ParserError> {
12405        let (tables, with_from_keyword) = if !self.parse_keyword(Keyword::FROM) {
12406            // `FROM` keyword is optional in BigQuery SQL.
12407            // https://cloud.google.com/bigquery/docs/reference/standard-sql/dml-syntax#delete_statement
12408            if dialect_of!(self is BigQueryDialect | GenericDialect) {
12409                (vec![], false)
12410            } else {
12411                let tables = self.parse_comma_separated(|p| p.parse_object_name(false))?;
12412                self.expect_keyword_is(Keyword::FROM)?;
12413                (tables, true)
12414            }
12415        } else {
12416            (vec![], true)
12417        };
12418
12419        let from = self.parse_comma_separated(Parser::parse_table_and_joins)?;
12420        let using = if self.parse_keyword(Keyword::USING) {
12421            Some(self.parse_comma_separated(Parser::parse_table_and_joins)?)
12422        } else {
12423            None
12424        };
12425        let selection = if self.parse_keyword(Keyword::WHERE) {
12426            Some(self.parse_expr()?)
12427        } else {
12428            None
12429        };
12430        let returning = if self.parse_keyword(Keyword::RETURNING) {
12431            Some(self.parse_comma_separated(Parser::parse_select_item)?)
12432        } else {
12433            None
12434        };
12435        let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
12436            self.parse_comma_separated(Parser::parse_order_by_expr)?
12437        } else {
12438            vec![]
12439        };
12440        let limit = if self.parse_keyword(Keyword::LIMIT) {
12441            self.parse_limit()?
12442        } else {
12443            None
12444        };
12445
12446        Ok(Statement::Delete(Delete {
12447            tables,
12448            from: if with_from_keyword {
12449                FromTable::WithFromKeyword(from)
12450            } else {
12451                FromTable::WithoutKeyword(from)
12452            },
12453            using,
12454            selection,
12455            returning,
12456            order_by,
12457            limit,
12458        }))
12459    }
12460
12461    // KILL [CONNECTION | QUERY | MUTATION] processlist_id
12462    pub fn parse_kill(&mut self) -> Result<Statement, ParserError> {
12463        let modifier_keyword =
12464            self.parse_one_of_keywords(&[Keyword::CONNECTION, Keyword::QUERY, Keyword::MUTATION]);
12465
12466        let id = self.parse_literal_uint()?;
12467
12468        let modifier = match modifier_keyword {
12469            Some(Keyword::CONNECTION) => Some(KillType::Connection),
12470            Some(Keyword::QUERY) => Some(KillType::Query),
12471            Some(Keyword::MUTATION) => {
12472                if dialect_of!(self is ClickHouseDialect | GenericDialect) {
12473                    Some(KillType::Mutation)
12474                } else {
12475                    self.expected(
12476                        "Unsupported type for KILL, allowed: CONNECTION | QUERY",
12477                        self.peek_token(),
12478                    )?
12479                }
12480            }
12481            _ => None,
12482        };
12483
12484        Ok(Statement::Kill { modifier, id })
12485    }
12486
12487    pub fn parse_explain(
12488        &mut self,
12489        describe_alias: DescribeAlias,
12490    ) -> Result<Statement, ParserError> {
12491        let mut analyze = false;
12492        let mut verbose = false;
12493        let mut query_plan = false;
12494        let mut estimate = false;
12495        let mut format = None;
12496        let mut options = None;
12497
12498        // Note: DuckDB is compatible with PostgreSQL syntax for this statement,
12499        // although not all features may be implemented.
12500        if describe_alias == DescribeAlias::Explain
12501            && self.dialect.supports_explain_with_utility_options()
12502            && self.peek_token().token == Token::LParen
12503        {
12504            options = Some(self.parse_utility_options()?)
12505        } else if self.parse_keywords(&[Keyword::QUERY, Keyword::PLAN]) {
12506            query_plan = true;
12507        } else if self.parse_keyword(Keyword::ESTIMATE) {
12508            estimate = true;
12509        } else {
12510            analyze = self.parse_keyword(Keyword::ANALYZE);
12511            verbose = self.parse_keyword(Keyword::VERBOSE);
12512            if self.parse_keyword(Keyword::FORMAT) {
12513                format = Some(self.parse_analyze_format_kind()?);
12514            }
12515        }
12516
12517        match self.maybe_parse(|parser| parser.parse_statement())? {
12518            Some(Statement::Explain { .. }) | Some(Statement::ExplainTable { .. }) => Err(
12519                ParserError::ParserError("Explain must be root of the plan".to_string()),
12520            ),
12521            Some(statement) => Ok(Statement::Explain {
12522                describe_alias,
12523                analyze,
12524                verbose,
12525                query_plan,
12526                estimate,
12527                statement: Box::new(statement),
12528                format,
12529                options,
12530            }),
12531            _ => {
12532                let hive_format =
12533                    match self.parse_one_of_keywords(&[Keyword::EXTENDED, Keyword::FORMATTED]) {
12534                        Some(Keyword::EXTENDED) => Some(HiveDescribeFormat::Extended),
12535                        Some(Keyword::FORMATTED) => Some(HiveDescribeFormat::Formatted),
12536                        _ => None,
12537                    };
12538
12539                let has_table_keyword = if self.dialect.describe_requires_table_keyword() {
12540                    // only allow to use TABLE keyword for DESC|DESCRIBE statement
12541                    self.parse_keyword(Keyword::TABLE)
12542                } else {
12543                    false
12544                };
12545
12546                let table_name = self.parse_object_name(false)?;
12547                Ok(Statement::ExplainTable {
12548                    describe_alias,
12549                    hive_format,
12550                    has_table_keyword,
12551                    table_name,
12552                })
12553            }
12554        }
12555    }
12556
12557    /// Parse a query expression, i.e. a `SELECT` statement optionally
12558    /// preceded with some `WITH` CTE declarations and optionally followed
12559    /// by `ORDER BY`. Unlike some other parse_... methods, this one doesn't
12560    /// expect the initial keyword to be already consumed
12561    pub fn parse_query(&mut self) -> Result<Box<Query>, ParserError> {
12562        let _guard = self.recursion_counter.try_decrease()?;
12563        let with = if self.parse_keyword(Keyword::WITH) {
12564            let with_token = self.get_current_token();
12565            Some(With {
12566                with_token: with_token.clone().into(),
12567                recursive: self.parse_keyword(Keyword::RECURSIVE),
12568                cte_tables: self.parse_comma_separated(Parser::parse_cte)?,
12569            })
12570        } else {
12571            None
12572        };
12573        if self.parse_keyword(Keyword::INSERT) {
12574            Ok(Query {
12575                with,
12576                body: self.parse_insert_setexpr_boxed()?,
12577                order_by: None,
12578                limit_clause: None,
12579                fetch: None,
12580                locks: vec![],
12581                for_clause: None,
12582                settings: None,
12583                format_clause: None,
12584                pipe_operators: vec![],
12585            }
12586            .into())
12587        } else if self.parse_keyword(Keyword::UPDATE) {
12588            Ok(Query {
12589                with,
12590                body: self.parse_update_setexpr_boxed()?,
12591                order_by: None,
12592                limit_clause: None,
12593                fetch: None,
12594                locks: vec![],
12595                for_clause: None,
12596                settings: None,
12597                format_clause: None,
12598                pipe_operators: vec![],
12599            }
12600            .into())
12601        } else if self.parse_keyword(Keyword::DELETE) {
12602            Ok(Query {
12603                with,
12604                body: self.parse_delete_setexpr_boxed()?,
12605                limit_clause: None,
12606                order_by: None,
12607                fetch: None,
12608                locks: vec![],
12609                for_clause: None,
12610                settings: None,
12611                format_clause: None,
12612                pipe_operators: vec![],
12613            }
12614            .into())
12615        } else if self.parse_keyword(Keyword::MERGE) {
12616            Ok(Query {
12617                with,
12618                body: self.parse_merge_setexpr_boxed()?,
12619                limit_clause: None,
12620                order_by: None,
12621                fetch: None,
12622                locks: vec![],
12623                for_clause: None,
12624                settings: None,
12625                format_clause: None,
12626                pipe_operators: vec![],
12627            }
12628            .into())
12629        } else {
12630            let body = self.parse_query_body(self.dialect.prec_unknown())?;
12631
12632            let order_by = self.parse_optional_order_by()?;
12633
12634            let limit_clause = self.parse_optional_limit_clause()?;
12635
12636            let settings = self.parse_settings()?;
12637
12638            let fetch = if self.parse_keyword(Keyword::FETCH) {
12639                Some(self.parse_fetch()?)
12640            } else {
12641                None
12642            };
12643
12644            let mut for_clause = None;
12645            let mut locks = Vec::new();
12646            while self.parse_keyword(Keyword::FOR) {
12647                if let Some(parsed_for_clause) = self.parse_for_clause()? {
12648                    for_clause = Some(parsed_for_clause);
12649                    break;
12650                } else {
12651                    locks.push(self.parse_lock()?);
12652                }
12653            }
12654            let format_clause = if dialect_of!(self is ClickHouseDialect | GenericDialect)
12655                && self.parse_keyword(Keyword::FORMAT)
12656            {
12657                if self.parse_keyword(Keyword::NULL) {
12658                    Some(FormatClause::Null)
12659                } else {
12660                    let ident = self.parse_identifier()?;
12661                    Some(FormatClause::Identifier(ident))
12662                }
12663            } else {
12664                None
12665            };
12666
12667            let pipe_operators = if self.dialect.supports_pipe_operator() {
12668                self.parse_pipe_operators()?
12669            } else {
12670                Vec::new()
12671            };
12672
12673            Ok(Query {
12674                with,
12675                body,
12676                order_by,
12677                limit_clause,
12678                fetch,
12679                locks,
12680                for_clause,
12681                settings,
12682                format_clause,
12683                pipe_operators,
12684            }
12685            .into())
12686        }
12687    }
12688
12689    fn parse_pipe_operators(&mut self) -> Result<Vec<PipeOperator>, ParserError> {
12690        let mut pipe_operators = Vec::new();
12691
12692        while self.consume_token(&Token::VerticalBarRightAngleBracket) {
12693            let kw = self.expect_one_of_keywords(&[
12694                Keyword::SELECT,
12695                Keyword::EXTEND,
12696                Keyword::SET,
12697                Keyword::DROP,
12698                Keyword::AS,
12699                Keyword::WHERE,
12700                Keyword::LIMIT,
12701                Keyword::AGGREGATE,
12702                Keyword::ORDER,
12703                Keyword::TABLESAMPLE,
12704                Keyword::RENAME,
12705                Keyword::UNION,
12706                Keyword::INTERSECT,
12707                Keyword::EXCEPT,
12708                Keyword::CALL,
12709                Keyword::PIVOT,
12710                Keyword::UNPIVOT,
12711                Keyword::JOIN,
12712                Keyword::INNER,
12713                Keyword::LEFT,
12714                Keyword::RIGHT,
12715                Keyword::FULL,
12716                Keyword::CROSS,
12717            ])?;
12718            match kw {
12719                Keyword::SELECT => {
12720                    let exprs = self.parse_comma_separated(Parser::parse_select_item)?;
12721                    pipe_operators.push(PipeOperator::Select { exprs })
12722                }
12723                Keyword::EXTEND => {
12724                    let exprs = self.parse_comma_separated(Parser::parse_select_item)?;
12725                    pipe_operators.push(PipeOperator::Extend { exprs })
12726                }
12727                Keyword::SET => {
12728                    let assignments = self.parse_comma_separated(Parser::parse_assignment)?;
12729                    pipe_operators.push(PipeOperator::Set { assignments })
12730                }
12731                Keyword::DROP => {
12732                    let columns = self.parse_identifiers()?;
12733                    pipe_operators.push(PipeOperator::Drop { columns })
12734                }
12735                Keyword::AS => {
12736                    let alias = self.parse_identifier()?;
12737                    pipe_operators.push(PipeOperator::As { alias })
12738                }
12739                Keyword::WHERE => {
12740                    let expr = self.parse_expr()?;
12741                    pipe_operators.push(PipeOperator::Where { expr })
12742                }
12743                Keyword::LIMIT => {
12744                    let expr = self.parse_expr()?;
12745                    let offset = if self.parse_keyword(Keyword::OFFSET) {
12746                        Some(self.parse_expr()?)
12747                    } else {
12748                        None
12749                    };
12750                    pipe_operators.push(PipeOperator::Limit { expr, offset })
12751                }
12752                Keyword::AGGREGATE => {
12753                    let full_table_exprs = if self.peek_keyword(Keyword::GROUP) {
12754                        vec![]
12755                    } else {
12756                        self.parse_comma_separated(|parser| {
12757                            parser.parse_expr_with_alias_and_order_by()
12758                        })?
12759                    };
12760
12761                    let group_by_expr = if self.parse_keywords(&[Keyword::GROUP, Keyword::BY]) {
12762                        self.parse_comma_separated(|parser| {
12763                            parser.parse_expr_with_alias_and_order_by()
12764                        })?
12765                    } else {
12766                        vec![]
12767                    };
12768
12769                    pipe_operators.push(PipeOperator::Aggregate {
12770                        full_table_exprs,
12771                        group_by_expr,
12772                    })
12773                }
12774                Keyword::ORDER => {
12775                    self.expect_one_of_keywords(&[Keyword::BY])?;
12776                    let exprs = self.parse_comma_separated(Parser::parse_order_by_expr)?;
12777                    pipe_operators.push(PipeOperator::OrderBy { exprs })
12778                }
12779                Keyword::TABLESAMPLE => {
12780                    let sample = self.parse_table_sample(TableSampleModifier::TableSample)?;
12781                    pipe_operators.push(PipeOperator::TableSample { sample });
12782                }
12783                Keyword::RENAME => {
12784                    let mappings =
12785                        self.parse_comma_separated(Parser::parse_identifier_with_optional_alias)?;
12786                    pipe_operators.push(PipeOperator::Rename { mappings });
12787                }
12788                Keyword::UNION => {
12789                    let set_quantifier = self.parse_set_quantifier(&Some(SetOperator::Union));
12790                    let queries = self.parse_pipe_operator_queries()?;
12791                    pipe_operators.push(PipeOperator::Union {
12792                        set_quantifier,
12793                        queries,
12794                    });
12795                }
12796                Keyword::INTERSECT => {
12797                    let set_quantifier =
12798                        self.parse_distinct_required_set_quantifier("INTERSECT")?;
12799                    let queries = self.parse_pipe_operator_queries()?;
12800                    pipe_operators.push(PipeOperator::Intersect {
12801                        set_quantifier,
12802                        queries,
12803                    });
12804                }
12805                Keyword::EXCEPT => {
12806                    let set_quantifier = self.parse_distinct_required_set_quantifier("EXCEPT")?;
12807                    let queries = self.parse_pipe_operator_queries()?;
12808                    pipe_operators.push(PipeOperator::Except {
12809                        set_quantifier,
12810                        queries,
12811                    });
12812                }
12813                Keyword::CALL => {
12814                    let function_name = self.parse_object_name(false)?;
12815                    let function_expr = self.parse_function(function_name)?;
12816                    if let Expr::Function(function) = function_expr {
12817                        let alias = self.parse_identifier_optional_alias()?;
12818                        pipe_operators.push(PipeOperator::Call { function, alias });
12819                    } else {
12820                        return Err(ParserError::ParserError(
12821                            "Expected function call after CALL".to_string(),
12822                        ));
12823                    }
12824                }
12825                Keyword::PIVOT => {
12826                    self.expect_token(&Token::LParen)?;
12827                    let aggregate_functions =
12828                        self.parse_comma_separated(Self::parse_aliased_function_call)?;
12829                    self.expect_keyword_is(Keyword::FOR)?;
12830                    let value_column = self.parse_period_separated(|p| p.parse_identifier())?;
12831                    self.expect_keyword_is(Keyword::IN)?;
12832
12833                    self.expect_token(&Token::LParen)?;
12834                    let value_source = if self.parse_keyword(Keyword::ANY) {
12835                        let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
12836                            self.parse_comma_separated(Parser::parse_order_by_expr)?
12837                        } else {
12838                            vec![]
12839                        };
12840                        PivotValueSource::Any(order_by)
12841                    } else if self.peek_sub_query() {
12842                        PivotValueSource::Subquery(self.parse_query()?)
12843                    } else {
12844                        PivotValueSource::List(
12845                            self.parse_comma_separated(Self::parse_expr_with_alias)?,
12846                        )
12847                    };
12848                    self.expect_token(&Token::RParen)?;
12849                    self.expect_token(&Token::RParen)?;
12850
12851                    let alias = self.parse_identifier_optional_alias()?;
12852
12853                    pipe_operators.push(PipeOperator::Pivot {
12854                        aggregate_functions,
12855                        value_column,
12856                        value_source,
12857                        alias,
12858                    });
12859                }
12860                Keyword::UNPIVOT => {
12861                    self.expect_token(&Token::LParen)?;
12862                    let value_column = self.parse_identifier()?;
12863                    self.expect_keyword(Keyword::FOR)?;
12864                    let name_column = self.parse_identifier()?;
12865                    self.expect_keyword(Keyword::IN)?;
12866
12867                    self.expect_token(&Token::LParen)?;
12868                    let unpivot_columns = self.parse_comma_separated(Parser::parse_identifier)?;
12869                    self.expect_token(&Token::RParen)?;
12870
12871                    self.expect_token(&Token::RParen)?;
12872
12873                    let alias = self.parse_identifier_optional_alias()?;
12874
12875                    pipe_operators.push(PipeOperator::Unpivot {
12876                        value_column,
12877                        name_column,
12878                        unpivot_columns,
12879                        alias,
12880                    });
12881                }
12882                Keyword::JOIN
12883                | Keyword::INNER
12884                | Keyword::LEFT
12885                | Keyword::RIGHT
12886                | Keyword::FULL
12887                | Keyword::CROSS => {
12888                    self.prev_token();
12889                    let mut joins = self.parse_joins()?;
12890                    if joins.len() != 1 {
12891                        return Err(ParserError::ParserError(
12892                            "Join pipe operator must have a single join".to_string(),
12893                        ));
12894                    }
12895                    let join = joins.swap_remove(0);
12896                    pipe_operators.push(PipeOperator::Join(join))
12897                }
12898                unhandled => {
12899                    return Err(ParserError::ParserError(format!(
12900                    "`expect_one_of_keywords` further up allowed unhandled keyword: {unhandled:?}"
12901                )))
12902                }
12903            }
12904        }
12905        Ok(pipe_operators)
12906    }
12907
12908    fn parse_settings(&mut self) -> Result<Option<Vec<Setting>>, ParserError> {
12909        let settings = if dialect_of!(self is ClickHouseDialect|GenericDialect)
12910            && self.parse_keyword(Keyword::SETTINGS)
12911        {
12912            let key_values = self.parse_comma_separated(|p| {
12913                let key = p.parse_identifier()?;
12914                p.expect_token(&Token::Eq)?;
12915                let value = p.parse_expr()?;
12916                Ok(Setting { key, value })
12917            })?;
12918            Some(key_values)
12919        } else {
12920            None
12921        };
12922        Ok(settings)
12923    }
12924
12925    /// Parse a mssql `FOR [XML | JSON | BROWSE]` clause
12926    pub fn parse_for_clause(&mut self) -> Result<Option<ForClause>, ParserError> {
12927        if self.parse_keyword(Keyword::XML) {
12928            Ok(Some(self.parse_for_xml()?))
12929        } else if self.parse_keyword(Keyword::JSON) {
12930            Ok(Some(self.parse_for_json()?))
12931        } else if self.parse_keyword(Keyword::BROWSE) {
12932            Ok(Some(ForClause::Browse))
12933        } else {
12934            Ok(None)
12935        }
12936    }
12937
12938    /// Parse a mssql `FOR XML` clause
12939    pub fn parse_for_xml(&mut self) -> Result<ForClause, ParserError> {
12940        let for_xml = if self.parse_keyword(Keyword::RAW) {
12941            let mut element_name = None;
12942            if self.peek_token().token == Token::LParen {
12943                self.expect_token(&Token::LParen)?;
12944                element_name = Some(self.parse_literal_string()?);
12945                self.expect_token(&Token::RParen)?;
12946            }
12947            ForXml::Raw(element_name)
12948        } else if self.parse_keyword(Keyword::AUTO) {
12949            ForXml::Auto
12950        } else if self.parse_keyword(Keyword::EXPLICIT) {
12951            ForXml::Explicit
12952        } else if self.parse_keyword(Keyword::PATH) {
12953            let mut element_name = None;
12954            if self.peek_token().token == Token::LParen {
12955                self.expect_token(&Token::LParen)?;
12956                element_name = Some(self.parse_literal_string()?);
12957                self.expect_token(&Token::RParen)?;
12958            }
12959            ForXml::Path(element_name)
12960        } else {
12961            return Err(ParserError::ParserError(
12962                "Expected FOR XML [RAW | AUTO | EXPLICIT | PATH ]".to_string(),
12963            ));
12964        };
12965        let mut elements = false;
12966        let mut binary_base64 = false;
12967        let mut root = None;
12968        let mut r#type = false;
12969        while self.peek_token().token == Token::Comma {
12970            self.next_token();
12971            if self.parse_keyword(Keyword::ELEMENTS) {
12972                elements = true;
12973            } else if self.parse_keyword(Keyword::BINARY) {
12974                self.expect_keyword_is(Keyword::BASE64)?;
12975                binary_base64 = true;
12976            } else if self.parse_keyword(Keyword::ROOT) {
12977                self.expect_token(&Token::LParen)?;
12978                root = Some(self.parse_literal_string()?);
12979                self.expect_token(&Token::RParen)?;
12980            } else if self.parse_keyword(Keyword::TYPE) {
12981                r#type = true;
12982            }
12983        }
12984        Ok(ForClause::Xml {
12985            for_xml,
12986            elements,
12987            binary_base64,
12988            root,
12989            r#type,
12990        })
12991    }
12992
12993    /// Parse a mssql `FOR JSON` clause
12994    pub fn parse_for_json(&mut self) -> Result<ForClause, ParserError> {
12995        let for_json = if self.parse_keyword(Keyword::AUTO) {
12996            ForJson::Auto
12997        } else if self.parse_keyword(Keyword::PATH) {
12998            ForJson::Path
12999        } else {
13000            return Err(ParserError::ParserError(
13001                "Expected FOR JSON [AUTO | PATH ]".to_string(),
13002            ));
13003        };
13004        let mut root = None;
13005        let mut include_null_values = false;
13006        let mut without_array_wrapper = false;
13007        while self.peek_token().token == Token::Comma {
13008            self.next_token();
13009            if self.parse_keyword(Keyword::ROOT) {
13010                self.expect_token(&Token::LParen)?;
13011                root = Some(self.parse_literal_string()?);
13012                self.expect_token(&Token::RParen)?;
13013            } else if self.parse_keyword(Keyword::INCLUDE_NULL_VALUES) {
13014                include_null_values = true;
13015            } else if self.parse_keyword(Keyword::WITHOUT_ARRAY_WRAPPER) {
13016                without_array_wrapper = true;
13017            }
13018        }
13019        Ok(ForClause::Json {
13020            for_json,
13021            root,
13022            include_null_values,
13023            without_array_wrapper,
13024        })
13025    }
13026
13027    /// Parse a CTE (`alias [( col1, col2, ... )] AS (subquery)`)
13028    pub fn parse_cte(&mut self) -> Result<Cte, ParserError> {
13029        let name = self.parse_identifier()?;
13030
13031        let mut cte = if self.parse_keyword(Keyword::AS) {
13032            let mut is_materialized = None;
13033            if dialect_of!(self is PostgreSqlDialect) {
13034                if self.parse_keyword(Keyword::MATERIALIZED) {
13035                    is_materialized = Some(CteAsMaterialized::Materialized);
13036                } else if self.parse_keywords(&[Keyword::NOT, Keyword::MATERIALIZED]) {
13037                    is_materialized = Some(CteAsMaterialized::NotMaterialized);
13038                }
13039            }
13040            self.expect_token(&Token::LParen)?;
13041
13042            let query = self.parse_query()?;
13043            let closing_paren_token = self.expect_token(&Token::RParen)?;
13044
13045            let alias = TableAlias {
13046                name,
13047                columns: vec![],
13048            };
13049            Cte {
13050                alias,
13051                query,
13052                from: None,
13053                materialized: is_materialized,
13054                closing_paren_token: closing_paren_token.into(),
13055            }
13056        } else {
13057            let columns = self.parse_table_alias_column_defs()?;
13058            self.expect_keyword_is(Keyword::AS)?;
13059            let mut is_materialized = None;
13060            if dialect_of!(self is PostgreSqlDialect) {
13061                if self.parse_keyword(Keyword::MATERIALIZED) {
13062                    is_materialized = Some(CteAsMaterialized::Materialized);
13063                } else if self.parse_keywords(&[Keyword::NOT, Keyword::MATERIALIZED]) {
13064                    is_materialized = Some(CteAsMaterialized::NotMaterialized);
13065                }
13066            }
13067            self.expect_token(&Token::LParen)?;
13068
13069            let query = self.parse_query()?;
13070            let closing_paren_token = self.expect_token(&Token::RParen)?;
13071
13072            let alias = TableAlias { name, columns };
13073            Cte {
13074                alias,
13075                query,
13076                from: None,
13077                materialized: is_materialized,
13078                closing_paren_token: closing_paren_token.into(),
13079            }
13080        };
13081        if self.parse_keyword(Keyword::FROM) {
13082            cte.from = Some(self.parse_identifier()?);
13083        }
13084        Ok(cte)
13085    }
13086
13087    /// Parse a "query body", which is an expression with roughly the
13088    /// following grammar:
13089    /// ```sql
13090    ///   query_body ::= restricted_select | '(' subquery ')' | set_operation
13091    ///   restricted_select ::= 'SELECT' [expr_list] [ from ] [ where ] [ groupby_having ]
13092    ///   subquery ::= query_body [ order_by_limit ]
13093    ///   set_operation ::= query_body { 'UNION' | 'EXCEPT' | 'INTERSECT' } [ 'ALL' ] query_body
13094    /// ```
13095    pub fn parse_query_body(&mut self, precedence: u8) -> Result<Box<SetExpr>, ParserError> {
13096        // We parse the expression using a Pratt parser, as in `parse_expr()`.
13097        // Start by parsing a restricted SELECT or a `(subquery)`:
13098        let expr = if self.peek_keyword(Keyword::SELECT)
13099            || (self.peek_keyword(Keyword::FROM) && self.dialect.supports_from_first_select())
13100        {
13101            SetExpr::Select(self.parse_select().map(Box::new)?)
13102        } else if self.consume_token(&Token::LParen) {
13103            // CTEs are not allowed here, but the parser currently accepts them
13104            let subquery = self.parse_query()?;
13105            self.expect_token(&Token::RParen)?;
13106            SetExpr::Query(subquery)
13107        } else if self.parse_keyword(Keyword::VALUES) {
13108            let is_mysql = dialect_of!(self is MySqlDialect);
13109            SetExpr::Values(self.parse_values(is_mysql)?)
13110        } else if self.parse_keyword(Keyword::TABLE) {
13111            SetExpr::Table(Box::new(self.parse_as_table()?))
13112        } else {
13113            return self.expected(
13114                "SELECT, VALUES, or a subquery in the query body",
13115                self.peek_token(),
13116            );
13117        };
13118
13119        self.parse_remaining_set_exprs(expr, precedence)
13120    }
13121
13122    /// Parse any extra set expressions that may be present in a query body
13123    ///
13124    /// (this is its own function to reduce required stack size in debug builds)
13125    fn parse_remaining_set_exprs(
13126        &mut self,
13127        mut expr: SetExpr,
13128        precedence: u8,
13129    ) -> Result<Box<SetExpr>, ParserError> {
13130        loop {
13131            // The query can be optionally followed by a set operator:
13132            let op = self.parse_set_operator(&self.peek_token().token);
13133            let next_precedence = match op {
13134                // UNION and EXCEPT have the same binding power and evaluate left-to-right
13135                Some(SetOperator::Union) | Some(SetOperator::Except) | Some(SetOperator::Minus) => {
13136                    10
13137                }
13138                // INTERSECT has higher precedence than UNION/EXCEPT
13139                Some(SetOperator::Intersect) => 20,
13140                // Unexpected token or EOF => stop parsing the query body
13141                None => break,
13142            };
13143            if precedence >= next_precedence {
13144                break;
13145            }
13146            self.next_token(); // skip past the set operator
13147            let set_quantifier = self.parse_set_quantifier(&op);
13148            expr = SetExpr::SetOperation {
13149                left: Box::new(expr),
13150                op: op.unwrap(),
13151                set_quantifier,
13152                right: self.parse_query_body(next_precedence)?,
13153            };
13154        }
13155
13156        Ok(expr.into())
13157    }
13158
13159    pub fn parse_set_operator(&mut self, token: &Token) -> Option<SetOperator> {
13160        match token {
13161            Token::Word(w) if w.keyword == Keyword::UNION => Some(SetOperator::Union),
13162            Token::Word(w) if w.keyword == Keyword::EXCEPT => Some(SetOperator::Except),
13163            Token::Word(w) if w.keyword == Keyword::INTERSECT => Some(SetOperator::Intersect),
13164            Token::Word(w) if w.keyword == Keyword::MINUS => Some(SetOperator::Minus),
13165            _ => None,
13166        }
13167    }
13168
13169    pub fn parse_set_quantifier(&mut self, op: &Option<SetOperator>) -> SetQuantifier {
13170        match op {
13171            Some(
13172                SetOperator::Except
13173                | SetOperator::Intersect
13174                | SetOperator::Union
13175                | SetOperator::Minus,
13176            ) => {
13177                if self.parse_keywords(&[Keyword::DISTINCT, Keyword::BY, Keyword::NAME]) {
13178                    SetQuantifier::DistinctByName
13179                } else if self.parse_keywords(&[Keyword::BY, Keyword::NAME]) {
13180                    SetQuantifier::ByName
13181                } else if self.parse_keyword(Keyword::ALL) {
13182                    if self.parse_keywords(&[Keyword::BY, Keyword::NAME]) {
13183                        SetQuantifier::AllByName
13184                    } else {
13185                        SetQuantifier::All
13186                    }
13187                } else if self.parse_keyword(Keyword::DISTINCT) {
13188                    SetQuantifier::Distinct
13189                } else {
13190                    SetQuantifier::None
13191                }
13192            }
13193            _ => SetQuantifier::None,
13194        }
13195    }
13196
13197    /// Parse a restricted `SELECT` statement (no CTEs / `UNION` / `ORDER BY`)
13198    pub fn parse_select(&mut self) -> Result<Select, ParserError> {
13199        let mut from_first = None;
13200
13201        if self.dialect.supports_from_first_select() && self.peek_keyword(Keyword::FROM) {
13202            let from_token = self.expect_keyword(Keyword::FROM)?;
13203            let from = self.parse_table_with_joins()?;
13204            if !self.peek_keyword(Keyword::SELECT) {
13205                return Ok(Select {
13206                    select_token: AttachedToken(from_token),
13207                    distinct: None,
13208                    top: None,
13209                    top_before_distinct: false,
13210                    projection: vec![],
13211                    exclude: None,
13212                    into: None,
13213                    from,
13214                    lateral_views: vec![],
13215                    prewhere: None,
13216                    selection: None,
13217                    group_by: GroupByExpr::Expressions(vec![], vec![]),
13218                    cluster_by: vec![],
13219                    distribute_by: vec![],
13220                    sort_by: vec![],
13221                    having: None,
13222                    named_window: vec![],
13223                    window_before_qualify: false,
13224                    qualify: None,
13225                    value_table_mode: None,
13226                    connect_by: None,
13227                    flavor: SelectFlavor::FromFirstNoSelect,
13228                });
13229            }
13230            from_first = Some(from);
13231        }
13232
13233        let select_token = self.expect_keyword(Keyword::SELECT)?;
13234        let value_table_mode = self.parse_value_table_mode()?;
13235
13236        let mut top_before_distinct = false;
13237        let mut top = None;
13238        if self.dialect.supports_top_before_distinct() && self.parse_keyword(Keyword::TOP) {
13239            top = Some(self.parse_top()?);
13240            top_before_distinct = true;
13241        }
13242        let distinct = self.parse_all_or_distinct()?;
13243        if !self.dialect.supports_top_before_distinct() && self.parse_keyword(Keyword::TOP) {
13244            top = Some(self.parse_top()?);
13245        }
13246
13247        let projection =
13248            if self.dialect.supports_empty_projections() && self.peek_keyword(Keyword::FROM) {
13249                vec![]
13250            } else {
13251                self.parse_projection()?
13252            };
13253
13254        let exclude = if self.dialect.supports_select_exclude() {
13255            self.parse_optional_select_item_exclude()?
13256        } else {
13257            None
13258        };
13259
13260        let into = if self.parse_keyword(Keyword::INTO) {
13261            Some(self.parse_select_into()?)
13262        } else {
13263            None
13264        };
13265
13266        // Note that for keywords to be properly handled here, they need to be
13267        // added to `RESERVED_FOR_COLUMN_ALIAS` / `RESERVED_FOR_TABLE_ALIAS`,
13268        // otherwise they may be parsed as an alias as part of the `projection`
13269        // or `from`.
13270
13271        let (from, from_first) = if let Some(from) = from_first.take() {
13272            (from, true)
13273        } else if self.parse_keyword(Keyword::FROM) {
13274            (self.parse_table_with_joins()?, false)
13275        } else {
13276            (vec![], false)
13277        };
13278
13279        let mut lateral_views = vec![];
13280        loop {
13281            if self.parse_keywords(&[Keyword::LATERAL, Keyword::VIEW]) {
13282                let outer = self.parse_keyword(Keyword::OUTER);
13283                let lateral_view = self.parse_expr()?;
13284                let lateral_view_name = self.parse_object_name(false)?;
13285                let lateral_col_alias = self
13286                    .parse_comma_separated(|parser| {
13287                        parser.parse_optional_alias(&[
13288                            Keyword::WHERE,
13289                            Keyword::GROUP,
13290                            Keyword::CLUSTER,
13291                            Keyword::HAVING,
13292                            Keyword::LATERAL,
13293                        ]) // This couldn't possibly be a bad idea
13294                    })?
13295                    .into_iter()
13296                    .flatten()
13297                    .collect();
13298
13299                lateral_views.push(LateralView {
13300                    lateral_view,
13301                    lateral_view_name,
13302                    lateral_col_alias,
13303                    outer,
13304                });
13305            } else {
13306                break;
13307            }
13308        }
13309
13310        let prewhere = if dialect_of!(self is ClickHouseDialect|GenericDialect)
13311            && self.parse_keyword(Keyword::PREWHERE)
13312        {
13313            Some(self.parse_expr()?)
13314        } else {
13315            None
13316        };
13317
13318        let selection = if self.parse_keyword(Keyword::WHERE) {
13319            Some(self.parse_expr()?)
13320        } else {
13321            None
13322        };
13323
13324        let group_by = self
13325            .parse_optional_group_by()?
13326            .unwrap_or_else(|| GroupByExpr::Expressions(vec![], vec![]));
13327
13328        let cluster_by = if self.parse_keywords(&[Keyword::CLUSTER, Keyword::BY]) {
13329            self.parse_comma_separated(Parser::parse_expr)?
13330        } else {
13331            vec![]
13332        };
13333
13334        let distribute_by = if self.parse_keywords(&[Keyword::DISTRIBUTE, Keyword::BY]) {
13335            self.parse_comma_separated(Parser::parse_expr)?
13336        } else {
13337            vec![]
13338        };
13339
13340        let sort_by = if self.parse_keywords(&[Keyword::SORT, Keyword::BY]) {
13341            self.parse_comma_separated(Parser::parse_order_by_expr)?
13342        } else {
13343            vec![]
13344        };
13345
13346        let having = if self.parse_keyword(Keyword::HAVING) {
13347            Some(self.parse_expr()?)
13348        } else {
13349            None
13350        };
13351
13352        // Accept QUALIFY and WINDOW in any order and flag accordingly.
13353        let (named_windows, qualify, window_before_qualify) = if self.parse_keyword(Keyword::WINDOW)
13354        {
13355            let named_windows = self.parse_comma_separated(Parser::parse_named_window)?;
13356            if self.parse_keyword(Keyword::QUALIFY) {
13357                (named_windows, Some(self.parse_expr()?), true)
13358            } else {
13359                (named_windows, None, true)
13360            }
13361        } else if self.parse_keyword(Keyword::QUALIFY) {
13362            let qualify = Some(self.parse_expr()?);
13363            if self.parse_keyword(Keyword::WINDOW) {
13364                (
13365                    self.parse_comma_separated(Parser::parse_named_window)?,
13366                    qualify,
13367                    false,
13368                )
13369            } else {
13370                (Default::default(), qualify, false)
13371            }
13372        } else {
13373            Default::default()
13374        };
13375
13376        let connect_by = if self.dialect.supports_connect_by()
13377            && self
13378                .parse_one_of_keywords(&[Keyword::START, Keyword::CONNECT])
13379                .is_some()
13380        {
13381            self.prev_token();
13382            Some(self.parse_connect_by()?)
13383        } else {
13384            None
13385        };
13386
13387        Ok(Select {
13388            select_token: AttachedToken(select_token),
13389            distinct,
13390            top,
13391            top_before_distinct,
13392            projection,
13393            exclude,
13394            into,
13395            from,
13396            lateral_views,
13397            prewhere,
13398            selection,
13399            group_by,
13400            cluster_by,
13401            distribute_by,
13402            sort_by,
13403            having,
13404            named_window: named_windows,
13405            window_before_qualify,
13406            qualify,
13407            value_table_mode,
13408            connect_by,
13409            flavor: if from_first {
13410                SelectFlavor::FromFirst
13411            } else {
13412                SelectFlavor::Standard
13413            },
13414        })
13415    }
13416
13417    fn parse_value_table_mode(&mut self) -> Result<Option<ValueTableMode>, ParserError> {
13418        if !dialect_of!(self is BigQueryDialect) {
13419            return Ok(None);
13420        }
13421
13422        let mode = if self.parse_keywords(&[Keyword::DISTINCT, Keyword::AS, Keyword::VALUE]) {
13423            Some(ValueTableMode::DistinctAsValue)
13424        } else if self.parse_keywords(&[Keyword::DISTINCT, Keyword::AS, Keyword::STRUCT]) {
13425            Some(ValueTableMode::DistinctAsStruct)
13426        } else if self.parse_keywords(&[Keyword::AS, Keyword::VALUE])
13427            || self.parse_keywords(&[Keyword::ALL, Keyword::AS, Keyword::VALUE])
13428        {
13429            Some(ValueTableMode::AsValue)
13430        } else if self.parse_keywords(&[Keyword::AS, Keyword::STRUCT])
13431            || self.parse_keywords(&[Keyword::ALL, Keyword::AS, Keyword::STRUCT])
13432        {
13433            Some(ValueTableMode::AsStruct)
13434        } else if self.parse_keyword(Keyword::AS) {
13435            self.expected("VALUE or STRUCT", self.peek_token())?
13436        } else {
13437            None
13438        };
13439
13440        Ok(mode)
13441    }
13442
13443    /// Invoke `f` after first setting the parser's `ParserState` to `state`.
13444    ///
13445    /// Upon return, restores the parser's state to what it started at.
13446    fn with_state<T, F>(&mut self, state: ParserState, mut f: F) -> Result<T, ParserError>
13447    where
13448        F: FnMut(&mut Parser) -> Result<T, ParserError>,
13449    {
13450        let current_state = self.state;
13451        self.state = state;
13452        let res = f(self);
13453        self.state = current_state;
13454        res
13455    }
13456
13457    pub fn parse_connect_by(&mut self) -> Result<ConnectBy, ParserError> {
13458        let (condition, relationships) = if self.parse_keywords(&[Keyword::CONNECT, Keyword::BY]) {
13459            let relationships = self.with_state(ParserState::ConnectBy, |parser| {
13460                parser.parse_comma_separated(Parser::parse_expr)
13461            })?;
13462            self.expect_keywords(&[Keyword::START, Keyword::WITH])?;
13463            let condition = self.parse_expr()?;
13464            (condition, relationships)
13465        } else {
13466            self.expect_keywords(&[Keyword::START, Keyword::WITH])?;
13467            let condition = self.parse_expr()?;
13468            self.expect_keywords(&[Keyword::CONNECT, Keyword::BY])?;
13469            let relationships = self.with_state(ParserState::ConnectBy, |parser| {
13470                parser.parse_comma_separated(Parser::parse_expr)
13471            })?;
13472            (condition, relationships)
13473        };
13474        Ok(ConnectBy {
13475            condition,
13476            relationships,
13477        })
13478    }
13479
13480    /// Parse `CREATE TABLE x AS TABLE y`
13481    pub fn parse_as_table(&mut self) -> Result<Table, ParserError> {
13482        let token1 = self.next_token();
13483        let token2 = self.next_token();
13484        let token3 = self.next_token();
13485
13486        let table_name;
13487        let schema_name;
13488        if token2 == Token::Period {
13489            match token1.token {
13490                Token::Word(w) => {
13491                    schema_name = w.value;
13492                }
13493                _ => {
13494                    return self.expected("Schema name", token1);
13495                }
13496            }
13497            match token3.token {
13498                Token::Word(w) => {
13499                    table_name = w.value;
13500                }
13501                _ => {
13502                    return self.expected("Table name", token3);
13503                }
13504            }
13505            Ok(Table {
13506                table_name: Some(table_name),
13507                schema_name: Some(schema_name),
13508            })
13509        } else {
13510            match token1.token {
13511                Token::Word(w) => {
13512                    table_name = w.value;
13513                }
13514                _ => {
13515                    return self.expected("Table name", token1);
13516                }
13517            }
13518            Ok(Table {
13519                table_name: Some(table_name),
13520                schema_name: None,
13521            })
13522        }
13523    }
13524
13525    /// Parse a `SET ROLE` statement. Expects SET to be consumed already.
13526    fn parse_set_role(
13527        &mut self,
13528        modifier: Option<ContextModifier>,
13529    ) -> Result<Statement, ParserError> {
13530        self.expect_keyword_is(Keyword::ROLE)?;
13531
13532        let role_name = if self.parse_keyword(Keyword::NONE) {
13533            None
13534        } else {
13535            Some(self.parse_identifier()?)
13536        };
13537        Ok(Statement::Set(Set::SetRole {
13538            context_modifier: modifier,
13539            role_name,
13540        }))
13541    }
13542
13543    fn parse_set_values(
13544        &mut self,
13545        parenthesized_assignment: bool,
13546    ) -> Result<Vec<Expr>, ParserError> {
13547        let mut values = vec![];
13548
13549        if parenthesized_assignment {
13550            self.expect_token(&Token::LParen)?;
13551        }
13552
13553        loop {
13554            let value = if let Some(expr) = self.try_parse_expr_sub_query()? {
13555                expr
13556            } else if let Ok(expr) = self.parse_expr() {
13557                expr
13558            } else {
13559                self.expected("variable value", self.peek_token())?
13560            };
13561
13562            values.push(value);
13563            if self.consume_token(&Token::Comma) {
13564                continue;
13565            }
13566
13567            if parenthesized_assignment {
13568                self.expect_token(&Token::RParen)?;
13569            }
13570            return Ok(values);
13571        }
13572    }
13573
13574    fn parse_context_modifier(&mut self) -> Option<ContextModifier> {
13575        let modifier =
13576            self.parse_one_of_keywords(&[Keyword::SESSION, Keyword::LOCAL, Keyword::GLOBAL])?;
13577
13578        Self::keyword_to_modifier(modifier)
13579    }
13580
13581    /// Parse a single SET statement assignment `var = expr`.
13582    fn parse_set_assignment(&mut self) -> Result<SetAssignment, ParserError> {
13583        let scope = self.parse_context_modifier();
13584
13585        let name = if self.dialect.supports_parenthesized_set_variables()
13586            && self.consume_token(&Token::LParen)
13587        {
13588            // Parenthesized assignments are handled in the `parse_set` function after
13589            // trying to parse list of assignments using this function.
13590            // If a dialect supports both, and we find a LParen, we early exit from this function.
13591            self.expected("Unparenthesized assignment", self.peek_token())?
13592        } else {
13593            self.parse_object_name(false)?
13594        };
13595
13596        if !(self.consume_token(&Token::Eq) || self.parse_keyword(Keyword::TO)) {
13597            return self.expected("assignment operator", self.peek_token());
13598        }
13599
13600        let value = self.parse_expr()?;
13601
13602        Ok(SetAssignment { scope, name, value })
13603    }
13604
13605    fn parse_set(&mut self) -> Result<Statement, ParserError> {
13606        let hivevar = self.parse_keyword(Keyword::HIVEVAR);
13607
13608        // Modifier is either HIVEVAR: or a ContextModifier (LOCAL, SESSION, etc), not both
13609        let scope = if !hivevar {
13610            self.parse_context_modifier()
13611        } else {
13612            None
13613        };
13614
13615        if hivevar {
13616            self.expect_token(&Token::Colon)?;
13617        }
13618
13619        if let Some(set_role_stmt) = self.maybe_parse(|parser| parser.parse_set_role(scope))? {
13620            return Ok(set_role_stmt);
13621        }
13622
13623        // Handle special cases first
13624        if self.parse_keywords(&[Keyword::TIME, Keyword::ZONE])
13625            || self.parse_keyword(Keyword::TIMEZONE)
13626        {
13627            if self.consume_token(&Token::Eq) || self.parse_keyword(Keyword::TO) {
13628                return Ok(Set::SingleAssignment {
13629                    scope,
13630                    hivevar,
13631                    variable: ObjectName::from(vec!["TIMEZONE".into()]),
13632                    values: self.parse_set_values(false)?,
13633                }
13634                .into());
13635            } else {
13636                // A shorthand alias for SET TIME ZONE that doesn't require
13637                // the assignment operator. It's originally PostgreSQL specific,
13638                // but we allow it for all the dialects
13639                return Ok(Set::SetTimeZone {
13640                    local: scope == Some(ContextModifier::Local),
13641                    value: self.parse_expr()?,
13642                }
13643                .into());
13644            }
13645        } else if self.dialect.supports_set_names() && self.parse_keyword(Keyword::NAMES) {
13646            if self.parse_keyword(Keyword::DEFAULT) {
13647                return Ok(Set::SetNamesDefault {}.into());
13648            }
13649            let charset_name = self.parse_identifier()?;
13650            let collation_name = if self.parse_one_of_keywords(&[Keyword::COLLATE]).is_some() {
13651                Some(self.parse_literal_string()?)
13652            } else {
13653                None
13654            };
13655
13656            return Ok(Set::SetNames {
13657                charset_name,
13658                collation_name,
13659            }
13660            .into());
13661        } else if self.parse_keyword(Keyword::CHARACTERISTICS) {
13662            self.expect_keywords(&[Keyword::AS, Keyword::TRANSACTION])?;
13663            return Ok(Set::SetTransaction {
13664                modes: self.parse_transaction_modes()?,
13665                snapshot: None,
13666                session: true,
13667            }
13668            .into());
13669        } else if self.parse_keyword(Keyword::TRANSACTION) {
13670            if self.parse_keyword(Keyword::SNAPSHOT) {
13671                let snapshot_id = self.parse_value()?.value;
13672                return Ok(Set::SetTransaction {
13673                    modes: vec![],
13674                    snapshot: Some(snapshot_id),
13675                    session: false,
13676                }
13677                .into());
13678            }
13679            return Ok(Set::SetTransaction {
13680                modes: self.parse_transaction_modes()?,
13681                snapshot: None,
13682                session: false,
13683            }
13684            .into());
13685        }
13686
13687        if self.dialect.supports_comma_separated_set_assignments() {
13688            if scope.is_some() {
13689                self.prev_token();
13690            }
13691
13692            if let Some(assignments) = self
13693                .maybe_parse(|parser| parser.parse_comma_separated(Parser::parse_set_assignment))?
13694            {
13695                return if assignments.len() > 1 {
13696                    Ok(Set::MultipleAssignments { assignments }.into())
13697                } else {
13698                    let SetAssignment { scope, name, value } =
13699                        assignments.into_iter().next().ok_or_else(|| {
13700                            ParserError::ParserError("Expected at least one assignment".to_string())
13701                        })?;
13702
13703                    Ok(Set::SingleAssignment {
13704                        scope,
13705                        hivevar,
13706                        variable: name,
13707                        values: vec![value],
13708                    }
13709                    .into())
13710                };
13711            }
13712        }
13713
13714        let variables = if self.dialect.supports_parenthesized_set_variables()
13715            && self.consume_token(&Token::LParen)
13716        {
13717            let vars = OneOrManyWithParens::Many(
13718                self.parse_comma_separated(|parser: &mut Parser<'a>| parser.parse_identifier())?
13719                    .into_iter()
13720                    .map(|ident| ObjectName::from(vec![ident]))
13721                    .collect(),
13722            );
13723            self.expect_token(&Token::RParen)?;
13724            vars
13725        } else {
13726            OneOrManyWithParens::One(self.parse_object_name(false)?)
13727        };
13728
13729        if self.consume_token(&Token::Eq) || self.parse_keyword(Keyword::TO) {
13730            let stmt = match variables {
13731                OneOrManyWithParens::One(var) => Set::SingleAssignment {
13732                    scope,
13733                    hivevar,
13734                    variable: var,
13735                    values: self.parse_set_values(false)?,
13736                },
13737                OneOrManyWithParens::Many(vars) => Set::ParenthesizedAssignments {
13738                    variables: vars,
13739                    values: self.parse_set_values(true)?,
13740                },
13741            };
13742
13743            return Ok(stmt.into());
13744        }
13745
13746        if self.dialect.supports_set_stmt_without_operator() {
13747            self.prev_token();
13748            return self.parse_set_session_params();
13749        };
13750
13751        self.expected("equals sign or TO", self.peek_token())
13752    }
13753
13754    pub fn parse_set_session_params(&mut self) -> Result<Statement, ParserError> {
13755        if self.parse_keyword(Keyword::STATISTICS) {
13756            let topic = match self.parse_one_of_keywords(&[
13757                Keyword::IO,
13758                Keyword::PROFILE,
13759                Keyword::TIME,
13760                Keyword::XML,
13761            ]) {
13762                Some(Keyword::IO) => SessionParamStatsTopic::IO,
13763                Some(Keyword::PROFILE) => SessionParamStatsTopic::Profile,
13764                Some(Keyword::TIME) => SessionParamStatsTopic::Time,
13765                Some(Keyword::XML) => SessionParamStatsTopic::Xml,
13766                _ => return self.expected("IO, PROFILE, TIME or XML", self.peek_token()),
13767            };
13768            let value = self.parse_session_param_value()?;
13769            Ok(
13770                Set::SetSessionParam(SetSessionParamKind::Statistics(SetSessionParamStatistics {
13771                    topic,
13772                    value,
13773                }))
13774                .into(),
13775            )
13776        } else if self.parse_keyword(Keyword::IDENTITY_INSERT) {
13777            let obj = self.parse_object_name(false)?;
13778            let value = self.parse_session_param_value()?;
13779            Ok(Set::SetSessionParam(SetSessionParamKind::IdentityInsert(
13780                SetSessionParamIdentityInsert { obj, value },
13781            ))
13782            .into())
13783        } else if self.parse_keyword(Keyword::OFFSETS) {
13784            let keywords = self.parse_comma_separated(|parser| {
13785                let next_token = parser.next_token();
13786                match &next_token.token {
13787                    Token::Word(w) => Ok(w.to_string()),
13788                    _ => parser.expected("SQL keyword", next_token),
13789                }
13790            })?;
13791            let value = self.parse_session_param_value()?;
13792            Ok(
13793                Set::SetSessionParam(SetSessionParamKind::Offsets(SetSessionParamOffsets {
13794                    keywords,
13795                    value,
13796                }))
13797                .into(),
13798            )
13799        } else {
13800            let names = self.parse_comma_separated(|parser| {
13801                let next_token = parser.next_token();
13802                match next_token.token {
13803                    Token::Word(w) => Ok(w.to_string()),
13804                    _ => parser.expected("Session param name", next_token),
13805                }
13806            })?;
13807            let value = self.parse_expr()?.to_string();
13808            Ok(
13809                Set::SetSessionParam(SetSessionParamKind::Generic(SetSessionParamGeneric {
13810                    names,
13811                    value,
13812                }))
13813                .into(),
13814            )
13815        }
13816    }
13817
13818    fn parse_session_param_value(&mut self) -> Result<SessionParamValue, ParserError> {
13819        if self.parse_keyword(Keyword::ON) {
13820            Ok(SessionParamValue::On)
13821        } else if self.parse_keyword(Keyword::OFF) {
13822            Ok(SessionParamValue::Off)
13823        } else {
13824            self.expected("ON or OFF", self.peek_token())
13825        }
13826    }
13827
13828    pub fn parse_show(&mut self) -> Result<Statement, ParserError> {
13829        let terse = self.parse_keyword(Keyword::TERSE);
13830        let extended = self.parse_keyword(Keyword::EXTENDED);
13831        let full = self.parse_keyword(Keyword::FULL);
13832        let session = self.parse_keyword(Keyword::SESSION);
13833        let global = self.parse_keyword(Keyword::GLOBAL);
13834        let external = self.parse_keyword(Keyword::EXTERNAL);
13835        if self
13836            .parse_one_of_keywords(&[Keyword::COLUMNS, Keyword::FIELDS])
13837            .is_some()
13838        {
13839            Ok(self.parse_show_columns(extended, full)?)
13840        } else if self.parse_keyword(Keyword::TABLES) {
13841            Ok(self.parse_show_tables(terse, extended, full, external)?)
13842        } else if self.parse_keywords(&[Keyword::MATERIALIZED, Keyword::VIEWS]) {
13843            Ok(self.parse_show_views(terse, true)?)
13844        } else if self.parse_keyword(Keyword::VIEWS) {
13845            Ok(self.parse_show_views(terse, false)?)
13846        } else if self.parse_keyword(Keyword::FUNCTIONS) {
13847            Ok(self.parse_show_functions()?)
13848        } else if extended || full {
13849            Err(ParserError::ParserError(
13850                "EXTENDED/FULL are not supported with this type of SHOW query".to_string(),
13851            ))
13852        } else if self.parse_one_of_keywords(&[Keyword::CREATE]).is_some() {
13853            Ok(self.parse_show_create()?)
13854        } else if self.parse_keyword(Keyword::COLLATION) {
13855            Ok(self.parse_show_collation()?)
13856        } else if self.parse_keyword(Keyword::VARIABLES)
13857            && dialect_of!(self is MySqlDialect | GenericDialect)
13858        {
13859            Ok(Statement::ShowVariables {
13860                filter: self.parse_show_statement_filter()?,
13861                session,
13862                global,
13863            })
13864        } else if self.parse_keyword(Keyword::STATUS)
13865            && dialect_of!(self is MySqlDialect | GenericDialect)
13866        {
13867            Ok(Statement::ShowStatus {
13868                filter: self.parse_show_statement_filter()?,
13869                session,
13870                global,
13871            })
13872        } else if self.parse_keyword(Keyword::DATABASES) {
13873            self.parse_show_databases(terse)
13874        } else if self.parse_keyword(Keyword::SCHEMAS) {
13875            self.parse_show_schemas(terse)
13876        } else if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
13877            self.parse_show_charset(false)
13878        } else if self.parse_keyword(Keyword::CHARSET) {
13879            self.parse_show_charset(true)
13880        } else {
13881            Ok(Statement::ShowVariable {
13882                variable: self.parse_identifiers()?,
13883            })
13884        }
13885    }
13886
13887    fn parse_show_charset(&mut self, is_shorthand: bool) -> Result<Statement, ParserError> {
13888        // parse one of keywords
13889        Ok(Statement::ShowCharset(ShowCharset {
13890            is_shorthand,
13891            filter: self.parse_show_statement_filter()?,
13892        }))
13893    }
13894
13895    fn parse_show_databases(&mut self, terse: bool) -> Result<Statement, ParserError> {
13896        let history = self.parse_keyword(Keyword::HISTORY);
13897        let show_options = self.parse_show_stmt_options()?;
13898        Ok(Statement::ShowDatabases {
13899            terse,
13900            history,
13901            show_options,
13902        })
13903    }
13904
13905    fn parse_show_schemas(&mut self, terse: bool) -> Result<Statement, ParserError> {
13906        let history = self.parse_keyword(Keyword::HISTORY);
13907        let show_options = self.parse_show_stmt_options()?;
13908        Ok(Statement::ShowSchemas {
13909            terse,
13910            history,
13911            show_options,
13912        })
13913    }
13914
13915    pub fn parse_show_create(&mut self) -> Result<Statement, ParserError> {
13916        let obj_type = match self.expect_one_of_keywords(&[
13917            Keyword::TABLE,
13918            Keyword::TRIGGER,
13919            Keyword::FUNCTION,
13920            Keyword::PROCEDURE,
13921            Keyword::EVENT,
13922            Keyword::VIEW,
13923        ])? {
13924            Keyword::TABLE => Ok(ShowCreateObject::Table),
13925            Keyword::TRIGGER => Ok(ShowCreateObject::Trigger),
13926            Keyword::FUNCTION => Ok(ShowCreateObject::Function),
13927            Keyword::PROCEDURE => Ok(ShowCreateObject::Procedure),
13928            Keyword::EVENT => Ok(ShowCreateObject::Event),
13929            Keyword::VIEW => Ok(ShowCreateObject::View),
13930            keyword => Err(ParserError::ParserError(format!(
13931                "Unable to map keyword to ShowCreateObject: {keyword:?}"
13932            ))),
13933        }?;
13934
13935        let obj_name = self.parse_object_name(false)?;
13936
13937        Ok(Statement::ShowCreate { obj_type, obj_name })
13938    }
13939
13940    pub fn parse_show_columns(
13941        &mut self,
13942        extended: bool,
13943        full: bool,
13944    ) -> Result<Statement, ParserError> {
13945        let show_options = self.parse_show_stmt_options()?;
13946        Ok(Statement::ShowColumns {
13947            extended,
13948            full,
13949            show_options,
13950        })
13951    }
13952
13953    fn parse_show_tables(
13954        &mut self,
13955        terse: bool,
13956        extended: bool,
13957        full: bool,
13958        external: bool,
13959    ) -> Result<Statement, ParserError> {
13960        let history = !external && self.parse_keyword(Keyword::HISTORY);
13961        let show_options = self.parse_show_stmt_options()?;
13962        Ok(Statement::ShowTables {
13963            terse,
13964            history,
13965            extended,
13966            full,
13967            external,
13968            show_options,
13969        })
13970    }
13971
13972    fn parse_show_views(
13973        &mut self,
13974        terse: bool,
13975        materialized: bool,
13976    ) -> Result<Statement, ParserError> {
13977        let show_options = self.parse_show_stmt_options()?;
13978        Ok(Statement::ShowViews {
13979            materialized,
13980            terse,
13981            show_options,
13982        })
13983    }
13984
13985    pub fn parse_show_functions(&mut self) -> Result<Statement, ParserError> {
13986        let filter = self.parse_show_statement_filter()?;
13987        Ok(Statement::ShowFunctions { filter })
13988    }
13989
13990    pub fn parse_show_collation(&mut self) -> Result<Statement, ParserError> {
13991        let filter = self.parse_show_statement_filter()?;
13992        Ok(Statement::ShowCollation { filter })
13993    }
13994
13995    pub fn parse_show_statement_filter(
13996        &mut self,
13997    ) -> Result<Option<ShowStatementFilter>, ParserError> {
13998        if self.parse_keyword(Keyword::LIKE) {
13999            Ok(Some(ShowStatementFilter::Like(
14000                self.parse_literal_string()?,
14001            )))
14002        } else if self.parse_keyword(Keyword::ILIKE) {
14003            Ok(Some(ShowStatementFilter::ILike(
14004                self.parse_literal_string()?,
14005            )))
14006        } else if self.parse_keyword(Keyword::WHERE) {
14007            Ok(Some(ShowStatementFilter::Where(self.parse_expr()?)))
14008        } else {
14009            self.maybe_parse(|parser| -> Result<String, ParserError> {
14010                parser.parse_literal_string()
14011            })?
14012            .map_or(Ok(None), |filter| {
14013                Ok(Some(ShowStatementFilter::NoKeyword(filter)))
14014            })
14015        }
14016    }
14017
14018    pub fn parse_use(&mut self) -> Result<Statement, ParserError> {
14019        // Determine which keywords are recognized by the current dialect
14020        let parsed_keyword = if dialect_of!(self is HiveDialect) {
14021            // HiveDialect accepts USE DEFAULT; statement without any db specified
14022            if self.parse_keyword(Keyword::DEFAULT) {
14023                return Ok(Statement::Use(Use::Default));
14024            }
14025            None // HiveDialect doesn't expect any other specific keyword after `USE`
14026        } else if dialect_of!(self is DatabricksDialect) {
14027            self.parse_one_of_keywords(&[Keyword::CATALOG, Keyword::DATABASE, Keyword::SCHEMA])
14028        } else if dialect_of!(self is SnowflakeDialect) {
14029            self.parse_one_of_keywords(&[
14030                Keyword::DATABASE,
14031                Keyword::SCHEMA,
14032                Keyword::WAREHOUSE,
14033                Keyword::ROLE,
14034                Keyword::SECONDARY,
14035            ])
14036        } else {
14037            None // No specific keywords for other dialects, including GenericDialect
14038        };
14039
14040        let result = if matches!(parsed_keyword, Some(Keyword::SECONDARY)) {
14041            self.parse_secondary_roles()?
14042        } else {
14043            let obj_name = self.parse_object_name(false)?;
14044            match parsed_keyword {
14045                Some(Keyword::CATALOG) => Use::Catalog(obj_name),
14046                Some(Keyword::DATABASE) => Use::Database(obj_name),
14047                Some(Keyword::SCHEMA) => Use::Schema(obj_name),
14048                Some(Keyword::WAREHOUSE) => Use::Warehouse(obj_name),
14049                Some(Keyword::ROLE) => Use::Role(obj_name),
14050                _ => Use::Object(obj_name),
14051            }
14052        };
14053
14054        Ok(Statement::Use(result))
14055    }
14056
14057    fn parse_secondary_roles(&mut self) -> Result<Use, ParserError> {
14058        self.expect_one_of_keywords(&[Keyword::ROLES, Keyword::ROLE])?;
14059        if self.parse_keyword(Keyword::NONE) {
14060            Ok(Use::SecondaryRoles(SecondaryRoles::None))
14061        } else if self.parse_keyword(Keyword::ALL) {
14062            Ok(Use::SecondaryRoles(SecondaryRoles::All))
14063        } else {
14064            let roles = self.parse_comma_separated(|parser| parser.parse_identifier())?;
14065            Ok(Use::SecondaryRoles(SecondaryRoles::List(roles)))
14066        }
14067    }
14068
14069    pub fn parse_table_and_joins(&mut self) -> Result<TableWithJoins, ParserError> {
14070        let relation = self.parse_table_factor()?;
14071        // Note that for keywords to be properly handled here, they need to be
14072        // added to `RESERVED_FOR_TABLE_ALIAS`, otherwise they may be parsed as
14073        // a table alias.
14074        let joins = self.parse_joins()?;
14075        Ok(TableWithJoins { relation, joins })
14076    }
14077
14078    fn parse_joins(&mut self) -> Result<Vec<Join>, ParserError> {
14079        let mut joins = vec![];
14080        loop {
14081            let global = self.parse_keyword(Keyword::GLOBAL);
14082            let join = if self.parse_keyword(Keyword::CROSS) {
14083                let join_operator = if self.parse_keyword(Keyword::JOIN) {
14084                    JoinOperator::CrossJoin(JoinConstraint::None)
14085                } else if self.parse_keyword(Keyword::APPLY) {
14086                    // MSSQL extension, similar to CROSS JOIN LATERAL
14087                    JoinOperator::CrossApply
14088                } else {
14089                    return self.expected("JOIN or APPLY after CROSS", self.peek_token());
14090                };
14091                let relation = self.parse_table_factor()?;
14092                let join_operator = if matches!(join_operator, JoinOperator::CrossJoin(_))
14093                    && self.dialect.supports_cross_join_constraint()
14094                {
14095                    let constraint = self.parse_join_constraint(false)?;
14096                    JoinOperator::CrossJoin(constraint)
14097                } else {
14098                    join_operator
14099                };
14100                Join {
14101                    relation,
14102                    global,
14103                    join_operator,
14104                }
14105            } else if self.parse_keyword(Keyword::OUTER) {
14106                // MSSQL extension, similar to LEFT JOIN LATERAL .. ON 1=1
14107                self.expect_keyword_is(Keyword::APPLY)?;
14108                Join {
14109                    relation: self.parse_table_factor()?,
14110                    global,
14111                    join_operator: JoinOperator::OuterApply,
14112                }
14113            } else if self.parse_keyword(Keyword::ASOF) {
14114                self.expect_keyword_is(Keyword::JOIN)?;
14115                let relation = self.parse_table_factor()?;
14116                self.expect_keyword_is(Keyword::MATCH_CONDITION)?;
14117                let match_condition = self.parse_parenthesized(Self::parse_expr)?;
14118                Join {
14119                    relation,
14120                    global,
14121                    join_operator: JoinOperator::AsOf {
14122                        match_condition,
14123                        constraint: self.parse_join_constraint(false)?,
14124                    },
14125                }
14126            } else {
14127                let natural = self.parse_keyword(Keyword::NATURAL);
14128                let peek_keyword = if let Token::Word(w) = self.peek_token().token {
14129                    w.keyword
14130                } else {
14131                    Keyword::NoKeyword
14132                };
14133
14134                let join_operator_type = match peek_keyword {
14135                    Keyword::INNER | Keyword::JOIN => {
14136                        let inner = self.parse_keyword(Keyword::INNER); // [ INNER ]
14137                        self.expect_keyword_is(Keyword::JOIN)?;
14138                        if inner {
14139                            JoinOperator::Inner
14140                        } else {
14141                            JoinOperator::Join
14142                        }
14143                    }
14144                    kw @ Keyword::LEFT | kw @ Keyword::RIGHT => {
14145                        let _ = self.next_token(); // consume LEFT/RIGHT
14146                        let is_left = kw == Keyword::LEFT;
14147                        let join_type = self.parse_one_of_keywords(&[
14148                            Keyword::OUTER,
14149                            Keyword::SEMI,
14150                            Keyword::ANTI,
14151                            Keyword::JOIN,
14152                        ]);
14153                        match join_type {
14154                            Some(Keyword::OUTER) => {
14155                                self.expect_keyword_is(Keyword::JOIN)?;
14156                                if is_left {
14157                                    JoinOperator::LeftOuter
14158                                } else {
14159                                    JoinOperator::RightOuter
14160                                }
14161                            }
14162                            Some(Keyword::SEMI) => {
14163                                self.expect_keyword_is(Keyword::JOIN)?;
14164                                if is_left {
14165                                    JoinOperator::LeftSemi
14166                                } else {
14167                                    JoinOperator::RightSemi
14168                                }
14169                            }
14170                            Some(Keyword::ANTI) => {
14171                                self.expect_keyword_is(Keyword::JOIN)?;
14172                                if is_left {
14173                                    JoinOperator::LeftAnti
14174                                } else {
14175                                    JoinOperator::RightAnti
14176                                }
14177                            }
14178                            Some(Keyword::JOIN) => {
14179                                if is_left {
14180                                    JoinOperator::Left
14181                                } else {
14182                                    JoinOperator::Right
14183                                }
14184                            }
14185                            _ => {
14186                                return Err(ParserError::ParserError(format!(
14187                                    "expected OUTER, SEMI, ANTI or JOIN after {kw:?}"
14188                                )))
14189                            }
14190                        }
14191                    }
14192                    Keyword::ANTI => {
14193                        let _ = self.next_token(); // consume ANTI
14194                        self.expect_keyword_is(Keyword::JOIN)?;
14195                        JoinOperator::Anti
14196                    }
14197                    Keyword::SEMI => {
14198                        let _ = self.next_token(); // consume SEMI
14199                        self.expect_keyword_is(Keyword::JOIN)?;
14200                        JoinOperator::Semi
14201                    }
14202                    Keyword::FULL => {
14203                        let _ = self.next_token(); // consume FULL
14204                        let _ = self.parse_keyword(Keyword::OUTER); // [ OUTER ]
14205                        self.expect_keyword_is(Keyword::JOIN)?;
14206                        JoinOperator::FullOuter
14207                    }
14208                    Keyword::OUTER => {
14209                        return self.expected("LEFT, RIGHT, or FULL", self.peek_token());
14210                    }
14211                    Keyword::STRAIGHT_JOIN => {
14212                        let _ = self.next_token(); // consume STRAIGHT_JOIN
14213                        JoinOperator::StraightJoin
14214                    }
14215                    _ if natural => {
14216                        return self.expected("a join type after NATURAL", self.peek_token());
14217                    }
14218                    _ => break,
14219                };
14220                let mut relation = self.parse_table_factor()?;
14221
14222                if !self
14223                    .dialect
14224                    .supports_left_associative_joins_without_parens()
14225                    && self.peek_parens_less_nested_join()
14226                {
14227                    let joins = self.parse_joins()?;
14228                    relation = TableFactor::NestedJoin {
14229                        table_with_joins: Box::new(TableWithJoins { relation, joins }),
14230                        alias: None,
14231                    };
14232                }
14233
14234                let join_constraint = self.parse_join_constraint(natural)?;
14235                Join {
14236                    relation,
14237                    global,
14238                    join_operator: join_operator_type(join_constraint),
14239                }
14240            };
14241            joins.push(join);
14242        }
14243        Ok(joins)
14244    }
14245
14246    fn peek_parens_less_nested_join(&self) -> bool {
14247        matches!(
14248            self.peek_token_ref().token,
14249            Token::Word(Word {
14250                keyword: Keyword::JOIN
14251                    | Keyword::INNER
14252                    | Keyword::LEFT
14253                    | Keyword::RIGHT
14254                    | Keyword::FULL,
14255                ..
14256            })
14257        )
14258    }
14259
14260    /// A table name or a parenthesized subquery, followed by optional `[AS] alias`
14261    pub fn parse_table_factor(&mut self) -> Result<TableFactor, ParserError> {
14262        if self.parse_keyword(Keyword::LATERAL) {
14263            // LATERAL must always be followed by a subquery or table function.
14264            if self.consume_token(&Token::LParen) {
14265                self.parse_derived_table_factor(Lateral)
14266            } else {
14267                let name = self.parse_object_name(false)?;
14268                self.expect_token(&Token::LParen)?;
14269                let args = self.parse_optional_args()?;
14270                let alias = self.maybe_parse_table_alias()?;
14271                Ok(TableFactor::Function {
14272                    lateral: true,
14273                    name,
14274                    args,
14275                    alias,
14276                })
14277            }
14278        } else if self.parse_keyword(Keyword::TABLE) {
14279            // parse table function (SELECT * FROM TABLE (<expr>) [ AS <alias> ])
14280            self.expect_token(&Token::LParen)?;
14281            let expr = self.parse_expr()?;
14282            self.expect_token(&Token::RParen)?;
14283            let alias = self.maybe_parse_table_alias()?;
14284            Ok(TableFactor::TableFunction { expr, alias })
14285        } else if self.consume_token(&Token::LParen) {
14286            // A left paren introduces either a derived table (i.e., a subquery)
14287            // or a nested join. It's nearly impossible to determine ahead of
14288            // time which it is... so we just try to parse both.
14289            //
14290            // Here's an example that demonstrates the complexity:
14291            //                     /-------------------------------------------------------\
14292            //                     | /-----------------------------------\                 |
14293            //     SELECT * FROM ( ( ( (SELECT 1) UNION (SELECT 2) ) AS t1 NATURAL JOIN t2 ) )
14294            //                   ^ ^ ^ ^
14295            //                   | | | |
14296            //                   | | | |
14297            //                   | | | (4) belongs to a SetExpr::Query inside the subquery
14298            //                   | | (3) starts a derived table (subquery)
14299            //                   | (2) starts a nested join
14300            //                   (1) an additional set of parens around a nested join
14301            //
14302
14303            // If the recently consumed '(' starts a derived table, the call to
14304            // `parse_derived_table_factor` below will return success after parsing the
14305            // subquery, followed by the closing ')', and the alias of the derived table.
14306            // In the example above this is case (3).
14307            if let Some(mut table) =
14308                self.maybe_parse(|parser| parser.parse_derived_table_factor(NotLateral))?
14309            {
14310                while let Some(kw) = self.parse_one_of_keywords(&[Keyword::PIVOT, Keyword::UNPIVOT])
14311                {
14312                    table = match kw {
14313                        Keyword::PIVOT => self.parse_pivot_table_factor(table)?,
14314                        Keyword::UNPIVOT => self.parse_unpivot_table_factor(table)?,
14315                        _ => unreachable!(),
14316                    }
14317                }
14318                return Ok(table);
14319            }
14320
14321            // A parsing error from `parse_derived_table_factor` indicates that the '(' we've
14322            // recently consumed does not start a derived table (cases 1, 2, or 4).
14323            // `maybe_parse` will ignore such an error and rewind to be after the opening '('.
14324
14325            // Inside the parentheses we expect to find an (A) table factor
14326            // followed by some joins or (B) another level of nesting.
14327            let mut table_and_joins = self.parse_table_and_joins()?;
14328
14329            #[allow(clippy::if_same_then_else)]
14330            if !table_and_joins.joins.is_empty() {
14331                self.expect_token(&Token::RParen)?;
14332                let alias = self.maybe_parse_table_alias()?;
14333                Ok(TableFactor::NestedJoin {
14334                    table_with_joins: Box::new(table_and_joins),
14335                    alias,
14336                }) // (A)
14337            } else if let TableFactor::NestedJoin {
14338                table_with_joins: _,
14339                alias: _,
14340            } = &table_and_joins.relation
14341            {
14342                // (B): `table_and_joins` (what we found inside the parentheses)
14343                // is a nested join `(foo JOIN bar)`, not followed by other joins.
14344                self.expect_token(&Token::RParen)?;
14345                let alias = self.maybe_parse_table_alias()?;
14346                Ok(TableFactor::NestedJoin {
14347                    table_with_joins: Box::new(table_and_joins),
14348                    alias,
14349                })
14350            } else if dialect_of!(self is SnowflakeDialect | GenericDialect) {
14351                // Dialect-specific behavior: Snowflake diverges from the
14352                // standard and from most of the other implementations by
14353                // allowing extra parentheses not only around a join (B), but
14354                // around lone table names (e.g. `FROM (mytable [AS alias])`)
14355                // and around derived tables (e.g. `FROM ((SELECT ...)
14356                // [AS alias])`) as well.
14357                self.expect_token(&Token::RParen)?;
14358
14359                if let Some(outer_alias) = self.maybe_parse_table_alias()? {
14360                    // Snowflake also allows specifying an alias *after* parens
14361                    // e.g. `FROM (mytable) AS alias`
14362                    match &mut table_and_joins.relation {
14363                        TableFactor::Derived { alias, .. }
14364                        | TableFactor::Table { alias, .. }
14365                        | TableFactor::Function { alias, .. }
14366                        | TableFactor::UNNEST { alias, .. }
14367                        | TableFactor::JsonTable { alias, .. }
14368                        | TableFactor::XmlTable { alias, .. }
14369                        | TableFactor::OpenJsonTable { alias, .. }
14370                        | TableFactor::TableFunction { alias, .. }
14371                        | TableFactor::Pivot { alias, .. }
14372                        | TableFactor::Unpivot { alias, .. }
14373                        | TableFactor::MatchRecognize { alias, .. }
14374                        | TableFactor::SemanticView { alias, .. }
14375                        | TableFactor::NestedJoin { alias, .. } => {
14376                            // but not `FROM (mytable AS alias1) AS alias2`.
14377                            if let Some(inner_alias) = alias {
14378                                return Err(ParserError::ParserError(format!(
14379                                    "duplicate alias {inner_alias}"
14380                                )));
14381                            }
14382                            // Act as if the alias was specified normally next
14383                            // to the table name: `(mytable) AS alias` ->
14384                            // `(mytable AS alias)`
14385                            alias.replace(outer_alias);
14386                        }
14387                    };
14388                }
14389                // Do not store the extra set of parens in the AST
14390                Ok(table_and_joins.relation)
14391            } else {
14392                // The SQL spec prohibits derived tables and bare tables from
14393                // appearing alone in parentheses (e.g. `FROM (mytable)`)
14394                self.expected("joined table", self.peek_token())
14395            }
14396        } else if dialect_of!(self is SnowflakeDialect | DatabricksDialect | GenericDialect)
14397            && matches!(
14398                self.peek_tokens(),
14399                [
14400                    Token::Word(Word {
14401                        keyword: Keyword::VALUES,
14402                        ..
14403                    }),
14404                    Token::LParen
14405                ]
14406            )
14407        {
14408            self.expect_keyword_is(Keyword::VALUES)?;
14409
14410            // Snowflake and Databricks allow syntax like below:
14411            // SELECT * FROM VALUES (1, 'a'), (2, 'b') AS t (col1, col2)
14412            // where there are no parentheses around the VALUES clause.
14413            let values = SetExpr::Values(self.parse_values(false)?);
14414            let alias = self.maybe_parse_table_alias()?;
14415            Ok(TableFactor::Derived {
14416                lateral: false,
14417                subquery: Box::new(Query {
14418                    with: None,
14419                    body: Box::new(values),
14420                    order_by: None,
14421                    limit_clause: None,
14422                    fetch: None,
14423                    locks: vec![],
14424                    for_clause: None,
14425                    settings: None,
14426                    format_clause: None,
14427                    pipe_operators: vec![],
14428                }),
14429                alias,
14430            })
14431        } else if dialect_of!(self is BigQueryDialect | PostgreSqlDialect | GenericDialect)
14432            && self.parse_keyword(Keyword::UNNEST)
14433        {
14434            self.expect_token(&Token::LParen)?;
14435            let array_exprs = self.parse_comma_separated(Parser::parse_expr)?;
14436            self.expect_token(&Token::RParen)?;
14437
14438            let with_ordinality = self.parse_keywords(&[Keyword::WITH, Keyword::ORDINALITY]);
14439            let alias = match self.maybe_parse_table_alias() {
14440                Ok(Some(alias)) => Some(alias),
14441                Ok(None) => None,
14442                Err(e) => return Err(e),
14443            };
14444
14445            let with_offset = match self.expect_keywords(&[Keyword::WITH, Keyword::OFFSET]) {
14446                Ok(()) => true,
14447                Err(_) => false,
14448            };
14449
14450            let with_offset_alias = if with_offset {
14451                match self.parse_optional_alias(keywords::RESERVED_FOR_COLUMN_ALIAS) {
14452                    Ok(Some(alias)) => Some(alias),
14453                    Ok(None) => None,
14454                    Err(e) => return Err(e),
14455                }
14456            } else {
14457                None
14458            };
14459
14460            Ok(TableFactor::UNNEST {
14461                alias,
14462                array_exprs,
14463                with_offset,
14464                with_offset_alias,
14465                with_ordinality,
14466            })
14467        } else if self.parse_keyword_with_tokens(Keyword::JSON_TABLE, &[Token::LParen]) {
14468            let json_expr = self.parse_expr()?;
14469            self.expect_token(&Token::Comma)?;
14470            let json_path = self.parse_value()?.value;
14471            self.expect_keyword_is(Keyword::COLUMNS)?;
14472            self.expect_token(&Token::LParen)?;
14473            let columns = self.parse_comma_separated(Parser::parse_json_table_column_def)?;
14474            self.expect_token(&Token::RParen)?;
14475            self.expect_token(&Token::RParen)?;
14476            let alias = self.maybe_parse_table_alias()?;
14477            Ok(TableFactor::JsonTable {
14478                json_expr,
14479                json_path,
14480                columns,
14481                alias,
14482            })
14483        } else if self.parse_keyword_with_tokens(Keyword::OPENJSON, &[Token::LParen]) {
14484            self.prev_token();
14485            self.parse_open_json_table_factor()
14486        } else if self.parse_keyword_with_tokens(Keyword::XMLTABLE, &[Token::LParen]) {
14487            self.prev_token();
14488            self.parse_xml_table_factor()
14489        } else if self.dialect.supports_semantic_view_table_factor()
14490            && self.peek_keyword_with_tokens(Keyword::SEMANTIC_VIEW, &[Token::LParen])
14491        {
14492            self.parse_semantic_view_table_factor()
14493        } else {
14494            let name = self.parse_object_name(true)?;
14495
14496            let json_path = match self.peek_token().token {
14497                Token::LBracket if self.dialect.supports_partiql() => Some(self.parse_json_path()?),
14498                _ => None,
14499            };
14500
14501            let partitions: Vec<Ident> = if dialect_of!(self is MySqlDialect | GenericDialect)
14502                && self.parse_keyword(Keyword::PARTITION)
14503            {
14504                self.parse_parenthesized_identifiers()?
14505            } else {
14506                vec![]
14507            };
14508
14509            // Parse potential version qualifier
14510            let version = self.maybe_parse_table_version()?;
14511
14512            // Postgres, MSSQL, ClickHouse: table-valued functions:
14513            let args = if self.consume_token(&Token::LParen) {
14514                Some(self.parse_table_function_args()?)
14515            } else {
14516                None
14517            };
14518
14519            let with_ordinality = self.parse_keywords(&[Keyword::WITH, Keyword::ORDINALITY]);
14520
14521            let mut sample = None;
14522            if self.dialect.supports_table_sample_before_alias() {
14523                if let Some(parsed_sample) = self.maybe_parse_table_sample()? {
14524                    sample = Some(TableSampleKind::BeforeTableAlias(parsed_sample));
14525                }
14526            }
14527
14528            let alias = self.maybe_parse_table_alias()?;
14529
14530            // MYSQL-specific table hints:
14531            let index_hints = if self.dialect.supports_table_hints() {
14532                self.maybe_parse(|p| p.parse_table_index_hints())?
14533                    .unwrap_or(vec![])
14534            } else {
14535                vec![]
14536            };
14537
14538            // MSSQL-specific table hints:
14539            let mut with_hints = vec![];
14540            if self.parse_keyword(Keyword::WITH) {
14541                if self.consume_token(&Token::LParen) {
14542                    with_hints = self.parse_comma_separated(Parser::parse_expr)?;
14543                    self.expect_token(&Token::RParen)?;
14544                } else {
14545                    // rewind, as WITH may belong to the next statement's CTE
14546                    self.prev_token();
14547                }
14548            };
14549
14550            if !self.dialect.supports_table_sample_before_alias() {
14551                if let Some(parsed_sample) = self.maybe_parse_table_sample()? {
14552                    sample = Some(TableSampleKind::AfterTableAlias(parsed_sample));
14553                }
14554            }
14555
14556            let mut table = TableFactor::Table {
14557                name,
14558                alias,
14559                args,
14560                with_hints,
14561                version,
14562                partitions,
14563                with_ordinality,
14564                json_path,
14565                sample,
14566                index_hints,
14567            };
14568
14569            while let Some(kw) = self.parse_one_of_keywords(&[Keyword::PIVOT, Keyword::UNPIVOT]) {
14570                table = match kw {
14571                    Keyword::PIVOT => self.parse_pivot_table_factor(table)?,
14572                    Keyword::UNPIVOT => self.parse_unpivot_table_factor(table)?,
14573                    _ => unreachable!(),
14574                }
14575            }
14576
14577            if self.dialect.supports_match_recognize()
14578                && self.parse_keyword(Keyword::MATCH_RECOGNIZE)
14579            {
14580                table = self.parse_match_recognize(table)?;
14581            }
14582
14583            Ok(table)
14584        }
14585    }
14586
14587    fn maybe_parse_table_sample(&mut self) -> Result<Option<Box<TableSample>>, ParserError> {
14588        let modifier = if self.parse_keyword(Keyword::TABLESAMPLE) {
14589            TableSampleModifier::TableSample
14590        } else if self.parse_keyword(Keyword::SAMPLE) {
14591            TableSampleModifier::Sample
14592        } else {
14593            return Ok(None);
14594        };
14595        self.parse_table_sample(modifier).map(Some)
14596    }
14597
14598    fn parse_table_sample(
14599        &mut self,
14600        modifier: TableSampleModifier,
14601    ) -> Result<Box<TableSample>, ParserError> {
14602        let name = match self.parse_one_of_keywords(&[
14603            Keyword::BERNOULLI,
14604            Keyword::ROW,
14605            Keyword::SYSTEM,
14606            Keyword::BLOCK,
14607        ]) {
14608            Some(Keyword::BERNOULLI) => Some(TableSampleMethod::Bernoulli),
14609            Some(Keyword::ROW) => Some(TableSampleMethod::Row),
14610            Some(Keyword::SYSTEM) => Some(TableSampleMethod::System),
14611            Some(Keyword::BLOCK) => Some(TableSampleMethod::Block),
14612            _ => None,
14613        };
14614
14615        let parenthesized = self.consume_token(&Token::LParen);
14616
14617        let (quantity, bucket) = if parenthesized && self.parse_keyword(Keyword::BUCKET) {
14618            let selected_bucket = self.parse_number_value()?.value;
14619            self.expect_keywords(&[Keyword::OUT, Keyword::OF])?;
14620            let total = self.parse_number_value()?.value;
14621            let on = if self.parse_keyword(Keyword::ON) {
14622                Some(self.parse_expr()?)
14623            } else {
14624                None
14625            };
14626            (
14627                None,
14628                Some(TableSampleBucket {
14629                    bucket: selected_bucket,
14630                    total,
14631                    on,
14632                }),
14633            )
14634        } else {
14635            let value = match self.maybe_parse(|p| p.parse_expr())? {
14636                Some(num) => num,
14637                None => {
14638                    let next_token = self.next_token();
14639                    if let Token::Word(w) = next_token.token {
14640                        Expr::Value(Value::Placeholder(w.value).with_span(next_token.span))
14641                    } else {
14642                        return parser_err!(
14643                            "Expecting number or byte length e.g. 100M",
14644                            self.peek_token().span.start
14645                        );
14646                    }
14647                }
14648            };
14649            let unit = if self.parse_keyword(Keyword::ROWS) {
14650                Some(TableSampleUnit::Rows)
14651            } else if self.parse_keyword(Keyword::PERCENT) {
14652                Some(TableSampleUnit::Percent)
14653            } else {
14654                None
14655            };
14656            (
14657                Some(TableSampleQuantity {
14658                    parenthesized,
14659                    value,
14660                    unit,
14661                }),
14662                None,
14663            )
14664        };
14665        if parenthesized {
14666            self.expect_token(&Token::RParen)?;
14667        }
14668
14669        let seed = if self.parse_keyword(Keyword::REPEATABLE) {
14670            Some(self.parse_table_sample_seed(TableSampleSeedModifier::Repeatable)?)
14671        } else if self.parse_keyword(Keyword::SEED) {
14672            Some(self.parse_table_sample_seed(TableSampleSeedModifier::Seed)?)
14673        } else {
14674            None
14675        };
14676
14677        let offset = if self.parse_keyword(Keyword::OFFSET) {
14678            Some(self.parse_expr()?)
14679        } else {
14680            None
14681        };
14682
14683        Ok(Box::new(TableSample {
14684            modifier,
14685            name,
14686            quantity,
14687            seed,
14688            bucket,
14689            offset,
14690        }))
14691    }
14692
14693    fn parse_table_sample_seed(
14694        &mut self,
14695        modifier: TableSampleSeedModifier,
14696    ) -> Result<TableSampleSeed, ParserError> {
14697        self.expect_token(&Token::LParen)?;
14698        let value = self.parse_number_value()?.value;
14699        self.expect_token(&Token::RParen)?;
14700        Ok(TableSampleSeed { modifier, value })
14701    }
14702
14703    /// Parses `OPENJSON( jsonExpression [ , path ] )  [ <with_clause> ]` clause,
14704    /// assuming the `OPENJSON` keyword was already consumed.
14705    fn parse_open_json_table_factor(&mut self) -> Result<TableFactor, ParserError> {
14706        self.expect_token(&Token::LParen)?;
14707        let json_expr = self.parse_expr()?;
14708        let json_path = if self.consume_token(&Token::Comma) {
14709            Some(self.parse_value()?.value)
14710        } else {
14711            None
14712        };
14713        self.expect_token(&Token::RParen)?;
14714        let columns = if self.parse_keyword(Keyword::WITH) {
14715            self.expect_token(&Token::LParen)?;
14716            let columns = self.parse_comma_separated(Parser::parse_openjson_table_column_def)?;
14717            self.expect_token(&Token::RParen)?;
14718            columns
14719        } else {
14720            Vec::new()
14721        };
14722        let alias = self.maybe_parse_table_alias()?;
14723        Ok(TableFactor::OpenJsonTable {
14724            json_expr,
14725            json_path,
14726            columns,
14727            alias,
14728        })
14729    }
14730
14731    fn parse_xml_table_factor(&mut self) -> Result<TableFactor, ParserError> {
14732        self.expect_token(&Token::LParen)?;
14733        let namespaces = if self.parse_keyword(Keyword::XMLNAMESPACES) {
14734            self.expect_token(&Token::LParen)?;
14735            let namespaces = self.parse_comma_separated(Parser::parse_xml_namespace_definition)?;
14736            self.expect_token(&Token::RParen)?;
14737            self.expect_token(&Token::Comma)?;
14738            namespaces
14739        } else {
14740            vec![]
14741        };
14742        let row_expression = self.parse_expr()?;
14743        let passing = self.parse_xml_passing_clause()?;
14744        self.expect_keyword_is(Keyword::COLUMNS)?;
14745        let columns = self.parse_comma_separated(Parser::parse_xml_table_column)?;
14746        self.expect_token(&Token::RParen)?;
14747        let alias = self.maybe_parse_table_alias()?;
14748        Ok(TableFactor::XmlTable {
14749            namespaces,
14750            row_expression,
14751            passing,
14752            columns,
14753            alias,
14754        })
14755    }
14756
14757    fn parse_xml_namespace_definition(&mut self) -> Result<XmlNamespaceDefinition, ParserError> {
14758        let uri = self.parse_expr()?;
14759        self.expect_keyword_is(Keyword::AS)?;
14760        let name = self.parse_identifier()?;
14761        Ok(XmlNamespaceDefinition { uri, name })
14762    }
14763
14764    fn parse_xml_table_column(&mut self) -> Result<XmlTableColumn, ParserError> {
14765        let name = self.parse_identifier()?;
14766
14767        let option = if self.parse_keyword(Keyword::FOR) {
14768            self.expect_keyword(Keyword::ORDINALITY)?;
14769            XmlTableColumnOption::ForOrdinality
14770        } else {
14771            let r#type = self.parse_data_type()?;
14772            let mut path = None;
14773            let mut default = None;
14774
14775            if self.parse_keyword(Keyword::PATH) {
14776                path = Some(self.parse_expr()?);
14777            }
14778
14779            if self.parse_keyword(Keyword::DEFAULT) {
14780                default = Some(self.parse_expr()?);
14781            }
14782
14783            let not_null = self.parse_keywords(&[Keyword::NOT, Keyword::NULL]);
14784            if !not_null {
14785                // NULL is the default but can be specified explicitly
14786                let _ = self.parse_keyword(Keyword::NULL);
14787            }
14788
14789            XmlTableColumnOption::NamedInfo {
14790                r#type,
14791                path,
14792                default,
14793                nullable: !not_null,
14794            }
14795        };
14796        Ok(XmlTableColumn { name, option })
14797    }
14798
14799    fn parse_xml_passing_clause(&mut self) -> Result<XmlPassingClause, ParserError> {
14800        let mut arguments = vec![];
14801        if self.parse_keyword(Keyword::PASSING) {
14802            loop {
14803                let by_value =
14804                    self.parse_keyword(Keyword::BY) && self.expect_keyword(Keyword::VALUE).is_ok();
14805                let expr = self.parse_expr()?;
14806                let alias = if self.parse_keyword(Keyword::AS) {
14807                    Some(self.parse_identifier()?)
14808                } else {
14809                    None
14810                };
14811                arguments.push(XmlPassingArgument {
14812                    expr,
14813                    alias,
14814                    by_value,
14815                });
14816                if !self.consume_token(&Token::Comma) {
14817                    break;
14818                }
14819            }
14820        }
14821        Ok(XmlPassingClause { arguments })
14822    }
14823
14824    /// Parse a [TableFactor::SemanticView]
14825    fn parse_semantic_view_table_factor(&mut self) -> Result<TableFactor, ParserError> {
14826        self.expect_keyword(Keyword::SEMANTIC_VIEW)?;
14827        self.expect_token(&Token::LParen)?;
14828
14829        let name = self.parse_object_name(true)?;
14830
14831        // Parse DIMENSIONS, METRICS, FACTS and WHERE clauses in flexible order
14832        let mut dimensions = Vec::new();
14833        let mut metrics = Vec::new();
14834        let mut facts = Vec::new();
14835        let mut where_clause = None;
14836
14837        while self.peek_token().token != Token::RParen {
14838            if self.parse_keyword(Keyword::DIMENSIONS) {
14839                if !dimensions.is_empty() {
14840                    return Err(ParserError::ParserError(
14841                        "DIMENSIONS clause can only be specified once".to_string(),
14842                    ));
14843                }
14844                dimensions = self.parse_comma_separated(Parser::parse_wildcard_expr)?;
14845            } else if self.parse_keyword(Keyword::METRICS) {
14846                if !metrics.is_empty() {
14847                    return Err(ParserError::ParserError(
14848                        "METRICS clause can only be specified once".to_string(),
14849                    ));
14850                }
14851                metrics = self.parse_comma_separated(Parser::parse_wildcard_expr)?;
14852            } else if self.parse_keyword(Keyword::FACTS) {
14853                if !facts.is_empty() {
14854                    return Err(ParserError::ParserError(
14855                        "FACTS clause can only be specified once".to_string(),
14856                    ));
14857                }
14858                facts = self.parse_comma_separated(Parser::parse_wildcard_expr)?;
14859            } else if self.parse_keyword(Keyword::WHERE) {
14860                if where_clause.is_some() {
14861                    return Err(ParserError::ParserError(
14862                        "WHERE clause can only be specified once".to_string(),
14863                    ));
14864                }
14865                where_clause = Some(self.parse_expr()?);
14866            } else {
14867                return parser_err!(
14868                    format!(
14869                        "Expected one of DIMENSIONS, METRICS, FACTS or WHERE, got {}",
14870                        self.peek_token().token
14871                    ),
14872                    self.peek_token().span.start
14873                )?;
14874            }
14875        }
14876
14877        self.expect_token(&Token::RParen)?;
14878
14879        let alias = self.maybe_parse_table_alias()?;
14880
14881        Ok(TableFactor::SemanticView {
14882            name,
14883            dimensions,
14884            metrics,
14885            facts,
14886            where_clause,
14887            alias,
14888        })
14889    }
14890
14891    fn parse_match_recognize(&mut self, table: TableFactor) -> Result<TableFactor, ParserError> {
14892        self.expect_token(&Token::LParen)?;
14893
14894        let partition_by = if self.parse_keywords(&[Keyword::PARTITION, Keyword::BY]) {
14895            self.parse_comma_separated(Parser::parse_expr)?
14896        } else {
14897            vec![]
14898        };
14899
14900        let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
14901            self.parse_comma_separated(Parser::parse_order_by_expr)?
14902        } else {
14903            vec![]
14904        };
14905
14906        let measures = if self.parse_keyword(Keyword::MEASURES) {
14907            self.parse_comma_separated(|p| {
14908                let expr = p.parse_expr()?;
14909                let _ = p.parse_keyword(Keyword::AS);
14910                let alias = p.parse_identifier()?;
14911                Ok(Measure { expr, alias })
14912            })?
14913        } else {
14914            vec![]
14915        };
14916
14917        let rows_per_match =
14918            if self.parse_keywords(&[Keyword::ONE, Keyword::ROW, Keyword::PER, Keyword::MATCH]) {
14919                Some(RowsPerMatch::OneRow)
14920            } else if self.parse_keywords(&[
14921                Keyword::ALL,
14922                Keyword::ROWS,
14923                Keyword::PER,
14924                Keyword::MATCH,
14925            ]) {
14926                Some(RowsPerMatch::AllRows(
14927                    if self.parse_keywords(&[Keyword::SHOW, Keyword::EMPTY, Keyword::MATCHES]) {
14928                        Some(EmptyMatchesMode::Show)
14929                    } else if self.parse_keywords(&[
14930                        Keyword::OMIT,
14931                        Keyword::EMPTY,
14932                        Keyword::MATCHES,
14933                    ]) {
14934                        Some(EmptyMatchesMode::Omit)
14935                    } else if self.parse_keywords(&[
14936                        Keyword::WITH,
14937                        Keyword::UNMATCHED,
14938                        Keyword::ROWS,
14939                    ]) {
14940                        Some(EmptyMatchesMode::WithUnmatched)
14941                    } else {
14942                        None
14943                    },
14944                ))
14945            } else {
14946                None
14947            };
14948
14949        let after_match_skip =
14950            if self.parse_keywords(&[Keyword::AFTER, Keyword::MATCH, Keyword::SKIP]) {
14951                if self.parse_keywords(&[Keyword::PAST, Keyword::LAST, Keyword::ROW]) {
14952                    Some(AfterMatchSkip::PastLastRow)
14953                } else if self.parse_keywords(&[Keyword::TO, Keyword::NEXT, Keyword::ROW]) {
14954                    Some(AfterMatchSkip::ToNextRow)
14955                } else if self.parse_keywords(&[Keyword::TO, Keyword::FIRST]) {
14956                    Some(AfterMatchSkip::ToFirst(self.parse_identifier()?))
14957                } else if self.parse_keywords(&[Keyword::TO, Keyword::LAST]) {
14958                    Some(AfterMatchSkip::ToLast(self.parse_identifier()?))
14959                } else {
14960                    let found = self.next_token();
14961                    return self.expected("after match skip option", found);
14962                }
14963            } else {
14964                None
14965            };
14966
14967        self.expect_keyword_is(Keyword::PATTERN)?;
14968        let pattern = self.parse_parenthesized(Self::parse_pattern)?;
14969
14970        self.expect_keyword_is(Keyword::DEFINE)?;
14971
14972        let symbols = self.parse_comma_separated(|p| {
14973            let symbol = p.parse_identifier()?;
14974            p.expect_keyword_is(Keyword::AS)?;
14975            let definition = p.parse_expr()?;
14976            Ok(SymbolDefinition { symbol, definition })
14977        })?;
14978
14979        self.expect_token(&Token::RParen)?;
14980
14981        let alias = self.maybe_parse_table_alias()?;
14982
14983        Ok(TableFactor::MatchRecognize {
14984            table: Box::new(table),
14985            partition_by,
14986            order_by,
14987            measures,
14988            rows_per_match,
14989            after_match_skip,
14990            pattern,
14991            symbols,
14992            alias,
14993        })
14994    }
14995
14996    fn parse_base_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
14997        match self.next_token().token {
14998            Token::Caret => Ok(MatchRecognizePattern::Symbol(MatchRecognizeSymbol::Start)),
14999            Token::Placeholder(s) if s == "$" => {
15000                Ok(MatchRecognizePattern::Symbol(MatchRecognizeSymbol::End))
15001            }
15002            Token::LBrace => {
15003                self.expect_token(&Token::Minus)?;
15004                let symbol = self.parse_identifier().map(MatchRecognizeSymbol::Named)?;
15005                self.expect_token(&Token::Minus)?;
15006                self.expect_token(&Token::RBrace)?;
15007                Ok(MatchRecognizePattern::Exclude(symbol))
15008            }
15009            Token::Word(Word {
15010                value,
15011                quote_style: None,
15012                ..
15013            }) if value == "PERMUTE" => {
15014                self.expect_token(&Token::LParen)?;
15015                let symbols = self.parse_comma_separated(|p| {
15016                    p.parse_identifier().map(MatchRecognizeSymbol::Named)
15017                })?;
15018                self.expect_token(&Token::RParen)?;
15019                Ok(MatchRecognizePattern::Permute(symbols))
15020            }
15021            Token::LParen => {
15022                let pattern = self.parse_pattern()?;
15023                self.expect_token(&Token::RParen)?;
15024                Ok(MatchRecognizePattern::Group(Box::new(pattern)))
15025            }
15026            _ => {
15027                self.prev_token();
15028                self.parse_identifier()
15029                    .map(MatchRecognizeSymbol::Named)
15030                    .map(MatchRecognizePattern::Symbol)
15031            }
15032        }
15033    }
15034
15035    fn parse_repetition_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
15036        let mut pattern = self.parse_base_pattern()?;
15037        loop {
15038            let token = self.next_token();
15039            let quantifier = match token.token {
15040                Token::Mul => RepetitionQuantifier::ZeroOrMore,
15041                Token::Plus => RepetitionQuantifier::OneOrMore,
15042                Token::Placeholder(s) if s == "?" => RepetitionQuantifier::AtMostOne,
15043                Token::LBrace => {
15044                    // quantifier is a range like {n} or {n,} or {,m} or {n,m}
15045                    let token = self.next_token();
15046                    match token.token {
15047                        Token::Comma => {
15048                            let next_token = self.next_token();
15049                            let Token::Number(n, _) = next_token.token else {
15050                                return self.expected("literal number", next_token);
15051                            };
15052                            self.expect_token(&Token::RBrace)?;
15053                            RepetitionQuantifier::AtMost(Self::parse(n, token.span.start)?)
15054                        }
15055                        Token::Number(n, _) if self.consume_token(&Token::Comma) => {
15056                            let next_token = self.next_token();
15057                            match next_token.token {
15058                                Token::Number(m, _) => {
15059                                    self.expect_token(&Token::RBrace)?;
15060                                    RepetitionQuantifier::Range(
15061                                        Self::parse(n, token.span.start)?,
15062                                        Self::parse(m, token.span.start)?,
15063                                    )
15064                                }
15065                                Token::RBrace => {
15066                                    RepetitionQuantifier::AtLeast(Self::parse(n, token.span.start)?)
15067                                }
15068                                _ => {
15069                                    return self.expected("} or upper bound", next_token);
15070                                }
15071                            }
15072                        }
15073                        Token::Number(n, _) => {
15074                            self.expect_token(&Token::RBrace)?;
15075                            RepetitionQuantifier::Exactly(Self::parse(n, token.span.start)?)
15076                        }
15077                        _ => return self.expected("quantifier range", token),
15078                    }
15079                }
15080                _ => {
15081                    self.prev_token();
15082                    break;
15083                }
15084            };
15085            pattern = MatchRecognizePattern::Repetition(Box::new(pattern), quantifier);
15086        }
15087        Ok(pattern)
15088    }
15089
15090    fn parse_concat_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
15091        let mut patterns = vec![self.parse_repetition_pattern()?];
15092        while !matches!(self.peek_token().token, Token::RParen | Token::Pipe) {
15093            patterns.push(self.parse_repetition_pattern()?);
15094        }
15095        match <[MatchRecognizePattern; 1]>::try_from(patterns) {
15096            Ok([pattern]) => Ok(pattern),
15097            Err(patterns) => Ok(MatchRecognizePattern::Concat(patterns)),
15098        }
15099    }
15100
15101    fn parse_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
15102        let pattern = self.parse_concat_pattern()?;
15103        if self.consume_token(&Token::Pipe) {
15104            match self.parse_pattern()? {
15105                // flatten nested alternations
15106                MatchRecognizePattern::Alternation(mut patterns) => {
15107                    patterns.insert(0, pattern);
15108                    Ok(MatchRecognizePattern::Alternation(patterns))
15109                }
15110                next => Ok(MatchRecognizePattern::Alternation(vec![pattern, next])),
15111            }
15112        } else {
15113            Ok(pattern)
15114        }
15115    }
15116
15117    /// Parses a the timestamp version specifier (i.e. query historical data)
15118    pub fn maybe_parse_table_version(&mut self) -> Result<Option<TableVersion>, ParserError> {
15119        if self.dialect.supports_timestamp_versioning() {
15120            if self.parse_keywords(&[Keyword::FOR, Keyword::SYSTEM_TIME, Keyword::AS, Keyword::OF])
15121            {
15122                let expr = self.parse_expr()?;
15123                return Ok(Some(TableVersion::ForSystemTimeAsOf(expr)));
15124            } else if self.peek_keyword(Keyword::AT) || self.peek_keyword(Keyword::BEFORE) {
15125                let func_name = self.parse_object_name(true)?;
15126                let func = self.parse_function(func_name)?;
15127                return Ok(Some(TableVersion::Function(func)));
15128            }
15129        }
15130        Ok(None)
15131    }
15132
15133    /// Parses MySQL's JSON_TABLE column definition.
15134    /// For example: `id INT EXISTS PATH '$' DEFAULT '0' ON EMPTY ERROR ON ERROR`
15135    pub fn parse_json_table_column_def(&mut self) -> Result<JsonTableColumn, ParserError> {
15136        if self.parse_keyword(Keyword::NESTED) {
15137            let _has_path_keyword = self.parse_keyword(Keyword::PATH);
15138            let path = self.parse_value()?.value;
15139            self.expect_keyword_is(Keyword::COLUMNS)?;
15140            let columns = self.parse_parenthesized(|p| {
15141                p.parse_comma_separated(Self::parse_json_table_column_def)
15142            })?;
15143            return Ok(JsonTableColumn::Nested(JsonTableNestedColumn {
15144                path,
15145                columns,
15146            }));
15147        }
15148        let name = self.parse_identifier()?;
15149        if self.parse_keyword(Keyword::FOR) {
15150            self.expect_keyword_is(Keyword::ORDINALITY)?;
15151            return Ok(JsonTableColumn::ForOrdinality(name));
15152        }
15153        let r#type = self.parse_data_type()?;
15154        let exists = self.parse_keyword(Keyword::EXISTS);
15155        self.expect_keyword_is(Keyword::PATH)?;
15156        let path = self.parse_value()?.value;
15157        let mut on_empty = None;
15158        let mut on_error = None;
15159        while let Some(error_handling) = self.parse_json_table_column_error_handling()? {
15160            if self.parse_keyword(Keyword::EMPTY) {
15161                on_empty = Some(error_handling);
15162            } else {
15163                self.expect_keyword_is(Keyword::ERROR)?;
15164                on_error = Some(error_handling);
15165            }
15166        }
15167        Ok(JsonTableColumn::Named(JsonTableNamedColumn {
15168            name,
15169            r#type,
15170            path,
15171            exists,
15172            on_empty,
15173            on_error,
15174        }))
15175    }
15176
15177    /// Parses MSSQL's `OPENJSON WITH` column definition.
15178    ///
15179    /// ```sql
15180    /// colName type [ column_path ] [ AS JSON ]
15181    /// ```
15182    ///
15183    /// Reference: <https://learn.microsoft.com/en-us/sql/t-sql/functions/openjson-transact-sql?view=sql-server-ver16#syntax>
15184    pub fn parse_openjson_table_column_def(&mut self) -> Result<OpenJsonTableColumn, ParserError> {
15185        let name = self.parse_identifier()?;
15186        let r#type = self.parse_data_type()?;
15187        let path = if let Token::SingleQuotedString(path) = self.peek_token().token {
15188            self.next_token();
15189            Some(path)
15190        } else {
15191            None
15192        };
15193        let as_json = self.parse_keyword(Keyword::AS);
15194        if as_json {
15195            self.expect_keyword_is(Keyword::JSON)?;
15196        }
15197        Ok(OpenJsonTableColumn {
15198            name,
15199            r#type,
15200            path,
15201            as_json,
15202        })
15203    }
15204
15205    fn parse_json_table_column_error_handling(
15206        &mut self,
15207    ) -> Result<Option<JsonTableColumnErrorHandling>, ParserError> {
15208        let res = if self.parse_keyword(Keyword::NULL) {
15209            JsonTableColumnErrorHandling::Null
15210        } else if self.parse_keyword(Keyword::ERROR) {
15211            JsonTableColumnErrorHandling::Error
15212        } else if self.parse_keyword(Keyword::DEFAULT) {
15213            JsonTableColumnErrorHandling::Default(self.parse_value()?.value)
15214        } else {
15215            return Ok(None);
15216        };
15217        self.expect_keyword_is(Keyword::ON)?;
15218        Ok(Some(res))
15219    }
15220
15221    pub fn parse_derived_table_factor(
15222        &mut self,
15223        lateral: IsLateral,
15224    ) -> Result<TableFactor, ParserError> {
15225        let subquery = self.parse_query()?;
15226        self.expect_token(&Token::RParen)?;
15227        let alias = self.maybe_parse_table_alias()?;
15228        Ok(TableFactor::Derived {
15229            lateral: match lateral {
15230                Lateral => true,
15231                NotLateral => false,
15232            },
15233            subquery,
15234            alias,
15235        })
15236    }
15237
15238    fn parse_aliased_function_call(&mut self) -> Result<ExprWithAlias, ParserError> {
15239        let function_name = match self.next_token().token {
15240            Token::Word(w) => Ok(w.value),
15241            _ => self.expected("a function identifier", self.peek_token()),
15242        }?;
15243        let expr = self.parse_function(ObjectName::from(vec![Ident::new(function_name)]))?;
15244        let alias = if self.parse_keyword(Keyword::AS) {
15245            Some(self.parse_identifier()?)
15246        } else {
15247            None
15248        };
15249
15250        Ok(ExprWithAlias { expr, alias })
15251    }
15252    /// Parses an expression with an optional alias
15253    ///
15254    /// Examples:
15255    ///
15256    /// ```sql
15257    /// SUM(price) AS total_price
15258    /// ```
15259    /// ```sql
15260    /// SUM(price)
15261    /// ```
15262    ///
15263    /// Example
15264    /// ```
15265    /// # use sqlparser::parser::{Parser, ParserError};
15266    /// # use sqlparser::dialect::GenericDialect;
15267    /// # fn main() ->Result<(), ParserError> {
15268    /// let sql = r#"SUM("a") as "b""#;
15269    /// let mut parser = Parser::new(&GenericDialect).try_with_sql(sql)?;
15270    /// let expr_with_alias = parser.parse_expr_with_alias()?;
15271    /// assert_eq!(Some("b".to_string()), expr_with_alias.alias.map(|x|x.value));
15272    /// # Ok(())
15273    /// # }
15274    pub fn parse_expr_with_alias(&mut self) -> Result<ExprWithAlias, ParserError> {
15275        let expr = self.parse_expr()?;
15276        let alias = if self.parse_keyword(Keyword::AS) {
15277            Some(self.parse_identifier()?)
15278        } else {
15279            None
15280        };
15281
15282        Ok(ExprWithAlias { expr, alias })
15283    }
15284
15285    pub fn parse_pivot_table_factor(
15286        &mut self,
15287        table: TableFactor,
15288    ) -> Result<TableFactor, ParserError> {
15289        self.expect_token(&Token::LParen)?;
15290        let aggregate_functions = self.parse_comma_separated(Self::parse_aliased_function_call)?;
15291        self.expect_keyword_is(Keyword::FOR)?;
15292        let value_column = if self.peek_token_ref().token == Token::LParen {
15293            self.parse_parenthesized_column_list_inner(Mandatory, false, |p| {
15294                p.parse_subexpr(self.dialect.prec_value(Precedence::Between))
15295            })?
15296        } else {
15297            vec![self.parse_subexpr(self.dialect.prec_value(Precedence::Between))?]
15298        };
15299        self.expect_keyword_is(Keyword::IN)?;
15300
15301        self.expect_token(&Token::LParen)?;
15302        let value_source = if self.parse_keyword(Keyword::ANY) {
15303            let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
15304                self.parse_comma_separated(Parser::parse_order_by_expr)?
15305            } else {
15306                vec![]
15307            };
15308            PivotValueSource::Any(order_by)
15309        } else if self.peek_sub_query() {
15310            PivotValueSource::Subquery(self.parse_query()?)
15311        } else {
15312            PivotValueSource::List(self.parse_comma_separated(Self::parse_expr_with_alias)?)
15313        };
15314        self.expect_token(&Token::RParen)?;
15315
15316        let default_on_null =
15317            if self.parse_keywords(&[Keyword::DEFAULT, Keyword::ON, Keyword::NULL]) {
15318                self.expect_token(&Token::LParen)?;
15319                let expr = self.parse_expr()?;
15320                self.expect_token(&Token::RParen)?;
15321                Some(expr)
15322            } else {
15323                None
15324            };
15325
15326        self.expect_token(&Token::RParen)?;
15327        let alias = self.maybe_parse_table_alias()?;
15328        Ok(TableFactor::Pivot {
15329            table: Box::new(table),
15330            aggregate_functions,
15331            value_column,
15332            value_source,
15333            default_on_null,
15334            alias,
15335        })
15336    }
15337
15338    pub fn parse_unpivot_table_factor(
15339        &mut self,
15340        table: TableFactor,
15341    ) -> Result<TableFactor, ParserError> {
15342        let null_inclusion = if self.parse_keyword(Keyword::INCLUDE) {
15343            self.expect_keyword_is(Keyword::NULLS)?;
15344            Some(NullInclusion::IncludeNulls)
15345        } else if self.parse_keyword(Keyword::EXCLUDE) {
15346            self.expect_keyword_is(Keyword::NULLS)?;
15347            Some(NullInclusion::ExcludeNulls)
15348        } else {
15349            None
15350        };
15351        self.expect_token(&Token::LParen)?;
15352        let value = self.parse_expr()?;
15353        self.expect_keyword_is(Keyword::FOR)?;
15354        let name = self.parse_identifier()?;
15355        self.expect_keyword_is(Keyword::IN)?;
15356        let columns = self.parse_parenthesized_column_list_inner(Mandatory, false, |p| {
15357            p.parse_expr_with_alias()
15358        })?;
15359        self.expect_token(&Token::RParen)?;
15360        let alias = self.maybe_parse_table_alias()?;
15361        Ok(TableFactor::Unpivot {
15362            table: Box::new(table),
15363            value,
15364            null_inclusion,
15365            name,
15366            columns,
15367            alias,
15368        })
15369    }
15370
15371    pub fn parse_join_constraint(&mut self, natural: bool) -> Result<JoinConstraint, ParserError> {
15372        if natural {
15373            Ok(JoinConstraint::Natural)
15374        } else if self.parse_keyword(Keyword::ON) {
15375            let constraint = self.parse_expr()?;
15376            Ok(JoinConstraint::On(constraint))
15377        } else if self.parse_keyword(Keyword::USING) {
15378            let columns = self.parse_parenthesized_qualified_column_list(Mandatory, false)?;
15379            Ok(JoinConstraint::Using(columns))
15380        } else {
15381            Ok(JoinConstraint::None)
15382            //self.expected("ON, or USING after JOIN", self.peek_token())
15383        }
15384    }
15385
15386    /// Parse a GRANT statement.
15387    pub fn parse_grant(&mut self) -> Result<Statement, ParserError> {
15388        let (privileges, objects) = self.parse_grant_deny_revoke_privileges_objects()?;
15389
15390        self.expect_keyword_is(Keyword::TO)?;
15391        let grantees = self.parse_grantees()?;
15392
15393        let with_grant_option =
15394            self.parse_keywords(&[Keyword::WITH, Keyword::GRANT, Keyword::OPTION]);
15395
15396        let current_grants =
15397            if self.parse_keywords(&[Keyword::COPY, Keyword::CURRENT, Keyword::GRANTS]) {
15398                Some(CurrentGrantsKind::CopyCurrentGrants)
15399            } else if self.parse_keywords(&[Keyword::REVOKE, Keyword::CURRENT, Keyword::GRANTS]) {
15400                Some(CurrentGrantsKind::RevokeCurrentGrants)
15401            } else {
15402                None
15403            };
15404
15405        let as_grantor = if self.parse_keywords(&[Keyword::AS]) {
15406            Some(self.parse_identifier()?)
15407        } else {
15408            None
15409        };
15410
15411        let granted_by = if self.parse_keywords(&[Keyword::GRANTED, Keyword::BY]) {
15412            Some(self.parse_identifier()?)
15413        } else {
15414            None
15415        };
15416
15417        Ok(Statement::Grant {
15418            privileges,
15419            objects,
15420            grantees,
15421            with_grant_option,
15422            as_grantor,
15423            granted_by,
15424            current_grants,
15425        })
15426    }
15427
15428    fn parse_grantees(&mut self) -> Result<Vec<Grantee>, ParserError> {
15429        let mut values = vec![];
15430        let mut grantee_type = GranteesType::None;
15431        loop {
15432            let new_grantee_type = if self.parse_keyword(Keyword::ROLE) {
15433                GranteesType::Role
15434            } else if self.parse_keyword(Keyword::USER) {
15435                GranteesType::User
15436            } else if self.parse_keyword(Keyword::SHARE) {
15437                GranteesType::Share
15438            } else if self.parse_keyword(Keyword::GROUP) {
15439                GranteesType::Group
15440            } else if self.parse_keyword(Keyword::PUBLIC) {
15441                GranteesType::Public
15442            } else if self.parse_keywords(&[Keyword::DATABASE, Keyword::ROLE]) {
15443                GranteesType::DatabaseRole
15444            } else if self.parse_keywords(&[Keyword::APPLICATION, Keyword::ROLE]) {
15445                GranteesType::ApplicationRole
15446            } else if self.parse_keyword(Keyword::APPLICATION) {
15447                GranteesType::Application
15448            } else {
15449                grantee_type.clone() // keep from previous iteraton, if not specified
15450            };
15451
15452            if self
15453                .dialect
15454                .get_reserved_grantees_types()
15455                .contains(&new_grantee_type)
15456            {
15457                self.prev_token();
15458            } else {
15459                grantee_type = new_grantee_type;
15460            }
15461
15462            let grantee = if grantee_type == GranteesType::Public {
15463                Grantee {
15464                    grantee_type: grantee_type.clone(),
15465                    name: None,
15466                }
15467            } else {
15468                let mut name = self.parse_grantee_name()?;
15469                if self.consume_token(&Token::Colon) {
15470                    // Redshift supports namespace prefix for external users and groups:
15471                    // <Namespace>:<GroupName> or <Namespace>:<UserName>
15472                    // https://docs.aws.amazon.com/redshift/latest/mgmt/redshift-iam-access-control-native-idp.html
15473                    let ident = self.parse_identifier()?;
15474                    if let GranteeName::ObjectName(namespace) = name {
15475                        name = GranteeName::ObjectName(ObjectName::from(vec![Ident::new(
15476                            format!("{namespace}:{ident}"),
15477                        )]));
15478                    };
15479                }
15480                Grantee {
15481                    grantee_type: grantee_type.clone(),
15482                    name: Some(name),
15483                }
15484            };
15485
15486            values.push(grantee);
15487
15488            if !self.consume_token(&Token::Comma) {
15489                break;
15490            }
15491        }
15492
15493        Ok(values)
15494    }
15495
15496    pub fn parse_grant_deny_revoke_privileges_objects(
15497        &mut self,
15498    ) -> Result<(Privileges, Option<GrantObjects>), ParserError> {
15499        let privileges = if self.parse_keyword(Keyword::ALL) {
15500            Privileges::All {
15501                with_privileges_keyword: self.parse_keyword(Keyword::PRIVILEGES),
15502            }
15503        } else {
15504            let actions = self.parse_actions_list()?;
15505            Privileges::Actions(actions)
15506        };
15507
15508        let objects = if self.parse_keyword(Keyword::ON) {
15509            if self.parse_keywords(&[Keyword::ALL, Keyword::TABLES, Keyword::IN, Keyword::SCHEMA]) {
15510                Some(GrantObjects::AllTablesInSchema {
15511                    schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15512                })
15513            } else if self.parse_keywords(&[
15514                Keyword::ALL,
15515                Keyword::EXTERNAL,
15516                Keyword::TABLES,
15517                Keyword::IN,
15518                Keyword::SCHEMA,
15519            ]) {
15520                Some(GrantObjects::AllExternalTablesInSchema {
15521                    schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15522                })
15523            } else if self.parse_keywords(&[
15524                Keyword::ALL,
15525                Keyword::VIEWS,
15526                Keyword::IN,
15527                Keyword::SCHEMA,
15528            ]) {
15529                Some(GrantObjects::AllViewsInSchema {
15530                    schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15531                })
15532            } else if self.parse_keywords(&[
15533                Keyword::ALL,
15534                Keyword::MATERIALIZED,
15535                Keyword::VIEWS,
15536                Keyword::IN,
15537                Keyword::SCHEMA,
15538            ]) {
15539                Some(GrantObjects::AllMaterializedViewsInSchema {
15540                    schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15541                })
15542            } else if self.parse_keywords(&[
15543                Keyword::ALL,
15544                Keyword::FUNCTIONS,
15545                Keyword::IN,
15546                Keyword::SCHEMA,
15547            ]) {
15548                Some(GrantObjects::AllFunctionsInSchema {
15549                    schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15550                })
15551            } else if self.parse_keywords(&[
15552                Keyword::FUTURE,
15553                Keyword::SCHEMAS,
15554                Keyword::IN,
15555                Keyword::DATABASE,
15556            ]) {
15557                Some(GrantObjects::FutureSchemasInDatabase {
15558                    databases: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15559                })
15560            } else if self.parse_keywords(&[
15561                Keyword::FUTURE,
15562                Keyword::TABLES,
15563                Keyword::IN,
15564                Keyword::SCHEMA,
15565            ]) {
15566                Some(GrantObjects::FutureTablesInSchema {
15567                    schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15568                })
15569            } else if self.parse_keywords(&[
15570                Keyword::FUTURE,
15571                Keyword::EXTERNAL,
15572                Keyword::TABLES,
15573                Keyword::IN,
15574                Keyword::SCHEMA,
15575            ]) {
15576                Some(GrantObjects::FutureExternalTablesInSchema {
15577                    schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15578                })
15579            } else if self.parse_keywords(&[
15580                Keyword::FUTURE,
15581                Keyword::VIEWS,
15582                Keyword::IN,
15583                Keyword::SCHEMA,
15584            ]) {
15585                Some(GrantObjects::FutureViewsInSchema {
15586                    schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15587                })
15588            } else if self.parse_keywords(&[
15589                Keyword::FUTURE,
15590                Keyword::MATERIALIZED,
15591                Keyword::VIEWS,
15592                Keyword::IN,
15593                Keyword::SCHEMA,
15594            ]) {
15595                Some(GrantObjects::FutureMaterializedViewsInSchema {
15596                    schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15597                })
15598            } else if self.parse_keywords(&[
15599                Keyword::ALL,
15600                Keyword::SEQUENCES,
15601                Keyword::IN,
15602                Keyword::SCHEMA,
15603            ]) {
15604                Some(GrantObjects::AllSequencesInSchema {
15605                    schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15606                })
15607            } else if self.parse_keywords(&[
15608                Keyword::FUTURE,
15609                Keyword::SEQUENCES,
15610                Keyword::IN,
15611                Keyword::SCHEMA,
15612            ]) {
15613                Some(GrantObjects::FutureSequencesInSchema {
15614                    schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
15615                })
15616            } else if self.parse_keywords(&[Keyword::RESOURCE, Keyword::MONITOR]) {
15617                Some(GrantObjects::ResourceMonitors(
15618                    self.parse_comma_separated(|p| p.parse_object_name(false))?,
15619                ))
15620            } else if self.parse_keywords(&[Keyword::COMPUTE, Keyword::POOL]) {
15621                Some(GrantObjects::ComputePools(
15622                    self.parse_comma_separated(|p| p.parse_object_name(false))?,
15623                ))
15624            } else if self.parse_keywords(&[Keyword::FAILOVER, Keyword::GROUP]) {
15625                Some(GrantObjects::FailoverGroup(
15626                    self.parse_comma_separated(|p| p.parse_object_name(false))?,
15627                ))
15628            } else if self.parse_keywords(&[Keyword::REPLICATION, Keyword::GROUP]) {
15629                Some(GrantObjects::ReplicationGroup(
15630                    self.parse_comma_separated(|p| p.parse_object_name(false))?,
15631                ))
15632            } else if self.parse_keywords(&[Keyword::EXTERNAL, Keyword::VOLUME]) {
15633                Some(GrantObjects::ExternalVolumes(
15634                    self.parse_comma_separated(|p| p.parse_object_name(false))?,
15635                ))
15636            } else {
15637                let object_type = self.parse_one_of_keywords(&[
15638                    Keyword::SEQUENCE,
15639                    Keyword::DATABASE,
15640                    Keyword::SCHEMA,
15641                    Keyword::TABLE,
15642                    Keyword::VIEW,
15643                    Keyword::WAREHOUSE,
15644                    Keyword::INTEGRATION,
15645                    Keyword::VIEW,
15646                    Keyword::WAREHOUSE,
15647                    Keyword::INTEGRATION,
15648                    Keyword::USER,
15649                    Keyword::CONNECTION,
15650                    Keyword::PROCEDURE,
15651                    Keyword::FUNCTION,
15652                ]);
15653                let objects =
15654                    self.parse_comma_separated(|p| p.parse_object_name_inner(false, true));
15655                match object_type {
15656                    Some(Keyword::DATABASE) => Some(GrantObjects::Databases(objects?)),
15657                    Some(Keyword::SCHEMA) => Some(GrantObjects::Schemas(objects?)),
15658                    Some(Keyword::SEQUENCE) => Some(GrantObjects::Sequences(objects?)),
15659                    Some(Keyword::WAREHOUSE) => Some(GrantObjects::Warehouses(objects?)),
15660                    Some(Keyword::INTEGRATION) => Some(GrantObjects::Integrations(objects?)),
15661                    Some(Keyword::VIEW) => Some(GrantObjects::Views(objects?)),
15662                    Some(Keyword::USER) => Some(GrantObjects::Users(objects?)),
15663                    Some(Keyword::CONNECTION) => Some(GrantObjects::Connections(objects?)),
15664                    kw @ (Some(Keyword::PROCEDURE) | Some(Keyword::FUNCTION)) => {
15665                        if let Some(name) = objects?.first() {
15666                            self.parse_grant_procedure_or_function(name, &kw)?
15667                        } else {
15668                            self.expected("procedure or function name", self.peek_token())?
15669                        }
15670                    }
15671                    Some(Keyword::TABLE) | None => Some(GrantObjects::Tables(objects?)),
15672                    _ => unreachable!(),
15673                }
15674            }
15675        } else {
15676            None
15677        };
15678
15679        Ok((privileges, objects))
15680    }
15681
15682    fn parse_grant_procedure_or_function(
15683        &mut self,
15684        name: &ObjectName,
15685        kw: &Option<Keyword>,
15686    ) -> Result<Option<GrantObjects>, ParserError> {
15687        let arg_types = if self.consume_token(&Token::LParen) {
15688            let list = self.parse_comma_separated0(Self::parse_data_type, Token::RParen)?;
15689            self.expect_token(&Token::RParen)?;
15690            list
15691        } else {
15692            vec![]
15693        };
15694        match kw {
15695            Some(Keyword::PROCEDURE) => Ok(Some(GrantObjects::Procedure {
15696                name: name.clone(),
15697                arg_types,
15698            })),
15699            Some(Keyword::FUNCTION) => Ok(Some(GrantObjects::Function {
15700                name: name.clone(),
15701                arg_types,
15702            })),
15703            _ => self.expected("procedure or function keywords", self.peek_token())?,
15704        }
15705    }
15706
15707    pub fn parse_grant_permission(&mut self) -> Result<Action, ParserError> {
15708        fn parse_columns(parser: &mut Parser) -> Result<Option<Vec<Ident>>, ParserError> {
15709            let columns = parser.parse_parenthesized_column_list(Optional, false)?;
15710            if columns.is_empty() {
15711                Ok(None)
15712            } else {
15713                Ok(Some(columns))
15714            }
15715        }
15716
15717        // Multi-word privileges
15718        if self.parse_keywords(&[Keyword::IMPORTED, Keyword::PRIVILEGES]) {
15719            Ok(Action::ImportedPrivileges)
15720        } else if self.parse_keywords(&[Keyword::ADD, Keyword::SEARCH, Keyword::OPTIMIZATION]) {
15721            Ok(Action::AddSearchOptimization)
15722        } else if self.parse_keywords(&[Keyword::ATTACH, Keyword::LISTING]) {
15723            Ok(Action::AttachListing)
15724        } else if self.parse_keywords(&[Keyword::ATTACH, Keyword::POLICY]) {
15725            Ok(Action::AttachPolicy)
15726        } else if self.parse_keywords(&[Keyword::BIND, Keyword::SERVICE, Keyword::ENDPOINT]) {
15727            Ok(Action::BindServiceEndpoint)
15728        } else if self.parse_keywords(&[Keyword::DATABASE, Keyword::ROLE]) {
15729            let role = self.parse_object_name(false)?;
15730            Ok(Action::DatabaseRole { role })
15731        } else if self.parse_keywords(&[Keyword::EVOLVE, Keyword::SCHEMA]) {
15732            Ok(Action::EvolveSchema)
15733        } else if self.parse_keywords(&[Keyword::IMPORT, Keyword::SHARE]) {
15734            Ok(Action::ImportShare)
15735        } else if self.parse_keywords(&[Keyword::MANAGE, Keyword::VERSIONS]) {
15736            Ok(Action::ManageVersions)
15737        } else if self.parse_keywords(&[Keyword::MANAGE, Keyword::RELEASES]) {
15738            Ok(Action::ManageReleases)
15739        } else if self.parse_keywords(&[Keyword::OVERRIDE, Keyword::SHARE, Keyword::RESTRICTIONS]) {
15740            Ok(Action::OverrideShareRestrictions)
15741        } else if self.parse_keywords(&[
15742            Keyword::PURCHASE,
15743            Keyword::DATA,
15744            Keyword::EXCHANGE,
15745            Keyword::LISTING,
15746        ]) {
15747            Ok(Action::PurchaseDataExchangeListing)
15748        } else if self.parse_keywords(&[Keyword::RESOLVE, Keyword::ALL]) {
15749            Ok(Action::ResolveAll)
15750        } else if self.parse_keywords(&[Keyword::READ, Keyword::SESSION]) {
15751            Ok(Action::ReadSession)
15752
15753        // Single-word privileges
15754        } else if self.parse_keyword(Keyword::APPLY) {
15755            let apply_type = self.parse_action_apply_type()?;
15756            Ok(Action::Apply { apply_type })
15757        } else if self.parse_keyword(Keyword::APPLYBUDGET) {
15758            Ok(Action::ApplyBudget)
15759        } else if self.parse_keyword(Keyword::AUDIT) {
15760            Ok(Action::Audit)
15761        } else if self.parse_keyword(Keyword::CONNECT) {
15762            Ok(Action::Connect)
15763        } else if self.parse_keyword(Keyword::CREATE) {
15764            let obj_type = self.maybe_parse_action_create_object_type();
15765            Ok(Action::Create { obj_type })
15766        } else if self.parse_keyword(Keyword::DELETE) {
15767            Ok(Action::Delete)
15768        } else if self.parse_keyword(Keyword::EXEC) {
15769            let obj_type = self.maybe_parse_action_execute_obj_type();
15770            Ok(Action::Exec { obj_type })
15771        } else if self.parse_keyword(Keyword::EXECUTE) {
15772            let obj_type = self.maybe_parse_action_execute_obj_type();
15773            Ok(Action::Execute { obj_type })
15774        } else if self.parse_keyword(Keyword::FAILOVER) {
15775            Ok(Action::Failover)
15776        } else if self.parse_keyword(Keyword::INSERT) {
15777            Ok(Action::Insert {
15778                columns: parse_columns(self)?,
15779            })
15780        } else if self.parse_keyword(Keyword::MANAGE) {
15781            let manage_type = self.parse_action_manage_type()?;
15782            Ok(Action::Manage { manage_type })
15783        } else if self.parse_keyword(Keyword::MODIFY) {
15784            let modify_type = self.parse_action_modify_type();
15785            Ok(Action::Modify { modify_type })
15786        } else if self.parse_keyword(Keyword::MONITOR) {
15787            let monitor_type = self.parse_action_monitor_type();
15788            Ok(Action::Monitor { monitor_type })
15789        } else if self.parse_keyword(Keyword::OPERATE) {
15790            Ok(Action::Operate)
15791        } else if self.parse_keyword(Keyword::REFERENCES) {
15792            Ok(Action::References {
15793                columns: parse_columns(self)?,
15794            })
15795        } else if self.parse_keyword(Keyword::READ) {
15796            Ok(Action::Read)
15797        } else if self.parse_keyword(Keyword::REPLICATE) {
15798            Ok(Action::Replicate)
15799        } else if self.parse_keyword(Keyword::ROLE) {
15800            let role = self.parse_object_name(false)?;
15801            Ok(Action::Role { role })
15802        } else if self.parse_keyword(Keyword::SELECT) {
15803            Ok(Action::Select {
15804                columns: parse_columns(self)?,
15805            })
15806        } else if self.parse_keyword(Keyword::TEMPORARY) {
15807            Ok(Action::Temporary)
15808        } else if self.parse_keyword(Keyword::TRIGGER) {
15809            Ok(Action::Trigger)
15810        } else if self.parse_keyword(Keyword::TRUNCATE) {
15811            Ok(Action::Truncate)
15812        } else if self.parse_keyword(Keyword::UPDATE) {
15813            Ok(Action::Update {
15814                columns: parse_columns(self)?,
15815            })
15816        } else if self.parse_keyword(Keyword::USAGE) {
15817            Ok(Action::Usage)
15818        } else if self.parse_keyword(Keyword::OWNERSHIP) {
15819            Ok(Action::Ownership)
15820        } else if self.parse_keyword(Keyword::DROP) {
15821            Ok(Action::Drop)
15822        } else if matches!(
15823            &self.peek_token().token,
15824            Token::Word(w) if w.quote_style.is_some()
15825        ) {
15826            let role = self.parse_object_name(false)?;
15827            Ok(Action::Role { role })
15828        } else {
15829            self.expected("a privilege keyword", self.peek_token())?
15830        }
15831    }
15832
15833    fn maybe_parse_action_create_object_type(&mut self) -> Option<ActionCreateObjectType> {
15834        // Multi-word object types
15835        if self.parse_keywords(&[Keyword::APPLICATION, Keyword::PACKAGE]) {
15836            Some(ActionCreateObjectType::ApplicationPackage)
15837        } else if self.parse_keywords(&[Keyword::COMPUTE, Keyword::POOL]) {
15838            Some(ActionCreateObjectType::ComputePool)
15839        } else if self.parse_keywords(&[Keyword::DATA, Keyword::EXCHANGE, Keyword::LISTING]) {
15840            Some(ActionCreateObjectType::DataExchangeListing)
15841        } else if self.parse_keywords(&[Keyword::EXTERNAL, Keyword::VOLUME]) {
15842            Some(ActionCreateObjectType::ExternalVolume)
15843        } else if self.parse_keywords(&[Keyword::FAILOVER, Keyword::GROUP]) {
15844            Some(ActionCreateObjectType::FailoverGroup)
15845        } else if self.parse_keywords(&[Keyword::NETWORK, Keyword::POLICY]) {
15846            Some(ActionCreateObjectType::NetworkPolicy)
15847        } else if self.parse_keywords(&[Keyword::ORGANIZATION, Keyword::LISTING]) {
15848            Some(ActionCreateObjectType::OrganiationListing)
15849        } else if self.parse_keywords(&[Keyword::REPLICATION, Keyword::GROUP]) {
15850            Some(ActionCreateObjectType::ReplicationGroup)
15851        }
15852        // Single-word object types
15853        else if self.parse_keyword(Keyword::ACCOUNT) {
15854            Some(ActionCreateObjectType::Account)
15855        } else if self.parse_keyword(Keyword::APPLICATION) {
15856            Some(ActionCreateObjectType::Application)
15857        } else if self.parse_keyword(Keyword::DATABASE) {
15858            Some(ActionCreateObjectType::Database)
15859        } else if self.parse_keyword(Keyword::INTEGRATION) {
15860            Some(ActionCreateObjectType::Integration)
15861        } else if self.parse_keyword(Keyword::ROLE) {
15862            Some(ActionCreateObjectType::Role)
15863        } else if self.parse_keyword(Keyword::SCHEMA) {
15864            Some(ActionCreateObjectType::Schema)
15865        } else if self.parse_keyword(Keyword::SHARE) {
15866            Some(ActionCreateObjectType::Share)
15867        } else if self.parse_keyword(Keyword::USER) {
15868            Some(ActionCreateObjectType::User)
15869        } else if self.parse_keyword(Keyword::WAREHOUSE) {
15870            Some(ActionCreateObjectType::Warehouse)
15871        } else {
15872            None
15873        }
15874    }
15875
15876    fn parse_action_apply_type(&mut self) -> Result<ActionApplyType, ParserError> {
15877        if self.parse_keywords(&[Keyword::AGGREGATION, Keyword::POLICY]) {
15878            Ok(ActionApplyType::AggregationPolicy)
15879        } else if self.parse_keywords(&[Keyword::AUTHENTICATION, Keyword::POLICY]) {
15880            Ok(ActionApplyType::AuthenticationPolicy)
15881        } else if self.parse_keywords(&[Keyword::JOIN, Keyword::POLICY]) {
15882            Ok(ActionApplyType::JoinPolicy)
15883        } else if self.parse_keywords(&[Keyword::MASKING, Keyword::POLICY]) {
15884            Ok(ActionApplyType::MaskingPolicy)
15885        } else if self.parse_keywords(&[Keyword::PACKAGES, Keyword::POLICY]) {
15886            Ok(ActionApplyType::PackagesPolicy)
15887        } else if self.parse_keywords(&[Keyword::PASSWORD, Keyword::POLICY]) {
15888            Ok(ActionApplyType::PasswordPolicy)
15889        } else if self.parse_keywords(&[Keyword::PROJECTION, Keyword::POLICY]) {
15890            Ok(ActionApplyType::ProjectionPolicy)
15891        } else if self.parse_keywords(&[Keyword::ROW, Keyword::ACCESS, Keyword::POLICY]) {
15892            Ok(ActionApplyType::RowAccessPolicy)
15893        } else if self.parse_keywords(&[Keyword::SESSION, Keyword::POLICY]) {
15894            Ok(ActionApplyType::SessionPolicy)
15895        } else if self.parse_keyword(Keyword::TAG) {
15896            Ok(ActionApplyType::Tag)
15897        } else {
15898            self.expected("GRANT APPLY type", self.peek_token())
15899        }
15900    }
15901
15902    fn maybe_parse_action_execute_obj_type(&mut self) -> Option<ActionExecuteObjectType> {
15903        if self.parse_keywords(&[Keyword::DATA, Keyword::METRIC, Keyword::FUNCTION]) {
15904            Some(ActionExecuteObjectType::DataMetricFunction)
15905        } else if self.parse_keywords(&[Keyword::MANAGED, Keyword::ALERT]) {
15906            Some(ActionExecuteObjectType::ManagedAlert)
15907        } else if self.parse_keywords(&[Keyword::MANAGED, Keyword::TASK]) {
15908            Some(ActionExecuteObjectType::ManagedTask)
15909        } else if self.parse_keyword(Keyword::ALERT) {
15910            Some(ActionExecuteObjectType::Alert)
15911        } else if self.parse_keyword(Keyword::TASK) {
15912            Some(ActionExecuteObjectType::Task)
15913        } else {
15914            None
15915        }
15916    }
15917
15918    fn parse_action_manage_type(&mut self) -> Result<ActionManageType, ParserError> {
15919        if self.parse_keywords(&[Keyword::ACCOUNT, Keyword::SUPPORT, Keyword::CASES]) {
15920            Ok(ActionManageType::AccountSupportCases)
15921        } else if self.parse_keywords(&[Keyword::EVENT, Keyword::SHARING]) {
15922            Ok(ActionManageType::EventSharing)
15923        } else if self.parse_keywords(&[Keyword::LISTING, Keyword::AUTO, Keyword::FULFILLMENT]) {
15924            Ok(ActionManageType::ListingAutoFulfillment)
15925        } else if self.parse_keywords(&[Keyword::ORGANIZATION, Keyword::SUPPORT, Keyword::CASES]) {
15926            Ok(ActionManageType::OrganizationSupportCases)
15927        } else if self.parse_keywords(&[Keyword::USER, Keyword::SUPPORT, Keyword::CASES]) {
15928            Ok(ActionManageType::UserSupportCases)
15929        } else if self.parse_keyword(Keyword::GRANTS) {
15930            Ok(ActionManageType::Grants)
15931        } else if self.parse_keyword(Keyword::WAREHOUSES) {
15932            Ok(ActionManageType::Warehouses)
15933        } else {
15934            self.expected("GRANT MANAGE type", self.peek_token())
15935        }
15936    }
15937
15938    fn parse_action_modify_type(&mut self) -> Option<ActionModifyType> {
15939        if self.parse_keywords(&[Keyword::LOG, Keyword::LEVEL]) {
15940            Some(ActionModifyType::LogLevel)
15941        } else if self.parse_keywords(&[Keyword::TRACE, Keyword::LEVEL]) {
15942            Some(ActionModifyType::TraceLevel)
15943        } else if self.parse_keywords(&[Keyword::SESSION, Keyword::LOG, Keyword::LEVEL]) {
15944            Some(ActionModifyType::SessionLogLevel)
15945        } else if self.parse_keywords(&[Keyword::SESSION, Keyword::TRACE, Keyword::LEVEL]) {
15946            Some(ActionModifyType::SessionTraceLevel)
15947        } else {
15948            None
15949        }
15950    }
15951
15952    fn parse_action_monitor_type(&mut self) -> Option<ActionMonitorType> {
15953        if self.parse_keyword(Keyword::EXECUTION) {
15954            Some(ActionMonitorType::Execution)
15955        } else if self.parse_keyword(Keyword::SECURITY) {
15956            Some(ActionMonitorType::Security)
15957        } else if self.parse_keyword(Keyword::USAGE) {
15958            Some(ActionMonitorType::Usage)
15959        } else {
15960            None
15961        }
15962    }
15963
15964    pub fn parse_grantee_name(&mut self) -> Result<GranteeName, ParserError> {
15965        let mut name = self.parse_object_name(false)?;
15966        if self.dialect.supports_user_host_grantee()
15967            && name.0.len() == 1
15968            && name.0[0].as_ident().is_some()
15969            && self.consume_token(&Token::AtSign)
15970        {
15971            let user = name.0.pop().unwrap().as_ident().unwrap().clone();
15972            let host = self.parse_identifier()?;
15973            Ok(GranteeName::UserHost { user, host })
15974        } else {
15975            Ok(GranteeName::ObjectName(name))
15976        }
15977    }
15978
15979    /// Parse [`Statement::Deny`]
15980    pub fn parse_deny(&mut self) -> Result<Statement, ParserError> {
15981        self.expect_keyword(Keyword::DENY)?;
15982
15983        let (privileges, objects) = self.parse_grant_deny_revoke_privileges_objects()?;
15984        let objects = match objects {
15985            Some(o) => o,
15986            None => {
15987                return parser_err!(
15988                    "DENY statements must specify an object",
15989                    self.peek_token().span.start
15990                )
15991            }
15992        };
15993
15994        self.expect_keyword_is(Keyword::TO)?;
15995        let grantees = self.parse_grantees()?;
15996        let cascade = self.parse_cascade_option();
15997        let granted_by = if self.parse_keywords(&[Keyword::AS]) {
15998            Some(self.parse_identifier()?)
15999        } else {
16000            None
16001        };
16002
16003        Ok(Statement::Deny(DenyStatement {
16004            privileges,
16005            objects,
16006            grantees,
16007            cascade,
16008            granted_by,
16009        }))
16010    }
16011
16012    /// Parse a REVOKE statement
16013    pub fn parse_revoke(&mut self) -> Result<Statement, ParserError> {
16014        let (privileges, objects) = self.parse_grant_deny_revoke_privileges_objects()?;
16015
16016        self.expect_keyword_is(Keyword::FROM)?;
16017        let grantees = self.parse_grantees()?;
16018
16019        let granted_by = if self.parse_keywords(&[Keyword::GRANTED, Keyword::BY]) {
16020            Some(self.parse_identifier()?)
16021        } else {
16022            None
16023        };
16024
16025        let cascade = self.parse_cascade_option();
16026
16027        Ok(Statement::Revoke {
16028            privileges,
16029            objects,
16030            grantees,
16031            granted_by,
16032            cascade,
16033        })
16034    }
16035
16036    /// Parse an REPLACE statement
16037    pub fn parse_replace(&mut self) -> Result<Statement, ParserError> {
16038        if !dialect_of!(self is MySqlDialect | GenericDialect) {
16039            return parser_err!(
16040                "Unsupported statement REPLACE",
16041                self.peek_token().span.start
16042            );
16043        }
16044
16045        let mut insert = self.parse_insert()?;
16046        if let Statement::Insert(Insert { replace_into, .. }) = &mut insert {
16047            *replace_into = true;
16048        }
16049
16050        Ok(insert)
16051    }
16052
16053    /// Parse an INSERT statement, returning a `Box`ed SetExpr
16054    ///
16055    /// This is used to reduce the size of the stack frames in debug builds
16056    fn parse_insert_setexpr_boxed(&mut self) -> Result<Box<SetExpr>, ParserError> {
16057        Ok(Box::new(SetExpr::Insert(self.parse_insert()?)))
16058    }
16059
16060    /// Parse an INSERT statement
16061    pub fn parse_insert(&mut self) -> Result<Statement, ParserError> {
16062        let or = self.parse_conflict_clause();
16063        let priority = if !dialect_of!(self is MySqlDialect | GenericDialect) {
16064            None
16065        } else if self.parse_keyword(Keyword::LOW_PRIORITY) {
16066            Some(MysqlInsertPriority::LowPriority)
16067        } else if self.parse_keyword(Keyword::DELAYED) {
16068            Some(MysqlInsertPriority::Delayed)
16069        } else if self.parse_keyword(Keyword::HIGH_PRIORITY) {
16070            Some(MysqlInsertPriority::HighPriority)
16071        } else {
16072            None
16073        };
16074
16075        let ignore = dialect_of!(self is MySqlDialect | GenericDialect)
16076            && self.parse_keyword(Keyword::IGNORE);
16077
16078        let replace_into = false;
16079
16080        let overwrite = self.parse_keyword(Keyword::OVERWRITE);
16081        let into = self.parse_keyword(Keyword::INTO);
16082
16083        let local = self.parse_keyword(Keyword::LOCAL);
16084
16085        if self.parse_keyword(Keyword::DIRECTORY) {
16086            let path = self.parse_literal_string()?;
16087            let file_format = if self.parse_keywords(&[Keyword::STORED, Keyword::AS]) {
16088                Some(self.parse_file_format()?)
16089            } else {
16090                None
16091            };
16092            let source = self.parse_query()?;
16093            Ok(Statement::Directory {
16094                local,
16095                path,
16096                overwrite,
16097                file_format,
16098                source,
16099            })
16100        } else {
16101            // Hive lets you put table here regardless
16102            let table = self.parse_keyword(Keyword::TABLE);
16103            let table_object = self.parse_table_object()?;
16104
16105            let table_alias =
16106                if dialect_of!(self is PostgreSqlDialect) && self.parse_keyword(Keyword::AS) {
16107                    Some(self.parse_identifier()?)
16108                } else {
16109                    None
16110                };
16111
16112            let is_mysql = dialect_of!(self is MySqlDialect);
16113
16114            let (columns, partitioned, after_columns, source, assignments) = if self
16115                .parse_keywords(&[Keyword::DEFAULT, Keyword::VALUES])
16116            {
16117                (vec![], None, vec![], None, vec![])
16118            } else {
16119                let (columns, partitioned, after_columns) = if !self.peek_subquery_start() {
16120                    let columns = self.parse_parenthesized_column_list(Optional, is_mysql)?;
16121
16122                    let partitioned = self.parse_insert_partition()?;
16123                    // Hive allows you to specify columns after partitions as well if you want.
16124                    let after_columns = if dialect_of!(self is HiveDialect) {
16125                        self.parse_parenthesized_column_list(Optional, false)?
16126                    } else {
16127                        vec![]
16128                    };
16129                    (columns, partitioned, after_columns)
16130                } else {
16131                    Default::default()
16132                };
16133
16134                let (source, assignments) = if self.peek_keyword(Keyword::FORMAT)
16135                    || self.peek_keyword(Keyword::SETTINGS)
16136                {
16137                    (None, vec![])
16138                } else if self.dialect.supports_insert_set() && self.parse_keyword(Keyword::SET) {
16139                    (None, self.parse_comma_separated(Parser::parse_assignment)?)
16140                } else {
16141                    (Some(self.parse_query()?), vec![])
16142                };
16143
16144                (columns, partitioned, after_columns, source, assignments)
16145            };
16146
16147            let (format_clause, settings) = if self.dialect.supports_insert_format() {
16148                // Settings always comes before `FORMAT` for ClickHouse:
16149                // <https://clickhouse.com/docs/en/sql-reference/statements/insert-into>
16150                let settings = self.parse_settings()?;
16151
16152                let format = if self.parse_keyword(Keyword::FORMAT) {
16153                    Some(self.parse_input_format_clause()?)
16154                } else {
16155                    None
16156                };
16157
16158                (format, settings)
16159            } else {
16160                Default::default()
16161            };
16162
16163            let insert_alias = if dialect_of!(self is MySqlDialect | GenericDialect)
16164                && self.parse_keyword(Keyword::AS)
16165            {
16166                let row_alias = self.parse_object_name(false)?;
16167                let col_aliases = Some(self.parse_parenthesized_column_list(Optional, false)?);
16168                Some(InsertAliases {
16169                    row_alias,
16170                    col_aliases,
16171                })
16172            } else {
16173                None
16174            };
16175
16176            let on = if self.parse_keyword(Keyword::ON) {
16177                if self.parse_keyword(Keyword::CONFLICT) {
16178                    let conflict_target =
16179                        if self.parse_keywords(&[Keyword::ON, Keyword::CONSTRAINT]) {
16180                            Some(ConflictTarget::OnConstraint(self.parse_object_name(false)?))
16181                        } else if self.peek_token() == Token::LParen {
16182                            Some(ConflictTarget::Columns(
16183                                self.parse_parenthesized_column_list(IsOptional::Mandatory, false)?,
16184                            ))
16185                        } else {
16186                            None
16187                        };
16188
16189                    self.expect_keyword_is(Keyword::DO)?;
16190                    let action = if self.parse_keyword(Keyword::NOTHING) {
16191                        OnConflictAction::DoNothing
16192                    } else {
16193                        self.expect_keyword_is(Keyword::UPDATE)?;
16194                        self.expect_keyword_is(Keyword::SET)?;
16195                        let assignments = self.parse_comma_separated(Parser::parse_assignment)?;
16196                        let selection = if self.parse_keyword(Keyword::WHERE) {
16197                            Some(self.parse_expr()?)
16198                        } else {
16199                            None
16200                        };
16201                        OnConflictAction::DoUpdate(DoUpdate {
16202                            assignments,
16203                            selection,
16204                        })
16205                    };
16206
16207                    Some(OnInsert::OnConflict(OnConflict {
16208                        conflict_target,
16209                        action,
16210                    }))
16211                } else {
16212                    self.expect_keyword_is(Keyword::DUPLICATE)?;
16213                    self.expect_keyword_is(Keyword::KEY)?;
16214                    self.expect_keyword_is(Keyword::UPDATE)?;
16215                    let l = self.parse_comma_separated(Parser::parse_assignment)?;
16216
16217                    Some(OnInsert::DuplicateKeyUpdate(l))
16218                }
16219            } else {
16220                None
16221            };
16222
16223            let returning = if self.parse_keyword(Keyword::RETURNING) {
16224                Some(self.parse_comma_separated(Parser::parse_select_item)?)
16225            } else {
16226                None
16227            };
16228
16229            Ok(Statement::Insert(Insert {
16230                or,
16231                table: table_object,
16232                table_alias,
16233                ignore,
16234                into,
16235                overwrite,
16236                partitioned,
16237                columns,
16238                after_columns,
16239                source,
16240                assignments,
16241                has_table_keyword: table,
16242                on,
16243                returning,
16244                replace_into,
16245                priority,
16246                insert_alias,
16247                settings,
16248                format_clause,
16249            }))
16250        }
16251    }
16252
16253    // Parses input format clause used for [ClickHouse].
16254    //
16255    // <https://clickhouse.com/docs/en/interfaces/formats>
16256    pub fn parse_input_format_clause(&mut self) -> Result<InputFormatClause, ParserError> {
16257        let ident = self.parse_identifier()?;
16258        let values = self
16259            .maybe_parse(|p| p.parse_comma_separated(|p| p.parse_expr()))?
16260            .unwrap_or_default();
16261
16262        Ok(InputFormatClause { ident, values })
16263    }
16264
16265    /// Returns true if the immediate tokens look like the
16266    /// beginning of a subquery. `(SELECT ...`
16267    fn peek_subquery_start(&mut self) -> bool {
16268        let [maybe_lparen, maybe_select] = self.peek_tokens();
16269        Token::LParen == maybe_lparen
16270            && matches!(maybe_select, Token::Word(w) if w.keyword == Keyword::SELECT)
16271    }
16272
16273    fn parse_conflict_clause(&mut self) -> Option<SqliteOnConflict> {
16274        if self.parse_keywords(&[Keyword::OR, Keyword::REPLACE]) {
16275            Some(SqliteOnConflict::Replace)
16276        } else if self.parse_keywords(&[Keyword::OR, Keyword::ROLLBACK]) {
16277            Some(SqliteOnConflict::Rollback)
16278        } else if self.parse_keywords(&[Keyword::OR, Keyword::ABORT]) {
16279            Some(SqliteOnConflict::Abort)
16280        } else if self.parse_keywords(&[Keyword::OR, Keyword::FAIL]) {
16281            Some(SqliteOnConflict::Fail)
16282        } else if self.parse_keywords(&[Keyword::OR, Keyword::IGNORE]) {
16283            Some(SqliteOnConflict::Ignore)
16284        } else if self.parse_keyword(Keyword::REPLACE) {
16285            Some(SqliteOnConflict::Replace)
16286        } else {
16287            None
16288        }
16289    }
16290
16291    pub fn parse_insert_partition(&mut self) -> Result<Option<Vec<Expr>>, ParserError> {
16292        if self.parse_keyword(Keyword::PARTITION) {
16293            self.expect_token(&Token::LParen)?;
16294            let partition_cols = Some(self.parse_comma_separated(Parser::parse_expr)?);
16295            self.expect_token(&Token::RParen)?;
16296            Ok(partition_cols)
16297        } else {
16298            Ok(None)
16299        }
16300    }
16301
16302    pub fn parse_load_data_table_format(
16303        &mut self,
16304    ) -> Result<Option<HiveLoadDataFormat>, ParserError> {
16305        if self.parse_keyword(Keyword::INPUTFORMAT) {
16306            let input_format = self.parse_expr()?;
16307            self.expect_keyword_is(Keyword::SERDE)?;
16308            let serde = self.parse_expr()?;
16309            Ok(Some(HiveLoadDataFormat {
16310                input_format,
16311                serde,
16312            }))
16313        } else {
16314            Ok(None)
16315        }
16316    }
16317
16318    /// Parse an UPDATE statement, returning a `Box`ed SetExpr
16319    ///
16320    /// This is used to reduce the size of the stack frames in debug builds
16321    fn parse_update_setexpr_boxed(&mut self) -> Result<Box<SetExpr>, ParserError> {
16322        Ok(Box::new(SetExpr::Update(self.parse_update()?)))
16323    }
16324
16325    pub fn parse_update(&mut self) -> Result<Statement, ParserError> {
16326        let or = self.parse_conflict_clause();
16327        let table = self.parse_table_and_joins()?;
16328        let from_before_set = if self.parse_keyword(Keyword::FROM) {
16329            Some(UpdateTableFromKind::BeforeSet(
16330                self.parse_table_with_joins()?,
16331            ))
16332        } else {
16333            None
16334        };
16335        self.expect_keyword(Keyword::SET)?;
16336        let assignments = self.parse_comma_separated(Parser::parse_assignment)?;
16337        let from = if from_before_set.is_none() && self.parse_keyword(Keyword::FROM) {
16338            Some(UpdateTableFromKind::AfterSet(
16339                self.parse_table_with_joins()?,
16340            ))
16341        } else {
16342            from_before_set
16343        };
16344        let selection = if self.parse_keyword(Keyword::WHERE) {
16345            Some(self.parse_expr()?)
16346        } else {
16347            None
16348        };
16349        let returning = if self.parse_keyword(Keyword::RETURNING) {
16350            Some(self.parse_comma_separated(Parser::parse_select_item)?)
16351        } else {
16352            None
16353        };
16354        let limit = if self.parse_keyword(Keyword::LIMIT) {
16355            Some(self.parse_expr()?)
16356        } else {
16357            None
16358        };
16359        Ok(Statement::Update {
16360            table,
16361            assignments,
16362            from,
16363            selection,
16364            returning,
16365            or,
16366            limit,
16367        })
16368    }
16369
16370    /// Parse a `var = expr` assignment, used in an UPDATE statement
16371    pub fn parse_assignment(&mut self) -> Result<Assignment, ParserError> {
16372        let target = self.parse_assignment_target()?;
16373        self.expect_token(&Token::Eq)?;
16374        let value = self.parse_expr()?;
16375        Ok(Assignment { target, value })
16376    }
16377
16378    /// Parse the left-hand side of an assignment, used in an UPDATE statement
16379    pub fn parse_assignment_target(&mut self) -> Result<AssignmentTarget, ParserError> {
16380        if self.consume_token(&Token::LParen) {
16381            let columns = self.parse_comma_separated(|p| p.parse_object_name(false))?;
16382            self.expect_token(&Token::RParen)?;
16383            Ok(AssignmentTarget::Tuple(columns))
16384        } else {
16385            let column = self.parse_object_name(false)?;
16386            Ok(AssignmentTarget::ColumnName(column))
16387        }
16388    }
16389
16390    pub fn parse_function_args(&mut self) -> Result<FunctionArg, ParserError> {
16391        if self.parse_keyword(Keyword::TABLE) {
16392            let table_name = self.parse_object_name(false)?;
16393            return Ok(FunctionArg::Unnamed(FunctionArgExpr::TableRef(table_name)));
16394        }
16395        let arg = if self.dialect.supports_named_fn_args_with_expr_name() {
16396            self.maybe_parse(|p| {
16397                let name = p.parse_expr()?;
16398                let operator = p.parse_function_named_arg_operator()?;
16399                let arg = p.parse_wildcard_expr()?.into();
16400                Ok(FunctionArg::ExprNamed {
16401                    name,
16402                    arg,
16403                    operator,
16404                })
16405            })?
16406        } else {
16407            self.maybe_parse(|p| {
16408                let name = p.parse_identifier()?;
16409                let operator = p.parse_function_named_arg_operator()?;
16410                let arg = p.parse_wildcard_expr()?.into();
16411                Ok(FunctionArg::Named {
16412                    name,
16413                    arg,
16414                    operator,
16415                })
16416            })?
16417        };
16418        if let Some(arg) = arg {
16419            return Ok(arg);
16420        }
16421        Ok(FunctionArg::Unnamed(self.parse_wildcard_expr()?.into()))
16422    }
16423
16424    fn parse_function_named_arg_operator(&mut self) -> Result<FunctionArgOperator, ParserError> {
16425        if self.parse_keyword(Keyword::VALUE) {
16426            return Ok(FunctionArgOperator::Value);
16427        }
16428        let tok = self.next_token();
16429        match tok.token {
16430            Token::RArrow if self.dialect.supports_named_fn_args_with_rarrow_operator() => {
16431                Ok(FunctionArgOperator::RightArrow)
16432            }
16433            Token::Eq if self.dialect.supports_named_fn_args_with_eq_operator() => {
16434                Ok(FunctionArgOperator::Equals)
16435            }
16436            Token::Assignment
16437                if self
16438                    .dialect
16439                    .supports_named_fn_args_with_assignment_operator() =>
16440            {
16441                Ok(FunctionArgOperator::Assignment)
16442            }
16443            Token::Colon if self.dialect.supports_named_fn_args_with_colon_operator() => {
16444                Ok(FunctionArgOperator::Colon)
16445            }
16446            _ => {
16447                self.prev_token();
16448                self.expected("argument operator", tok)
16449            }
16450        }
16451    }
16452
16453    pub fn parse_optional_args(&mut self) -> Result<Vec<FunctionArg>, ParserError> {
16454        if self.consume_token(&Token::RParen) {
16455            Ok(vec![])
16456        } else {
16457            let args = self.parse_comma_separated(Parser::parse_function_args)?;
16458            self.expect_token(&Token::RParen)?;
16459            Ok(args)
16460        }
16461    }
16462
16463    fn parse_table_function_args(&mut self) -> Result<TableFunctionArgs, ParserError> {
16464        if self.consume_token(&Token::RParen) {
16465            return Ok(TableFunctionArgs {
16466                args: vec![],
16467                settings: None,
16468            });
16469        }
16470        let mut args = vec![];
16471        let settings = loop {
16472            if let Some(settings) = self.parse_settings()? {
16473                break Some(settings);
16474            }
16475            args.push(self.parse_function_args()?);
16476            if self.is_parse_comma_separated_end() {
16477                break None;
16478            }
16479        };
16480        self.expect_token(&Token::RParen)?;
16481        Ok(TableFunctionArgs { args, settings })
16482    }
16483
16484    /// Parses a potentially empty list of arguments to a function
16485    /// (including the closing parenthesis).
16486    ///
16487    /// Examples:
16488    /// ```sql
16489    /// FIRST_VALUE(x ORDER BY 1,2,3);
16490    /// FIRST_VALUE(x IGNORE NULL);
16491    /// ```
16492    fn parse_function_argument_list(&mut self) -> Result<FunctionArgumentList, ParserError> {
16493        let mut clauses = vec![];
16494
16495        // Handle clauses that may exist with an empty argument list
16496
16497        if let Some(null_clause) = self.parse_json_null_clause() {
16498            clauses.push(FunctionArgumentClause::JsonNullClause(null_clause));
16499        }
16500
16501        if let Some(json_returning_clause) = self.maybe_parse_json_returning_clause()? {
16502            clauses.push(FunctionArgumentClause::JsonReturningClause(
16503                json_returning_clause,
16504            ));
16505        }
16506
16507        if self.consume_token(&Token::RParen) {
16508            return Ok(FunctionArgumentList {
16509                duplicate_treatment: None,
16510                args: vec![],
16511                clauses,
16512            });
16513        }
16514
16515        let duplicate_treatment = self.parse_duplicate_treatment()?;
16516        let args = self.parse_comma_separated(Parser::parse_function_args)?;
16517
16518        if self.dialect.supports_window_function_null_treatment_arg() {
16519            if let Some(null_treatment) = self.parse_null_treatment()? {
16520                clauses.push(FunctionArgumentClause::IgnoreOrRespectNulls(null_treatment));
16521            }
16522        }
16523
16524        if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
16525            clauses.push(FunctionArgumentClause::OrderBy(
16526                self.parse_comma_separated(Parser::parse_order_by_expr)?,
16527            ));
16528        }
16529
16530        if self.parse_keyword(Keyword::LIMIT) {
16531            clauses.push(FunctionArgumentClause::Limit(self.parse_expr()?));
16532        }
16533
16534        if dialect_of!(self is GenericDialect | BigQueryDialect)
16535            && self.parse_keyword(Keyword::HAVING)
16536        {
16537            let kind = match self.expect_one_of_keywords(&[Keyword::MIN, Keyword::MAX])? {
16538                Keyword::MIN => HavingBoundKind::Min,
16539                Keyword::MAX => HavingBoundKind::Max,
16540                _ => unreachable!(),
16541            };
16542            clauses.push(FunctionArgumentClause::Having(HavingBound(
16543                kind,
16544                self.parse_expr()?,
16545            )))
16546        }
16547
16548        if dialect_of!(self is GenericDialect | MySqlDialect)
16549            && self.parse_keyword(Keyword::SEPARATOR)
16550        {
16551            clauses.push(FunctionArgumentClause::Separator(self.parse_value()?.value));
16552        }
16553
16554        if let Some(on_overflow) = self.parse_listagg_on_overflow()? {
16555            clauses.push(FunctionArgumentClause::OnOverflow(on_overflow));
16556        }
16557
16558        if let Some(null_clause) = self.parse_json_null_clause() {
16559            clauses.push(FunctionArgumentClause::JsonNullClause(null_clause));
16560        }
16561
16562        if let Some(json_returning_clause) = self.maybe_parse_json_returning_clause()? {
16563            clauses.push(FunctionArgumentClause::JsonReturningClause(
16564                json_returning_clause,
16565            ));
16566        }
16567
16568        self.expect_token(&Token::RParen)?;
16569        Ok(FunctionArgumentList {
16570            duplicate_treatment,
16571            args,
16572            clauses,
16573        })
16574    }
16575
16576    fn parse_json_null_clause(&mut self) -> Option<JsonNullClause> {
16577        if self.parse_keywords(&[Keyword::ABSENT, Keyword::ON, Keyword::NULL]) {
16578            Some(JsonNullClause::AbsentOnNull)
16579        } else if self.parse_keywords(&[Keyword::NULL, Keyword::ON, Keyword::NULL]) {
16580            Some(JsonNullClause::NullOnNull)
16581        } else {
16582            None
16583        }
16584    }
16585
16586    fn maybe_parse_json_returning_clause(
16587        &mut self,
16588    ) -> Result<Option<JsonReturningClause>, ParserError> {
16589        if self.parse_keyword(Keyword::RETURNING) {
16590            let data_type = self.parse_data_type()?;
16591            Ok(Some(JsonReturningClause { data_type }))
16592        } else {
16593            Ok(None)
16594        }
16595    }
16596
16597    fn parse_duplicate_treatment(&mut self) -> Result<Option<DuplicateTreatment>, ParserError> {
16598        let loc = self.peek_token().span.start;
16599        match (
16600            self.parse_keyword(Keyword::ALL),
16601            self.parse_keyword(Keyword::DISTINCT),
16602        ) {
16603            (true, false) => Ok(Some(DuplicateTreatment::All)),
16604            (false, true) => Ok(Some(DuplicateTreatment::Distinct)),
16605            (false, false) => Ok(None),
16606            (true, true) => parser_err!("Cannot specify both ALL and DISTINCT".to_string(), loc),
16607        }
16608    }
16609
16610    /// Parse a comma-delimited list of projections after SELECT
16611    pub fn parse_select_item(&mut self) -> Result<SelectItem, ParserError> {
16612        let prefix = self
16613            .parse_one_of_keywords(
16614                self.dialect
16615                    .get_reserved_keywords_for_select_item_operator(),
16616            )
16617            .map(|keyword| Ident::new(format!("{keyword:?}")));
16618
16619        match self.parse_wildcard_expr()? {
16620            Expr::QualifiedWildcard(prefix, token) => Ok(SelectItem::QualifiedWildcard(
16621                SelectItemQualifiedWildcardKind::ObjectName(prefix),
16622                self.parse_wildcard_additional_options(token.0)?,
16623            )),
16624            Expr::Wildcard(token) => Ok(SelectItem::Wildcard(
16625                self.parse_wildcard_additional_options(token.0)?,
16626            )),
16627            Expr::Identifier(v) if v.value.to_lowercase() == "from" && v.quote_style.is_none() => {
16628                parser_err!(
16629                    format!("Expected an expression, found: {}", v),
16630                    self.peek_token().span.start
16631                )
16632            }
16633            Expr::BinaryOp {
16634                left,
16635                op: BinaryOperator::Eq,
16636                right,
16637            } if self.dialect.supports_eq_alias_assignment()
16638                && matches!(left.as_ref(), Expr::Identifier(_)) =>
16639            {
16640                let Expr::Identifier(alias) = *left else {
16641                    return parser_err!(
16642                        "BUG: expected identifier expression as alias",
16643                        self.peek_token().span.start
16644                    );
16645                };
16646                Ok(SelectItem::ExprWithAlias {
16647                    expr: *right,
16648                    alias,
16649                })
16650            }
16651            expr if self.dialect.supports_select_expr_star()
16652                && self.consume_tokens(&[Token::Period, Token::Mul]) =>
16653            {
16654                let wildcard_token = self.get_previous_token().clone();
16655                Ok(SelectItem::QualifiedWildcard(
16656                    SelectItemQualifiedWildcardKind::Expr(expr),
16657                    self.parse_wildcard_additional_options(wildcard_token)?,
16658                ))
16659            }
16660            expr => self
16661                .maybe_parse_select_item_alias()
16662                .map(|alias| match alias {
16663                    Some(alias) => SelectItem::ExprWithAlias {
16664                        expr: maybe_prefixed_expr(expr, prefix),
16665                        alias,
16666                    },
16667                    None => SelectItem::UnnamedExpr(maybe_prefixed_expr(expr, prefix)),
16668                }),
16669        }
16670    }
16671
16672    /// Parse an [`WildcardAdditionalOptions`] information for wildcard select items.
16673    ///
16674    /// If it is not possible to parse it, will return an option.
16675    pub fn parse_wildcard_additional_options(
16676        &mut self,
16677        wildcard_token: TokenWithSpan,
16678    ) -> Result<WildcardAdditionalOptions, ParserError> {
16679        let opt_ilike = if dialect_of!(self is GenericDialect | SnowflakeDialect) {
16680            self.parse_optional_select_item_ilike()?
16681        } else {
16682            None
16683        };
16684        let opt_exclude = if opt_ilike.is_none() && self.dialect.supports_select_wildcard_exclude()
16685        {
16686            self.parse_optional_select_item_exclude()?
16687        } else {
16688            None
16689        };
16690        let opt_except = if self.dialect.supports_select_wildcard_except() {
16691            self.parse_optional_select_item_except()?
16692        } else {
16693            None
16694        };
16695        let opt_replace = if dialect_of!(self is GenericDialect | BigQueryDialect | ClickHouseDialect | DuckDbDialect | SnowflakeDialect)
16696        {
16697            self.parse_optional_select_item_replace()?
16698        } else {
16699            None
16700        };
16701        let opt_rename = if dialect_of!(self is GenericDialect | SnowflakeDialect) {
16702            self.parse_optional_select_item_rename()?
16703        } else {
16704            None
16705        };
16706
16707        Ok(WildcardAdditionalOptions {
16708            wildcard_token: wildcard_token.into(),
16709            opt_ilike,
16710            opt_exclude,
16711            opt_except,
16712            opt_rename,
16713            opt_replace,
16714        })
16715    }
16716
16717    /// Parse an [`Ilike`](IlikeSelectItem) information for wildcard select items.
16718    ///
16719    /// If it is not possible to parse it, will return an option.
16720    pub fn parse_optional_select_item_ilike(
16721        &mut self,
16722    ) -> Result<Option<IlikeSelectItem>, ParserError> {
16723        let opt_ilike = if self.parse_keyword(Keyword::ILIKE) {
16724            let next_token = self.next_token();
16725            let pattern = match next_token.token {
16726                Token::SingleQuotedString(s) => s,
16727                _ => return self.expected("ilike pattern", next_token),
16728            };
16729            Some(IlikeSelectItem { pattern })
16730        } else {
16731            None
16732        };
16733        Ok(opt_ilike)
16734    }
16735
16736    /// Parse an [`Exclude`](ExcludeSelectItem) information for wildcard select items.
16737    ///
16738    /// If it is not possible to parse it, will return an option.
16739    pub fn parse_optional_select_item_exclude(
16740        &mut self,
16741    ) -> Result<Option<ExcludeSelectItem>, ParserError> {
16742        let opt_exclude = if self.parse_keyword(Keyword::EXCLUDE) {
16743            if self.consume_token(&Token::LParen) {
16744                let columns = self.parse_comma_separated(|parser| parser.parse_identifier())?;
16745                self.expect_token(&Token::RParen)?;
16746                Some(ExcludeSelectItem::Multiple(columns))
16747            } else {
16748                let column = self.parse_identifier()?;
16749                Some(ExcludeSelectItem::Single(column))
16750            }
16751        } else {
16752            None
16753        };
16754
16755        Ok(opt_exclude)
16756    }
16757
16758    /// Parse an [`Except`](ExceptSelectItem) information for wildcard select items.
16759    ///
16760    /// If it is not possible to parse it, will return an option.
16761    pub fn parse_optional_select_item_except(
16762        &mut self,
16763    ) -> Result<Option<ExceptSelectItem>, ParserError> {
16764        let opt_except = if self.parse_keyword(Keyword::EXCEPT) {
16765            if self.peek_token().token == Token::LParen {
16766                let idents = self.parse_parenthesized_column_list(Mandatory, false)?;
16767                match &idents[..] {
16768                    [] => {
16769                        return self.expected(
16770                            "at least one column should be parsed by the expect clause",
16771                            self.peek_token(),
16772                        )?;
16773                    }
16774                    [first, idents @ ..] => Some(ExceptSelectItem {
16775                        first_element: first.clone(),
16776                        additional_elements: idents.to_vec(),
16777                    }),
16778                }
16779            } else {
16780                // Clickhouse allows EXCEPT column_name
16781                let ident = self.parse_identifier()?;
16782                Some(ExceptSelectItem {
16783                    first_element: ident,
16784                    additional_elements: vec![],
16785                })
16786            }
16787        } else {
16788            None
16789        };
16790
16791        Ok(opt_except)
16792    }
16793
16794    /// Parse a [`Rename`](RenameSelectItem) information for wildcard select items.
16795    pub fn parse_optional_select_item_rename(
16796        &mut self,
16797    ) -> Result<Option<RenameSelectItem>, ParserError> {
16798        let opt_rename = if self.parse_keyword(Keyword::RENAME) {
16799            if self.consume_token(&Token::LParen) {
16800                let idents =
16801                    self.parse_comma_separated(|parser| parser.parse_identifier_with_alias())?;
16802                self.expect_token(&Token::RParen)?;
16803                Some(RenameSelectItem::Multiple(idents))
16804            } else {
16805                let ident = self.parse_identifier_with_alias()?;
16806                Some(RenameSelectItem::Single(ident))
16807            }
16808        } else {
16809            None
16810        };
16811
16812        Ok(opt_rename)
16813    }
16814
16815    /// Parse a [`Replace`](ReplaceSelectItem) information for wildcard select items.
16816    pub fn parse_optional_select_item_replace(
16817        &mut self,
16818    ) -> Result<Option<ReplaceSelectItem>, ParserError> {
16819        let opt_replace = if self.parse_keyword(Keyword::REPLACE) {
16820            if self.consume_token(&Token::LParen) {
16821                let items = self.parse_comma_separated(|parser| {
16822                    Ok(Box::new(parser.parse_replace_elements()?))
16823                })?;
16824                self.expect_token(&Token::RParen)?;
16825                Some(ReplaceSelectItem { items })
16826            } else {
16827                let tok = self.next_token();
16828                return self.expected("( after REPLACE but", tok);
16829            }
16830        } else {
16831            None
16832        };
16833
16834        Ok(opt_replace)
16835    }
16836    pub fn parse_replace_elements(&mut self) -> Result<ReplaceSelectElement, ParserError> {
16837        let expr = self.parse_expr()?;
16838        let as_keyword = self.parse_keyword(Keyword::AS);
16839        let ident = self.parse_identifier()?;
16840        Ok(ReplaceSelectElement {
16841            expr,
16842            column_name: ident,
16843            as_keyword,
16844        })
16845    }
16846
16847    /// Parse ASC or DESC, returns an Option with true if ASC, false of DESC or `None` if none of
16848    /// them.
16849    pub fn parse_asc_desc(&mut self) -> Option<bool> {
16850        if self.parse_keyword(Keyword::ASC) {
16851            Some(true)
16852        } else if self.parse_keyword(Keyword::DESC) {
16853            Some(false)
16854        } else {
16855            None
16856        }
16857    }
16858
16859    /// Parse an [OrderByExpr] expression.
16860    pub fn parse_order_by_expr(&mut self) -> Result<OrderByExpr, ParserError> {
16861        self.parse_order_by_expr_inner(false)
16862            .map(|(order_by, _)| order_by)
16863    }
16864
16865    /// Parse an [IndexColumn].
16866    pub fn parse_create_index_expr(&mut self) -> Result<IndexColumn, ParserError> {
16867        self.parse_order_by_expr_inner(true)
16868            .map(|(column, operator_class)| IndexColumn {
16869                column,
16870                operator_class,
16871            })
16872    }
16873
16874    fn parse_order_by_expr_inner(
16875        &mut self,
16876        with_operator_class: bool,
16877    ) -> Result<(OrderByExpr, Option<Ident>), ParserError> {
16878        let expr = self.parse_expr()?;
16879
16880        let operator_class: Option<Ident> = if with_operator_class {
16881            // We check that if non of the following keywords are present, then we parse an
16882            // identifier as operator class.
16883            if self
16884                .peek_one_of_keywords(&[Keyword::ASC, Keyword::DESC, Keyword::NULLS, Keyword::WITH])
16885                .is_some()
16886            {
16887                None
16888            } else {
16889                self.maybe_parse(|parser| parser.parse_identifier())?
16890            }
16891        } else {
16892            None
16893        };
16894
16895        let options = self.parse_order_by_options()?;
16896
16897        let with_fill = if dialect_of!(self is ClickHouseDialect | GenericDialect)
16898            && self.parse_keywords(&[Keyword::WITH, Keyword::FILL])
16899        {
16900            Some(self.parse_with_fill()?)
16901        } else {
16902            None
16903        };
16904
16905        Ok((
16906            OrderByExpr {
16907                expr,
16908                options,
16909                with_fill,
16910            },
16911            operator_class,
16912        ))
16913    }
16914
16915    fn parse_order_by_options(&mut self) -> Result<OrderByOptions, ParserError> {
16916        let asc = self.parse_asc_desc();
16917
16918        let nulls_first = if self.parse_keywords(&[Keyword::NULLS, Keyword::FIRST]) {
16919            Some(true)
16920        } else if self.parse_keywords(&[Keyword::NULLS, Keyword::LAST]) {
16921            Some(false)
16922        } else {
16923            None
16924        };
16925
16926        Ok(OrderByOptions { asc, nulls_first })
16927    }
16928
16929    // Parse a WITH FILL clause (ClickHouse dialect)
16930    // that follow the WITH FILL keywords in a ORDER BY clause
16931    pub fn parse_with_fill(&mut self) -> Result<WithFill, ParserError> {
16932        let from = if self.parse_keyword(Keyword::FROM) {
16933            Some(self.parse_expr()?)
16934        } else {
16935            None
16936        };
16937
16938        let to = if self.parse_keyword(Keyword::TO) {
16939            Some(self.parse_expr()?)
16940        } else {
16941            None
16942        };
16943
16944        let step = if self.parse_keyword(Keyword::STEP) {
16945            Some(self.parse_expr()?)
16946        } else {
16947            None
16948        };
16949
16950        Ok(WithFill { from, to, step })
16951    }
16952
16953    // Parse a set of comma separated INTERPOLATE expressions (ClickHouse dialect)
16954    // that follow the INTERPOLATE keyword in an ORDER BY clause with the WITH FILL modifier
16955    pub fn parse_interpolations(&mut self) -> Result<Option<Interpolate>, ParserError> {
16956        if !self.parse_keyword(Keyword::INTERPOLATE) {
16957            return Ok(None);
16958        }
16959
16960        if self.consume_token(&Token::LParen) {
16961            let interpolations =
16962                self.parse_comma_separated0(|p| p.parse_interpolation(), Token::RParen)?;
16963            self.expect_token(&Token::RParen)?;
16964            // INTERPOLATE () and INTERPOLATE ( ... ) variants
16965            return Ok(Some(Interpolate {
16966                exprs: Some(interpolations),
16967            }));
16968        }
16969
16970        // INTERPOLATE
16971        Ok(Some(Interpolate { exprs: None }))
16972    }
16973
16974    // Parse a INTERPOLATE expression (ClickHouse dialect)
16975    pub fn parse_interpolation(&mut self) -> Result<InterpolateExpr, ParserError> {
16976        let column = self.parse_identifier()?;
16977        let expr = if self.parse_keyword(Keyword::AS) {
16978            Some(self.parse_expr()?)
16979        } else {
16980            None
16981        };
16982        Ok(InterpolateExpr { column, expr })
16983    }
16984
16985    /// Parse a TOP clause, MSSQL equivalent of LIMIT,
16986    /// that follows after `SELECT [DISTINCT]`.
16987    pub fn parse_top(&mut self) -> Result<Top, ParserError> {
16988        let quantity = if self.consume_token(&Token::LParen) {
16989            let quantity = self.parse_expr()?;
16990            self.expect_token(&Token::RParen)?;
16991            Some(TopQuantity::Expr(quantity))
16992        } else {
16993            let next_token = self.next_token();
16994            let quantity = match next_token.token {
16995                Token::Number(s, _) => Self::parse::<u64>(s, next_token.span.start)?,
16996                _ => self.expected("literal int", next_token)?,
16997            };
16998            Some(TopQuantity::Constant(quantity))
16999        };
17000
17001        let percent = self.parse_keyword(Keyword::PERCENT);
17002
17003        let with_ties = self.parse_keywords(&[Keyword::WITH, Keyword::TIES]);
17004
17005        Ok(Top {
17006            with_ties,
17007            percent,
17008            quantity,
17009        })
17010    }
17011
17012    /// Parse a LIMIT clause
17013    pub fn parse_limit(&mut self) -> Result<Option<Expr>, ParserError> {
17014        if self.parse_keyword(Keyword::ALL) {
17015            Ok(None)
17016        } else {
17017            Ok(Some(self.parse_expr()?))
17018        }
17019    }
17020
17021    /// Parse an OFFSET clause
17022    pub fn parse_offset(&mut self) -> Result<Offset, ParserError> {
17023        let value = self.parse_expr()?;
17024        let rows = if self.parse_keyword(Keyword::ROW) {
17025            OffsetRows::Row
17026        } else if self.parse_keyword(Keyword::ROWS) {
17027            OffsetRows::Rows
17028        } else {
17029            OffsetRows::None
17030        };
17031        Ok(Offset { value, rows })
17032    }
17033
17034    /// Parse a FETCH clause
17035    pub fn parse_fetch(&mut self) -> Result<Fetch, ParserError> {
17036        let _ = self.parse_one_of_keywords(&[Keyword::FIRST, Keyword::NEXT]);
17037
17038        let (quantity, percent) = if self
17039            .parse_one_of_keywords(&[Keyword::ROW, Keyword::ROWS])
17040            .is_some()
17041        {
17042            (None, false)
17043        } else {
17044            let quantity = Expr::Value(self.parse_value()?);
17045            let percent = self.parse_keyword(Keyword::PERCENT);
17046            let _ = self.parse_one_of_keywords(&[Keyword::ROW, Keyword::ROWS]);
17047            (Some(quantity), percent)
17048        };
17049
17050        let with_ties = if self.parse_keyword(Keyword::ONLY) {
17051            false
17052        } else {
17053            self.parse_keywords(&[Keyword::WITH, Keyword::TIES])
17054        };
17055
17056        Ok(Fetch {
17057            with_ties,
17058            percent,
17059            quantity,
17060        })
17061    }
17062
17063    /// Parse a FOR UPDATE/FOR SHARE clause
17064    pub fn parse_lock(&mut self) -> Result<LockClause, ParserError> {
17065        let lock_type = match self.expect_one_of_keywords(&[Keyword::UPDATE, Keyword::SHARE])? {
17066            Keyword::UPDATE => LockType::Update,
17067            Keyword::SHARE => LockType::Share,
17068            _ => unreachable!(),
17069        };
17070        let of = if self.parse_keyword(Keyword::OF) {
17071            Some(self.parse_object_name(false)?)
17072        } else {
17073            None
17074        };
17075        let nonblock = if self.parse_keyword(Keyword::NOWAIT) {
17076            Some(NonBlock::Nowait)
17077        } else if self.parse_keywords(&[Keyword::SKIP, Keyword::LOCKED]) {
17078            Some(NonBlock::SkipLocked)
17079        } else {
17080            None
17081        };
17082        Ok(LockClause {
17083            lock_type,
17084            of,
17085            nonblock,
17086        })
17087    }
17088
17089    pub fn parse_values(&mut self, allow_empty: bool) -> Result<Values, ParserError> {
17090        let mut explicit_row = false;
17091
17092        let rows = self.parse_comma_separated(|parser| {
17093            if parser.parse_keyword(Keyword::ROW) {
17094                explicit_row = true;
17095            }
17096
17097            parser.expect_token(&Token::LParen)?;
17098            if allow_empty && parser.peek_token().token == Token::RParen {
17099                parser.next_token();
17100                Ok(vec![])
17101            } else {
17102                let exprs = parser.parse_comma_separated(Parser::parse_expr)?;
17103                parser.expect_token(&Token::RParen)?;
17104                Ok(exprs)
17105            }
17106        })?;
17107        Ok(Values { explicit_row, rows })
17108    }
17109
17110    pub fn parse_start_transaction(&mut self) -> Result<Statement, ParserError> {
17111        self.expect_keyword_is(Keyword::TRANSACTION)?;
17112        Ok(Statement::StartTransaction {
17113            modes: self.parse_transaction_modes()?,
17114            begin: false,
17115            transaction: Some(BeginTransactionKind::Transaction),
17116            modifier: None,
17117            statements: vec![],
17118            exception: None,
17119            has_end_keyword: false,
17120            label: None,
17121        })
17122    }
17123
17124    pub fn parse_begin(&mut self) -> Result<Statement, ParserError> {
17125        let modifier = if !self.dialect.supports_start_transaction_modifier() {
17126            None
17127        } else if self.parse_keyword(Keyword::DEFERRED) {
17128            Some(TransactionModifier::Deferred)
17129        } else if self.parse_keyword(Keyword::IMMEDIATE) {
17130            Some(TransactionModifier::Immediate)
17131        } else if self.parse_keyword(Keyword::EXCLUSIVE) {
17132            Some(TransactionModifier::Exclusive)
17133        } else if self.parse_keyword(Keyword::TRY) {
17134            Some(TransactionModifier::Try)
17135        } else if self.parse_keyword(Keyword::CATCH) {
17136            Some(TransactionModifier::Catch)
17137        } else {
17138            None
17139        };
17140        let transaction = match self.parse_one_of_keywords(&[Keyword::TRANSACTION, Keyword::WORK]) {
17141            Some(Keyword::TRANSACTION) => Some(BeginTransactionKind::Transaction),
17142            Some(Keyword::WORK) => Some(BeginTransactionKind::Work),
17143            _ => None,
17144        };
17145        Ok(Statement::StartTransaction {
17146            modes: self.parse_transaction_modes()?,
17147            begin: true,
17148            transaction,
17149            modifier,
17150            statements: vec![],
17151            exception: None,
17152            has_end_keyword: false,
17153            label: None,
17154        })
17155    }
17156
17157    pub fn parse_begin_with_label(&mut self, label: Option<Ident>) -> Result<Statement, ParserError> {
17158        let statements = self.parse_statement_list(&[Keyword::EXCEPTION, Keyword::END])?;
17159
17160        let exception = if self.parse_keyword(Keyword::EXCEPTION) {
17161            let mut when = Vec::new();
17162            while !self.peek_keyword(Keyword::END) {
17163                self.expect_keyword(Keyword::WHEN)?;
17164                let mut idents = Vec::new();
17165                while !self.parse_keyword(Keyword::THEN) {
17166                    let ident = self.parse_identifier()?;
17167                    idents.push(ident);
17168                    self.maybe_parse(|p| p.expect_keyword(Keyword::OR))?;
17169                }
17170                let statements = self.parse_statement_list(&[Keyword::WHEN, Keyword::END])?;
17171                when.push(ExceptionWhen { idents, statements });
17172            }
17173            Some(when)
17174        } else {
17175            None
17176        };
17177
17178        self.expect_keyword(Keyword::END)?;
17179        if label.is_some() {
17180            let _ = self.parse_identifier();
17181        }
17182
17183        Ok(Statement::StartTransaction {
17184            begin: true,
17185            statements,
17186            exception,
17187            has_end_keyword: true,
17188            transaction: None,
17189            modifier: None,
17190            modes: Default::default(),
17191            label,
17192        })
17193    }
17194
17195    pub fn parse_begin_exception_end(&mut self) -> Result<Statement, ParserError> {
17196        let statements = self.parse_statement_list(&[Keyword::EXCEPTION, Keyword::END])?;
17197
17198        let exception = if self.parse_keyword(Keyword::EXCEPTION) {
17199            let mut when = Vec::new();
17200
17201            // We can have multiple `WHEN` arms so we consume all cases until `END`
17202            while !self.peek_keyword(Keyword::END) {
17203                self.expect_keyword(Keyword::WHEN)?;
17204
17205                // Each `WHEN` case can have one or more conditions, e.g.
17206                // WHEN EXCEPTION_1 [OR EXCEPTION_2] THEN
17207                // So we parse identifiers until the `THEN` keyword.
17208                let mut idents = Vec::new();
17209
17210                while !self.parse_keyword(Keyword::THEN) {
17211                    let ident = self.parse_identifier()?;
17212                    idents.push(ident);
17213
17214                    self.maybe_parse(|p| p.expect_keyword(Keyword::OR))?;
17215                }
17216
17217                let statements = self.parse_statement_list(&[Keyword::WHEN, Keyword::END])?;
17218
17219                when.push(ExceptionWhen { idents, statements });
17220            }
17221
17222            Some(when)
17223        } else {
17224            None
17225        };
17226
17227        self.expect_keyword(Keyword::END)?;
17228
17229        Ok(Statement::StartTransaction {
17230            begin: true,
17231            statements,
17232            exception,
17233            has_end_keyword: true,
17234            transaction: None,
17235            modifier: None,
17236            modes: Default::default(),
17237            label: None,
17238        })
17239    }
17240
17241    pub fn parse_end(&mut self) -> Result<Statement, ParserError> {
17242        let modifier = if !self.dialect.supports_end_transaction_modifier() {
17243            None
17244        } else if self.parse_keyword(Keyword::TRY) {
17245            Some(TransactionModifier::Try)
17246        } else if self.parse_keyword(Keyword::CATCH) {
17247            Some(TransactionModifier::Catch)
17248        } else {
17249            None
17250        };
17251        Ok(Statement::Commit {
17252            chain: self.parse_commit_rollback_chain()?,
17253            end: true,
17254            modifier,
17255        })
17256    }
17257
17258    pub fn parse_transaction_modes(&mut self) -> Result<Vec<TransactionMode>, ParserError> {
17259        let mut modes = vec![];
17260        let mut required = false;
17261        loop {
17262            let mode = if self.parse_keywords(&[Keyword::ISOLATION, Keyword::LEVEL]) {
17263                let iso_level = if self.parse_keywords(&[Keyword::READ, Keyword::UNCOMMITTED]) {
17264                    TransactionIsolationLevel::ReadUncommitted
17265                } else if self.parse_keywords(&[Keyword::READ, Keyword::COMMITTED]) {
17266                    TransactionIsolationLevel::ReadCommitted
17267                } else if self.parse_keywords(&[Keyword::REPEATABLE, Keyword::READ]) {
17268                    TransactionIsolationLevel::RepeatableRead
17269                } else if self.parse_keyword(Keyword::SERIALIZABLE) {
17270                    TransactionIsolationLevel::Serializable
17271                } else if self.parse_keyword(Keyword::SNAPSHOT) {
17272                    TransactionIsolationLevel::Snapshot
17273                } else {
17274                    self.expected("isolation level", self.peek_token())?
17275                };
17276                TransactionMode::IsolationLevel(iso_level)
17277            } else if self.parse_keywords(&[Keyword::READ, Keyword::ONLY]) {
17278                TransactionMode::AccessMode(TransactionAccessMode::ReadOnly)
17279            } else if self.parse_keywords(&[Keyword::READ, Keyword::WRITE]) {
17280                TransactionMode::AccessMode(TransactionAccessMode::ReadWrite)
17281            } else if required {
17282                self.expected("transaction mode", self.peek_token())?
17283            } else {
17284                break;
17285            };
17286            modes.push(mode);
17287            // ANSI requires a comma after each transaction mode, but
17288            // PostgreSQL, for historical reasons, does not. We follow
17289            // PostgreSQL in making the comma optional, since that is strictly
17290            // more general.
17291            required = self.consume_token(&Token::Comma);
17292        }
17293        Ok(modes)
17294    }
17295
17296    pub fn parse_commit(&mut self) -> Result<Statement, ParserError> {
17297        Ok(Statement::Commit {
17298            chain: self.parse_commit_rollback_chain()?,
17299            end: false,
17300            modifier: None,
17301        })
17302    }
17303
17304    pub fn parse_rollback(&mut self) -> Result<Statement, ParserError> {
17305        let chain = self.parse_commit_rollback_chain()?;
17306        let savepoint = self.parse_rollback_savepoint()?;
17307
17308        Ok(Statement::Rollback { chain, savepoint })
17309    }
17310
17311    pub fn parse_commit_rollback_chain(&mut self) -> Result<bool, ParserError> {
17312        let _ = self.parse_one_of_keywords(&[Keyword::TRANSACTION, Keyword::WORK]);
17313        if self.parse_keyword(Keyword::AND) {
17314            let chain = !self.parse_keyword(Keyword::NO);
17315            self.expect_keyword_is(Keyword::CHAIN)?;
17316            Ok(chain)
17317        } else {
17318            Ok(false)
17319        }
17320    }
17321
17322    pub fn parse_rollback_savepoint(&mut self) -> Result<Option<Ident>, ParserError> {
17323        if self.parse_keyword(Keyword::TO) {
17324            let _ = self.parse_keyword(Keyword::SAVEPOINT);
17325            let savepoint = self.parse_identifier()?;
17326
17327            Ok(Some(savepoint))
17328        } else {
17329            Ok(None)
17330        }
17331    }
17332
17333    /// Parse a 'RAISERROR' statement
17334    pub fn parse_raiserror(&mut self) -> Result<Statement, ParserError> {
17335        self.expect_token(&Token::LParen)?;
17336        let message = Box::new(self.parse_expr()?);
17337        self.expect_token(&Token::Comma)?;
17338        let severity = Box::new(self.parse_expr()?);
17339        self.expect_token(&Token::Comma)?;
17340        let state = Box::new(self.parse_expr()?);
17341        let arguments = if self.consume_token(&Token::Comma) {
17342            self.parse_comma_separated(Parser::parse_expr)?
17343        } else {
17344            vec![]
17345        };
17346        self.expect_token(&Token::RParen)?;
17347        let options = if self.parse_keyword(Keyword::WITH) {
17348            self.parse_comma_separated(Parser::parse_raiserror_option)?
17349        } else {
17350            vec![]
17351        };
17352        Ok(Statement::RaisError {
17353            message,
17354            severity,
17355            state,
17356            arguments,
17357            options,
17358        })
17359    }
17360
17361    pub fn parse_raiserror_option(&mut self) -> Result<RaisErrorOption, ParserError> {
17362        match self.expect_one_of_keywords(&[Keyword::LOG, Keyword::NOWAIT, Keyword::SETERROR])? {
17363            Keyword::LOG => Ok(RaisErrorOption::Log),
17364            Keyword::NOWAIT => Ok(RaisErrorOption::NoWait),
17365            Keyword::SETERROR => Ok(RaisErrorOption::SetError),
17366            _ => self.expected(
17367                "LOG, NOWAIT OR SETERROR raiserror option",
17368                self.peek_token(),
17369            ),
17370        }
17371    }
17372
17373    pub fn parse_deallocate(&mut self) -> Result<Statement, ParserError> {
17374        let prepare = self.parse_keyword(Keyword::PREPARE);
17375        let name = self.parse_identifier()?;
17376        Ok(Statement::Deallocate { name, prepare })
17377    }
17378
17379    pub fn parse_execute(&mut self) -> Result<Statement, ParserError> {
17380        let name = if self.dialect.supports_execute_immediate()
17381            && self.parse_keyword(Keyword::IMMEDIATE)
17382        {
17383            None
17384        } else {
17385            let name = self.parse_object_name(false)?;
17386            Some(name)
17387        };
17388
17389        let has_parentheses = self.consume_token(&Token::LParen);
17390
17391        let end_kws = &[Keyword::USING, Keyword::OUTPUT, Keyword::DEFAULT];
17392        let end_token = match (has_parentheses, self.peek_token().token) {
17393            (true, _) => Token::RParen,
17394            (false, Token::EOF) => Token::EOF,
17395            (false, Token::Word(w)) if end_kws.contains(&w.keyword) => Token::Word(w),
17396            (false, _) => Token::SemiColon,
17397        };
17398
17399        let parameters = self.parse_comma_separated0(Parser::parse_expr, end_token)?;
17400
17401        if has_parentheses {
17402            self.expect_token(&Token::RParen)?;
17403        }
17404
17405        let into = if self.parse_keyword(Keyword::INTO) {
17406            self.parse_comma_separated(Self::parse_identifier)?
17407        } else {
17408            vec![]
17409        };
17410
17411        let using = if self.parse_keyword(Keyword::USING) {
17412            self.parse_comma_separated(Self::parse_expr_with_alias)?
17413        } else {
17414            vec![]
17415        };
17416
17417        let output = self.parse_keyword(Keyword::OUTPUT);
17418
17419        let default = self.parse_keyword(Keyword::DEFAULT);
17420
17421        Ok(Statement::Execute {
17422            immediate: name.is_none(),
17423            name,
17424            parameters,
17425            has_parentheses,
17426            into,
17427            using,
17428            output,
17429            default,
17430        })
17431    }
17432
17433    pub fn parse_prepare(&mut self) -> Result<Statement, ParserError> {
17434        let name = self.parse_identifier()?;
17435
17436        let mut data_types = vec![];
17437        if self.consume_token(&Token::LParen) {
17438            data_types = self.parse_comma_separated(Parser::parse_data_type)?;
17439            self.expect_token(&Token::RParen)?;
17440        }
17441
17442        self.expect_keyword_is(Keyword::AS)?;
17443        let statement = Box::new(self.parse_statement()?);
17444        Ok(Statement::Prepare {
17445            name,
17446            data_types,
17447            statement,
17448        })
17449    }
17450
17451    pub fn parse_unload(&mut self) -> Result<Statement, ParserError> {
17452        self.expect_keyword(Keyword::UNLOAD)?;
17453        self.expect_token(&Token::LParen)?;
17454        let (query, query_text) = if matches!(self.peek_token().token, Token::SingleQuotedString(_))
17455        {
17456            (None, Some(self.parse_literal_string()?))
17457        } else {
17458            (Some(self.parse_query()?), None)
17459        };
17460        self.expect_token(&Token::RParen)?;
17461
17462        self.expect_keyword_is(Keyword::TO)?;
17463        let to = self.parse_identifier()?;
17464        let auth = if self.parse_keyword(Keyword::IAM_ROLE) {
17465            Some(self.parse_iam_role_kind()?)
17466        } else {
17467            None
17468        };
17469        let with = self.parse_options(Keyword::WITH)?;
17470        let mut options = vec![];
17471        while let Some(opt) = self.maybe_parse(|parser| parser.parse_copy_legacy_option())? {
17472            options.push(opt);
17473        }
17474        Ok(Statement::Unload {
17475            query,
17476            query_text,
17477            to,
17478            auth,
17479            with,
17480            options,
17481        })
17482    }
17483
17484    pub fn parse_merge_clauses(&mut self) -> Result<Vec<MergeClause>, ParserError> {
17485        let mut clauses = vec![];
17486        loop {
17487            if !(self.parse_keyword(Keyword::WHEN)) {
17488                break;
17489            }
17490
17491            let mut clause_kind = MergeClauseKind::Matched;
17492            if self.parse_keyword(Keyword::NOT) {
17493                clause_kind = MergeClauseKind::NotMatched;
17494            }
17495            self.expect_keyword_is(Keyword::MATCHED)?;
17496
17497            if matches!(clause_kind, MergeClauseKind::NotMatched)
17498                && self.parse_keywords(&[Keyword::BY, Keyword::SOURCE])
17499            {
17500                clause_kind = MergeClauseKind::NotMatchedBySource;
17501            } else if matches!(clause_kind, MergeClauseKind::NotMatched)
17502                && self.parse_keywords(&[Keyword::BY, Keyword::TARGET])
17503            {
17504                clause_kind = MergeClauseKind::NotMatchedByTarget;
17505            }
17506
17507            let predicate = if self.parse_keyword(Keyword::AND) {
17508                Some(self.parse_expr()?)
17509            } else {
17510                None
17511            };
17512
17513            self.expect_keyword_is(Keyword::THEN)?;
17514
17515            let merge_clause = match self.parse_one_of_keywords(&[
17516                Keyword::UPDATE,
17517                Keyword::INSERT,
17518                Keyword::DELETE,
17519            ]) {
17520                Some(Keyword::UPDATE) => {
17521                    if matches!(
17522                        clause_kind,
17523                        MergeClauseKind::NotMatched | MergeClauseKind::NotMatchedByTarget
17524                    ) {
17525                        return Err(ParserError::ParserError(format!(
17526                            "UPDATE is not allowed in a {clause_kind} merge clause"
17527                        )));
17528                    }
17529                    self.expect_keyword_is(Keyword::SET)?;
17530                    MergeAction::Update {
17531                        assignments: self.parse_comma_separated(Parser::parse_assignment)?,
17532                    }
17533                }
17534                Some(Keyword::DELETE) => {
17535                    if matches!(
17536                        clause_kind,
17537                        MergeClauseKind::NotMatched | MergeClauseKind::NotMatchedByTarget
17538                    ) {
17539                        return Err(ParserError::ParserError(format!(
17540                            "DELETE is not allowed in a {clause_kind} merge clause"
17541                        )));
17542                    }
17543                    MergeAction::Delete
17544                }
17545                Some(Keyword::INSERT) => {
17546                    if !matches!(
17547                        clause_kind,
17548                        MergeClauseKind::NotMatched | MergeClauseKind::NotMatchedByTarget
17549                    ) {
17550                        return Err(ParserError::ParserError(format!(
17551                            "INSERT is not allowed in a {clause_kind} merge clause"
17552                        )));
17553                    }
17554                    let is_mysql = dialect_of!(self is MySqlDialect);
17555
17556                    let columns = self.parse_parenthesized_column_list(Optional, is_mysql)?;
17557                    let kind = if dialect_of!(self is BigQueryDialect | GenericDialect)
17558                        && self.parse_keyword(Keyword::ROW)
17559                    {
17560                        MergeInsertKind::Row
17561                    } else {
17562                        self.expect_keyword_is(Keyword::VALUES)?;
17563                        let values = self.parse_values(is_mysql)?;
17564                        MergeInsertKind::Values(values)
17565                    };
17566                    MergeAction::Insert(MergeInsertExpr { columns, kind })
17567                }
17568                _ => {
17569                    return Err(ParserError::ParserError(
17570                        "expected UPDATE, DELETE or INSERT in merge clause".to_string(),
17571                    ));
17572                }
17573            };
17574            clauses.push(MergeClause {
17575                clause_kind,
17576                predicate,
17577                action: merge_clause,
17578            });
17579        }
17580        Ok(clauses)
17581    }
17582
17583    fn parse_output(&mut self, start_keyword: Keyword) -> Result<OutputClause, ParserError> {
17584        let select_items = self.parse_projection()?;
17585        let into_table = if start_keyword == Keyword::OUTPUT && self.peek_keyword(Keyword::INTO) {
17586            self.expect_keyword_is(Keyword::INTO)?;
17587            Some(self.parse_select_into()?)
17588        } else {
17589            None
17590        };
17591
17592        Ok(if start_keyword == Keyword::OUTPUT {
17593            OutputClause::Output {
17594                select_items,
17595                into_table,
17596            }
17597        } else {
17598            OutputClause::Returning { select_items }
17599        })
17600    }
17601
17602    fn parse_select_into(&mut self) -> Result<SelectInto, ParserError> {
17603        let temporary = self
17604            .parse_one_of_keywords(&[Keyword::TEMP, Keyword::TEMPORARY])
17605            .is_some();
17606        let unlogged = self.parse_keyword(Keyword::UNLOGGED);
17607        let table = self.parse_keyword(Keyword::TABLE);
17608        let name = self.parse_object_name(false)?;
17609
17610        Ok(SelectInto {
17611            temporary,
17612            unlogged,
17613            table,
17614            name,
17615        })
17616    }
17617
17618    pub fn parse_merge(&mut self) -> Result<Statement, ParserError> {
17619        let into = self.parse_keyword(Keyword::INTO);
17620
17621        let table = self.parse_table_factor()?;
17622
17623        self.expect_keyword_is(Keyword::USING)?;
17624        let source = self.parse_table_factor()?;
17625        self.expect_keyword_is(Keyword::ON)?;
17626        let on = self.parse_expr()?;
17627        let clauses = self.parse_merge_clauses()?;
17628        let output = match self.parse_one_of_keywords(&[Keyword::OUTPUT, Keyword::RETURNING]) {
17629            Some(start_keyword) => Some(self.parse_output(start_keyword)?),
17630            None => None,
17631        };
17632
17633        Ok(Statement::Merge {
17634            into,
17635            table,
17636            source,
17637            on: Box::new(on),
17638            clauses,
17639            output,
17640        })
17641    }
17642
17643    fn parse_pragma_value(&mut self) -> Result<Value, ParserError> {
17644        match self.parse_value()?.value {
17645            v @ Value::SingleQuotedString(_) => Ok(v),
17646            v @ Value::DoubleQuotedString(_) => Ok(v),
17647            v @ Value::Number(_, _) => Ok(v),
17648            v @ Value::Placeholder(_) => Ok(v),
17649            _ => {
17650                self.prev_token();
17651                self.expected("number or string or ? placeholder", self.peek_token())
17652            }
17653        }
17654    }
17655
17656    // PRAGMA [schema-name '.'] pragma-name [('=' pragma-value) | '(' pragma-value ')']
17657    pub fn parse_pragma(&mut self) -> Result<Statement, ParserError> {
17658        let name = self.parse_object_name(false)?;
17659        if self.consume_token(&Token::LParen) {
17660            let value = self.parse_pragma_value()?;
17661            self.expect_token(&Token::RParen)?;
17662            Ok(Statement::Pragma {
17663                name,
17664                value: Some(value),
17665                is_eq: false,
17666            })
17667        } else if self.consume_token(&Token::Eq) {
17668            Ok(Statement::Pragma {
17669                name,
17670                value: Some(self.parse_pragma_value()?),
17671                is_eq: true,
17672            })
17673        } else {
17674            Ok(Statement::Pragma {
17675                name,
17676                value: None,
17677                is_eq: false,
17678            })
17679        }
17680    }
17681
17682    /// `INSTALL [extension_name]`
17683    pub fn parse_install(&mut self) -> Result<Statement, ParserError> {
17684        let extension_name = self.parse_identifier()?;
17685
17686        Ok(Statement::Install { extension_name })
17687    }
17688
17689    /// Parse a SQL LOAD statement
17690    pub fn parse_load(&mut self) -> Result<Statement, ParserError> {
17691        if self.dialect.supports_load_extension() {
17692            let extension_name = self.parse_identifier()?;
17693            Ok(Statement::Load { extension_name })
17694        } else if self.parse_keyword(Keyword::DATA) && self.dialect.supports_load_data() {
17695            let local = self.parse_one_of_keywords(&[Keyword::LOCAL]).is_some();
17696            self.expect_keyword_is(Keyword::INPATH)?;
17697            let inpath = self.parse_literal_string()?;
17698            let overwrite = self.parse_one_of_keywords(&[Keyword::OVERWRITE]).is_some();
17699            self.expect_keyword_is(Keyword::INTO)?;
17700            self.expect_keyword_is(Keyword::TABLE)?;
17701            let table_name = self.parse_object_name(false)?;
17702            let partitioned = self.parse_insert_partition()?;
17703            let table_format = self.parse_load_data_table_format()?;
17704            Ok(Statement::LoadData {
17705                local,
17706                inpath,
17707                overwrite,
17708                table_name,
17709                partitioned,
17710                table_format,
17711            })
17712        } else {
17713            self.expected(
17714                "`DATA` or an extension name after `LOAD`",
17715                self.peek_token(),
17716            )
17717        }
17718    }
17719
17720    /// ```sql
17721    /// OPTIMIZE TABLE [db.]name [ON CLUSTER cluster] [PARTITION partition | PARTITION ID 'partition_id'] [FINAL] [DEDUPLICATE [BY expression]]
17722    /// ```
17723    /// [ClickHouse](https://clickhouse.com/docs/en/sql-reference/statements/optimize)
17724    pub fn parse_optimize_table(&mut self) -> Result<Statement, ParserError> {
17725        self.expect_keyword_is(Keyword::TABLE)?;
17726        let name = self.parse_object_name(false)?;
17727        let on_cluster = self.parse_optional_on_cluster()?;
17728
17729        let partition = if self.parse_keyword(Keyword::PARTITION) {
17730            if self.parse_keyword(Keyword::ID) {
17731                Some(Partition::Identifier(self.parse_identifier()?))
17732            } else {
17733                Some(Partition::Expr(self.parse_expr()?))
17734            }
17735        } else {
17736            None
17737        };
17738
17739        let include_final = self.parse_keyword(Keyword::FINAL);
17740        let deduplicate = if self.parse_keyword(Keyword::DEDUPLICATE) {
17741            if self.parse_keyword(Keyword::BY) {
17742                Some(Deduplicate::ByExpression(self.parse_expr()?))
17743            } else {
17744                Some(Deduplicate::All)
17745            }
17746        } else {
17747            None
17748        };
17749
17750        Ok(Statement::OptimizeTable {
17751            name,
17752            on_cluster,
17753            partition,
17754            include_final,
17755            deduplicate,
17756        })
17757    }
17758
17759    /// ```sql
17760    /// CREATE [ { TEMPORARY | TEMP } ] SEQUENCE [ IF NOT EXISTS ] <sequence_name>
17761    /// ```
17762    ///
17763    /// See [Postgres docs](https://www.postgresql.org/docs/current/sql-createsequence.html) for more details.
17764    pub fn parse_create_sequence(&mut self, temporary: bool) -> Result<Statement, ParserError> {
17765        //[ IF NOT EXISTS ]
17766        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
17767        //name
17768        let name = self.parse_object_name(false)?;
17769        //[ AS data_type ]
17770        let mut data_type: Option<DataType> = None;
17771        if self.parse_keywords(&[Keyword::AS]) {
17772            data_type = Some(self.parse_data_type()?)
17773        }
17774        let sequence_options = self.parse_create_sequence_options()?;
17775        // [ OWNED BY { table_name.column_name | NONE } ]
17776        let owned_by = if self.parse_keywords(&[Keyword::OWNED, Keyword::BY]) {
17777            if self.parse_keywords(&[Keyword::NONE]) {
17778                Some(ObjectName::from(vec![Ident::new("NONE")]))
17779            } else {
17780                Some(self.parse_object_name(false)?)
17781            }
17782        } else {
17783            None
17784        };
17785        Ok(Statement::CreateSequence {
17786            temporary,
17787            if_not_exists,
17788            name,
17789            data_type,
17790            sequence_options,
17791            owned_by,
17792        })
17793    }
17794
17795    fn parse_create_sequence_options(&mut self) -> Result<Vec<SequenceOptions>, ParserError> {
17796        let mut sequence_options = vec![];
17797        //[ INCREMENT [ BY ] increment ]
17798        if self.parse_keywords(&[Keyword::INCREMENT]) {
17799            if self.parse_keywords(&[Keyword::BY]) {
17800                sequence_options.push(SequenceOptions::IncrementBy(self.parse_number()?, true));
17801            } else {
17802                sequence_options.push(SequenceOptions::IncrementBy(self.parse_number()?, false));
17803            }
17804        }
17805        //[ MINVALUE minvalue | NO MINVALUE ]
17806        if self.parse_keyword(Keyword::MINVALUE) {
17807            sequence_options.push(SequenceOptions::MinValue(Some(self.parse_number()?)));
17808        } else if self.parse_keywords(&[Keyword::NO, Keyword::MINVALUE]) {
17809            sequence_options.push(SequenceOptions::MinValue(None));
17810        }
17811        //[ MAXVALUE maxvalue | NO MAXVALUE ]
17812        if self.parse_keywords(&[Keyword::MAXVALUE]) {
17813            sequence_options.push(SequenceOptions::MaxValue(Some(self.parse_number()?)));
17814        } else if self.parse_keywords(&[Keyword::NO, Keyword::MAXVALUE]) {
17815            sequence_options.push(SequenceOptions::MaxValue(None));
17816        }
17817
17818        //[ START [ WITH ] start ]
17819        if self.parse_keywords(&[Keyword::START]) {
17820            if self.parse_keywords(&[Keyword::WITH]) {
17821                sequence_options.push(SequenceOptions::StartWith(self.parse_number()?, true));
17822            } else {
17823                sequence_options.push(SequenceOptions::StartWith(self.parse_number()?, false));
17824            }
17825        }
17826        //[ CACHE cache ]
17827        if self.parse_keywords(&[Keyword::CACHE]) {
17828            sequence_options.push(SequenceOptions::Cache(self.parse_number()?));
17829        }
17830        // [ [ NO ] CYCLE ]
17831        if self.parse_keywords(&[Keyword::NO, Keyword::CYCLE]) {
17832            sequence_options.push(SequenceOptions::Cycle(true));
17833        } else if self.parse_keywords(&[Keyword::CYCLE]) {
17834            sequence_options.push(SequenceOptions::Cycle(false));
17835        }
17836
17837        Ok(sequence_options)
17838    }
17839
17840    ///   Parse a `CREATE SERVER` statement.
17841    ///
17842    ///  See [Statement::CreateServer]
17843    pub fn parse_pg_create_server(&mut self) -> Result<Statement, ParserError> {
17844        let ine = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
17845        let name = self.parse_object_name(false)?;
17846
17847        let server_type = if self.parse_keyword(Keyword::TYPE) {
17848            Some(self.parse_identifier()?)
17849        } else {
17850            None
17851        };
17852
17853        let version = if self.parse_keyword(Keyword::VERSION) {
17854            Some(self.parse_identifier()?)
17855        } else {
17856            None
17857        };
17858
17859        self.expect_keywords(&[Keyword::FOREIGN, Keyword::DATA, Keyword::WRAPPER])?;
17860        let foreign_data_wrapper = self.parse_object_name(false)?;
17861
17862        let mut options = None;
17863        if self.parse_keyword(Keyword::OPTIONS) {
17864            self.expect_token(&Token::LParen)?;
17865            options = Some(self.parse_comma_separated(|p| {
17866                let key = p.parse_identifier()?;
17867                let value = p.parse_identifier()?;
17868                Ok(CreateServerOption { key, value })
17869            })?);
17870            self.expect_token(&Token::RParen)?;
17871        }
17872
17873        Ok(Statement::CreateServer(CreateServerStatement {
17874            name,
17875            if_not_exists: ine,
17876            server_type,
17877            version,
17878            foreign_data_wrapper,
17879            options,
17880        }))
17881    }
17882
17883    /// The index of the first unprocessed token.
17884    pub fn index(&self) -> usize {
17885        self.index
17886    }
17887
17888    pub fn parse_named_window(&mut self) -> Result<NamedWindowDefinition, ParserError> {
17889        let ident = self.parse_identifier()?;
17890        self.expect_keyword_is(Keyword::AS)?;
17891
17892        let window_expr = if self.consume_token(&Token::LParen) {
17893            NamedWindowExpr::WindowSpec(self.parse_window_spec()?)
17894        } else if self.dialect.supports_window_clause_named_window_reference() {
17895            NamedWindowExpr::NamedWindow(self.parse_identifier()?)
17896        } else {
17897            return self.expected("(", self.peek_token());
17898        };
17899
17900        Ok(NamedWindowDefinition(ident, window_expr))
17901    }
17902
17903    pub fn parse_create_procedure(&mut self, or_alter: bool) -> Result<Statement, ParserError> {
17904        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
17905        let name = self.parse_object_name(false)?;
17906        let params = self.parse_optional_procedure_parameters()?;
17907
17908        let language = if self.parse_keyword(Keyword::LANGUAGE) {
17909            Some(self.parse_identifier()?)
17910        } else {
17911            None
17912        };
17913
17914        let options = self.maybe_parse_options(Keyword::OPTIONS)?;
17915
17916        let _ = self.parse_keyword(Keyword::AS);
17917
17918        let body = self.parse_conditional_statements(&[Keyword::END])?;
17919
17920        Ok(Statement::CreateProcedure {
17921            name,
17922            or_alter,
17923            if_not_exists,
17924            params,
17925            language,
17926            options,
17927            body,
17928        })
17929    }
17930
17931    pub fn parse_window_spec(&mut self) -> Result<WindowSpec, ParserError> {
17932        let window_name = match self.peek_token().token {
17933            Token::Word(word) if word.keyword == Keyword::NoKeyword => {
17934                self.parse_optional_ident()?
17935            }
17936            _ => None,
17937        };
17938
17939        let partition_by = if self.parse_keywords(&[Keyword::PARTITION, Keyword::BY]) {
17940            self.parse_comma_separated(Parser::parse_expr)?
17941        } else {
17942            vec![]
17943        };
17944        let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
17945            self.parse_comma_separated(Parser::parse_order_by_expr)?
17946        } else {
17947            vec![]
17948        };
17949
17950        let window_frame = if !self.consume_token(&Token::RParen) {
17951            let window_frame = self.parse_window_frame()?;
17952            self.expect_token(&Token::RParen)?;
17953            Some(window_frame)
17954        } else {
17955            None
17956        };
17957        Ok(WindowSpec {
17958            window_name,
17959            partition_by,
17960            order_by,
17961            window_frame,
17962        })
17963    }
17964
17965    pub fn parse_create_type(&mut self) -> Result<Statement, ParserError> {
17966        let name = self.parse_object_name(false)?;
17967        self.expect_keyword_is(Keyword::AS)?;
17968
17969        if self.parse_keyword(Keyword::ENUM) {
17970            return self.parse_create_type_enum(name);
17971        }
17972
17973        let mut attributes = vec![];
17974        if !self.consume_token(&Token::LParen) || self.consume_token(&Token::RParen) {
17975            return Ok(Statement::CreateType {
17976                name,
17977                representation: UserDefinedTypeRepresentation::Composite { attributes },
17978            });
17979        }
17980
17981        loop {
17982            let attr_name = self.parse_identifier()?;
17983            let attr_data_type = self.parse_data_type()?;
17984            let attr_collation = if self.parse_keyword(Keyword::COLLATE) {
17985                Some(self.parse_object_name(false)?)
17986            } else {
17987                None
17988            };
17989            attributes.push(UserDefinedTypeCompositeAttributeDef {
17990                name: attr_name,
17991                data_type: attr_data_type,
17992                collation: attr_collation,
17993            });
17994            let comma = self.consume_token(&Token::Comma);
17995            if self.consume_token(&Token::RParen) {
17996                // allow a trailing comma
17997                break;
17998            } else if !comma {
17999                return self.expected("',' or ')' after attribute definition", self.peek_token());
18000            }
18001        }
18002
18003        Ok(Statement::CreateType {
18004            name,
18005            representation: UserDefinedTypeRepresentation::Composite { attributes },
18006        })
18007    }
18008
18009    /// Parse remainder of `CREATE TYPE AS ENUM` statement (see [Statement::CreateType] and [Self::parse_create_type])
18010    ///
18011    /// See [PostgreSQL](https://www.postgresql.org/docs/current/sql-createtype.html)
18012    pub fn parse_create_type_enum(&mut self, name: ObjectName) -> Result<Statement, ParserError> {
18013        self.expect_token(&Token::LParen)?;
18014        let labels = self.parse_comma_separated0(|p| p.parse_identifier(), Token::RParen)?;
18015        self.expect_token(&Token::RParen)?;
18016
18017        Ok(Statement::CreateType {
18018            name,
18019            representation: UserDefinedTypeRepresentation::Enum { labels },
18020        })
18021    }
18022
18023    fn parse_parenthesized_identifiers(&mut self) -> Result<Vec<Ident>, ParserError> {
18024        self.expect_token(&Token::LParen)?;
18025        let idents = self.parse_comma_separated0(|p| p.parse_identifier(), Token::RParen)?;
18026        self.expect_token(&Token::RParen)?;
18027        Ok(idents)
18028    }
18029
18030    fn parse_column_position(&mut self) -> Result<Option<MySQLColumnPosition>, ParserError> {
18031        if dialect_of!(self is MySqlDialect | GenericDialect | BigQueryDialect) {
18032            if self.parse_keyword(Keyword::FIRST) {
18033                Ok(Some(MySQLColumnPosition::First))
18034            } else if self.parse_keyword(Keyword::AFTER) {
18035                let ident = self.parse_identifier()?;
18036                Ok(Some(MySQLColumnPosition::After(ident)))
18037            } else {
18038                Ok(None)
18039            }
18040        } else {
18041            Ok(None)
18042        }
18043    }
18044
18045    /// Parse [Statement::Print]
18046    fn parse_print(&mut self) -> Result<Statement, ParserError> {
18047        Ok(Statement::Print(PrintStatement {
18048            message: Box::new(self.parse_expr()?),
18049        }))
18050    }
18051
18052    /// Parse [Statement::Return]
18053    fn parse_return(&mut self) -> Result<Statement, ParserError> {
18054        match self.maybe_parse(|p| p.parse_expr())? {
18055            Some(expr) => Ok(Statement::Return(ReturnStatement {
18056                value: Some(ReturnStatementValue::Expr(expr)),
18057            })),
18058            None => Ok(Statement::Return(ReturnStatement { value: None })),
18059        }
18060    }
18061
18062    /// /// Parse a `EXPORT DATA` statement.
18063    ///
18064    /// See [Statement::ExportData]
18065    fn parse_export_data(&mut self) -> Result<Statement, ParserError> {
18066        self.expect_keywords(&[Keyword::EXPORT, Keyword::DATA])?;
18067
18068        let connection = if self.parse_keywords(&[Keyword::WITH, Keyword::CONNECTION]) {
18069            Some(self.parse_object_name(false)?)
18070        } else {
18071            None
18072        };
18073        self.expect_keyword(Keyword::OPTIONS)?;
18074        self.expect_token(&Token::LParen)?;
18075        let options = self.parse_comma_separated(|p| p.parse_sql_option())?;
18076        self.expect_token(&Token::RParen)?;
18077        self.expect_keyword(Keyword::AS)?;
18078        let query = self.parse_query()?;
18079        Ok(Statement::ExportData(ExportData {
18080            options,
18081            query,
18082            connection,
18083        }))
18084    }
18085
18086    fn parse_vacuum(&mut self) -> Result<Statement, ParserError> {
18087        self.expect_keyword(Keyword::VACUUM)?;
18088        let full = self.parse_keyword(Keyword::FULL);
18089        let sort_only = self.parse_keywords(&[Keyword::SORT, Keyword::ONLY]);
18090        let delete_only = self.parse_keywords(&[Keyword::DELETE, Keyword::ONLY]);
18091        let reindex = self.parse_keyword(Keyword::REINDEX);
18092        let recluster = self.parse_keyword(Keyword::RECLUSTER);
18093        let (table_name, threshold, boost) =
18094            match self.maybe_parse(|p| p.parse_object_name(false))? {
18095                Some(table_name) => {
18096                    let threshold = if self.parse_keyword(Keyword::TO) {
18097                        let value = self.parse_value()?;
18098                        self.expect_keyword(Keyword::PERCENT)?;
18099                        Some(value.value)
18100                    } else {
18101                        None
18102                    };
18103                    let boost = self.parse_keyword(Keyword::BOOST);
18104                    (Some(table_name), threshold, boost)
18105                }
18106                _ => (None, None, false),
18107            };
18108        Ok(Statement::Vacuum(VacuumStatement {
18109            full,
18110            sort_only,
18111            delete_only,
18112            reindex,
18113            recluster,
18114            table_name,
18115            threshold,
18116            boost,
18117        }))
18118    }
18119
18120    /// Consume the parser and return its underlying token buffer
18121    pub fn into_tokens(self) -> Vec<TokenWithSpan> {
18122        self.tokens
18123    }
18124
18125    /// Returns true if the next keyword indicates a sub query, i.e. SELECT or WITH
18126    fn peek_sub_query(&mut self) -> bool {
18127        if self
18128            .parse_one_of_keywords(&[Keyword::SELECT, Keyword::WITH])
18129            .is_some()
18130        {
18131            self.prev_token();
18132            return true;
18133        }
18134        false
18135    }
18136
18137    pub(crate) fn parse_show_stmt_options(&mut self) -> Result<ShowStatementOptions, ParserError> {
18138        let show_in;
18139        let mut filter_position = None;
18140        if self.dialect.supports_show_like_before_in() {
18141            if let Some(filter) = self.parse_show_statement_filter()? {
18142                filter_position = Some(ShowStatementFilterPosition::Infix(filter));
18143            }
18144            show_in = self.maybe_parse_show_stmt_in()?;
18145        } else {
18146            show_in = self.maybe_parse_show_stmt_in()?;
18147            if let Some(filter) = self.parse_show_statement_filter()? {
18148                filter_position = Some(ShowStatementFilterPosition::Suffix(filter));
18149            }
18150        }
18151        let starts_with = self.maybe_parse_show_stmt_starts_with()?;
18152        let limit = self.maybe_parse_show_stmt_limit()?;
18153        let from = self.maybe_parse_show_stmt_from()?;
18154        Ok(ShowStatementOptions {
18155            filter_position,
18156            show_in,
18157            starts_with,
18158            limit,
18159            limit_from: from,
18160        })
18161    }
18162
18163    fn maybe_parse_show_stmt_in(&mut self) -> Result<Option<ShowStatementIn>, ParserError> {
18164        let clause = match self.parse_one_of_keywords(&[Keyword::FROM, Keyword::IN]) {
18165            Some(Keyword::FROM) => ShowStatementInClause::FROM,
18166            Some(Keyword::IN) => ShowStatementInClause::IN,
18167            None => return Ok(None),
18168            _ => return self.expected("FROM or IN", self.peek_token()),
18169        };
18170
18171        let (parent_type, parent_name) = match self.parse_one_of_keywords(&[
18172            Keyword::ACCOUNT,
18173            Keyword::DATABASE,
18174            Keyword::SCHEMA,
18175            Keyword::TABLE,
18176            Keyword::VIEW,
18177        ]) {
18178            // If we see these next keywords it means we don't have a parent name
18179            Some(Keyword::DATABASE)
18180                if self.peek_keywords(&[Keyword::STARTS, Keyword::WITH])
18181                    | self.peek_keyword(Keyword::LIMIT) =>
18182            {
18183                (Some(ShowStatementInParentType::Database), None)
18184            }
18185            Some(Keyword::SCHEMA)
18186                if self.peek_keywords(&[Keyword::STARTS, Keyword::WITH])
18187                    | self.peek_keyword(Keyword::LIMIT) =>
18188            {
18189                (Some(ShowStatementInParentType::Schema), None)
18190            }
18191            Some(parent_kw) => {
18192                // The parent name here is still optional, for example:
18193                // SHOW TABLES IN ACCOUNT, so parsing the object name
18194                // may fail because the statement ends.
18195                let parent_name = self.maybe_parse(|p| p.parse_object_name(false))?;
18196                match parent_kw {
18197                    Keyword::ACCOUNT => (Some(ShowStatementInParentType::Account), parent_name),
18198                    Keyword::DATABASE => (Some(ShowStatementInParentType::Database), parent_name),
18199                    Keyword::SCHEMA => (Some(ShowStatementInParentType::Schema), parent_name),
18200                    Keyword::TABLE => (Some(ShowStatementInParentType::Table), parent_name),
18201                    Keyword::VIEW => (Some(ShowStatementInParentType::View), parent_name),
18202                    _ => {
18203                        return self.expected(
18204                            "one of ACCOUNT, DATABASE, SCHEMA, TABLE or VIEW",
18205                            self.peek_token(),
18206                        )
18207                    }
18208                }
18209            }
18210            None => {
18211                // Parsing MySQL style FROM tbl_name FROM db_name
18212                // which is equivalent to FROM tbl_name.db_name
18213                let mut parent_name = self.parse_object_name(false)?;
18214                if self
18215                    .parse_one_of_keywords(&[Keyword::FROM, Keyword::IN])
18216                    .is_some()
18217                {
18218                    parent_name
18219                        .0
18220                        .insert(0, ObjectNamePart::Identifier(self.parse_identifier()?));
18221                }
18222                (None, Some(parent_name))
18223            }
18224        };
18225
18226        Ok(Some(ShowStatementIn {
18227            clause,
18228            parent_type,
18229            parent_name,
18230        }))
18231    }
18232
18233    fn maybe_parse_show_stmt_starts_with(&mut self) -> Result<Option<Value>, ParserError> {
18234        if self.parse_keywords(&[Keyword::STARTS, Keyword::WITH]) {
18235            Ok(Some(self.parse_value()?.value))
18236        } else {
18237            Ok(None)
18238        }
18239    }
18240
18241    fn maybe_parse_show_stmt_limit(&mut self) -> Result<Option<Expr>, ParserError> {
18242        if self.parse_keyword(Keyword::LIMIT) {
18243            Ok(self.parse_limit()?)
18244        } else {
18245            Ok(None)
18246        }
18247    }
18248
18249    fn maybe_parse_show_stmt_from(&mut self) -> Result<Option<Value>, ParserError> {
18250        if self.parse_keyword(Keyword::FROM) {
18251            Ok(Some(self.parse_value()?.value))
18252        } else {
18253            Ok(None)
18254        }
18255    }
18256
18257    pub(crate) fn in_column_definition_state(&self) -> bool {
18258        matches!(self.state, ColumnDefinition)
18259    }
18260
18261    /// Parses options provided in key-value format.
18262    ///
18263    /// * `parenthesized` - true if the options are enclosed in parenthesis
18264    /// * `end_words` - a list of keywords that any of them indicates the end of the options section
18265    pub(crate) fn parse_key_value_options(
18266        &mut self,
18267        parenthesized: bool,
18268        end_words: &[Keyword],
18269    ) -> Result<Vec<KeyValueOption>, ParserError> {
18270        let mut options: Vec<KeyValueOption> = Vec::new();
18271        if parenthesized {
18272            self.expect_token(&Token::LParen)?;
18273        }
18274        loop {
18275            match self.next_token().token {
18276                Token::RParen => {
18277                    if parenthesized {
18278                        break;
18279                    } else {
18280                        return self.expected(" another option or EOF", self.peek_token());
18281                    }
18282                }
18283                Token::EOF => break,
18284                Token::Comma => continue,
18285                Token::Word(w) if !end_words.contains(&w.keyword) => {
18286                    options.push(self.parse_key_value_option(w)?)
18287                }
18288                Token::Word(w) if end_words.contains(&w.keyword) => {
18289                    self.prev_token();
18290                    break;
18291                }
18292                _ => return self.expected("another option, EOF, Comma or ')'", self.peek_token()),
18293            };
18294        }
18295        Ok(options)
18296    }
18297
18298    /// Parses a `KEY = VALUE` construct based on the specified key
18299    pub(crate) fn parse_key_value_option(
18300        &mut self,
18301        key: Word,
18302    ) -> Result<KeyValueOption, ParserError> {
18303        self.expect_token(&Token::Eq)?;
18304        match self.next_token().token {
18305            Token::SingleQuotedString(value) => Ok(KeyValueOption {
18306                option_name: key.value,
18307                option_type: KeyValueOptionType::STRING,
18308                value,
18309            }),
18310            Token::Word(word)
18311                if word.keyword == Keyword::TRUE || word.keyword == Keyword::FALSE =>
18312            {
18313                Ok(KeyValueOption {
18314                    option_name: key.value,
18315                    option_type: KeyValueOptionType::BOOLEAN,
18316                    value: word.value.to_uppercase(),
18317                })
18318            }
18319            Token::Word(word) => Ok(KeyValueOption {
18320                option_name: key.value,
18321                option_type: KeyValueOptionType::ENUM,
18322                value: word.value,
18323            }),
18324            Token::Number(n, _) => Ok(KeyValueOption {
18325                option_name: key.value,
18326                option_type: KeyValueOptionType::NUMBER,
18327                value: n,
18328            }),
18329            _ => self.expected("expected option value", self.peek_token()),
18330        }
18331    }
18332}
18333
18334fn maybe_prefixed_expr(expr: Expr, prefix: Option<Ident>) -> Expr {
18335    if let Some(prefix) = prefix {
18336        Expr::Prefixed {
18337            prefix,
18338            value: Box::new(expr),
18339        }
18340    } else {
18341        expr
18342    }
18343}
18344
18345impl Word {
18346    #[deprecated(since = "0.54.0", note = "please use `into_ident` instead")]
18347    pub fn to_ident(&self, span: Span) -> Ident {
18348        Ident {
18349            value: self.value.clone(),
18350            quote_style: self.quote_style,
18351            span,
18352        }
18353    }
18354
18355    /// Convert this word into an [`Ident`] identifier
18356    pub fn into_ident(self, span: Span) -> Ident {
18357        Ident {
18358            value: self.value,
18359            quote_style: self.quote_style,
18360            span,
18361        }
18362    }
18363}
18364
18365#[cfg(test)]
18366mod tests {
18367    use crate::test_utils::{all_dialects, TestedDialects};
18368
18369    use super::*;
18370
18371    #[test]
18372    fn test_prev_index() {
18373        let sql = "SELECT version";
18374        all_dialects().run_parser_method(sql, |parser| {
18375            assert_eq!(parser.peek_token(), Token::make_keyword("SELECT"));
18376            assert_eq!(parser.next_token(), Token::make_keyword("SELECT"));
18377            parser.prev_token();
18378            assert_eq!(parser.next_token(), Token::make_keyword("SELECT"));
18379            assert_eq!(parser.next_token(), Token::make_word("version", None));
18380            parser.prev_token();
18381            assert_eq!(parser.peek_token(), Token::make_word("version", None));
18382            assert_eq!(parser.next_token(), Token::make_word("version", None));
18383            assert_eq!(parser.peek_token(), Token::EOF);
18384            parser.prev_token();
18385            assert_eq!(parser.next_token(), Token::make_word("version", None));
18386            assert_eq!(parser.next_token(), Token::EOF);
18387            assert_eq!(parser.next_token(), Token::EOF);
18388            parser.prev_token();
18389        });
18390    }
18391
18392    #[test]
18393    fn test_peek_tokens() {
18394        all_dialects().run_parser_method("SELECT foo AS bar FROM baz", |parser| {
18395            assert!(matches!(
18396                parser.peek_tokens(),
18397                [Token::Word(Word {
18398                    keyword: Keyword::SELECT,
18399                    ..
18400                })]
18401            ));
18402
18403            assert!(matches!(
18404                parser.peek_tokens(),
18405                [
18406                    Token::Word(Word {
18407                        keyword: Keyword::SELECT,
18408                        ..
18409                    }),
18410                    Token::Word(_),
18411                    Token::Word(Word {
18412                        keyword: Keyword::AS,
18413                        ..
18414                    }),
18415                ]
18416            ));
18417
18418            for _ in 0..4 {
18419                parser.next_token();
18420            }
18421
18422            assert!(matches!(
18423                parser.peek_tokens(),
18424                [
18425                    Token::Word(Word {
18426                        keyword: Keyword::FROM,
18427                        ..
18428                    }),
18429                    Token::Word(_),
18430                    Token::EOF,
18431                    Token::EOF,
18432                ]
18433            ))
18434        })
18435    }
18436
18437    #[cfg(test)]
18438    mod test_parse_data_type {
18439        use crate::ast::{
18440            CharLengthUnits, CharacterLength, DataType, ExactNumberInfo, ObjectName, TimezoneInfo,
18441        };
18442        use crate::dialect::{AnsiDialect, GenericDialect, PostgreSqlDialect};
18443        use crate::test_utils::TestedDialects;
18444
18445        macro_rules! test_parse_data_type {
18446            ($dialect:expr, $input:expr, $expected_type:expr $(,)?) => {{
18447                $dialect.run_parser_method(&*$input, |parser| {
18448                    let data_type = parser.parse_data_type().unwrap();
18449                    assert_eq!($expected_type, data_type);
18450                    assert_eq!($input.to_string(), data_type.to_string());
18451                });
18452            }};
18453        }
18454
18455        #[test]
18456        fn test_ansii_character_string_types() {
18457            // Character string types: <https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#character-string-type>
18458            let dialect =
18459                TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
18460
18461            test_parse_data_type!(dialect, "CHARACTER", DataType::Character(None));
18462
18463            test_parse_data_type!(
18464                dialect,
18465                "CHARACTER(20)",
18466                DataType::Character(Some(CharacterLength::IntegerLength {
18467                    length: 20,
18468                    unit: None
18469                }))
18470            );
18471
18472            test_parse_data_type!(
18473                dialect,
18474                "CHARACTER(20 CHARACTERS)",
18475                DataType::Character(Some(CharacterLength::IntegerLength {
18476                    length: 20,
18477                    unit: Some(CharLengthUnits::Characters)
18478                }))
18479            );
18480
18481            test_parse_data_type!(
18482                dialect,
18483                "CHARACTER(20 OCTETS)",
18484                DataType::Character(Some(CharacterLength::IntegerLength {
18485                    length: 20,
18486                    unit: Some(CharLengthUnits::Octets)
18487                }))
18488            );
18489
18490            test_parse_data_type!(dialect, "CHAR", DataType::Char(None));
18491
18492            test_parse_data_type!(
18493                dialect,
18494                "CHAR(20)",
18495                DataType::Char(Some(CharacterLength::IntegerLength {
18496                    length: 20,
18497                    unit: None
18498                }))
18499            );
18500
18501            test_parse_data_type!(
18502                dialect,
18503                "CHAR(20 CHARACTERS)",
18504                DataType::Char(Some(CharacterLength::IntegerLength {
18505                    length: 20,
18506                    unit: Some(CharLengthUnits::Characters)
18507                }))
18508            );
18509
18510            test_parse_data_type!(
18511                dialect,
18512                "CHAR(20 OCTETS)",
18513                DataType::Char(Some(CharacterLength::IntegerLength {
18514                    length: 20,
18515                    unit: Some(CharLengthUnits::Octets)
18516                }))
18517            );
18518
18519            test_parse_data_type!(
18520                dialect,
18521                "CHARACTER VARYING(20)",
18522                DataType::CharacterVarying(Some(CharacterLength::IntegerLength {
18523                    length: 20,
18524                    unit: None
18525                }))
18526            );
18527
18528            test_parse_data_type!(
18529                dialect,
18530                "CHARACTER VARYING(20 CHARACTERS)",
18531                DataType::CharacterVarying(Some(CharacterLength::IntegerLength {
18532                    length: 20,
18533                    unit: Some(CharLengthUnits::Characters)
18534                }))
18535            );
18536
18537            test_parse_data_type!(
18538                dialect,
18539                "CHARACTER VARYING(20 OCTETS)",
18540                DataType::CharacterVarying(Some(CharacterLength::IntegerLength {
18541                    length: 20,
18542                    unit: Some(CharLengthUnits::Octets)
18543                }))
18544            );
18545
18546            test_parse_data_type!(
18547                dialect,
18548                "CHAR VARYING(20)",
18549                DataType::CharVarying(Some(CharacterLength::IntegerLength {
18550                    length: 20,
18551                    unit: None
18552                }))
18553            );
18554
18555            test_parse_data_type!(
18556                dialect,
18557                "CHAR VARYING(20 CHARACTERS)",
18558                DataType::CharVarying(Some(CharacterLength::IntegerLength {
18559                    length: 20,
18560                    unit: Some(CharLengthUnits::Characters)
18561                }))
18562            );
18563
18564            test_parse_data_type!(
18565                dialect,
18566                "CHAR VARYING(20 OCTETS)",
18567                DataType::CharVarying(Some(CharacterLength::IntegerLength {
18568                    length: 20,
18569                    unit: Some(CharLengthUnits::Octets)
18570                }))
18571            );
18572
18573            test_parse_data_type!(
18574                dialect,
18575                "VARCHAR(20)",
18576                DataType::Varchar(Some(CharacterLength::IntegerLength {
18577                    length: 20,
18578                    unit: None
18579                }))
18580            );
18581        }
18582
18583        #[test]
18584        fn test_ansii_character_large_object_types() {
18585            // Character large object types: <https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#character-large-object-length>
18586            let dialect =
18587                TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
18588
18589            test_parse_data_type!(
18590                dialect,
18591                "CHARACTER LARGE OBJECT",
18592                DataType::CharacterLargeObject(None)
18593            );
18594            test_parse_data_type!(
18595                dialect,
18596                "CHARACTER LARGE OBJECT(20)",
18597                DataType::CharacterLargeObject(Some(20))
18598            );
18599
18600            test_parse_data_type!(
18601                dialect,
18602                "CHAR LARGE OBJECT",
18603                DataType::CharLargeObject(None)
18604            );
18605            test_parse_data_type!(
18606                dialect,
18607                "CHAR LARGE OBJECT(20)",
18608                DataType::CharLargeObject(Some(20))
18609            );
18610
18611            test_parse_data_type!(dialect, "CLOB", DataType::Clob(None));
18612            test_parse_data_type!(dialect, "CLOB(20)", DataType::Clob(Some(20)));
18613        }
18614
18615        #[test]
18616        fn test_parse_custom_types() {
18617            let dialect =
18618                TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
18619
18620            test_parse_data_type!(
18621                dialect,
18622                "GEOMETRY",
18623                DataType::Custom(ObjectName::from(vec!["GEOMETRY".into()]), vec![])
18624            );
18625
18626            test_parse_data_type!(
18627                dialect,
18628                "GEOMETRY(POINT)",
18629                DataType::Custom(
18630                    ObjectName::from(vec!["GEOMETRY".into()]),
18631                    vec!["POINT".to_string()]
18632                )
18633            );
18634
18635            test_parse_data_type!(
18636                dialect,
18637                "GEOMETRY(POINT, 4326)",
18638                DataType::Custom(
18639                    ObjectName::from(vec!["GEOMETRY".into()]),
18640                    vec!["POINT".to_string(), "4326".to_string()]
18641                )
18642            );
18643        }
18644
18645        #[test]
18646        fn test_ansii_exact_numeric_types() {
18647            // Exact numeric types: <https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#exact-numeric-type>
18648            let dialect = TestedDialects::new(vec![
18649                Box::new(GenericDialect {}),
18650                Box::new(AnsiDialect {}),
18651                Box::new(PostgreSqlDialect {}),
18652            ]);
18653
18654            test_parse_data_type!(dialect, "NUMERIC", DataType::Numeric(ExactNumberInfo::None));
18655
18656            test_parse_data_type!(
18657                dialect,
18658                "NUMERIC(2)",
18659                DataType::Numeric(ExactNumberInfo::Precision(2))
18660            );
18661
18662            test_parse_data_type!(
18663                dialect,
18664                "NUMERIC(2,10)",
18665                DataType::Numeric(ExactNumberInfo::PrecisionAndScale(2, 10))
18666            );
18667
18668            test_parse_data_type!(dialect, "DECIMAL", DataType::Decimal(ExactNumberInfo::None));
18669
18670            test_parse_data_type!(
18671                dialect,
18672                "DECIMAL(2)",
18673                DataType::Decimal(ExactNumberInfo::Precision(2))
18674            );
18675
18676            test_parse_data_type!(
18677                dialect,
18678                "DECIMAL(2,10)",
18679                DataType::Decimal(ExactNumberInfo::PrecisionAndScale(2, 10))
18680            );
18681
18682            test_parse_data_type!(dialect, "DEC", DataType::Dec(ExactNumberInfo::None));
18683
18684            test_parse_data_type!(
18685                dialect,
18686                "DEC(2)",
18687                DataType::Dec(ExactNumberInfo::Precision(2))
18688            );
18689
18690            test_parse_data_type!(
18691                dialect,
18692                "DEC(2,10)",
18693                DataType::Dec(ExactNumberInfo::PrecisionAndScale(2, 10))
18694            );
18695
18696            // Test negative scale values.
18697            test_parse_data_type!(
18698                dialect,
18699                "NUMERIC(10,-2)",
18700                DataType::Numeric(ExactNumberInfo::PrecisionAndScale(10, -2))
18701            );
18702
18703            test_parse_data_type!(
18704                dialect,
18705                "DECIMAL(1000,-10)",
18706                DataType::Decimal(ExactNumberInfo::PrecisionAndScale(1000, -10))
18707            );
18708
18709            test_parse_data_type!(
18710                dialect,
18711                "DEC(5,-1000)",
18712                DataType::Dec(ExactNumberInfo::PrecisionAndScale(5, -1000))
18713            );
18714
18715            test_parse_data_type!(
18716                dialect,
18717                "NUMERIC(10,-5)",
18718                DataType::Numeric(ExactNumberInfo::PrecisionAndScale(10, -5))
18719            );
18720
18721            test_parse_data_type!(
18722                dialect,
18723                "DECIMAL(20,-10)",
18724                DataType::Decimal(ExactNumberInfo::PrecisionAndScale(20, -10))
18725            );
18726
18727            test_parse_data_type!(
18728                dialect,
18729                "DEC(5,-2)",
18730                DataType::Dec(ExactNumberInfo::PrecisionAndScale(5, -2))
18731            );
18732
18733            dialect.run_parser_method("NUMERIC(10,+5)", |parser| {
18734                let data_type = parser.parse_data_type().unwrap();
18735                assert_eq!(
18736                    DataType::Numeric(ExactNumberInfo::PrecisionAndScale(10, 5)),
18737                    data_type
18738                );
18739                // Note: Explicit '+' sign is not preserved in output, which is correct
18740                assert_eq!("NUMERIC(10,5)", data_type.to_string());
18741            });
18742        }
18743
18744        #[test]
18745        fn test_ansii_date_type() {
18746            // Datetime types: <https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#datetime-type>
18747            let dialect =
18748                TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
18749
18750            test_parse_data_type!(dialect, "DATE", DataType::Date);
18751
18752            test_parse_data_type!(dialect, "TIME", DataType::Time(None, TimezoneInfo::None));
18753
18754            test_parse_data_type!(
18755                dialect,
18756                "TIME(6)",
18757                DataType::Time(Some(6), TimezoneInfo::None)
18758            );
18759
18760            test_parse_data_type!(
18761                dialect,
18762                "TIME WITH TIME ZONE",
18763                DataType::Time(None, TimezoneInfo::WithTimeZone)
18764            );
18765
18766            test_parse_data_type!(
18767                dialect,
18768                "TIME(6) WITH TIME ZONE",
18769                DataType::Time(Some(6), TimezoneInfo::WithTimeZone)
18770            );
18771
18772            test_parse_data_type!(
18773                dialect,
18774                "TIME WITHOUT TIME ZONE",
18775                DataType::Time(None, TimezoneInfo::WithoutTimeZone)
18776            );
18777
18778            test_parse_data_type!(
18779                dialect,
18780                "TIME(6) WITHOUT TIME ZONE",
18781                DataType::Time(Some(6), TimezoneInfo::WithoutTimeZone)
18782            );
18783
18784            test_parse_data_type!(
18785                dialect,
18786                "TIMESTAMP",
18787                DataType::Timestamp(None, TimezoneInfo::None)
18788            );
18789
18790            test_parse_data_type!(
18791                dialect,
18792                "TIMESTAMP(22)",
18793                DataType::Timestamp(Some(22), TimezoneInfo::None)
18794            );
18795
18796            test_parse_data_type!(
18797                dialect,
18798                "TIMESTAMP(22) WITH TIME ZONE",
18799                DataType::Timestamp(Some(22), TimezoneInfo::WithTimeZone)
18800            );
18801
18802            test_parse_data_type!(
18803                dialect,
18804                "TIMESTAMP(33) WITHOUT TIME ZONE",
18805                DataType::Timestamp(Some(33), TimezoneInfo::WithoutTimeZone)
18806            );
18807        }
18808    }
18809
18810    #[test]
18811    fn test_parse_schema_name() {
18812        // The expected name should be identical as the input name, that's why I don't receive both
18813        macro_rules! test_parse_schema_name {
18814            ($input:expr, $expected_name:expr $(,)?) => {{
18815                all_dialects().run_parser_method(&*$input, |parser| {
18816                    let schema_name = parser.parse_schema_name().unwrap();
18817                    // Validate that the structure is the same as expected
18818                    assert_eq!(schema_name, $expected_name);
18819                    // Validate that the input and the expected structure serialization are the same
18820                    assert_eq!(schema_name.to_string(), $input.to_string());
18821                });
18822            }};
18823        }
18824
18825        let dummy_name = ObjectName::from(vec![Ident::new("dummy_name")]);
18826        let dummy_authorization = Ident::new("dummy_authorization");
18827
18828        test_parse_schema_name!(
18829            format!("{dummy_name}"),
18830            SchemaName::Simple(dummy_name.clone())
18831        );
18832
18833        test_parse_schema_name!(
18834            format!("AUTHORIZATION {dummy_authorization}"),
18835            SchemaName::UnnamedAuthorization(dummy_authorization.clone()),
18836        );
18837        test_parse_schema_name!(
18838            format!("{dummy_name} AUTHORIZATION {dummy_authorization}"),
18839            SchemaName::NamedAuthorization(dummy_name.clone(), dummy_authorization.clone()),
18840        );
18841    }
18842
18843    #[test]
18844    fn mysql_parse_index_table_constraint() {
18845        macro_rules! test_parse_table_constraint {
18846            ($dialect:expr, $input:expr, $expected:expr $(,)?) => {{
18847                $dialect.run_parser_method(&*$input, |parser| {
18848                    let constraint = parser.parse_optional_table_constraint().unwrap().unwrap();
18849                    // Validate that the structure is the same as expected
18850                    assert_eq!(constraint, $expected);
18851                    // Validate that the input and the expected structure serialization are the same
18852                    assert_eq!(constraint.to_string(), $input.to_string());
18853                });
18854            }};
18855        }
18856
18857        fn mk_expected_col(name: &str) -> IndexColumn {
18858            IndexColumn {
18859                column: OrderByExpr {
18860                    expr: Expr::Identifier(name.into()),
18861                    options: OrderByOptions {
18862                        asc: None,
18863                        nulls_first: None,
18864                    },
18865                    with_fill: None,
18866                },
18867                operator_class: None,
18868            }
18869        }
18870
18871        let dialect =
18872            TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(MySqlDialect {})]);
18873
18874        test_parse_table_constraint!(
18875            dialect,
18876            "INDEX (c1)",
18877            TableConstraint::Index {
18878                display_as_key: false,
18879                name: None,
18880                index_type: None,
18881                columns: vec![mk_expected_col("c1")],
18882                index_options: vec![],
18883            }
18884        );
18885
18886        test_parse_table_constraint!(
18887            dialect,
18888            "KEY (c1)",
18889            TableConstraint::Index {
18890                display_as_key: true,
18891                name: None,
18892                index_type: None,
18893                columns: vec![mk_expected_col("c1")],
18894                index_options: vec![],
18895            }
18896        );
18897
18898        test_parse_table_constraint!(
18899            dialect,
18900            "INDEX 'index' (c1, c2)",
18901            TableConstraint::Index {
18902                display_as_key: false,
18903                name: Some(Ident::with_quote('\'', "index")),
18904                index_type: None,
18905                columns: vec![mk_expected_col("c1"), mk_expected_col("c2")],
18906                index_options: vec![],
18907            }
18908        );
18909
18910        test_parse_table_constraint!(
18911            dialect,
18912            "INDEX USING BTREE (c1)",
18913            TableConstraint::Index {
18914                display_as_key: false,
18915                name: None,
18916                index_type: Some(IndexType::BTree),
18917                columns: vec![mk_expected_col("c1")],
18918                index_options: vec![],
18919            }
18920        );
18921
18922        test_parse_table_constraint!(
18923            dialect,
18924            "INDEX USING HASH (c1)",
18925            TableConstraint::Index {
18926                display_as_key: false,
18927                name: None,
18928                index_type: Some(IndexType::Hash),
18929                columns: vec![mk_expected_col("c1")],
18930                index_options: vec![],
18931            }
18932        );
18933
18934        test_parse_table_constraint!(
18935            dialect,
18936            "INDEX idx_name USING BTREE (c1)",
18937            TableConstraint::Index {
18938                display_as_key: false,
18939                name: Some(Ident::new("idx_name")),
18940                index_type: Some(IndexType::BTree),
18941                columns: vec![mk_expected_col("c1")],
18942                index_options: vec![],
18943            }
18944        );
18945
18946        test_parse_table_constraint!(
18947            dialect,
18948            "INDEX idx_name USING HASH (c1)",
18949            TableConstraint::Index {
18950                display_as_key: false,
18951                name: Some(Ident::new("idx_name")),
18952                index_type: Some(IndexType::Hash),
18953                columns: vec![mk_expected_col("c1")],
18954                index_options: vec![],
18955            }
18956        );
18957    }
18958
18959    #[test]
18960    fn test_tokenizer_error_loc() {
18961        let sql = "foo '";
18962        let ast = Parser::parse_sql(&GenericDialect, sql);
18963        assert_eq!(
18964            ast,
18965            Err(ParserError::TokenizerError(
18966                "Unterminated string literal at Line: 1, Column: 5".to_string()
18967            ))
18968        );
18969    }
18970
18971    #[test]
18972    fn test_parser_error_loc() {
18973        let sql = "SELECT this is a syntax error";
18974        let ast = Parser::parse_sql(&GenericDialect, sql);
18975        assert_eq!(
18976            ast,
18977            Err(ParserError::ParserError(
18978                "Expected: [NOT] NULL | TRUE | FALSE | DISTINCT | [form] NORMALIZED FROM after IS, found: a at Line: 1, Column: 16"
18979                    .to_string()
18980            ))
18981        );
18982    }
18983
18984    #[test]
18985    fn test_nested_explain_error() {
18986        let sql = "EXPLAIN EXPLAIN SELECT 1";
18987        let ast = Parser::parse_sql(&GenericDialect, sql);
18988        assert_eq!(
18989            ast,
18990            Err(ParserError::ParserError(
18991                "Explain must be root of the plan".to_string()
18992            ))
18993        );
18994    }
18995
18996    #[test]
18997    fn test_parse_multipart_identifier_positive() {
18998        let dialect = TestedDialects::new(vec![Box::new(GenericDialect {})]);
18999
19000        // parse multipart with quotes
19001        let expected = vec![
19002            Ident {
19003                value: "CATALOG".to_string(),
19004                quote_style: None,
19005                span: Span::empty(),
19006            },
19007            Ident {
19008                value: "F(o)o. \"bar".to_string(),
19009                quote_style: Some('"'),
19010                span: Span::empty(),
19011            },
19012            Ident {
19013                value: "table".to_string(),
19014                quote_style: None,
19015                span: Span::empty(),
19016            },
19017        ];
19018        dialect.run_parser_method(r#"CATALOG."F(o)o. ""bar".table"#, |parser| {
19019            let actual = parser.parse_multipart_identifier().unwrap();
19020            assert_eq!(expected, actual);
19021        });
19022
19023        // allow whitespace between ident parts
19024        let expected = vec![
19025            Ident {
19026                value: "CATALOG".to_string(),
19027                quote_style: None,
19028                span: Span::empty(),
19029            },
19030            Ident {
19031                value: "table".to_string(),
19032                quote_style: None,
19033                span: Span::empty(),
19034            },
19035        ];
19036        dialect.run_parser_method("CATALOG . table", |parser| {
19037            let actual = parser.parse_multipart_identifier().unwrap();
19038            assert_eq!(expected, actual);
19039        });
19040    }
19041
19042    #[test]
19043    fn test_parse_multipart_identifier_negative() {
19044        macro_rules! test_parse_multipart_identifier_error {
19045            ($input:expr, $expected_err:expr $(,)?) => {{
19046                all_dialects().run_parser_method(&*$input, |parser| {
19047                    let actual_err = parser.parse_multipart_identifier().unwrap_err();
19048                    assert_eq!(actual_err.to_string(), $expected_err);
19049                });
19050            }};
19051        }
19052
19053        test_parse_multipart_identifier_error!(
19054            "",
19055            "sql parser error: Empty input when parsing identifier",
19056        );
19057
19058        test_parse_multipart_identifier_error!(
19059            "*schema.table",
19060            "sql parser error: Unexpected token in identifier: *",
19061        );
19062
19063        test_parse_multipart_identifier_error!(
19064            "schema.table*",
19065            "sql parser error: Unexpected token in identifier: *",
19066        );
19067
19068        test_parse_multipart_identifier_error!(
19069            "schema.table.",
19070            "sql parser error: Trailing period in identifier",
19071        );
19072
19073        test_parse_multipart_identifier_error!(
19074            "schema.*",
19075            "sql parser error: Unexpected token following period in identifier: *",
19076        );
19077    }
19078
19079    #[test]
19080    fn test_mysql_partition_selection() {
19081        let sql = "SELECT * FROM employees PARTITION (p0, p2)";
19082        let expected = vec!["p0", "p2"];
19083
19084        let ast: Vec<Statement> = Parser::parse_sql(&MySqlDialect {}, sql).unwrap();
19085        assert_eq!(ast.len(), 1);
19086        if let Statement::Query(v) = &ast[0] {
19087            if let SetExpr::Select(select) = &*v.body {
19088                assert_eq!(select.from.len(), 1);
19089                let from: &TableWithJoins = &select.from[0];
19090                let table_factor = &from.relation;
19091                if let TableFactor::Table { partitions, .. } = table_factor {
19092                    let actual: Vec<&str> = partitions
19093                        .iter()
19094                        .map(|ident| ident.value.as_str())
19095                        .collect();
19096                    assert_eq!(expected, actual);
19097                }
19098            }
19099        } else {
19100            panic!("fail to parse mysql partition selection");
19101        }
19102    }
19103
19104    #[test]
19105    fn test_replace_into_placeholders() {
19106        let sql = "REPLACE INTO t (a) VALUES (&a)";
19107
19108        assert!(Parser::parse_sql(&GenericDialect {}, sql).is_err());
19109    }
19110
19111    #[test]
19112    fn test_replace_into_set_placeholder() {
19113        let sql = "REPLACE INTO t SET ?";
19114
19115        assert!(Parser::parse_sql(&GenericDialect {}, sql).is_err());
19116    }
19117
19118    #[test]
19119    fn test_replace_incomplete() {
19120        let sql = r#"REPLACE"#;
19121
19122        assert!(Parser::parse_sql(&MySqlDialect {}, sql).is_err());
19123    }
19124
19125    #[test]
19126    fn test_placeholder_invalid_whitespace() {
19127        for w in ["  ", "/*invalid*/"] {
19128            let sql = format!("\nSELECT\n  :{w}fooBar");
19129            assert!(Parser::parse_sql(&GenericDialect, &sql).is_err());
19130        }
19131    }
19132}