Skip to main content

sqlparser/parser/
mod.rs

1// Licensed under the Apache License, Version 2.0 (the "License");
2// you may not use this file except in compliance with the License.
3// You may obtain a copy of the License at
4//
5// http://www.apache.org/licenses/LICENSE-2.0
6//
7// Unless required by applicable law or agreed to in writing, software
8// distributed under the License is distributed on an "AS IS" BASIS,
9// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
10// See the License for the specific language governing permissions and
11// limitations under the License.
12
13//! SQL Parser
14
15#[cfg(not(feature = "std"))]
16use alloc::{
17    boxed::Box,
18    format,
19    string::{String, ToString},
20    vec,
21    vec::Vec,
22};
23use core::{
24    fmt::{self, Display},
25    str::FromStr,
26};
27use helpers::attached_token::AttachedToken;
28
29use log::debug;
30
31use recursion::RecursionCounter;
32use IsLateral::*;
33use IsOptional::*;
34
35use crate::ast::*;
36use crate::ast::{
37    comments,
38    helpers::{
39        key_value_options::{
40            KeyValueOption, KeyValueOptionKind, KeyValueOptions, KeyValueOptionsDelimiter,
41        },
42        stmt_create_table::{CreateTableBuilder, CreateTableConfiguration},
43    },
44};
45use crate::dialect::*;
46use crate::keywords::{Keyword, ALL_KEYWORDS};
47use crate::tokenizer::*;
48use sqlparser::parser::ParserState::ColumnDefinition;
49
50/// Errors produced by the SQL parser.
51#[derive(Debug, Clone, PartialEq, Eq)]
52pub enum ParserError {
53    /// Error originating from the tokenizer with a message.
54    TokenizerError(String),
55    /// Generic parser error with a message.
56    ParserError(String),
57    /// Raised when a recursion depth limit is exceeded.
58    RecursionLimitExceeded,
59}
60
61// Use `Parser::expected` instead, if possible
62macro_rules! parser_err {
63    ($MSG:expr, $loc:expr) => {
64        Err(ParserError::ParserError(format!("{}{}", $MSG, $loc)))
65    };
66}
67
68mod alter;
69mod merge;
70
71#[cfg(feature = "std")]
72/// Implementation [`RecursionCounter`] if std is available
73mod recursion {
74    use std::cell::Cell;
75    use std::rc::Rc;
76
77    use super::ParserError;
78
79    /// Tracks remaining recursion depth. This value is decremented on
80    /// each call to [`RecursionCounter::try_decrease()`], when it reaches 0 an error will
81    /// be returned.
82    ///
83    /// Note: Uses an [`std::rc::Rc`] and [`std::cell::Cell`] in order to satisfy the Rust
84    /// borrow checker so the automatic [`DepthGuard`] decrement a
85    /// reference to the counter.
86    ///
87    /// Note: when "recursive-protection" feature is enabled, this crate uses additional stack overflow protection
88    /// for some of its recursive methods. See [`recursive::recursive`] for more information.
89    pub(crate) struct RecursionCounter {
90        remaining_depth: Rc<Cell<usize>>,
91    }
92
93    impl RecursionCounter {
94        /// Creates a [`RecursionCounter`] with the specified maximum
95        /// depth
96        pub fn new(remaining_depth: usize) -> Self {
97            Self {
98                remaining_depth: Rc::new(remaining_depth.into()),
99            }
100        }
101
102        /// Decreases the remaining depth by 1.
103        ///
104        /// Returns [`Err`] if the remaining depth falls to 0.
105        ///
106        /// Returns a [`DepthGuard`] which will adds 1 to the
107        /// remaining depth upon drop;
108        pub fn try_decrease(&self) -> Result<DepthGuard, ParserError> {
109            let old_value = self.remaining_depth.get();
110            // ran out of space
111            if old_value == 0 {
112                Err(ParserError::RecursionLimitExceeded)
113            } else {
114                self.remaining_depth.set(old_value - 1);
115                Ok(DepthGuard::new(Rc::clone(&self.remaining_depth)))
116            }
117        }
118    }
119
120    /// Guard that increases the remaining depth by 1 on drop
121    pub struct DepthGuard {
122        remaining_depth: Rc<Cell<usize>>,
123    }
124
125    impl DepthGuard {
126        fn new(remaining_depth: Rc<Cell<usize>>) -> Self {
127            Self { remaining_depth }
128        }
129    }
130    impl Drop for DepthGuard {
131        fn drop(&mut self) {
132            let old_value = self.remaining_depth.get();
133            self.remaining_depth.set(old_value + 1);
134        }
135    }
136}
137
138#[cfg(not(feature = "std"))]
139mod recursion {
140    /// Implementation [`RecursionCounter`] if std is NOT available (and does not
141    /// guard against stack overflow).
142    ///
143    /// Has the same API as the std [`RecursionCounter`] implementation
144    /// but does not actually limit stack depth.
145    pub(crate) struct RecursionCounter {}
146
147    impl RecursionCounter {
148        pub fn new(_remaining_depth: usize) -> Self {
149            Self {}
150        }
151        pub fn try_decrease(&self) -> Result<DepthGuard, super::ParserError> {
152            Ok(DepthGuard {})
153        }
154    }
155
156    pub struct DepthGuard {}
157}
158
159#[derive(PartialEq, Eq)]
160/// Indicates whether a parser element is optional or mandatory.
161pub enum IsOptional {
162    /// The element is optional.
163    Optional,
164    /// The element is mandatory.
165    Mandatory,
166}
167
168/// Indicates if a table expression is lateral.
169pub enum IsLateral {
170    /// The expression is lateral.
171    Lateral,
172    /// The expression is not lateral.
173    NotLateral,
174}
175
176/// Represents a wildcard expression used in SELECT lists.
177pub enum WildcardExpr {
178    /// A specific expression used instead of a wildcard.
179    Expr(Expr),
180    /// A qualified wildcard like `table.*`.
181    QualifiedWildcard(ObjectName),
182    /// An unqualified `*` wildcard.
183    Wildcard,
184}
185
186impl From<TokenizerError> for ParserError {
187    fn from(e: TokenizerError) -> Self {
188        ParserError::TokenizerError(e.to_string())
189    }
190}
191
192impl fmt::Display for ParserError {
193    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
194        write!(
195            f,
196            "sql parser error: {}",
197            match self {
198                ParserError::TokenizerError(s) => s,
199                ParserError::ParserError(s) => s,
200                ParserError::RecursionLimitExceeded => "recursion limit exceeded",
201            }
202        )
203    }
204}
205
206impl core::error::Error for ParserError {}
207
208// By default, allow expressions up to this deep before erroring
209const DEFAULT_REMAINING_DEPTH: usize = 50;
210
211// A constant EOF token that can be referenced.
212const EOF_TOKEN: TokenWithSpan = TokenWithSpan {
213    token: Token::EOF,
214    span: Span {
215        start: Location { line: 0, column: 0 },
216        end: Location { line: 0, column: 0 },
217    },
218};
219
220/// Composite types declarations using angle brackets syntax can be arbitrary
221/// nested such that the following declaration is possible:
222///      `ARRAY<ARRAY<INT>>`
223/// But the tokenizer recognizes the `>>` as a ShiftRight token.
224/// We work around that limitation when parsing a data type by accepting
225/// either a `>` or `>>` token in such cases, remembering which variant we
226/// matched.
227/// In the latter case having matched a `>>`, the parent type will not look to
228/// match its closing `>` as a result since that will have taken place at the
229/// child type.
230///
231/// See [Parser::parse_data_type] for details
232struct MatchedTrailingBracket(bool);
233
234impl From<bool> for MatchedTrailingBracket {
235    fn from(value: bool) -> Self {
236        Self(value)
237    }
238}
239
240/// Options that control how the [`Parser`] parses SQL text
241#[derive(Debug, Clone, PartialEq, Eq)]
242pub struct ParserOptions {
243    /// Allow trailing commas in lists (e.g. `a, b,`).
244    pub trailing_commas: bool,
245    /// Controls how literal values are unescaped. See
246    /// [`Tokenizer::with_unescape`] for more details.
247    pub unescape: bool,
248    /// Controls if the parser expects a semi-colon token
249    /// between statements. Default is `true`.
250    pub require_semicolon_stmt_delimiter: bool,
251}
252
253impl Default for ParserOptions {
254    fn default() -> Self {
255        Self {
256            trailing_commas: false,
257            unescape: true,
258            require_semicolon_stmt_delimiter: true,
259        }
260    }
261}
262
263impl ParserOptions {
264    /// Create a new [`ParserOptions`]
265    pub fn new() -> Self {
266        Default::default()
267    }
268
269    /// Set if trailing commas are allowed.
270    ///
271    /// If this option is `false` (the default), the following SQL will
272    /// not parse. If the option is `true`, the SQL will parse.
273    ///
274    /// ```sql
275    ///  SELECT
276    ///   foo,
277    ///   bar,
278    ///  FROM baz
279    /// ```
280    pub fn with_trailing_commas(mut self, trailing_commas: bool) -> Self {
281        self.trailing_commas = trailing_commas;
282        self
283    }
284
285    /// Set if literal values are unescaped. Defaults to true. See
286    /// [`Tokenizer::with_unescape`] for more details.
287    pub fn with_unescape(mut self, unescape: bool) -> Self {
288        self.unescape = unescape;
289        self
290    }
291}
292
293#[derive(Copy, Clone)]
294enum ParserState {
295    /// The default state of the parser.
296    Normal,
297    /// The state when parsing a CONNECT BY expression. This allows parsing
298    /// PRIOR expressions while still allowing prior as an identifier name
299    /// in other contexts.
300    ConnectBy,
301    /// The state when parsing column definitions.  This state prohibits
302    /// NOT NULL as an alias for IS NOT NULL.  For example:
303    /// ```sql
304    /// CREATE TABLE foo (abc BIGINT NOT NULL);
305    /// ```
306    ColumnDefinition,
307}
308
309/// A SQL Parser
310///
311/// This struct is the main entry point for parsing SQL queries.
312///
313/// # Functionality:
314/// * Parsing SQL: see examples on [`Parser::new`] and [`Parser::parse_sql`]
315/// * Controlling recursion: See [`Parser::with_recursion_limit`]
316/// * Controlling parser options: See [`Parser::with_options`]
317/// * Providing your own tokens: See [`Parser::with_tokens`]
318///
319/// # Internals
320///
321/// The parser uses a [`Tokenizer`] to tokenize the input SQL string into a
322/// `Vec` of [`TokenWithSpan`]s and maintains an `index` to the current token
323/// being processed. The token vec may contain multiple SQL statements.
324///
325/// * The "current" token is the token at `index - 1`
326/// * The "next" token is the token at `index`
327/// * The "previous" token is the token at `index - 2`
328///
329/// If `index` is equal to the length of the token stream, the 'next' token is
330/// [`Token::EOF`].
331///
332/// For example, the SQL string "SELECT * FROM foo" will be tokenized into
333/// following tokens:
334/// ```text
335///  [
336///    "SELECT", // token index 0
337///    " ",      // whitespace
338///    "*",
339///    " ",
340///    "FROM",
341///    " ",
342///    "foo"
343///   ]
344/// ```
345///
346///
347pub struct Parser<'a> {
348    /// The tokens
349    tokens: Vec<TokenWithSpan>,
350    /// The index of the first unprocessed token in [`Parser::tokens`].
351    index: usize,
352    /// The current state of the parser.
353    state: ParserState,
354    /// The SQL dialect to use.
355    dialect: &'a dyn Dialect,
356    /// Additional options that allow you to mix & match behavior
357    /// otherwise constrained to certain dialects (e.g. trailing
358    /// commas) and/or format of parse (e.g. unescaping).
359    options: ParserOptions,
360    /// Ensures the stack does not overflow by limiting recursion depth.
361    recursion_counter: RecursionCounter,
362}
363
364impl<'a> Parser<'a> {
365    /// Create a parser for a [`Dialect`]
366    ///
367    /// See also [`Parser::parse_sql`]
368    ///
369    /// Example:
370    /// ```
371    /// # use sqlparser::{parser::{Parser, ParserError}, dialect::GenericDialect};
372    /// # fn main() -> Result<(), ParserError> {
373    /// let dialect = GenericDialect{};
374    /// let statements = Parser::new(&dialect)
375    ///   .try_with_sql("SELECT * FROM foo")?
376    ///   .parse_statements()?;
377    /// # Ok(())
378    /// # }
379    /// ```
380    pub fn new(dialect: &'a dyn Dialect) -> Self {
381        Self {
382            tokens: vec![],
383            index: 0,
384            state: ParserState::Normal,
385            dialect,
386            recursion_counter: RecursionCounter::new(DEFAULT_REMAINING_DEPTH),
387            options: ParserOptions::new().with_trailing_commas(dialect.supports_trailing_commas()),
388        }
389    }
390
391    /// Specify the maximum recursion limit while parsing.
392    ///
393    /// [`Parser`] prevents stack overflows by returning
394    /// [`ParserError::RecursionLimitExceeded`] if the parser exceeds
395    /// this depth while processing the query.
396    ///
397    /// Example:
398    /// ```
399    /// # use sqlparser::{parser::{Parser, ParserError}, dialect::GenericDialect};
400    /// # fn main() -> Result<(), ParserError> {
401    /// let dialect = GenericDialect{};
402    /// let result = Parser::new(&dialect)
403    ///   .with_recursion_limit(1)
404    ///   .try_with_sql("SELECT * FROM foo WHERE (a OR (b OR (c OR d)))")?
405    ///   .parse_statements();
406    ///   assert_eq!(result, Err(ParserError::RecursionLimitExceeded));
407    /// # Ok(())
408    /// # }
409    /// ```
410    ///
411    /// Note: when "recursive-protection" feature is enabled, this crate uses additional stack overflow protection
412    //  for some of its recursive methods. See [`recursive::recursive`] for more information.
413    pub fn with_recursion_limit(mut self, recursion_limit: usize) -> Self {
414        self.recursion_counter = RecursionCounter::new(recursion_limit);
415        self
416    }
417
418    /// Specify additional parser options
419    ///
420    /// [`Parser`] supports additional options ([`ParserOptions`])
421    /// that allow you to mix & match behavior otherwise constrained
422    /// to certain dialects (e.g. trailing commas).
423    ///
424    /// Example:
425    /// ```
426    /// # use sqlparser::{parser::{Parser, ParserError, ParserOptions}, dialect::GenericDialect};
427    /// # fn main() -> Result<(), ParserError> {
428    /// let dialect = GenericDialect{};
429    /// let options = ParserOptions::new()
430    ///    .with_trailing_commas(true)
431    ///    .with_unescape(false);
432    /// let result = Parser::new(&dialect)
433    ///   .with_options(options)
434    ///   .try_with_sql("SELECT a, b, COUNT(*), FROM foo GROUP BY a, b,")?
435    ///   .parse_statements();
436    ///   assert!(matches!(result, Ok(_)));
437    /// # Ok(())
438    /// # }
439    /// ```
440    pub fn with_options(mut self, options: ParserOptions) -> Self {
441        self.options = options;
442        self
443    }
444
445    /// Reset this parser to parse the specified token stream
446    pub fn with_tokens_with_locations(mut self, tokens: Vec<TokenWithSpan>) -> Self {
447        self.tokens = tokens;
448        self.index = 0;
449        self
450    }
451
452    /// Reset this parser state to parse the specified tokens
453    pub fn with_tokens(self, tokens: Vec<Token>) -> Self {
454        // Put in dummy locations
455        let tokens_with_locations: Vec<TokenWithSpan> = tokens
456            .into_iter()
457            .map(|token| TokenWithSpan {
458                token,
459                span: Span::empty(),
460            })
461            .collect();
462        self.with_tokens_with_locations(tokens_with_locations)
463    }
464
465    /// Tokenize the sql string and sets this [`Parser`]'s state to
466    /// parse the resulting tokens
467    ///
468    /// Returns an error if there was an error tokenizing the SQL string.
469    ///
470    /// See example on [`Parser::new()`] for an example
471    pub fn try_with_sql(self, sql: &str) -> Result<Self, ParserError> {
472        debug!("Parsing sql '{sql}'...");
473        let tokens = Tokenizer::new(self.dialect, sql)
474            .with_unescape(self.options.unescape)
475            .tokenize_with_location()?;
476        Ok(self.with_tokens_with_locations(tokens))
477    }
478
479    /// Parse potentially multiple statements
480    ///
481    /// Example
482    /// ```
483    /// # use sqlparser::{parser::{Parser, ParserError}, dialect::GenericDialect};
484    /// # fn main() -> Result<(), ParserError> {
485    /// let dialect = GenericDialect{};
486    /// let statements = Parser::new(&dialect)
487    ///   // Parse a SQL string with 2 separate statements
488    ///   .try_with_sql("SELECT * FROM foo; SELECT * FROM bar;")?
489    ///   .parse_statements()?;
490    /// assert_eq!(statements.len(), 2);
491    /// # Ok(())
492    /// # }
493    /// ```
494    pub fn parse_statements(&mut self) -> Result<Vec<Statement>, ParserError> {
495        let mut stmts = Vec::new();
496        let mut expecting_statement_delimiter = false;
497        loop {
498            // ignore empty statements (between successive statement delimiters)
499            while self.consume_token(&Token::SemiColon) {
500                expecting_statement_delimiter = false;
501            }
502
503            if !self.options.require_semicolon_stmt_delimiter {
504                expecting_statement_delimiter = false;
505            }
506
507            match self.peek_token().token {
508                Token::EOF => break,
509
510                // end of statement
511                Token::Word(word) => {
512                    if expecting_statement_delimiter && word.keyword == Keyword::END {
513                        break;
514                    }
515                }
516                _ => {}
517            }
518
519            if expecting_statement_delimiter {
520                return self.expected("end of statement", self.peek_token());
521            }
522
523            let statement = self.parse_statement()?;
524            stmts.push(statement);
525            expecting_statement_delimiter = true;
526        }
527        Ok(stmts)
528    }
529
530    /// Convenience method to parse a string with one or more SQL
531    /// statements into produce an Abstract Syntax Tree (AST).
532    ///
533    /// Example
534    /// ```
535    /// # use sqlparser::{parser::{Parser, ParserError}, dialect::GenericDialect};
536    /// # fn main() -> Result<(), ParserError> {
537    /// let dialect = GenericDialect{};
538    /// let statements = Parser::parse_sql(
539    ///   &dialect, "SELECT * FROM foo"
540    /// )?;
541    /// assert_eq!(statements.len(), 1);
542    /// # Ok(())
543    /// # }
544    /// ```
545    pub fn parse_sql(dialect: &dyn Dialect, sql: &str) -> Result<Vec<Statement>, ParserError> {
546        Parser::new(dialect).try_with_sql(sql)?.parse_statements()
547    }
548
549    /// Parses the given `sql` into an Abstract Syntax Tree (AST), returning
550    /// also encountered source code comments.
551    ///
552    /// See [Parser::parse_sql].
553    pub fn parse_sql_with_comments(
554        dialect: &'a dyn Dialect,
555        sql: &str,
556    ) -> Result<(Vec<Statement>, comments::Comments), ParserError> {
557        let mut p = Parser::new(dialect).try_with_sql(sql)?;
558        p.parse_statements().map(|stmts| (stmts, p.into_comments()))
559    }
560
561    /// Consumes this parser returning comments from the parsed token stream.
562    fn into_comments(self) -> comments::Comments {
563        let mut comments = comments::Comments::default();
564        for t in self.tokens.into_iter() {
565            match t.token {
566                Token::Whitespace(Whitespace::SingleLineComment { comment, prefix }) => {
567                    comments.offer(comments::CommentWithSpan {
568                        comment: comments::Comment::SingleLine {
569                            content: comment,
570                            prefix,
571                        },
572                        span: t.span,
573                    });
574                }
575                Token::Whitespace(Whitespace::MultiLineComment(comment)) => {
576                    comments.offer(comments::CommentWithSpan {
577                        comment: comments::Comment::MultiLine(comment),
578                        span: t.span,
579                    });
580                }
581                _ => {}
582            }
583        }
584        comments
585    }
586
587    /// Parse a single top-level statement (such as SELECT, INSERT, CREATE, etc.),
588    /// stopping before the statement separator, if any.
589    pub fn parse_statement(&mut self) -> Result<Statement, ParserError> {
590        let _guard = self.recursion_counter.try_decrease()?;
591
592        // allow the dialect to override statement parsing
593        if let Some(statement) = self.dialect.parse_statement(self) {
594            return statement;
595        }
596
597        let next_token = self.next_token();
598        match &next_token.token {
599            Token::Word(w) => match w.keyword {
600                Keyword::KILL => self.parse_kill(),
601                Keyword::FLUSH => self.parse_flush(),
602                Keyword::DESC => self.parse_explain(DescribeAlias::Desc),
603                Keyword::DESCRIBE => self.parse_explain(DescribeAlias::Describe),
604                Keyword::EXPLAIN => self.parse_explain(DescribeAlias::Explain),
605                Keyword::ANALYZE => self.parse_analyze().map(Into::into),
606                Keyword::CASE => {
607                    self.prev_token();
608                    self.parse_case_stmt().map(Into::into)
609                }
610                Keyword::IF => {
611                    self.prev_token();
612                    self.parse_if_stmt().map(Into::into)
613                }
614                Keyword::WHILE => {
615                    self.prev_token();
616                    self.parse_while().map(Into::into)
617                }
618                Keyword::RAISE => {
619                    self.prev_token();
620                    self.parse_raise_stmt().map(Into::into)
621                }
622                Keyword::SELECT | Keyword::WITH | Keyword::VALUES | Keyword::FROM => {
623                    self.prev_token();
624                    self.parse_query().map(Into::into)
625                }
626                Keyword::TRUNCATE => self.parse_truncate().map(Into::into),
627                Keyword::ATTACH => {
628                    if dialect_of!(self is DuckDbDialect) {
629                        self.parse_attach_duckdb_database()
630                    } else {
631                        self.parse_attach_database()
632                    }
633                }
634                Keyword::DETACH if self.dialect.supports_detach() => {
635                    self.parse_detach_duckdb_database()
636                }
637                Keyword::MSCK => self.parse_msck().map(Into::into),
638                Keyword::CREATE => self.parse_create(),
639                Keyword::CACHE => self.parse_cache_table(),
640                Keyword::DROP => self.parse_drop(),
641                Keyword::DISCARD => self.parse_discard(),
642                Keyword::DECLARE => self.parse_declare(),
643                Keyword::FETCH => self.parse_fetch_statement(),
644                Keyword::DELETE => self.parse_delete(next_token),
645                Keyword::INSERT => self.parse_insert(next_token),
646                Keyword::REPLACE => self.parse_replace(next_token),
647                Keyword::UNCACHE => self.parse_uncache_table(),
648                Keyword::UPDATE => self.parse_update(next_token),
649                Keyword::ALTER => self.parse_alter(),
650                Keyword::CALL => self.parse_call(),
651                Keyword::COPY => self.parse_copy(),
652                Keyword::OPEN => {
653                    self.prev_token();
654                    self.parse_open()
655                }
656                Keyword::CLOSE => self.parse_close(),
657                Keyword::SET => self.parse_set(),
658                Keyword::SHOW => self.parse_show(),
659                Keyword::USE => self.parse_use(),
660                Keyword::GRANT => self.parse_grant().map(Into::into),
661                Keyword::DENY => {
662                    self.prev_token();
663                    self.parse_deny()
664                }
665                Keyword::REVOKE => self.parse_revoke().map(Into::into),
666                Keyword::START => self.parse_start_transaction(),
667                Keyword::BEGIN => self.parse_begin(),
668                Keyword::END => self.parse_end(),
669                Keyword::SAVEPOINT => self.parse_savepoint(),
670                Keyword::RELEASE => self.parse_release(),
671                Keyword::COMMIT => self.parse_commit(),
672                Keyword::RAISERROR => Ok(self.parse_raiserror()?),
673                Keyword::ROLLBACK => self.parse_rollback(),
674                Keyword::ASSERT => self.parse_assert(),
675                // `PREPARE`, `EXECUTE` and `DEALLOCATE` are Postgres-specific
676                // syntaxes. They are used for Postgres prepared statement.
677                Keyword::DEALLOCATE => self.parse_deallocate(),
678                Keyword::EXECUTE | Keyword::EXEC => self.parse_execute(),
679                Keyword::PREPARE => self.parse_prepare(),
680                Keyword::MERGE => self.parse_merge(next_token).map(Into::into),
681                // `LISTEN`, `UNLISTEN` and `NOTIFY` are Postgres-specific
682                // syntaxes. They are used for Postgres statement.
683                Keyword::LISTEN if self.dialect.supports_listen_notify() => self.parse_listen(),
684                Keyword::UNLISTEN if self.dialect.supports_listen_notify() => self.parse_unlisten(),
685                Keyword::NOTIFY if self.dialect.supports_listen_notify() => self.parse_notify(),
686                // `PRAGMA` is sqlite specific https://www.sqlite.org/pragma.html
687                Keyword::PRAGMA => self.parse_pragma(),
688                Keyword::UNLOAD => {
689                    self.prev_token();
690                    self.parse_unload()
691                }
692                Keyword::RENAME => self.parse_rename(),
693                // `INSTALL` is duckdb specific https://duckdb.org/docs/extensions/overview
694                Keyword::INSTALL if self.dialect.supports_install() => self.parse_install(),
695                Keyword::LOAD => self.parse_load(),
696                // `OPTIMIZE` is clickhouse specific https://clickhouse.tech/docs/en/sql-reference/statements/optimize/
697                Keyword::OPTIMIZE if self.dialect.supports_optimize_table() => {
698                    self.parse_optimize_table()
699                }
700                // `COMMENT` is snowflake specific https://docs.snowflake.com/en/sql-reference/sql/comment
701                Keyword::COMMENT if self.dialect.supports_comment_on() => self.parse_comment(),
702                Keyword::PRINT => self.parse_print(),
703                Keyword::RETURN => self.parse_return(),
704                Keyword::EXPORT => {
705                    self.prev_token();
706                    self.parse_export_data()
707                }
708                Keyword::VACUUM => {
709                    self.prev_token();
710                    self.parse_vacuum()
711                }
712                Keyword::RESET => self.parse_reset().map(Into::into),
713                _ => self.expected("an SQL statement", next_token),
714            },
715            Token::LParen => {
716                self.prev_token();
717                self.parse_query().map(Into::into)
718            }
719            _ => self.expected("an SQL statement", next_token),
720        }
721    }
722
723    /// Parse a `CASE` statement.
724    ///
725    /// See [Statement::Case]
726    pub fn parse_case_stmt(&mut self) -> Result<CaseStatement, ParserError> {
727        let case_token = self.expect_keyword(Keyword::CASE)?;
728
729        let match_expr = if self.peek_keyword(Keyword::WHEN) {
730            None
731        } else {
732            Some(self.parse_expr()?)
733        };
734
735        self.expect_keyword_is(Keyword::WHEN)?;
736        let when_blocks = self.parse_keyword_separated(Keyword::WHEN, |parser| {
737            parser.parse_conditional_statement_block(&[Keyword::WHEN, Keyword::ELSE, Keyword::END])
738        })?;
739
740        let else_block = if self.parse_keyword(Keyword::ELSE) {
741            Some(self.parse_conditional_statement_block(&[Keyword::END])?)
742        } else {
743            None
744        };
745
746        let mut end_case_token = self.expect_keyword(Keyword::END)?;
747        if self.peek_keyword(Keyword::CASE) {
748            end_case_token = self.expect_keyword(Keyword::CASE)?;
749        }
750
751        Ok(CaseStatement {
752            case_token: AttachedToken(case_token),
753            match_expr,
754            when_blocks,
755            else_block,
756            end_case_token: AttachedToken(end_case_token),
757        })
758    }
759
760    /// Parse an `IF` statement.
761    ///
762    /// See [Statement::If]
763    pub fn parse_if_stmt(&mut self) -> Result<IfStatement, ParserError> {
764        self.expect_keyword_is(Keyword::IF)?;
765        let if_block = self.parse_conditional_statement_block(&[
766            Keyword::ELSE,
767            Keyword::ELSEIF,
768            Keyword::END,
769        ])?;
770
771        let elseif_blocks = if self.parse_keyword(Keyword::ELSEIF) {
772            self.parse_keyword_separated(Keyword::ELSEIF, |parser| {
773                parser.parse_conditional_statement_block(&[
774                    Keyword::ELSEIF,
775                    Keyword::ELSE,
776                    Keyword::END,
777                ])
778            })?
779        } else {
780            vec![]
781        };
782
783        let else_block = if self.parse_keyword(Keyword::ELSE) {
784            Some(self.parse_conditional_statement_block(&[Keyword::END])?)
785        } else {
786            None
787        };
788
789        self.expect_keyword_is(Keyword::END)?;
790        let end_token = self.expect_keyword(Keyword::IF)?;
791
792        Ok(IfStatement {
793            if_block,
794            elseif_blocks,
795            else_block,
796            end_token: Some(AttachedToken(end_token)),
797        })
798    }
799
800    /// Parse a `WHILE` statement.
801    ///
802    /// See [Statement::While]
803    fn parse_while(&mut self) -> Result<WhileStatement, ParserError> {
804        self.expect_keyword_is(Keyword::WHILE)?;
805        let while_block = self.parse_conditional_statement_block(&[Keyword::END])?;
806
807        Ok(WhileStatement { while_block })
808    }
809
810    /// Parses an expression and associated list of statements
811    /// belonging to a conditional statement like `IF` or `WHEN` or `WHILE`.
812    ///
813    /// Example:
814    /// ```sql
815    /// IF condition THEN statement1; statement2;
816    /// ```
817    fn parse_conditional_statement_block(
818        &mut self,
819        terminal_keywords: &[Keyword],
820    ) -> Result<ConditionalStatementBlock, ParserError> {
821        let start_token = self.get_current_token().clone(); // self.expect_keyword(keyword)?;
822        let mut then_token = None;
823
824        let condition = match &start_token.token {
825            Token::Word(w) if w.keyword == Keyword::ELSE => None,
826            Token::Word(w) if w.keyword == Keyword::WHILE => {
827                let expr = self.parse_expr()?;
828                Some(expr)
829            }
830            _ => {
831                let expr = self.parse_expr()?;
832                then_token = Some(AttachedToken(self.expect_keyword(Keyword::THEN)?));
833                Some(expr)
834            }
835        };
836
837        let conditional_statements = self.parse_conditional_statements(terminal_keywords)?;
838
839        Ok(ConditionalStatementBlock {
840            start_token: AttachedToken(start_token),
841            condition,
842            then_token,
843            conditional_statements,
844        })
845    }
846
847    /// Parse a BEGIN/END block or a sequence of statements
848    /// This could be inside of a conditional (IF, CASE, WHILE etc.) or an object body defined optionally BEGIN/END and one or more statements.
849    pub(crate) fn parse_conditional_statements(
850        &mut self,
851        terminal_keywords: &[Keyword],
852    ) -> Result<ConditionalStatements, ParserError> {
853        let conditional_statements = if self.peek_keyword(Keyword::BEGIN) {
854            let begin_token = self.expect_keyword(Keyword::BEGIN)?;
855            let statements = self.parse_statement_list(terminal_keywords)?;
856            let end_token = self.expect_keyword(Keyword::END)?;
857
858            ConditionalStatements::BeginEnd(BeginEndStatements {
859                begin_token: AttachedToken(begin_token),
860                statements,
861                end_token: AttachedToken(end_token),
862            })
863        } else {
864            ConditionalStatements::Sequence {
865                statements: self.parse_statement_list(terminal_keywords)?,
866            }
867        };
868        Ok(conditional_statements)
869    }
870
871    /// Parse a `RAISE` statement.
872    ///
873    /// See [Statement::Raise]
874    pub fn parse_raise_stmt(&mut self) -> Result<RaiseStatement, ParserError> {
875        self.expect_keyword_is(Keyword::RAISE)?;
876
877        let value = if self.parse_keywords(&[Keyword::USING, Keyword::MESSAGE]) {
878            self.expect_token(&Token::Eq)?;
879            Some(RaiseStatementValue::UsingMessage(self.parse_expr()?))
880        } else {
881            self.maybe_parse(|parser| parser.parse_expr().map(RaiseStatementValue::Expr))?
882        };
883
884        Ok(RaiseStatement { value })
885    }
886    /// Parse a COMMENT statement.
887    ///
888    /// See [Statement::Comment]
889    pub fn parse_comment(&mut self) -> Result<Statement, ParserError> {
890        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
891
892        self.expect_keyword_is(Keyword::ON)?;
893        let token = self.next_token();
894
895        let (object_type, object_name) = match token.token {
896            Token::Word(w) if w.keyword == Keyword::COLUMN => {
897                (CommentObject::Column, self.parse_object_name(false)?)
898            }
899            Token::Word(w) if w.keyword == Keyword::TABLE => {
900                (CommentObject::Table, self.parse_object_name(false)?)
901            }
902            Token::Word(w) if w.keyword == Keyword::EXTENSION => {
903                (CommentObject::Extension, self.parse_object_name(false)?)
904            }
905            Token::Word(w) if w.keyword == Keyword::SCHEMA => {
906                (CommentObject::Schema, self.parse_object_name(false)?)
907            }
908            Token::Word(w) if w.keyword == Keyword::DATABASE => {
909                (CommentObject::Database, self.parse_object_name(false)?)
910            }
911            Token::Word(w) if w.keyword == Keyword::USER => {
912                (CommentObject::User, self.parse_object_name(false)?)
913            }
914            Token::Word(w) if w.keyword == Keyword::ROLE => {
915                (CommentObject::Role, self.parse_object_name(false)?)
916            }
917            _ => self.expected("comment object_type", token)?,
918        };
919
920        self.expect_keyword_is(Keyword::IS)?;
921        let comment = if self.parse_keyword(Keyword::NULL) {
922            None
923        } else {
924            Some(self.parse_literal_string()?)
925        };
926        Ok(Statement::Comment {
927            object_type,
928            object_name,
929            comment,
930            if_exists,
931        })
932    }
933
934    /// Parse `FLUSH` statement.
935    pub fn parse_flush(&mut self) -> Result<Statement, ParserError> {
936        let mut channel = None;
937        let mut tables: Vec<ObjectName> = vec![];
938        let mut read_lock = false;
939        let mut export = false;
940
941        if !dialect_of!(self is MySqlDialect | GenericDialect) {
942            return parser_err!("Unsupported statement FLUSH", self.peek_token().span.start);
943        }
944
945        let location = if self.parse_keyword(Keyword::NO_WRITE_TO_BINLOG) {
946            Some(FlushLocation::NoWriteToBinlog)
947        } else if self.parse_keyword(Keyword::LOCAL) {
948            Some(FlushLocation::Local)
949        } else {
950            None
951        };
952
953        let object_type = if self.parse_keywords(&[Keyword::BINARY, Keyword::LOGS]) {
954            FlushType::BinaryLogs
955        } else if self.parse_keywords(&[Keyword::ENGINE, Keyword::LOGS]) {
956            FlushType::EngineLogs
957        } else if self.parse_keywords(&[Keyword::ERROR, Keyword::LOGS]) {
958            FlushType::ErrorLogs
959        } else if self.parse_keywords(&[Keyword::GENERAL, Keyword::LOGS]) {
960            FlushType::GeneralLogs
961        } else if self.parse_keywords(&[Keyword::HOSTS]) {
962            FlushType::Hosts
963        } else if self.parse_keyword(Keyword::PRIVILEGES) {
964            FlushType::Privileges
965        } else if self.parse_keyword(Keyword::OPTIMIZER_COSTS) {
966            FlushType::OptimizerCosts
967        } else if self.parse_keywords(&[Keyword::RELAY, Keyword::LOGS]) {
968            if self.parse_keywords(&[Keyword::FOR, Keyword::CHANNEL]) {
969                channel = Some(self.parse_object_name(false).unwrap().to_string());
970            }
971            FlushType::RelayLogs
972        } else if self.parse_keywords(&[Keyword::SLOW, Keyword::LOGS]) {
973            FlushType::SlowLogs
974        } else if self.parse_keyword(Keyword::STATUS) {
975            FlushType::Status
976        } else if self.parse_keyword(Keyword::USER_RESOURCES) {
977            FlushType::UserResources
978        } else if self.parse_keywords(&[Keyword::LOGS]) {
979            FlushType::Logs
980        } else if self.parse_keywords(&[Keyword::TABLES]) {
981            loop {
982                let next_token = self.next_token();
983                match &next_token.token {
984                    Token::Word(w) => match w.keyword {
985                        Keyword::WITH => {
986                            read_lock = self.parse_keywords(&[Keyword::READ, Keyword::LOCK]);
987                        }
988                        Keyword::FOR => {
989                            export = self.parse_keyword(Keyword::EXPORT);
990                        }
991                        Keyword::NoKeyword => {
992                            self.prev_token();
993                            tables = self.parse_comma_separated(|p| p.parse_object_name(false))?;
994                        }
995                        _ => {}
996                    },
997                    _ => {
998                        break;
999                    }
1000                }
1001            }
1002
1003            FlushType::Tables
1004        } else {
1005            return self.expected(
1006                "BINARY LOGS, ENGINE LOGS, ERROR LOGS, GENERAL LOGS, HOSTS, LOGS, PRIVILEGES, OPTIMIZER_COSTS,\
1007                 RELAY LOGS [FOR CHANNEL channel], SLOW LOGS, STATUS, USER_RESOURCES",
1008                self.peek_token(),
1009            );
1010        };
1011
1012        Ok(Statement::Flush {
1013            object_type,
1014            location,
1015            channel,
1016            read_lock,
1017            export,
1018            tables,
1019        })
1020    }
1021
1022    /// Parse `MSCK` statement.
1023    pub fn parse_msck(&mut self) -> Result<Msck, ParserError> {
1024        let repair = self.parse_keyword(Keyword::REPAIR);
1025        self.expect_keyword_is(Keyword::TABLE)?;
1026        let table_name = self.parse_object_name(false)?;
1027        let partition_action = self
1028            .maybe_parse(|parser| {
1029                let pa = match parser.parse_one_of_keywords(&[
1030                    Keyword::ADD,
1031                    Keyword::DROP,
1032                    Keyword::SYNC,
1033                ]) {
1034                    Some(Keyword::ADD) => Some(AddDropSync::ADD),
1035                    Some(Keyword::DROP) => Some(AddDropSync::DROP),
1036                    Some(Keyword::SYNC) => Some(AddDropSync::SYNC),
1037                    _ => None,
1038                };
1039                parser.expect_keyword_is(Keyword::PARTITIONS)?;
1040                Ok(pa)
1041            })?
1042            .unwrap_or_default();
1043        Ok(Msck {
1044            repair,
1045            table_name,
1046            partition_action,
1047        })
1048    }
1049
1050    /// Parse `TRUNCATE` statement.
1051    pub fn parse_truncate(&mut self) -> Result<Truncate, ParserError> {
1052        let table = self.parse_keyword(Keyword::TABLE);
1053        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
1054
1055        let table_names = self.parse_comma_separated(|p| {
1056            let only = p.parse_keyword(Keyword::ONLY);
1057            let name = p.parse_object_name(false)?;
1058            let has_asterisk = p.consume_token(&Token::Mul);
1059            Ok(TruncateTableTarget {
1060                name,
1061                only,
1062                has_asterisk,
1063            })
1064        })?;
1065
1066        let mut partitions = None;
1067        if self.parse_keyword(Keyword::PARTITION) {
1068            self.expect_token(&Token::LParen)?;
1069            partitions = Some(self.parse_comma_separated(Parser::parse_expr)?);
1070            self.expect_token(&Token::RParen)?;
1071        }
1072
1073        let mut identity = None;
1074        let mut cascade = None;
1075
1076        if dialect_of!(self is PostgreSqlDialect | GenericDialect) {
1077            identity = if self.parse_keywords(&[Keyword::RESTART, Keyword::IDENTITY]) {
1078                Some(TruncateIdentityOption::Restart)
1079            } else if self.parse_keywords(&[Keyword::CONTINUE, Keyword::IDENTITY]) {
1080                Some(TruncateIdentityOption::Continue)
1081            } else {
1082                None
1083            };
1084
1085            cascade = self.parse_cascade_option();
1086        };
1087
1088        let on_cluster = self.parse_optional_on_cluster()?;
1089
1090        Ok(Truncate {
1091            table_names,
1092            partitions,
1093            table,
1094            if_exists,
1095            identity,
1096            cascade,
1097            on_cluster,
1098        })
1099    }
1100
1101    fn parse_cascade_option(&mut self) -> Option<CascadeOption> {
1102        if self.parse_keyword(Keyword::CASCADE) {
1103            Some(CascadeOption::Cascade)
1104        } else if self.parse_keyword(Keyword::RESTRICT) {
1105            Some(CascadeOption::Restrict)
1106        } else {
1107            None
1108        }
1109    }
1110
1111    /// Parse options for `ATTACH DUCKDB DATABASE` statement.
1112    pub fn parse_attach_duckdb_database_options(
1113        &mut self,
1114    ) -> Result<Vec<AttachDuckDBDatabaseOption>, ParserError> {
1115        if !self.consume_token(&Token::LParen) {
1116            return Ok(vec![]);
1117        }
1118
1119        let mut options = vec![];
1120        loop {
1121            if self.parse_keyword(Keyword::READ_ONLY) {
1122                let boolean = if self.parse_keyword(Keyword::TRUE) {
1123                    Some(true)
1124                } else if self.parse_keyword(Keyword::FALSE) {
1125                    Some(false)
1126                } else {
1127                    None
1128                };
1129                options.push(AttachDuckDBDatabaseOption::ReadOnly(boolean));
1130            } else if self.parse_keyword(Keyword::TYPE) {
1131                let ident = self.parse_identifier()?;
1132                options.push(AttachDuckDBDatabaseOption::Type(ident));
1133            } else {
1134                return self.expected("expected one of: ), READ_ONLY, TYPE", self.peek_token());
1135            };
1136
1137            if self.consume_token(&Token::RParen) {
1138                return Ok(options);
1139            } else if self.consume_token(&Token::Comma) {
1140                continue;
1141            } else {
1142                return self.expected("expected one of: ')', ','", self.peek_token());
1143            }
1144        }
1145    }
1146
1147    /// Parse `ATTACH DUCKDB DATABASE` statement.
1148    pub fn parse_attach_duckdb_database(&mut self) -> Result<Statement, ParserError> {
1149        let database = self.parse_keyword(Keyword::DATABASE);
1150        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
1151        let database_path = self.parse_identifier()?;
1152        let database_alias = if self.parse_keyword(Keyword::AS) {
1153            Some(self.parse_identifier()?)
1154        } else {
1155            None
1156        };
1157
1158        let attach_options = self.parse_attach_duckdb_database_options()?;
1159        Ok(Statement::AttachDuckDBDatabase {
1160            if_not_exists,
1161            database,
1162            database_path,
1163            database_alias,
1164            attach_options,
1165        })
1166    }
1167
1168    /// Parse `DETACH DUCKDB DATABASE` statement.
1169    pub fn parse_detach_duckdb_database(&mut self) -> Result<Statement, ParserError> {
1170        let database = self.parse_keyword(Keyword::DATABASE);
1171        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
1172        let database_alias = self.parse_identifier()?;
1173        Ok(Statement::DetachDuckDBDatabase {
1174            if_exists,
1175            database,
1176            database_alias,
1177        })
1178    }
1179
1180    /// Parse `ATTACH DATABASE` statement.
1181    pub fn parse_attach_database(&mut self) -> Result<Statement, ParserError> {
1182        let database = self.parse_keyword(Keyword::DATABASE);
1183        let database_file_name = self.parse_expr()?;
1184        self.expect_keyword_is(Keyword::AS)?;
1185        let schema_name = self.parse_identifier()?;
1186        Ok(Statement::AttachDatabase {
1187            database,
1188            schema_name,
1189            database_file_name,
1190        })
1191    }
1192
1193    /// Parse `ANALYZE` statement.
1194    pub fn parse_analyze(&mut self) -> Result<Analyze, ParserError> {
1195        let has_table_keyword = self.parse_keyword(Keyword::TABLE);
1196        let table_name = self.maybe_parse(|parser| parser.parse_object_name(false))?;
1197        let mut for_columns = false;
1198        let mut cache_metadata = false;
1199        let mut noscan = false;
1200        let mut partitions = None;
1201        let mut compute_statistics = false;
1202        let mut columns = vec![];
1203
1204        // PostgreSQL syntax: ANALYZE t (col1, col2)
1205        if table_name.is_some() && self.consume_token(&Token::LParen) {
1206            columns = self.parse_comma_separated(|p| p.parse_identifier())?;
1207            self.expect_token(&Token::RParen)?;
1208        }
1209
1210        loop {
1211            match self.parse_one_of_keywords(&[
1212                Keyword::PARTITION,
1213                Keyword::FOR,
1214                Keyword::CACHE,
1215                Keyword::NOSCAN,
1216                Keyword::COMPUTE,
1217            ]) {
1218                Some(Keyword::PARTITION) => {
1219                    self.expect_token(&Token::LParen)?;
1220                    partitions = Some(self.parse_comma_separated(Parser::parse_expr)?);
1221                    self.expect_token(&Token::RParen)?;
1222                }
1223                Some(Keyword::NOSCAN) => noscan = true,
1224                Some(Keyword::FOR) => {
1225                    self.expect_keyword_is(Keyword::COLUMNS)?;
1226
1227                    columns = self
1228                        .maybe_parse(|parser| {
1229                            parser.parse_comma_separated(|p| p.parse_identifier())
1230                        })?
1231                        .unwrap_or_default();
1232                    for_columns = true
1233                }
1234                Some(Keyword::CACHE) => {
1235                    self.expect_keyword_is(Keyword::METADATA)?;
1236                    cache_metadata = true
1237                }
1238                Some(Keyword::COMPUTE) => {
1239                    self.expect_keyword_is(Keyword::STATISTICS)?;
1240                    compute_statistics = true
1241                }
1242                _ => break,
1243            }
1244        }
1245
1246        Ok(Analyze {
1247            has_table_keyword,
1248            table_name,
1249            for_columns,
1250            columns,
1251            partitions,
1252            cache_metadata,
1253            noscan,
1254            compute_statistics,
1255        })
1256    }
1257
1258    /// Parse a new expression including wildcard & qualified wildcard.
1259    pub fn parse_wildcard_expr(&mut self) -> Result<Expr, ParserError> {
1260        let index = self.index;
1261
1262        let next_token = self.next_token();
1263        match next_token.token {
1264            t @ (Token::Word(_) | Token::SingleQuotedString(_)) => {
1265                if self.peek_token().token == Token::Period {
1266                    let mut id_parts: Vec<Ident> = vec![match t {
1267                        Token::Word(w) => w.into_ident(next_token.span),
1268                        Token::SingleQuotedString(s) => Ident::with_quote('\'', s),
1269                        _ => {
1270                            return Err(ParserError::ParserError(
1271                                "Internal parser error: unexpected token type".to_string(),
1272                            ))
1273                        }
1274                    }];
1275
1276                    while self.consume_token(&Token::Period) {
1277                        let next_token = self.next_token();
1278                        match next_token.token {
1279                            Token::Word(w) => id_parts.push(w.into_ident(next_token.span)),
1280                            Token::SingleQuotedString(s) => {
1281                                // SQLite has single-quoted identifiers
1282                                id_parts.push(Ident::with_quote('\'', s))
1283                            }
1284                            Token::Mul => {
1285                                return Ok(Expr::QualifiedWildcard(
1286                                    ObjectName::from(id_parts),
1287                                    AttachedToken(next_token),
1288                                ));
1289                            }
1290                            _ => {
1291                                return self
1292                                    .expected("an identifier or a '*' after '.'", next_token);
1293                            }
1294                        }
1295                    }
1296                }
1297            }
1298            Token::Mul => {
1299                return Ok(Expr::Wildcard(AttachedToken(next_token)));
1300            }
1301            // Handle parenthesized wildcard: (*)
1302            Token::LParen => {
1303                let [maybe_mul, maybe_rparen] = self.peek_tokens_ref();
1304                if maybe_mul.token == Token::Mul && maybe_rparen.token == Token::RParen {
1305                    let mul_token = self.next_token(); // consume Mul
1306                    self.next_token(); // consume RParen
1307                    return Ok(Expr::Wildcard(AttachedToken(mul_token)));
1308                }
1309            }
1310            _ => (),
1311        };
1312
1313        self.index = index;
1314        self.parse_expr()
1315    }
1316
1317    /// Parse a new expression.
1318    pub fn parse_expr(&mut self) -> Result<Expr, ParserError> {
1319        self.parse_subexpr(self.dialect.prec_unknown())
1320    }
1321
1322    /// Parse expression with optional alias and order by.
1323    pub fn parse_expr_with_alias_and_order_by(
1324        &mut self,
1325    ) -> Result<ExprWithAliasAndOrderBy, ParserError> {
1326        let expr = self.parse_expr()?;
1327
1328        fn validator(explicit: bool, kw: &Keyword, _parser: &mut Parser) -> bool {
1329            explicit || !&[Keyword::ASC, Keyword::DESC, Keyword::GROUP].contains(kw)
1330        }
1331        let alias = self.parse_optional_alias_inner(None, validator)?;
1332        let order_by = OrderByOptions {
1333            asc: self.parse_asc_desc(),
1334            nulls_first: None,
1335        };
1336        Ok(ExprWithAliasAndOrderBy {
1337            expr: ExprWithAlias { expr, alias },
1338            order_by,
1339        })
1340    }
1341
1342    /// Parse tokens until the precedence changes.
1343    pub fn parse_subexpr(&mut self, precedence: u8) -> Result<Expr, ParserError> {
1344        let _guard = self.recursion_counter.try_decrease()?;
1345        debug!("parsing expr");
1346        let mut expr = self.parse_prefix()?;
1347
1348        expr = self.parse_compound_expr(expr, vec![])?;
1349
1350        debug!("prefix: {expr:?}");
1351        loop {
1352            let next_precedence = self.get_next_precedence()?;
1353            debug!("next precedence: {next_precedence:?}");
1354
1355            if precedence >= next_precedence {
1356                break;
1357            }
1358
1359            // The period operator is handled exclusively by the
1360            // compound field access parsing.
1361            if Token::Period == self.peek_token_ref().token {
1362                break;
1363            }
1364
1365            expr = self.parse_infix(expr, next_precedence)?;
1366        }
1367        Ok(expr)
1368    }
1369
1370    /// Parse `ASSERT` statement.
1371    pub fn parse_assert(&mut self) -> Result<Statement, ParserError> {
1372        let condition = self.parse_expr()?;
1373        let message = if self.parse_keyword(Keyword::AS) {
1374            Some(self.parse_expr()?)
1375        } else {
1376            None
1377        };
1378
1379        Ok(Statement::Assert { condition, message })
1380    }
1381
1382    /// Parse `SAVEPOINT` statement.
1383    pub fn parse_savepoint(&mut self) -> Result<Statement, ParserError> {
1384        let name = self.parse_identifier()?;
1385        Ok(Statement::Savepoint { name })
1386    }
1387
1388    /// Parse `RELEASE` statement.
1389    pub fn parse_release(&mut self) -> Result<Statement, ParserError> {
1390        let _ = self.parse_keyword(Keyword::SAVEPOINT);
1391        let name = self.parse_identifier()?;
1392
1393        Ok(Statement::ReleaseSavepoint { name })
1394    }
1395
1396    /// Parse `LISTEN` statement.
1397    pub fn parse_listen(&mut self) -> Result<Statement, ParserError> {
1398        let channel = self.parse_identifier()?;
1399        Ok(Statement::LISTEN { channel })
1400    }
1401
1402    /// Parse `UNLISTEN` statement.
1403    pub fn parse_unlisten(&mut self) -> Result<Statement, ParserError> {
1404        let channel = if self.consume_token(&Token::Mul) {
1405            Ident::new(Expr::Wildcard(AttachedToken::empty()).to_string())
1406        } else {
1407            match self.parse_identifier() {
1408                Ok(expr) => expr,
1409                _ => {
1410                    self.prev_token();
1411                    return self.expected("wildcard or identifier", self.peek_token());
1412                }
1413            }
1414        };
1415        Ok(Statement::UNLISTEN { channel })
1416    }
1417
1418    /// Parse `NOTIFY` statement.
1419    pub fn parse_notify(&mut self) -> Result<Statement, ParserError> {
1420        let channel = self.parse_identifier()?;
1421        let payload = if self.consume_token(&Token::Comma) {
1422            Some(self.parse_literal_string()?)
1423        } else {
1424            None
1425        };
1426        Ok(Statement::NOTIFY { channel, payload })
1427    }
1428
1429    /// Parses a `RENAME TABLE` statement. See [Statement::RenameTable]
1430    pub fn parse_rename(&mut self) -> Result<Statement, ParserError> {
1431        if self.peek_keyword(Keyword::TABLE) {
1432            self.expect_keyword(Keyword::TABLE)?;
1433            let rename_tables = self.parse_comma_separated(|parser| {
1434                let old_name = parser.parse_object_name(false)?;
1435                parser.expect_keyword(Keyword::TO)?;
1436                let new_name = parser.parse_object_name(false)?;
1437
1438                Ok(RenameTable { old_name, new_name })
1439            })?;
1440            Ok(rename_tables.into())
1441        } else {
1442            self.expected("KEYWORD `TABLE` after RENAME", self.peek_token())
1443        }
1444    }
1445
1446    /// Tries to parse an expression by matching the specified word to known keywords that have a special meaning in the dialect.
1447    /// Returns `None if no match is found.
1448    fn parse_expr_prefix_by_reserved_word(
1449        &mut self,
1450        w: &Word,
1451        w_span: Span,
1452    ) -> Result<Option<Expr>, ParserError> {
1453        match w.keyword {
1454            Keyword::TRUE | Keyword::FALSE if self.dialect.supports_boolean_literals() => {
1455                self.prev_token();
1456                Ok(Some(Expr::Value(self.parse_value()?)))
1457            }
1458            Keyword::NULL => {
1459                self.prev_token();
1460                Ok(Some(Expr::Value(self.parse_value()?)))
1461            }
1462            Keyword::CURRENT_CATALOG
1463            | Keyword::CURRENT_USER
1464            | Keyword::SESSION_USER
1465            | Keyword::USER
1466            if dialect_of!(self is PostgreSqlDialect | GenericDialect) =>
1467                {
1468                    Ok(Some(Expr::Function(Function {
1469                        name: ObjectName::from(vec![w.to_ident(w_span)]),
1470                        uses_odbc_syntax: false,
1471                        parameters: FunctionArguments::None,
1472                        args: FunctionArguments::None,
1473                        null_treatment: None,
1474                        filter: None,
1475                        over: None,
1476                        within_group: vec![],
1477                    })))
1478                }
1479            Keyword::CURRENT_TIMESTAMP
1480            | Keyword::CURRENT_TIME
1481            | Keyword::CURRENT_DATE
1482            | Keyword::LOCALTIME
1483            | Keyword::LOCALTIMESTAMP => {
1484                Ok(Some(self.parse_time_functions(ObjectName::from(vec![w.to_ident(w_span)]))?))
1485            }
1486            Keyword::CASE => Ok(Some(self.parse_case_expr()?)),
1487            Keyword::CONVERT => Ok(Some(self.parse_convert_expr(false)?)),
1488            Keyword::TRY_CONVERT if self.dialect.supports_try_convert() => Ok(Some(self.parse_convert_expr(true)?)),
1489            Keyword::CAST => Ok(Some(self.parse_cast_expr(CastKind::Cast)?)),
1490            Keyword::TRY_CAST => Ok(Some(self.parse_cast_expr(CastKind::TryCast)?)),
1491            Keyword::SAFE_CAST => Ok(Some(self.parse_cast_expr(CastKind::SafeCast)?)),
1492            Keyword::EXISTS
1493            // Support parsing Databricks has a function named `exists`.
1494            if !dialect_of!(self is DatabricksDialect)
1495                || matches!(
1496                        self.peek_nth_token_ref(1).token,
1497                        Token::Word(Word {
1498                            keyword: Keyword::SELECT | Keyword::WITH,
1499                            ..
1500                        })
1501                    ) =>
1502                {
1503                    Ok(Some(self.parse_exists_expr(false)?))
1504                }
1505            Keyword::EXTRACT => Ok(Some(self.parse_extract_expr()?)),
1506            Keyword::CEIL => Ok(Some(self.parse_ceil_floor_expr(true)?)),
1507            Keyword::FLOOR => Ok(Some(self.parse_ceil_floor_expr(false)?)),
1508            Keyword::POSITION if self.peek_token_ref().token == Token::LParen => {
1509                Ok(Some(self.parse_position_expr(w.to_ident(w_span))?))
1510            }
1511            Keyword::SUBSTR | Keyword::SUBSTRING => {
1512                self.prev_token();
1513                Ok(Some(self.parse_substring()?))
1514            }
1515            Keyword::OVERLAY => Ok(Some(self.parse_overlay_expr()?)),
1516            Keyword::TRIM => Ok(Some(self.parse_trim_expr()?)),
1517            Keyword::INTERVAL => Ok(Some(self.parse_interval()?)),
1518            // Treat ARRAY[1,2,3] as an array [1,2,3], otherwise try as subquery or a function call
1519            Keyword::ARRAY if *self.peek_token_ref() == Token::LBracket => {
1520                self.expect_token(&Token::LBracket)?;
1521                Ok(Some(self.parse_array_expr(true)?))
1522            }
1523            Keyword::ARRAY
1524            if self.peek_token() == Token::LParen
1525                && !dialect_of!(self is ClickHouseDialect | DatabricksDialect) =>
1526                {
1527                    self.expect_token(&Token::LParen)?;
1528                    let query = self.parse_query()?;
1529                    self.expect_token(&Token::RParen)?;
1530                    Ok(Some(Expr::Function(Function {
1531                        name: ObjectName::from(vec![w.to_ident(w_span)]),
1532                        uses_odbc_syntax: false,
1533                        parameters: FunctionArguments::None,
1534                        args: FunctionArguments::Subquery(query),
1535                        filter: None,
1536                        null_treatment: None,
1537                        over: None,
1538                        within_group: vec![],
1539                    })))
1540                }
1541            Keyword::NOT => Ok(Some(self.parse_not()?)),
1542            Keyword::MATCH if self.dialect.supports_match_against() => {
1543                Ok(Some(self.parse_match_against()?))
1544            }
1545            Keyword::STRUCT if self.dialect.supports_struct_literal() => {
1546                let struct_expr = self.parse_struct_literal()?;
1547                Ok(Some(struct_expr))
1548            }
1549            Keyword::PRIOR if matches!(self.state, ParserState::ConnectBy) => {
1550                let expr = self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?;
1551                Ok(Some(Expr::Prior(Box::new(expr))))
1552            }
1553            Keyword::MAP if *self.peek_token_ref() == Token::LBrace && self.dialect.support_map_literal_syntax() => {
1554                Ok(Some(self.parse_duckdb_map_literal()?))
1555            }
1556            Keyword::LAMBDA if self.dialect.supports_lambda_functions() => {
1557                Ok(Some(self.parse_lambda_expr()?))
1558            }
1559            _ if self.dialect.supports_geometric_types() => match w.keyword {
1560                Keyword::CIRCLE => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Circle)?)),
1561                Keyword::BOX => Ok(Some(self.parse_geometric_type(GeometricTypeKind::GeometricBox)?)),
1562                Keyword::PATH => Ok(Some(self.parse_geometric_type(GeometricTypeKind::GeometricPath)?)),
1563                Keyword::LINE => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Line)?)),
1564                Keyword::LSEG => Ok(Some(self.parse_geometric_type(GeometricTypeKind::LineSegment)?)),
1565                Keyword::POINT => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Point)?)),
1566                Keyword::POLYGON => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Polygon)?)),
1567                _ => Ok(None),
1568            },
1569            _ => Ok(None),
1570        }
1571    }
1572
1573    /// Tries to parse an expression by a word that is not known to have a special meaning in the dialect.
1574    fn parse_expr_prefix_by_unreserved_word(
1575        &mut self,
1576        w: &Word,
1577        w_span: Span,
1578    ) -> Result<Expr, ParserError> {
1579        match self.peek_token().token {
1580            Token::LParen if !self.peek_outer_join_operator() => {
1581                let id_parts = vec![w.to_ident(w_span)];
1582                self.parse_function(ObjectName::from(id_parts))
1583            }
1584            // string introducer https://dev.mysql.com/doc/refman/8.0/en/charset-introducer.html
1585            Token::SingleQuotedString(_)
1586            | Token::DoubleQuotedString(_)
1587            | Token::HexStringLiteral(_)
1588                if w.value.starts_with('_') =>
1589            {
1590                Ok(Expr::Prefixed {
1591                    prefix: w.to_ident(w_span),
1592                    value: self.parse_introduced_string_expr()?.into(),
1593                })
1594            }
1595            // string introducer https://dev.mysql.com/doc/refman/8.0/en/charset-introducer.html
1596            Token::SingleQuotedString(_)
1597            | Token::DoubleQuotedString(_)
1598            | Token::HexStringLiteral(_)
1599                if w.value.starts_with('_') =>
1600            {
1601                Ok(Expr::Prefixed {
1602                    prefix: w.to_ident(w_span),
1603                    value: self.parse_introduced_string_expr()?.into(),
1604                })
1605            }
1606            Token::Arrow if self.dialect.supports_lambda_functions() => {
1607                self.expect_token(&Token::Arrow)?;
1608                Ok(Expr::Lambda(LambdaFunction {
1609                    params: OneOrManyWithParens::One(w.to_ident(w_span)),
1610                    body: Box::new(self.parse_expr()?),
1611                    syntax: LambdaSyntax::Arrow,
1612                }))
1613            }
1614            _ => Ok(Expr::Identifier(w.to_ident(w_span))),
1615        }
1616    }
1617
1618    /// Parse an expression prefix.
1619    pub fn parse_prefix(&mut self) -> Result<Expr, ParserError> {
1620        // allow the dialect to override prefix parsing
1621        if let Some(prefix) = self.dialect.parse_prefix(self) {
1622            return prefix;
1623        }
1624
1625        // PostgreSQL allows any string literal to be preceded by a type name, indicating that the
1626        // string literal represents a literal of that type. Some examples:
1627        //
1628        //      DATE '2020-05-20'
1629        //      TIMESTAMP WITH TIME ZONE '2020-05-20 7:43:54'
1630        //      BOOL 'true'
1631        //
1632        // The first two are standard SQL, while the latter is a PostgreSQL extension. Complicating
1633        // matters is the fact that INTERVAL string literals may optionally be followed by special
1634        // keywords, e.g.:
1635        //
1636        //      INTERVAL '7' DAY
1637        //
1638        // Note also that naively `SELECT date` looks like a syntax error because the `date` type
1639        // name is not followed by a string literal, but in fact in PostgreSQL it is a valid
1640        // expression that should parse as the column name "date".
1641        let loc = self.peek_token_ref().span.start;
1642        let opt_expr = self.maybe_parse(|parser| {
1643            match parser.parse_data_type()? {
1644                DataType::Interval { .. } => parser.parse_interval(),
1645                // PostgreSQL allows almost any identifier to be used as custom data type name,
1646                // and we support that in `parse_data_type()`. But unlike Postgres we don't
1647                // have a list of globally reserved keywords (since they vary across dialects),
1648                // so given `NOT 'a' LIKE 'b'`, we'd accept `NOT` as a possible custom data type
1649                // name, resulting in `NOT 'a'` being recognized as a `TypedString` instead of
1650                // an unary negation `NOT ('a' LIKE 'b')`. To solve this, we don't accept the
1651                // `type 'string'` syntax for the custom data types at all.
1652                DataType::Custom(..) => parser_err!("dummy", loc),
1653                // MySQL supports using the `BINARY` keyword as a cast to binary type.
1654                DataType::Binary(..) if self.dialect.supports_binary_kw_as_cast() => {
1655                    Ok(Expr::Cast {
1656                        kind: CastKind::Cast,
1657                        expr: Box::new(parser.parse_expr()?),
1658                        data_type: DataType::Binary(None),
1659                        array: false,
1660                        format: None,
1661                    })
1662                }
1663                data_type => Ok(Expr::TypedString(TypedString {
1664                    data_type,
1665                    value: parser.parse_value()?,
1666                    uses_odbc_syntax: false,
1667                })),
1668            }
1669        })?;
1670
1671        if let Some(expr) = opt_expr {
1672            return Ok(expr);
1673        }
1674
1675        // Cache some dialect properties to avoid lifetime issues with the
1676        // next_token reference.
1677
1678        let dialect = self.dialect;
1679
1680        self.advance_token();
1681        let next_token_index = self.get_current_index();
1682        let next_token = self.get_current_token();
1683        let span = next_token.span;
1684        let expr = match &next_token.token {
1685            Token::Word(w) => {
1686                // The word we consumed may fall into one of two cases: it has a special meaning, or not.
1687                // For example, in Snowflake, the word `interval` may have two meanings depending on the context:
1688                // `SELECT CURRENT_DATE() + INTERVAL '1 DAY', MAX(interval) FROM tbl;`
1689                //                          ^^^^^^^^^^^^^^^^      ^^^^^^^^
1690                //                         interval expression   identifier
1691                //
1692                // We first try to parse the word and following tokens as a special expression, and if that fails,
1693                // we rollback and try to parse it as an identifier.
1694                let w = w.clone();
1695                match self.try_parse(|parser| parser.parse_expr_prefix_by_reserved_word(&w, span)) {
1696                    // This word indicated an expression prefix and parsing was successful
1697                    Ok(Some(expr)) => Ok(expr),
1698
1699                    // No expression prefix associated with this word
1700                    Ok(None) => Ok(self.parse_expr_prefix_by_unreserved_word(&w, span)?),
1701
1702                    // If parsing of the word as a special expression failed, we are facing two options:
1703                    // 1. The statement is malformed, e.g. `SELECT INTERVAL '1 DAI` (`DAI` instead of `DAY`)
1704                    // 2. The word is used as an identifier, e.g. `SELECT MAX(interval) FROM tbl`
1705                    // We first try to parse the word as an identifier and if that fails
1706                    // we rollback and return the parsing error we got from trying to parse a
1707                    // special expression (to maintain backwards compatibility of parsing errors).
1708                    Err(e) => {
1709                        if !self.dialect.is_reserved_for_identifier(w.keyword) {
1710                            if let Ok(Some(expr)) = self.maybe_parse(|parser| {
1711                                parser.parse_expr_prefix_by_unreserved_word(&w, span)
1712                            }) {
1713                                return Ok(expr);
1714                            }
1715                        }
1716                        return Err(e);
1717                    }
1718                }
1719            } // End of Token::Word
1720            // array `[1, 2, 3]`
1721            Token::LBracket => self.parse_array_expr(false),
1722            tok @ Token::Minus | tok @ Token::Plus => {
1723                let op = if *tok == Token::Plus {
1724                    UnaryOperator::Plus
1725                } else {
1726                    UnaryOperator::Minus
1727                };
1728                Ok(Expr::UnaryOp {
1729                    op,
1730                    expr: Box::new(
1731                        self.parse_subexpr(self.dialect.prec_value(Precedence::MulDivModOp))?,
1732                    ),
1733                })
1734            }
1735            Token::ExclamationMark if dialect.supports_bang_not_operator() => Ok(Expr::UnaryOp {
1736                op: UnaryOperator::BangNot,
1737                expr: Box::new(self.parse_subexpr(self.dialect.prec_value(Precedence::UnaryNot))?),
1738            }),
1739            tok @ Token::DoubleExclamationMark
1740            | tok @ Token::PGSquareRoot
1741            | tok @ Token::PGCubeRoot
1742            | tok @ Token::AtSign
1743                if dialect_is!(dialect is PostgreSqlDialect) =>
1744            {
1745                let op = match tok {
1746                    Token::DoubleExclamationMark => UnaryOperator::PGPrefixFactorial,
1747                    Token::PGSquareRoot => UnaryOperator::PGSquareRoot,
1748                    Token::PGCubeRoot => UnaryOperator::PGCubeRoot,
1749                    Token::AtSign => UnaryOperator::PGAbs,
1750                    _ => {
1751                        return Err(ParserError::ParserError(
1752                            "Internal parser error: unexpected unary operator token".to_string(),
1753                        ))
1754                    }
1755                };
1756                Ok(Expr::UnaryOp {
1757                    op,
1758                    expr: Box::new(
1759                        self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?,
1760                    ),
1761                })
1762            }
1763            Token::Tilde => Ok(Expr::UnaryOp {
1764                op: UnaryOperator::BitwiseNot,
1765                expr: Box::new(self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?),
1766            }),
1767            tok @ Token::Sharp
1768            | tok @ Token::AtDashAt
1769            | tok @ Token::AtAt
1770            | tok @ Token::QuestionMarkDash
1771            | tok @ Token::QuestionPipe
1772                if self.dialect.supports_geometric_types() =>
1773            {
1774                let op = match tok {
1775                    Token::Sharp => UnaryOperator::Hash,
1776                    Token::AtDashAt => UnaryOperator::AtDashAt,
1777                    Token::AtAt => UnaryOperator::DoubleAt,
1778                    Token::QuestionMarkDash => UnaryOperator::QuestionDash,
1779                    Token::QuestionPipe => UnaryOperator::QuestionPipe,
1780                    _ => {
1781                        return Err(ParserError::ParserError(format!(
1782                            "Unexpected token in unary operator parsing: {tok:?}"
1783                        )))
1784                    }
1785                };
1786                Ok(Expr::UnaryOp {
1787                    op,
1788                    expr: Box::new(
1789                        self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?,
1790                    ),
1791                })
1792            }
1793            Token::EscapedStringLiteral(_) if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) =>
1794            {
1795                self.prev_token();
1796                Ok(Expr::Value(self.parse_value()?))
1797            }
1798            Token::UnicodeStringLiteral(_) => {
1799                self.prev_token();
1800                Ok(Expr::Value(self.parse_value()?))
1801            }
1802            Token::Number(_, _)
1803            | Token::SingleQuotedString(_)
1804            | Token::DoubleQuotedString(_)
1805            | Token::TripleSingleQuotedString(_)
1806            | Token::TripleDoubleQuotedString(_)
1807            | Token::DollarQuotedString(_)
1808            | Token::SingleQuotedByteStringLiteral(_)
1809            | Token::DoubleQuotedByteStringLiteral(_)
1810            | Token::TripleSingleQuotedByteStringLiteral(_)
1811            | Token::TripleDoubleQuotedByteStringLiteral(_)
1812            | Token::SingleQuotedRawStringLiteral(_)
1813            | Token::DoubleQuotedRawStringLiteral(_)
1814            | Token::TripleSingleQuotedRawStringLiteral(_)
1815            | Token::TripleDoubleQuotedRawStringLiteral(_)
1816            | Token::NationalStringLiteral(_)
1817            | Token::QuoteDelimitedStringLiteral(_)
1818            | Token::NationalQuoteDelimitedStringLiteral(_)
1819            | Token::HexStringLiteral(_) => {
1820                self.prev_token();
1821                Ok(Expr::Value(self.parse_value()?))
1822            }
1823            Token::LParen => {
1824                let expr =
1825                    if let Some(expr) = self.try_parse_expr_sub_query()? {
1826                        expr
1827                    } else if let Some(lambda) = self.try_parse_lambda()? {
1828                        return Ok(lambda);
1829                    } else {
1830                        // Parentheses in expressions switch to "normal" parsing state.
1831                        // This matters for dialects (SQLite, DuckDB) where `NOT NULL` can
1832                        // be an alias for `IS NOT NULL`. In column definitions like:
1833                        //
1834                        //   CREATE TABLE t (c INT DEFAULT (42 NOT NULL) NOT NULL)
1835                        //
1836                        // The `(42 NOT NULL)` is an expression with parens, so it parses
1837                        // as `IsNotNull(42)`. The trailing `NOT NULL` is outside those
1838                        // expression parens (the outer parens are CREATE TABLE syntax),
1839                        // so it remains a column constraint.
1840                        let exprs = self.with_state(ParserState::Normal, |p| {
1841                            p.parse_comma_separated(Parser::parse_expr)
1842                        })?;
1843                        match exprs.len() {
1844                            0 => return Err(ParserError::ParserError(
1845                                "Internal parser error: parse_comma_separated returned empty list"
1846                                    .to_string(),
1847                            )),
1848                            1 => Expr::Nested(Box::new(exprs.into_iter().next().unwrap())),
1849                            _ => Expr::Tuple(exprs),
1850                        }
1851                    };
1852                self.expect_token(&Token::RParen)?;
1853                Ok(expr)
1854            }
1855            Token::Placeholder(_) | Token::Colon | Token::AtSign => {
1856                self.prev_token();
1857                Ok(Expr::Value(self.parse_value()?))
1858            }
1859            Token::LBrace => {
1860                self.prev_token();
1861                self.parse_lbrace_expr()
1862            }
1863            _ => self.expected_at("an expression", next_token_index),
1864        }?;
1865
1866        if !self.in_column_definition_state() && self.parse_keyword(Keyword::COLLATE) {
1867            Ok(Expr::Collate {
1868                expr: Box::new(expr),
1869                collation: self.parse_object_name(false)?,
1870            })
1871        } else {
1872            Ok(expr)
1873        }
1874    }
1875
1876    fn parse_geometric_type(&mut self, kind: GeometricTypeKind) -> Result<Expr, ParserError> {
1877        Ok(Expr::TypedString(TypedString {
1878            data_type: DataType::GeometricType(kind),
1879            value: self.parse_value()?,
1880            uses_odbc_syntax: false,
1881        }))
1882    }
1883
1884    /// Try to parse an [Expr::CompoundFieldAccess] like `a.b.c` or `a.b[1].c`.
1885    /// If all the fields are `Expr::Identifier`s, return an [Expr::CompoundIdentifier] instead.
1886    /// If only the root exists, return the root.
1887    /// Parses compound expressions which may be delimited by period
1888    /// or bracket notation.
1889    /// For example: `a.b.c`, `a.b[1]`.
1890    pub fn parse_compound_expr(
1891        &mut self,
1892        root: Expr,
1893        mut chain: Vec<AccessExpr>,
1894    ) -> Result<Expr, ParserError> {
1895        let mut ending_wildcard: Option<TokenWithSpan> = None;
1896        loop {
1897            if self.consume_token(&Token::Period) {
1898                let next_token = self.peek_token_ref();
1899                match &next_token.token {
1900                    Token::Mul => {
1901                        // Postgres explicitly allows funcnm(tablenm.*) and the
1902                        // function array_agg traverses this control flow
1903                        if dialect_of!(self is PostgreSqlDialect) {
1904                            ending_wildcard = Some(self.next_token());
1905                        } else {
1906                            // Put back the consumed `.` tokens before exiting.
1907                            // If this expression is being parsed in the
1908                            // context of a projection, then the `.*` could imply
1909                            // a wildcard expansion. For example:
1910                            // `SELECT STRUCT('foo').* FROM T`
1911                            self.prev_token(); // .
1912                        }
1913
1914                        break;
1915                    }
1916                    Token::SingleQuotedString(s) => {
1917                        let expr =
1918                            Expr::Identifier(Ident::with_quote_and_span('\'', next_token.span, s));
1919                        chain.push(AccessExpr::Dot(expr));
1920                        self.advance_token(); // The consumed string
1921                    }
1922                    // Fallback to parsing an arbitrary expression, but restrict to expression
1923                    // types that are valid after the dot operator. This ensures that e.g.
1924                    // `T.interval` is parsed as a compound identifier, not as an interval
1925                    // expression.
1926                    _ => {
1927                        let expr = self.maybe_parse(|parser| {
1928                            let expr = parser
1929                                .parse_subexpr(parser.dialect.prec_value(Precedence::Period))?;
1930                            match &expr {
1931                                Expr::CompoundFieldAccess { .. }
1932                                | Expr::CompoundIdentifier(_)
1933                                | Expr::Identifier(_)
1934                                | Expr::Value(_)
1935                                | Expr::Function(_) => Ok(expr),
1936                                _ => parser.expected("an identifier or value", parser.peek_token()),
1937                            }
1938                        })?;
1939
1940                        match expr {
1941                            // If we get back a compound field access or identifier,
1942                            // we flatten the nested expression.
1943                            // For example if the current root is `foo`
1944                            // and we get back a compound identifier expression `bar.baz`
1945                            // The full expression should be `foo.bar.baz` (i.e.
1946                            // a root with an access chain with 2 entries) and not
1947                            // `foo.(bar.baz)` (i.e. a root with an access chain with
1948                            // 1 entry`).
1949                            Some(Expr::CompoundFieldAccess { root, access_chain }) => {
1950                                chain.push(AccessExpr::Dot(*root));
1951                                chain.extend(access_chain);
1952                            }
1953                            Some(Expr::CompoundIdentifier(parts)) => chain.extend(
1954                                parts.into_iter().map(Expr::Identifier).map(AccessExpr::Dot),
1955                            ),
1956                            Some(expr) => {
1957                                chain.push(AccessExpr::Dot(expr));
1958                            }
1959                            // If the expression is not a valid suffix, fall back to
1960                            // parsing as an identifier. This handles cases like `T.interval`
1961                            // where `interval` is a keyword but should be treated as an identifier.
1962                            None => {
1963                                chain.push(AccessExpr::Dot(Expr::Identifier(
1964                                    self.parse_identifier()?,
1965                                )));
1966                            }
1967                        }
1968                    }
1969                }
1970            } else if !self.dialect.supports_partiql()
1971                && self.peek_token_ref().token == Token::LBracket
1972            {
1973                self.parse_multi_dim_subscript(&mut chain)?;
1974            } else {
1975                break;
1976            }
1977        }
1978
1979        let tok_index = self.get_current_index();
1980        if let Some(wildcard_token) = ending_wildcard {
1981            if !Self::is_all_ident(&root, &chain) {
1982                return self.expected("an identifier or a '*' after '.'", self.peek_token());
1983            };
1984            Ok(Expr::QualifiedWildcard(
1985                ObjectName::from(Self::exprs_to_idents(root, chain)?),
1986                AttachedToken(wildcard_token),
1987            ))
1988        } else if self.maybe_parse_outer_join_operator() {
1989            if !Self::is_all_ident(&root, &chain) {
1990                return self.expected_at("column identifier before (+)", tok_index);
1991            };
1992            let expr = if chain.is_empty() {
1993                root
1994            } else {
1995                Expr::CompoundIdentifier(Self::exprs_to_idents(root, chain)?)
1996            };
1997            Ok(Expr::OuterJoin(expr.into()))
1998        } else {
1999            Self::build_compound_expr(root, chain)
2000        }
2001    }
2002
2003    /// Combines a root expression and access chain to form
2004    /// a compound expression. Which may be a [Expr::CompoundFieldAccess]
2005    /// or other special cased expressions like [Expr::CompoundIdentifier],
2006    /// [Expr::OuterJoin].
2007    fn build_compound_expr(
2008        root: Expr,
2009        mut access_chain: Vec<AccessExpr>,
2010    ) -> Result<Expr, ParserError> {
2011        if access_chain.is_empty() {
2012            return Ok(root);
2013        }
2014
2015        if Self::is_all_ident(&root, &access_chain) {
2016            return Ok(Expr::CompoundIdentifier(Self::exprs_to_idents(
2017                root,
2018                access_chain,
2019            )?));
2020        }
2021
2022        // Flatten qualified function calls.
2023        // For example, the expression `a.b.c.foo(1,2,3)` should
2024        // represent a function called `a.b.c.foo`, rather than
2025        // a composite expression.
2026        if matches!(root, Expr::Identifier(_))
2027            && matches!(
2028                access_chain.last(),
2029                Some(AccessExpr::Dot(Expr::Function(_)))
2030            )
2031            && access_chain
2032                .iter()
2033                .rev()
2034                .skip(1) // All except the Function
2035                .all(|access| matches!(access, AccessExpr::Dot(Expr::Identifier(_))))
2036        {
2037            let Some(AccessExpr::Dot(Expr::Function(mut func))) = access_chain.pop() else {
2038                return parser_err!("expected function expression", root.span().start);
2039            };
2040
2041            let compound_func_name = [root]
2042                .into_iter()
2043                .chain(access_chain.into_iter().flat_map(|access| match access {
2044                    AccessExpr::Dot(expr) => Some(expr),
2045                    _ => None,
2046                }))
2047                .flat_map(|expr| match expr {
2048                    Expr::Identifier(ident) => Some(ident),
2049                    _ => None,
2050                })
2051                .map(ObjectNamePart::Identifier)
2052                .chain(func.name.0)
2053                .collect::<Vec<_>>();
2054            func.name = ObjectName(compound_func_name);
2055
2056            return Ok(Expr::Function(func));
2057        }
2058
2059        // Flatten qualified outer join expressions.
2060        // For example, the expression `T.foo(+)` should
2061        // represent an outer join on the column name `T.foo`
2062        // rather than a composite expression.
2063        if access_chain.len() == 1
2064            && matches!(
2065                access_chain.last(),
2066                Some(AccessExpr::Dot(Expr::OuterJoin(_)))
2067            )
2068        {
2069            let Some(AccessExpr::Dot(Expr::OuterJoin(inner_expr))) = access_chain.pop() else {
2070                return parser_err!("expected (+) expression", root.span().start);
2071            };
2072
2073            if !Self::is_all_ident(&root, &[]) {
2074                return parser_err!("column identifier before (+)", root.span().start);
2075            };
2076
2077            let token_start = root.span().start;
2078            let mut idents = Self::exprs_to_idents(root, vec![])?;
2079            match *inner_expr {
2080                Expr::CompoundIdentifier(suffix) => idents.extend(suffix),
2081                Expr::Identifier(suffix) => idents.push(suffix),
2082                _ => {
2083                    return parser_err!("column identifier before (+)", token_start);
2084                }
2085            }
2086
2087            return Ok(Expr::OuterJoin(Expr::CompoundIdentifier(idents).into()));
2088        }
2089
2090        Ok(Expr::CompoundFieldAccess {
2091            root: Box::new(root),
2092            access_chain,
2093        })
2094    }
2095
2096    fn keyword_to_modifier(k: Keyword) -> Option<ContextModifier> {
2097        match k {
2098            Keyword::LOCAL => Some(ContextModifier::Local),
2099            Keyword::GLOBAL => Some(ContextModifier::Global),
2100            Keyword::SESSION => Some(ContextModifier::Session),
2101            _ => None,
2102        }
2103    }
2104
2105    /// Check if the root is an identifier and all fields are identifiers.
2106    fn is_all_ident(root: &Expr, fields: &[AccessExpr]) -> bool {
2107        if !matches!(root, Expr::Identifier(_)) {
2108            return false;
2109        }
2110        fields
2111            .iter()
2112            .all(|x| matches!(x, AccessExpr::Dot(Expr::Identifier(_))))
2113    }
2114
2115    /// Convert a root and a list of fields to a list of identifiers.
2116    fn exprs_to_idents(root: Expr, fields: Vec<AccessExpr>) -> Result<Vec<Ident>, ParserError> {
2117        let mut idents = vec![];
2118        if let Expr::Identifier(root) = root {
2119            idents.push(root);
2120            for x in fields {
2121                if let AccessExpr::Dot(Expr::Identifier(ident)) = x {
2122                    idents.push(ident);
2123                } else {
2124                    return parser_err!(
2125                        format!("Expected identifier, found: {}", x),
2126                        x.span().start
2127                    );
2128                }
2129            }
2130            Ok(idents)
2131        } else {
2132            parser_err!(
2133                format!("Expected identifier, found: {}", root),
2134                root.span().start
2135            )
2136        }
2137    }
2138
2139    /// Returns true if the next tokens indicate the outer join operator `(+)`.
2140    fn peek_outer_join_operator(&mut self) -> bool {
2141        if !self.dialect.supports_outer_join_operator() {
2142            return false;
2143        }
2144
2145        let [maybe_lparen, maybe_plus, maybe_rparen] = self.peek_tokens_ref();
2146        Token::LParen == maybe_lparen.token
2147            && Token::Plus == maybe_plus.token
2148            && Token::RParen == maybe_rparen.token
2149    }
2150
2151    /// If the next tokens indicates the outer join operator `(+)`, consume
2152    /// the tokens and return true.
2153    fn maybe_parse_outer_join_operator(&mut self) -> bool {
2154        self.dialect.supports_outer_join_operator()
2155            && self.consume_tokens(&[Token::LParen, Token::Plus, Token::RParen])
2156    }
2157
2158    /// Parse utility options in the form of `(option1, option2 arg2, option3 arg3, ...)`
2159    pub fn parse_utility_options(&mut self) -> Result<Vec<UtilityOption>, ParserError> {
2160        self.expect_token(&Token::LParen)?;
2161        let options = self.parse_comma_separated(Self::parse_utility_option)?;
2162        self.expect_token(&Token::RParen)?;
2163
2164        Ok(options)
2165    }
2166
2167    fn parse_utility_option(&mut self) -> Result<UtilityOption, ParserError> {
2168        let name = self.parse_identifier()?;
2169
2170        let next_token = self.peek_token();
2171        if next_token == Token::Comma || next_token == Token::RParen {
2172            return Ok(UtilityOption { name, arg: None });
2173        }
2174        let arg = self.parse_expr()?;
2175
2176        Ok(UtilityOption {
2177            name,
2178            arg: Some(arg),
2179        })
2180    }
2181
2182    fn try_parse_expr_sub_query(&mut self) -> Result<Option<Expr>, ParserError> {
2183        if !self.peek_sub_query() {
2184            return Ok(None);
2185        }
2186
2187        Ok(Some(Expr::Subquery(self.parse_query()?)))
2188    }
2189
2190    fn try_parse_lambda(&mut self) -> Result<Option<Expr>, ParserError> {
2191        if !self.dialect.supports_lambda_functions() {
2192            return Ok(None);
2193        }
2194        self.maybe_parse(|p| {
2195            let params = p.parse_comma_separated(|p| p.parse_identifier())?;
2196            p.expect_token(&Token::RParen)?;
2197            p.expect_token(&Token::Arrow)?;
2198            let expr = p.parse_expr()?;
2199            Ok(Expr::Lambda(LambdaFunction {
2200                params: OneOrManyWithParens::Many(params),
2201                body: Box::new(expr),
2202                syntax: LambdaSyntax::Arrow,
2203            }))
2204        })
2205    }
2206
2207    /// Parses a lambda expression using the `LAMBDA` keyword syntax.
2208    ///
2209    /// Syntax: `LAMBDA <params> : <expr>`
2210    ///
2211    /// Examples:
2212    /// - `LAMBDA x : x + 1`
2213    /// - `LAMBDA x, i : x > i`
2214    ///
2215    /// See <https://duckdb.org/docs/stable/sql/functions/lambda>
2216    fn parse_lambda_expr(&mut self) -> Result<Expr, ParserError> {
2217        // Parse the parameters: either a single identifier or comma-separated identifiers
2218        let params = if self.consume_token(&Token::LParen) {
2219            // Parenthesized parameters: (x, y)
2220            let params = self.parse_comma_separated(|p| p.parse_identifier())?;
2221            self.expect_token(&Token::RParen)?;
2222            OneOrManyWithParens::Many(params)
2223        } else {
2224            // Unparenthesized parameters: x or x, y
2225            let params = self.parse_comma_separated(|p| p.parse_identifier())?;
2226            if params.len() == 1 {
2227                OneOrManyWithParens::One(params.into_iter().next().unwrap())
2228            } else {
2229                OneOrManyWithParens::Many(params)
2230            }
2231        };
2232        // Expect the colon separator
2233        self.expect_token(&Token::Colon)?;
2234        // Parse the body expression
2235        let body = self.parse_expr()?;
2236        Ok(Expr::Lambda(LambdaFunction {
2237            params,
2238            body: Box::new(body),
2239            syntax: LambdaSyntax::LambdaKeyword,
2240        }))
2241    }
2242
2243    /// Tries to parse the body of an [ODBC escaping sequence]
2244    /// i.e. without the enclosing braces
2245    /// Currently implemented:
2246    /// Scalar Function Calls
2247    /// Date, Time, and Timestamp Literals
2248    /// See <https://learn.microsoft.com/en-us/sql/odbc/reference/develop-app/escape-sequences-in-odbc?view=sql-server-2017>
2249    fn maybe_parse_odbc_body(&mut self) -> Result<Option<Expr>, ParserError> {
2250        // Attempt 1: Try to parse it as a function.
2251        if let Some(expr) = self.maybe_parse_odbc_fn_body()? {
2252            return Ok(Some(expr));
2253        }
2254        // Attempt 2: Try to parse it as a Date, Time or Timestamp Literal
2255        self.maybe_parse_odbc_body_datetime()
2256    }
2257
2258    /// Tries to parse the body of an [ODBC Date, Time, and Timestamp Literals] call.
2259    ///
2260    /// ```sql
2261    /// {d '2025-07-17'}
2262    /// {t '14:12:01'}
2263    /// {ts '2025-07-17 14:12:01'}
2264    /// ```
2265    ///
2266    /// [ODBC Date, Time, and Timestamp Literals]:
2267    /// https://learn.microsoft.com/en-us/sql/odbc/reference/develop-app/date-time-and-timestamp-literals?view=sql-server-2017
2268    fn maybe_parse_odbc_body_datetime(&mut self) -> Result<Option<Expr>, ParserError> {
2269        self.maybe_parse(|p| {
2270            let token = p.next_token().clone();
2271            let word_string = token.token.to_string();
2272            let data_type = match word_string.as_str() {
2273                "t" => DataType::Time(None, TimezoneInfo::None),
2274                "d" => DataType::Date,
2275                "ts" => DataType::Timestamp(None, TimezoneInfo::None),
2276                _ => return p.expected("ODBC datetime keyword (t, d, or ts)", token),
2277            };
2278            let value = p.parse_value()?;
2279            Ok(Expr::TypedString(TypedString {
2280                data_type,
2281                value,
2282                uses_odbc_syntax: true,
2283            }))
2284        })
2285    }
2286
2287    /// Tries to parse the body of an [ODBC function] call.
2288    /// i.e. without the enclosing braces
2289    ///
2290    /// ```sql
2291    /// fn myfunc(1,2,3)
2292    /// ```
2293    ///
2294    /// [ODBC function]: https://learn.microsoft.com/en-us/sql/odbc/reference/develop-app/scalar-function-calls?view=sql-server-2017
2295    fn maybe_parse_odbc_fn_body(&mut self) -> Result<Option<Expr>, ParserError> {
2296        self.maybe_parse(|p| {
2297            p.expect_keyword(Keyword::FN)?;
2298            let fn_name = p.parse_object_name(false)?;
2299            let mut fn_call = p.parse_function_call(fn_name)?;
2300            fn_call.uses_odbc_syntax = true;
2301            Ok(Expr::Function(fn_call))
2302        })
2303    }
2304
2305    /// Parse a function call expression named by `name` and return it as an `Expr`.
2306    pub fn parse_function(&mut self, name: ObjectName) -> Result<Expr, ParserError> {
2307        self.parse_function_call(name).map(Expr::Function)
2308    }
2309
2310    fn parse_function_call(&mut self, name: ObjectName) -> Result<Function, ParserError> {
2311        self.expect_token(&Token::LParen)?;
2312
2313        // Snowflake permits a subquery to be passed as an argument without
2314        // an enclosing set of parens if it's the only argument.
2315        if self.dialect.supports_subquery_as_function_arg() && self.peek_sub_query() {
2316            let subquery = self.parse_query()?;
2317            self.expect_token(&Token::RParen)?;
2318            return Ok(Function {
2319                name,
2320                uses_odbc_syntax: false,
2321                parameters: FunctionArguments::None,
2322                args: FunctionArguments::Subquery(subquery),
2323                filter: None,
2324                null_treatment: None,
2325                over: None,
2326                within_group: vec![],
2327            });
2328        }
2329
2330        let mut args = self.parse_function_argument_list()?;
2331        let mut parameters = FunctionArguments::None;
2332        // ClickHouse aggregations support parametric functions like `HISTOGRAM(0.5, 0.6)(x, y)`
2333        // which (0.5, 0.6) is a parameter to the function.
2334        if dialect_of!(self is ClickHouseDialect | GenericDialect)
2335            && self.consume_token(&Token::LParen)
2336        {
2337            parameters = FunctionArguments::List(args);
2338            args = self.parse_function_argument_list()?;
2339        }
2340
2341        let within_group = if self.parse_keywords(&[Keyword::WITHIN, Keyword::GROUP]) {
2342            self.expect_token(&Token::LParen)?;
2343            self.expect_keywords(&[Keyword::ORDER, Keyword::BY])?;
2344            let order_by = self.parse_comma_separated(Parser::parse_order_by_expr)?;
2345            self.expect_token(&Token::RParen)?;
2346            order_by
2347        } else {
2348            vec![]
2349        };
2350
2351        let filter = if self.dialect.supports_filter_during_aggregation()
2352            && self.parse_keyword(Keyword::FILTER)
2353            && self.consume_token(&Token::LParen)
2354            && self.parse_keyword(Keyword::WHERE)
2355        {
2356            let filter = Some(Box::new(self.parse_expr()?));
2357            self.expect_token(&Token::RParen)?;
2358            filter
2359        } else {
2360            None
2361        };
2362
2363        // Syntax for null treatment shows up either in the args list
2364        // or after the function call, but not both.
2365        let null_treatment = if args
2366            .clauses
2367            .iter()
2368            .all(|clause| !matches!(clause, FunctionArgumentClause::IgnoreOrRespectNulls(_)))
2369        {
2370            self.parse_null_treatment()?
2371        } else {
2372            None
2373        };
2374
2375        let over = if self.parse_keyword(Keyword::OVER) {
2376            if self.consume_token(&Token::LParen) {
2377                let window_spec = self.parse_window_spec()?;
2378                Some(WindowType::WindowSpec(window_spec))
2379            } else {
2380                Some(WindowType::NamedWindow(self.parse_identifier()?))
2381            }
2382        } else {
2383            None
2384        };
2385
2386        Ok(Function {
2387            name,
2388            uses_odbc_syntax: false,
2389            parameters,
2390            args: FunctionArguments::List(args),
2391            null_treatment,
2392            filter,
2393            over,
2394            within_group,
2395        })
2396    }
2397
2398    /// Optionally parses a null treatment clause.
2399    fn parse_null_treatment(&mut self) -> Result<Option<NullTreatment>, ParserError> {
2400        match self.parse_one_of_keywords(&[Keyword::RESPECT, Keyword::IGNORE]) {
2401            Some(keyword) => {
2402                self.expect_keyword_is(Keyword::NULLS)?;
2403
2404                Ok(match keyword {
2405                    Keyword::RESPECT => Some(NullTreatment::RespectNulls),
2406                    Keyword::IGNORE => Some(NullTreatment::IgnoreNulls),
2407                    _ => None,
2408                })
2409            }
2410            None => Ok(None),
2411        }
2412    }
2413
2414    /// Parse time-related function `name` possibly followed by `(...)` arguments.
2415    pub fn parse_time_functions(&mut self, name: ObjectName) -> Result<Expr, ParserError> {
2416        let args = if self.consume_token(&Token::LParen) {
2417            FunctionArguments::List(self.parse_function_argument_list()?)
2418        } else {
2419            FunctionArguments::None
2420        };
2421        Ok(Expr::Function(Function {
2422            name,
2423            uses_odbc_syntax: false,
2424            parameters: FunctionArguments::None,
2425            args,
2426            filter: None,
2427            over: None,
2428            null_treatment: None,
2429            within_group: vec![],
2430        }))
2431    }
2432
2433    /// Parse window frame `UNITS` clause: `ROWS`, `RANGE`, or `GROUPS`.
2434    pub fn parse_window_frame_units(&mut self) -> Result<WindowFrameUnits, ParserError> {
2435        let next_token = self.next_token();
2436        match &next_token.token {
2437            Token::Word(w) => match w.keyword {
2438                Keyword::ROWS => Ok(WindowFrameUnits::Rows),
2439                Keyword::RANGE => Ok(WindowFrameUnits::Range),
2440                Keyword::GROUPS => Ok(WindowFrameUnits::Groups),
2441                _ => self.expected("ROWS, RANGE, GROUPS", next_token)?,
2442            },
2443            _ => self.expected("ROWS, RANGE, GROUPS", next_token),
2444        }
2445    }
2446
2447    /// Parse a `WINDOW` frame definition (units and bounds).
2448    pub fn parse_window_frame(&mut self) -> Result<WindowFrame, ParserError> {
2449        let units = self.parse_window_frame_units()?;
2450        let (start_bound, end_bound) = if self.parse_keyword(Keyword::BETWEEN) {
2451            let start_bound = self.parse_window_frame_bound()?;
2452            self.expect_keyword_is(Keyword::AND)?;
2453            let end_bound = Some(self.parse_window_frame_bound()?);
2454            (start_bound, end_bound)
2455        } else {
2456            (self.parse_window_frame_bound()?, None)
2457        };
2458        Ok(WindowFrame {
2459            units,
2460            start_bound,
2461            end_bound,
2462        })
2463    }
2464
2465    /// Parse a window frame bound: `CURRENT ROW` or `<n> PRECEDING|FOLLOWING`.
2466    pub fn parse_window_frame_bound(&mut self) -> Result<WindowFrameBound, ParserError> {
2467        if self.parse_keywords(&[Keyword::CURRENT, Keyword::ROW]) {
2468            Ok(WindowFrameBound::CurrentRow)
2469        } else {
2470            let rows = if self.parse_keyword(Keyword::UNBOUNDED) {
2471                None
2472            } else {
2473                Some(Box::new(match self.peek_token().token {
2474                    Token::SingleQuotedString(_) => self.parse_interval()?,
2475                    _ => self.parse_expr()?,
2476                }))
2477            };
2478            if self.parse_keyword(Keyword::PRECEDING) {
2479                Ok(WindowFrameBound::Preceding(rows))
2480            } else if self.parse_keyword(Keyword::FOLLOWING) {
2481                Ok(WindowFrameBound::Following(rows))
2482            } else {
2483                self.expected("PRECEDING or FOLLOWING", self.peek_token())
2484            }
2485        }
2486    }
2487
2488    /// Parse a group by expr. Group by expr can be one of group sets, roll up, cube, or simple expr.
2489    fn parse_group_by_expr(&mut self) -> Result<Expr, ParserError> {
2490        if self.dialect.supports_group_by_expr() {
2491            if self.parse_keywords(&[Keyword::GROUPING, Keyword::SETS]) {
2492                self.expect_token(&Token::LParen)?;
2493                let result = self.parse_comma_separated(|p| p.parse_tuple(false, true))?;
2494                self.expect_token(&Token::RParen)?;
2495                Ok(Expr::GroupingSets(result))
2496            } else if self.parse_keyword(Keyword::CUBE) {
2497                self.expect_token(&Token::LParen)?;
2498                let result = self.parse_comma_separated(|p| p.parse_tuple(true, true))?;
2499                self.expect_token(&Token::RParen)?;
2500                Ok(Expr::Cube(result))
2501            } else if self.parse_keyword(Keyword::ROLLUP) {
2502                self.expect_token(&Token::LParen)?;
2503                let result = self.parse_comma_separated(|p| p.parse_tuple(true, true))?;
2504                self.expect_token(&Token::RParen)?;
2505                Ok(Expr::Rollup(result))
2506            } else if self.consume_tokens(&[Token::LParen, Token::RParen]) {
2507                // PostgreSQL allow to use empty tuple as a group by expression,
2508                // e.g. `GROUP BY (), name`. Please refer to GROUP BY Clause section in
2509                // [PostgreSQL](https://www.postgresql.org/docs/16/sql-select.html)
2510                Ok(Expr::Tuple(vec![]))
2511            } else {
2512                self.parse_expr()
2513            }
2514        } else {
2515            // TODO parse rollup for other dialects
2516            self.parse_expr()
2517        }
2518    }
2519
2520    /// Parse a tuple with `(` and `)`.
2521    /// If `lift_singleton` is true, then a singleton tuple is lifted to a tuple of length 1, otherwise it will fail.
2522    /// If `allow_empty` is true, then an empty tuple is allowed.
2523    fn parse_tuple(
2524        &mut self,
2525        lift_singleton: bool,
2526        allow_empty: bool,
2527    ) -> Result<Vec<Expr>, ParserError> {
2528        if lift_singleton {
2529            if self.consume_token(&Token::LParen) {
2530                let result = if allow_empty && self.consume_token(&Token::RParen) {
2531                    vec![]
2532                } else {
2533                    let result = self.parse_comma_separated(Parser::parse_expr)?;
2534                    self.expect_token(&Token::RParen)?;
2535                    result
2536                };
2537                Ok(result)
2538            } else {
2539                Ok(vec![self.parse_expr()?])
2540            }
2541        } else {
2542            self.expect_token(&Token::LParen)?;
2543            let result = if allow_empty && self.consume_token(&Token::RParen) {
2544                vec![]
2545            } else {
2546                let result = self.parse_comma_separated(Parser::parse_expr)?;
2547                self.expect_token(&Token::RParen)?;
2548                result
2549            };
2550            Ok(result)
2551        }
2552    }
2553
2554    /// Parse a `CASE` expression and return an [`Expr::Case`].
2555    pub fn parse_case_expr(&mut self) -> Result<Expr, ParserError> {
2556        let case_token = AttachedToken(self.get_current_token().clone());
2557        let mut operand = None;
2558        if !self.parse_keyword(Keyword::WHEN) {
2559            operand = Some(Box::new(self.parse_expr()?));
2560            self.expect_keyword_is(Keyword::WHEN)?;
2561        }
2562        let mut conditions = vec![];
2563        loop {
2564            let condition = self.parse_expr()?;
2565            self.expect_keyword_is(Keyword::THEN)?;
2566            let result = self.parse_expr()?;
2567            conditions.push(CaseWhen { condition, result });
2568            if !self.parse_keyword(Keyword::WHEN) {
2569                break;
2570            }
2571        }
2572        let else_result = if self.parse_keyword(Keyword::ELSE) {
2573            Some(Box::new(self.parse_expr()?))
2574        } else {
2575            None
2576        };
2577        let end_token = AttachedToken(self.expect_keyword(Keyword::END)?);
2578        Ok(Expr::Case {
2579            case_token,
2580            end_token,
2581            operand,
2582            conditions,
2583            else_result,
2584        })
2585    }
2586
2587    /// Parse an optional `FORMAT` clause for `CAST` expressions.
2588    pub fn parse_optional_cast_format(&mut self) -> Result<Option<CastFormat>, ParserError> {
2589        if self.parse_keyword(Keyword::FORMAT) {
2590            let value = self.parse_value()?.value;
2591            match self.parse_optional_time_zone()? {
2592                Some(tz) => Ok(Some(CastFormat::ValueAtTimeZone(value, tz))),
2593                None => Ok(Some(CastFormat::Value(value))),
2594            }
2595        } else {
2596            Ok(None)
2597        }
2598    }
2599
2600    /// Parse an optional `AT TIME ZONE` clause.
2601    pub fn parse_optional_time_zone(&mut self) -> Result<Option<Value>, ParserError> {
2602        if self.parse_keywords(&[Keyword::AT, Keyword::TIME, Keyword::ZONE]) {
2603            self.parse_value().map(|v| Some(v.value))
2604        } else {
2605            Ok(None)
2606        }
2607    }
2608
2609    /// mssql-like convert function
2610    fn parse_mssql_convert(&mut self, is_try: bool) -> Result<Expr, ParserError> {
2611        self.expect_token(&Token::LParen)?;
2612        let data_type = self.parse_data_type()?;
2613        self.expect_token(&Token::Comma)?;
2614        let expr = self.parse_expr()?;
2615        let styles = if self.consume_token(&Token::Comma) {
2616            self.parse_comma_separated(Parser::parse_expr)?
2617        } else {
2618            Default::default()
2619        };
2620        self.expect_token(&Token::RParen)?;
2621        Ok(Expr::Convert {
2622            is_try,
2623            expr: Box::new(expr),
2624            data_type: Some(data_type),
2625            charset: None,
2626            target_before_value: true,
2627            styles,
2628        })
2629    }
2630
2631    /// Parse a SQL CONVERT function:
2632    ///  - `CONVERT('héhé' USING utf8mb4)` (MySQL)
2633    ///  - `CONVERT('héhé', CHAR CHARACTER SET utf8mb4)` (MySQL)
2634    ///  - `CONVERT(DECIMAL(10, 5), 42)` (MSSQL) - the type comes first
2635    pub fn parse_convert_expr(&mut self, is_try: bool) -> Result<Expr, ParserError> {
2636        if self.dialect.convert_type_before_value() {
2637            return self.parse_mssql_convert(is_try);
2638        }
2639        self.expect_token(&Token::LParen)?;
2640        let expr = self.parse_expr()?;
2641        if self.parse_keyword(Keyword::USING) {
2642            let charset = self.parse_object_name(false)?;
2643            self.expect_token(&Token::RParen)?;
2644            return Ok(Expr::Convert {
2645                is_try,
2646                expr: Box::new(expr),
2647                data_type: None,
2648                charset: Some(charset),
2649                target_before_value: false,
2650                styles: vec![],
2651            });
2652        }
2653        self.expect_token(&Token::Comma)?;
2654        let data_type = self.parse_data_type()?;
2655        let charset = if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
2656            Some(self.parse_object_name(false)?)
2657        } else {
2658            None
2659        };
2660        self.expect_token(&Token::RParen)?;
2661        Ok(Expr::Convert {
2662            is_try,
2663            expr: Box::new(expr),
2664            data_type: Some(data_type),
2665            charset,
2666            target_before_value: false,
2667            styles: vec![],
2668        })
2669    }
2670
2671    /// Parse a SQL CAST function e.g. `CAST(expr AS FLOAT)`
2672    pub fn parse_cast_expr(&mut self, kind: CastKind) -> Result<Expr, ParserError> {
2673        self.expect_token(&Token::LParen)?;
2674        let expr = self.parse_expr()?;
2675        self.expect_keyword_is(Keyword::AS)?;
2676        let data_type = self.parse_data_type()?;
2677        let array = self.parse_keyword(Keyword::ARRAY);
2678        let format = self.parse_optional_cast_format()?;
2679        self.expect_token(&Token::RParen)?;
2680        Ok(Expr::Cast {
2681            kind,
2682            expr: Box::new(expr),
2683            data_type,
2684            array,
2685            format,
2686        })
2687    }
2688
2689    /// Parse a SQL EXISTS expression e.g. `WHERE EXISTS(SELECT ...)`.
2690    pub fn parse_exists_expr(&mut self, negated: bool) -> Result<Expr, ParserError> {
2691        self.expect_token(&Token::LParen)?;
2692        let exists_node = Expr::Exists {
2693            negated,
2694            subquery: self.parse_query()?,
2695        };
2696        self.expect_token(&Token::RParen)?;
2697        Ok(exists_node)
2698    }
2699
2700    /// Parse a SQL `EXTRACT` expression e.g. `EXTRACT(YEAR FROM date)`.
2701    pub fn parse_extract_expr(&mut self) -> Result<Expr, ParserError> {
2702        self.expect_token(&Token::LParen)?;
2703        let field = self.parse_date_time_field()?;
2704
2705        let syntax = if self.parse_keyword(Keyword::FROM) {
2706            ExtractSyntax::From
2707        } else if self.dialect.supports_extract_comma_syntax() && self.consume_token(&Token::Comma)
2708        {
2709            ExtractSyntax::Comma
2710        } else {
2711            return Err(ParserError::ParserError(
2712                "Expected 'FROM' or ','".to_string(),
2713            ));
2714        };
2715
2716        let expr = self.parse_expr()?;
2717        self.expect_token(&Token::RParen)?;
2718        Ok(Expr::Extract {
2719            field,
2720            expr: Box::new(expr),
2721            syntax,
2722        })
2723    }
2724
2725    /// Parse a `CEIL` or `FLOOR` expression.
2726    pub fn parse_ceil_floor_expr(&mut self, is_ceil: bool) -> Result<Expr, ParserError> {
2727        self.expect_token(&Token::LParen)?;
2728        let expr = self.parse_expr()?;
2729        // Parse `CEIL/FLOOR(expr)`
2730        let field = if self.parse_keyword(Keyword::TO) {
2731            // Parse `CEIL/FLOOR(expr TO DateTimeField)`
2732            CeilFloorKind::DateTimeField(self.parse_date_time_field()?)
2733        } else if self.consume_token(&Token::Comma) {
2734            // Parse `CEIL/FLOOR(expr, scale)`
2735            match self.parse_value()?.value {
2736                Value::Number(n, s) => CeilFloorKind::Scale(Value::Number(n, s)),
2737                _ => {
2738                    return Err(ParserError::ParserError(
2739                        "Scale field can only be of number type".to_string(),
2740                    ))
2741                }
2742            }
2743        } else {
2744            CeilFloorKind::DateTimeField(DateTimeField::NoDateTime)
2745        };
2746        self.expect_token(&Token::RParen)?;
2747        if is_ceil {
2748            Ok(Expr::Ceil {
2749                expr: Box::new(expr),
2750                field,
2751            })
2752        } else {
2753            Ok(Expr::Floor {
2754                expr: Box::new(expr),
2755                field,
2756            })
2757        }
2758    }
2759
2760    /// Parse a `POSITION` expression.
2761    pub fn parse_position_expr(&mut self, ident: Ident) -> Result<Expr, ParserError> {
2762        let between_prec = self.dialect.prec_value(Precedence::Between);
2763        let position_expr = self.maybe_parse(|p| {
2764            // PARSE SELECT POSITION('@' in field)
2765            p.expect_token(&Token::LParen)?;
2766
2767            // Parse the subexpr till the IN keyword
2768            let expr = p.parse_subexpr(between_prec)?;
2769            p.expect_keyword_is(Keyword::IN)?;
2770            let from = p.parse_expr()?;
2771            p.expect_token(&Token::RParen)?;
2772            Ok(Expr::Position {
2773                expr: Box::new(expr),
2774                r#in: Box::new(from),
2775            })
2776        })?;
2777        match position_expr {
2778            Some(expr) => Ok(expr),
2779            // Snowflake supports `position` as an ordinary function call
2780            // without the special `IN` syntax.
2781            None => self.parse_function(ObjectName::from(vec![ident])),
2782        }
2783    }
2784
2785    /// Parse `SUBSTRING`/`SUBSTR` expressions: `SUBSTRING(expr FROM start FOR length)` or `SUBSTR(expr, start, length)`.
2786    pub fn parse_substring(&mut self) -> Result<Expr, ParserError> {
2787        let shorthand = match self.expect_one_of_keywords(&[Keyword::SUBSTR, Keyword::SUBSTRING])? {
2788            Keyword::SUBSTR => true,
2789            Keyword::SUBSTRING => false,
2790            _ => {
2791                self.prev_token();
2792                return self.expected("SUBSTR or SUBSTRING", self.peek_token());
2793            }
2794        };
2795        self.expect_token(&Token::LParen)?;
2796        let expr = self.parse_expr()?;
2797        let mut from_expr = None;
2798        let special = self.consume_token(&Token::Comma);
2799        if special || self.parse_keyword(Keyword::FROM) {
2800            from_expr = Some(self.parse_expr()?);
2801        }
2802
2803        let mut to_expr = None;
2804        if self.parse_keyword(Keyword::FOR) || self.consume_token(&Token::Comma) {
2805            to_expr = Some(self.parse_expr()?);
2806        }
2807        self.expect_token(&Token::RParen)?;
2808
2809        Ok(Expr::Substring {
2810            expr: Box::new(expr),
2811            substring_from: from_expr.map(Box::new),
2812            substring_for: to_expr.map(Box::new),
2813            special,
2814            shorthand,
2815        })
2816    }
2817
2818    /// Parse an OVERLAY expression.
2819    ///
2820    /// See [Expr::Overlay]
2821    pub fn parse_overlay_expr(&mut self) -> Result<Expr, ParserError> {
2822        // PARSE OVERLAY (EXPR PLACING EXPR FROM 1 [FOR 3])
2823        self.expect_token(&Token::LParen)?;
2824        let expr = self.parse_expr()?;
2825        self.expect_keyword_is(Keyword::PLACING)?;
2826        let what_expr = self.parse_expr()?;
2827        self.expect_keyword_is(Keyword::FROM)?;
2828        let from_expr = self.parse_expr()?;
2829        let mut for_expr = None;
2830        if self.parse_keyword(Keyword::FOR) {
2831            for_expr = Some(self.parse_expr()?);
2832        }
2833        self.expect_token(&Token::RParen)?;
2834
2835        Ok(Expr::Overlay {
2836            expr: Box::new(expr),
2837            overlay_what: Box::new(what_expr),
2838            overlay_from: Box::new(from_expr),
2839            overlay_for: for_expr.map(Box::new),
2840        })
2841    }
2842
2843    /// ```sql
2844    /// TRIM ([WHERE] ['text' FROM] 'text')
2845    /// TRIM ('text')
2846    /// TRIM(<expr>, [, characters]) -- only Snowflake or BigQuery
2847    /// ```
2848    pub fn parse_trim_expr(&mut self) -> Result<Expr, ParserError> {
2849        self.expect_token(&Token::LParen)?;
2850        let mut trim_where = None;
2851        if let Token::Word(word) = self.peek_token().token {
2852            if [Keyword::BOTH, Keyword::LEADING, Keyword::TRAILING].contains(&word.keyword) {
2853                trim_where = Some(self.parse_trim_where()?);
2854            }
2855        }
2856        let expr = self.parse_expr()?;
2857        if self.parse_keyword(Keyword::FROM) {
2858            let trim_what = Box::new(expr);
2859            let expr = self.parse_expr()?;
2860            self.expect_token(&Token::RParen)?;
2861            Ok(Expr::Trim {
2862                expr: Box::new(expr),
2863                trim_where,
2864                trim_what: Some(trim_what),
2865                trim_characters: None,
2866            })
2867        } else if self.consume_token(&Token::Comma)
2868            && dialect_of!(self is DuckDbDialect | SnowflakeDialect | BigQueryDialect | GenericDialect)
2869        {
2870            let characters = self.parse_comma_separated(Parser::parse_expr)?;
2871            self.expect_token(&Token::RParen)?;
2872            Ok(Expr::Trim {
2873                expr: Box::new(expr),
2874                trim_where: None,
2875                trim_what: None,
2876                trim_characters: Some(characters),
2877            })
2878        } else {
2879            self.expect_token(&Token::RParen)?;
2880            Ok(Expr::Trim {
2881                expr: Box::new(expr),
2882                trim_where,
2883                trim_what: None,
2884                trim_characters: None,
2885            })
2886        }
2887    }
2888
2889    /// Parse the `WHERE` field for a `TRIM` expression.
2890    ///
2891    /// See [TrimWhereField]
2892    pub fn parse_trim_where(&mut self) -> Result<TrimWhereField, ParserError> {
2893        let next_token = self.next_token();
2894        match &next_token.token {
2895            Token::Word(w) => match w.keyword {
2896                Keyword::BOTH => Ok(TrimWhereField::Both),
2897                Keyword::LEADING => Ok(TrimWhereField::Leading),
2898                Keyword::TRAILING => Ok(TrimWhereField::Trailing),
2899                _ => self.expected("trim_where field", next_token)?,
2900            },
2901            _ => self.expected("trim_where field", next_token),
2902        }
2903    }
2904
2905    /// Parses an array expression `[ex1, ex2, ..]`
2906    /// if `named` is `true`, came from an expression like  `ARRAY[ex1, ex2]`
2907    pub fn parse_array_expr(&mut self, named: bool) -> Result<Expr, ParserError> {
2908        let exprs = self.parse_comma_separated0(Parser::parse_expr, Token::RBracket)?;
2909        self.expect_token(&Token::RBracket)?;
2910        Ok(Expr::Array(Array { elem: exprs, named }))
2911    }
2912
2913    /// Parse the `ON OVERFLOW` clause for `LISTAGG`.
2914    ///
2915    /// See [`ListAggOnOverflow`]
2916    pub fn parse_listagg_on_overflow(&mut self) -> Result<Option<ListAggOnOverflow>, ParserError> {
2917        if self.parse_keywords(&[Keyword::ON, Keyword::OVERFLOW]) {
2918            if self.parse_keyword(Keyword::ERROR) {
2919                Ok(Some(ListAggOnOverflow::Error))
2920            } else {
2921                self.expect_keyword_is(Keyword::TRUNCATE)?;
2922                let filler = match self.peek_token().token {
2923                    Token::Word(w)
2924                        if w.keyword == Keyword::WITH || w.keyword == Keyword::WITHOUT =>
2925                    {
2926                        None
2927                    }
2928                    Token::SingleQuotedString(_)
2929                    | Token::EscapedStringLiteral(_)
2930                    | Token::UnicodeStringLiteral(_)
2931                    | Token::NationalStringLiteral(_)
2932                    | Token::QuoteDelimitedStringLiteral(_)
2933                    | Token::NationalQuoteDelimitedStringLiteral(_)
2934                    | Token::HexStringLiteral(_) => Some(Box::new(self.parse_expr()?)),
2935                    _ => self.expected(
2936                        "either filler, WITH, or WITHOUT in LISTAGG",
2937                        self.peek_token(),
2938                    )?,
2939                };
2940                let with_count = self.parse_keyword(Keyword::WITH);
2941                if !with_count && !self.parse_keyword(Keyword::WITHOUT) {
2942                    self.expected("either WITH or WITHOUT in LISTAGG", self.peek_token())?;
2943                }
2944                self.expect_keyword_is(Keyword::COUNT)?;
2945                Ok(Some(ListAggOnOverflow::Truncate { filler, with_count }))
2946            }
2947        } else {
2948            Ok(None)
2949        }
2950    }
2951
2952    /// Parse a date/time field for `EXTRACT`, interval qualifiers, and ceil/floor operations.
2953    ///
2954    /// `EXTRACT` supports a wider set of date/time fields than interval qualifiers,
2955    /// so this function may need to be split in two.
2956    ///
2957    /// See [`DateTimeField`]
2958    pub fn parse_date_time_field(&mut self) -> Result<DateTimeField, ParserError> {
2959        let next_token = self.next_token();
2960        match &next_token.token {
2961            Token::Word(w) => match w.keyword {
2962                Keyword::YEAR => Ok(DateTimeField::Year),
2963                Keyword::YEARS => Ok(DateTimeField::Years),
2964                Keyword::MONTH => Ok(DateTimeField::Month),
2965                Keyword::MONTHS => Ok(DateTimeField::Months),
2966                Keyword::WEEK => {
2967                    let week_day = if dialect_of!(self is BigQueryDialect | GenericDialect)
2968                        && self.consume_token(&Token::LParen)
2969                    {
2970                        let week_day = self.parse_identifier()?;
2971                        self.expect_token(&Token::RParen)?;
2972                        Some(week_day)
2973                    } else {
2974                        None
2975                    };
2976                    Ok(DateTimeField::Week(week_day))
2977                }
2978                Keyword::WEEKS => Ok(DateTimeField::Weeks),
2979                Keyword::DAY => Ok(DateTimeField::Day),
2980                Keyword::DAYOFWEEK => Ok(DateTimeField::DayOfWeek),
2981                Keyword::DAYOFYEAR => Ok(DateTimeField::DayOfYear),
2982                Keyword::DAYS => Ok(DateTimeField::Days),
2983                Keyword::DATE => Ok(DateTimeField::Date),
2984                Keyword::DATETIME => Ok(DateTimeField::Datetime),
2985                Keyword::HOUR => Ok(DateTimeField::Hour),
2986                Keyword::HOURS => Ok(DateTimeField::Hours),
2987                Keyword::MINUTE => Ok(DateTimeField::Minute),
2988                Keyword::MINUTES => Ok(DateTimeField::Minutes),
2989                Keyword::SECOND => Ok(DateTimeField::Second),
2990                Keyword::SECONDS => Ok(DateTimeField::Seconds),
2991                Keyword::CENTURY => Ok(DateTimeField::Century),
2992                Keyword::DECADE => Ok(DateTimeField::Decade),
2993                Keyword::DOY => Ok(DateTimeField::Doy),
2994                Keyword::DOW => Ok(DateTimeField::Dow),
2995                Keyword::EPOCH => Ok(DateTimeField::Epoch),
2996                Keyword::ISODOW => Ok(DateTimeField::Isodow),
2997                Keyword::ISOYEAR => Ok(DateTimeField::Isoyear),
2998                Keyword::ISOWEEK => Ok(DateTimeField::IsoWeek),
2999                Keyword::JULIAN => Ok(DateTimeField::Julian),
3000                Keyword::MICROSECOND => Ok(DateTimeField::Microsecond),
3001                Keyword::MICROSECONDS => Ok(DateTimeField::Microseconds),
3002                Keyword::MILLENIUM => Ok(DateTimeField::Millenium),
3003                Keyword::MILLENNIUM => Ok(DateTimeField::Millennium),
3004                Keyword::MILLISECOND => Ok(DateTimeField::Millisecond),
3005                Keyword::MILLISECONDS => Ok(DateTimeField::Milliseconds),
3006                Keyword::NANOSECOND => Ok(DateTimeField::Nanosecond),
3007                Keyword::NANOSECONDS => Ok(DateTimeField::Nanoseconds),
3008                Keyword::QUARTER => Ok(DateTimeField::Quarter),
3009                Keyword::TIME => Ok(DateTimeField::Time),
3010                Keyword::TIMEZONE => Ok(DateTimeField::Timezone),
3011                Keyword::TIMEZONE_ABBR => Ok(DateTimeField::TimezoneAbbr),
3012                Keyword::TIMEZONE_HOUR => Ok(DateTimeField::TimezoneHour),
3013                Keyword::TIMEZONE_MINUTE => Ok(DateTimeField::TimezoneMinute),
3014                Keyword::TIMEZONE_REGION => Ok(DateTimeField::TimezoneRegion),
3015                _ if self.dialect.allow_extract_custom() => {
3016                    self.prev_token();
3017                    let custom = self.parse_identifier()?;
3018                    Ok(DateTimeField::Custom(custom))
3019                }
3020                _ => self.expected("date/time field", next_token),
3021            },
3022            Token::SingleQuotedString(_) if self.dialect.allow_extract_single_quotes() => {
3023                self.prev_token();
3024                let custom = self.parse_identifier()?;
3025                Ok(DateTimeField::Custom(custom))
3026            }
3027            _ => self.expected("date/time field", next_token),
3028        }
3029    }
3030
3031    /// Parse a `NOT` expression.
3032    ///
3033    /// Represented in the AST as `Expr::UnaryOp` with `UnaryOperator::Not`.
3034    pub fn parse_not(&mut self) -> Result<Expr, ParserError> {
3035        match self.peek_token().token {
3036            Token::Word(w) => match w.keyword {
3037                Keyword::EXISTS => {
3038                    let negated = true;
3039                    let _ = self.parse_keyword(Keyword::EXISTS);
3040                    self.parse_exists_expr(negated)
3041                }
3042                _ => Ok(Expr::UnaryOp {
3043                    op: UnaryOperator::Not,
3044                    expr: Box::new(
3045                        self.parse_subexpr(self.dialect.prec_value(Precedence::UnaryNot))?,
3046                    ),
3047                }),
3048            },
3049            _ => Ok(Expr::UnaryOp {
3050                op: UnaryOperator::Not,
3051                expr: Box::new(self.parse_subexpr(self.dialect.prec_value(Precedence::UnaryNot))?),
3052            }),
3053        }
3054    }
3055
3056    /// Parse expression types that start with a left brace '{'.
3057    /// Examples:
3058    /// ```sql
3059    /// -- Dictionary expr.
3060    /// {'key1': 'value1', 'key2': 'value2'}
3061    ///
3062    /// -- Function call using the ODBC syntax.
3063    /// { fn CONCAT('foo', 'bar') }
3064    /// ```
3065    fn parse_lbrace_expr(&mut self) -> Result<Expr, ParserError> {
3066        let token = self.expect_token(&Token::LBrace)?;
3067
3068        if let Some(fn_expr) = self.maybe_parse_odbc_body()? {
3069            self.expect_token(&Token::RBrace)?;
3070            return Ok(fn_expr);
3071        }
3072
3073        if self.dialect.supports_dictionary_syntax() {
3074            self.prev_token(); // Put back the '{'
3075            return self.parse_dictionary();
3076        }
3077
3078        self.expected("an expression", token)
3079    }
3080
3081    /// Parses fulltext expressions [`sqlparser::ast::Expr::MatchAgainst`]
3082    ///
3083    /// # Errors
3084    /// This method will raise an error if the column list is empty or with invalid identifiers,
3085    /// the match expression is not a literal string, or if the search modifier is not valid.
3086    pub fn parse_match_against(&mut self) -> Result<Expr, ParserError> {
3087        let columns = self.parse_parenthesized_qualified_column_list(Mandatory, false)?;
3088
3089        self.expect_keyword_is(Keyword::AGAINST)?;
3090
3091        self.expect_token(&Token::LParen)?;
3092
3093        // MySQL is too permissive about the value, IMO we can't validate it perfectly on syntax level.
3094        let match_value = self.parse_value()?.value;
3095
3096        let in_natural_language_mode_keywords = &[
3097            Keyword::IN,
3098            Keyword::NATURAL,
3099            Keyword::LANGUAGE,
3100            Keyword::MODE,
3101        ];
3102
3103        let with_query_expansion_keywords = &[Keyword::WITH, Keyword::QUERY, Keyword::EXPANSION];
3104
3105        let in_boolean_mode_keywords = &[Keyword::IN, Keyword::BOOLEAN, Keyword::MODE];
3106
3107        let opt_search_modifier = if self.parse_keywords(in_natural_language_mode_keywords) {
3108            if self.parse_keywords(with_query_expansion_keywords) {
3109                Some(SearchModifier::InNaturalLanguageModeWithQueryExpansion)
3110            } else {
3111                Some(SearchModifier::InNaturalLanguageMode)
3112            }
3113        } else if self.parse_keywords(in_boolean_mode_keywords) {
3114            Some(SearchModifier::InBooleanMode)
3115        } else if self.parse_keywords(with_query_expansion_keywords) {
3116            Some(SearchModifier::WithQueryExpansion)
3117        } else {
3118            None
3119        };
3120
3121        self.expect_token(&Token::RParen)?;
3122
3123        Ok(Expr::MatchAgainst {
3124            columns,
3125            match_value,
3126            opt_search_modifier,
3127        })
3128    }
3129
3130    /// Parse an `INTERVAL` expression.
3131    ///
3132    /// Some syntactically valid intervals:
3133    ///
3134    /// ```sql
3135    ///   1. INTERVAL '1' DAY
3136    ///   2. INTERVAL '1-1' YEAR TO MONTH
3137    ///   3. INTERVAL '1' SECOND
3138    ///   4. INTERVAL '1:1:1.1' HOUR (5) TO SECOND (5)
3139    ///   5. INTERVAL '1.1' SECOND (2, 2)
3140    ///   6. INTERVAL '1:1' HOUR (5) TO MINUTE (5)
3141    ///   7. (MySql & BigQuery only): INTERVAL 1 DAY
3142    /// ```
3143    ///
3144    /// Note that we do not currently attempt to parse the quoted value.
3145    pub fn parse_interval(&mut self) -> Result<Expr, ParserError> {
3146        // The SQL standard allows an optional sign before the value string, but
3147        // it is not clear if any implementations support that syntax, so we
3148        // don't currently try to parse it. (The sign can instead be included
3149        // inside the value string.)
3150
3151        // to match the different flavours of INTERVAL syntax, we only allow expressions
3152        // if the dialect requires an interval qualifier,
3153        // see https://github.com/sqlparser-rs/sqlparser-rs/pull/1398 for more details
3154        let value = if self.dialect.require_interval_qualifier() {
3155            // parse a whole expression so `INTERVAL 1 + 1 DAY` is valid
3156            self.parse_expr()?
3157        } else {
3158            // parse a prefix expression so `INTERVAL 1 DAY` is valid, but `INTERVAL 1 + 1 DAY` is not
3159            // this also means that `INTERVAL '5 days' > INTERVAL '1 day'` treated properly
3160            self.parse_prefix()?
3161        };
3162
3163        // Following the string literal is a qualifier which indicates the units
3164        // of the duration specified in the string literal.
3165        //
3166        // Note that PostgreSQL allows omitting the qualifier, so we provide
3167        // this more general implementation.
3168        let leading_field = if self.next_token_is_temporal_unit() {
3169            Some(self.parse_date_time_field()?)
3170        } else if self.dialect.require_interval_qualifier() {
3171            return parser_err!(
3172                "INTERVAL requires a unit after the literal value",
3173                self.peek_token().span.start
3174            );
3175        } else {
3176            None
3177        };
3178
3179        let (leading_precision, last_field, fsec_precision) =
3180            if leading_field == Some(DateTimeField::Second) {
3181                // SQL mandates special syntax for `SECOND TO SECOND` literals.
3182                // Instead of
3183                //     `SECOND [(<leading precision>)] TO SECOND[(<fractional seconds precision>)]`
3184                // one must use the special format:
3185                //     `SECOND [( <leading precision> [ , <fractional seconds precision>] )]`
3186                let last_field = None;
3187                let (leading_precision, fsec_precision) = self.parse_optional_precision_scale()?;
3188                (leading_precision, last_field, fsec_precision)
3189            } else {
3190                let leading_precision = self.parse_optional_precision()?;
3191                if self.parse_keyword(Keyword::TO) {
3192                    let last_field = Some(self.parse_date_time_field()?);
3193                    let fsec_precision = if last_field == Some(DateTimeField::Second) {
3194                        self.parse_optional_precision()?
3195                    } else {
3196                        None
3197                    };
3198                    (leading_precision, last_field, fsec_precision)
3199                } else {
3200                    (leading_precision, None, None)
3201                }
3202            };
3203
3204        Ok(Expr::Interval(Interval {
3205            value: Box::new(value),
3206            leading_field,
3207            leading_precision,
3208            last_field,
3209            fractional_seconds_precision: fsec_precision,
3210        }))
3211    }
3212
3213    /// Peek at the next token and determine if it is a temporal unit
3214    /// like `second`.
3215    pub fn next_token_is_temporal_unit(&mut self) -> bool {
3216        if let Token::Word(word) = self.peek_token().token {
3217            matches!(
3218                word.keyword,
3219                Keyword::YEAR
3220                    | Keyword::YEARS
3221                    | Keyword::MONTH
3222                    | Keyword::MONTHS
3223                    | Keyword::WEEK
3224                    | Keyword::WEEKS
3225                    | Keyword::DAY
3226                    | Keyword::DAYS
3227                    | Keyword::HOUR
3228                    | Keyword::HOURS
3229                    | Keyword::MINUTE
3230                    | Keyword::MINUTES
3231                    | Keyword::SECOND
3232                    | Keyword::SECONDS
3233                    | Keyword::CENTURY
3234                    | Keyword::DECADE
3235                    | Keyword::DOW
3236                    | Keyword::DOY
3237                    | Keyword::EPOCH
3238                    | Keyword::ISODOW
3239                    | Keyword::ISOYEAR
3240                    | Keyword::JULIAN
3241                    | Keyword::MICROSECOND
3242                    | Keyword::MICROSECONDS
3243                    | Keyword::MILLENIUM
3244                    | Keyword::MILLENNIUM
3245                    | Keyword::MILLISECOND
3246                    | Keyword::MILLISECONDS
3247                    | Keyword::NANOSECOND
3248                    | Keyword::NANOSECONDS
3249                    | Keyword::QUARTER
3250                    | Keyword::TIMEZONE
3251                    | Keyword::TIMEZONE_HOUR
3252                    | Keyword::TIMEZONE_MINUTE
3253            )
3254        } else {
3255            false
3256        }
3257    }
3258
3259    /// Syntax
3260    /// ```sql
3261    /// -- typed
3262    /// STRUCT<[field_name] field_type, ...>( expr1 [, ... ])
3263    /// -- typeless
3264    /// STRUCT( expr1 [AS field_name] [, ... ])
3265    /// ```
3266    fn parse_struct_literal(&mut self) -> Result<Expr, ParserError> {
3267        // Parse the fields definition if exist `<[field_name] field_type, ...>`
3268        self.prev_token();
3269        let (fields, trailing_bracket) =
3270            self.parse_struct_type_def(Self::parse_struct_field_def)?;
3271        if trailing_bracket.0 {
3272            return parser_err!(
3273                "unmatched > in STRUCT literal",
3274                self.peek_token().span.start
3275            );
3276        }
3277
3278        // Parse the struct values `(expr1 [, ... ])`
3279        self.expect_token(&Token::LParen)?;
3280        let values = self
3281            .parse_comma_separated(|parser| parser.parse_struct_field_expr(!fields.is_empty()))?;
3282        self.expect_token(&Token::RParen)?;
3283
3284        Ok(Expr::Struct { values, fields })
3285    }
3286
3287    /// Parse an expression value for a struct literal
3288    /// Syntax
3289    /// ```sql
3290    /// expr [AS name]
3291    /// ```
3292    ///
3293    /// For biquery [1], Parameter typed_syntax is set to true if the expression
3294    /// is to be parsed as a field expression declared using typed
3295    /// struct syntax [2], and false if using typeless struct syntax [3].
3296    ///
3297    /// [1]: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#constructing_a_struct
3298    /// [2]: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#typed_struct_syntax
3299    /// [3]: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#typeless_struct_syntax
3300    fn parse_struct_field_expr(&mut self, typed_syntax: bool) -> Result<Expr, ParserError> {
3301        let expr = self.parse_expr()?;
3302        if self.parse_keyword(Keyword::AS) {
3303            if typed_syntax {
3304                return parser_err!("Typed syntax does not allow AS", {
3305                    self.prev_token();
3306                    self.peek_token().span.start
3307                });
3308            }
3309            let field_name = self.parse_identifier()?;
3310            Ok(Expr::Named {
3311                expr: expr.into(),
3312                name: field_name,
3313            })
3314        } else {
3315            Ok(expr)
3316        }
3317    }
3318
3319    /// Parse a Struct type definition as a sequence of field-value pairs.
3320    /// The syntax of the Struct elem differs by dialect so it is customised
3321    /// by the `elem_parser` argument.
3322    ///
3323    /// Syntax
3324    /// ```sql
3325    /// Hive:
3326    /// STRUCT<field_name: field_type>
3327    ///
3328    /// BigQuery:
3329    /// STRUCT<[field_name] field_type>
3330    /// ```
3331    fn parse_struct_type_def<F>(
3332        &mut self,
3333        mut elem_parser: F,
3334    ) -> Result<(Vec<StructField>, MatchedTrailingBracket), ParserError>
3335    where
3336        F: FnMut(&mut Parser<'a>) -> Result<(StructField, MatchedTrailingBracket), ParserError>,
3337    {
3338        self.expect_keyword_is(Keyword::STRUCT)?;
3339
3340        // Nothing to do if we have no type information.
3341        if Token::Lt != self.peek_token() {
3342            return Ok((Default::default(), false.into()));
3343        }
3344        self.next_token();
3345
3346        let mut field_defs = vec![];
3347        let trailing_bracket = loop {
3348            let (def, trailing_bracket) = elem_parser(self)?;
3349            field_defs.push(def);
3350            // The struct field definition is finished if it occurs `>>` or comma.
3351            if trailing_bracket.0 || !self.consume_token(&Token::Comma) {
3352                break trailing_bracket;
3353            }
3354        };
3355
3356        Ok((
3357            field_defs,
3358            self.expect_closing_angle_bracket(trailing_bracket)?,
3359        ))
3360    }
3361
3362    /// Duckdb Struct Data Type <https://duckdb.org/docs/sql/data_types/struct.html#retrieving-from-structs>
3363    fn parse_duckdb_struct_type_def(&mut self) -> Result<Vec<StructField>, ParserError> {
3364        self.expect_keyword_is(Keyword::STRUCT)?;
3365        self.expect_token(&Token::LParen)?;
3366        let struct_body = self.parse_comma_separated(|parser| {
3367            let field_name = parser.parse_identifier()?;
3368            let field_type = parser.parse_data_type()?;
3369
3370            Ok(StructField {
3371                field_name: Some(field_name),
3372                field_type,
3373                options: None,
3374            })
3375        });
3376        self.expect_token(&Token::RParen)?;
3377        struct_body
3378    }
3379
3380    /// Parse a field definition in a [struct] or [tuple].
3381    /// Syntax:
3382    ///
3383    /// ```sql
3384    /// [field_name] field_type
3385    /// ```
3386    ///
3387    /// [struct]: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#declaring_a_struct_type
3388    /// [tuple]: https://clickhouse.com/docs/en/sql-reference/data-types/tuple
3389    fn parse_struct_field_def(
3390        &mut self,
3391    ) -> Result<(StructField, MatchedTrailingBracket), ParserError> {
3392        // Look beyond the next item to infer whether both field name
3393        // and type are specified.
3394        let is_anonymous_field = !matches!(
3395            (self.peek_nth_token(0).token, self.peek_nth_token(1).token),
3396            (Token::Word(_), Token::Word(_))
3397        );
3398
3399        let field_name = if is_anonymous_field {
3400            None
3401        } else {
3402            Some(self.parse_identifier()?)
3403        };
3404
3405        let (field_type, trailing_bracket) = self.parse_data_type_helper()?;
3406
3407        let options = self.maybe_parse_options(Keyword::OPTIONS)?;
3408        Ok((
3409            StructField {
3410                field_name,
3411                field_type,
3412                options,
3413            },
3414            trailing_bracket,
3415        ))
3416    }
3417
3418    /// DuckDB specific: Parse a Union type definition as a sequence of field-value pairs.
3419    ///
3420    /// Syntax:
3421    ///
3422    /// ```sql
3423    /// UNION(field_name field_type[,...])
3424    /// ```
3425    ///
3426    /// [1]: https://duckdb.org/docs/sql/data_types/union.html
3427    fn parse_union_type_def(&mut self) -> Result<Vec<UnionField>, ParserError> {
3428        self.expect_keyword_is(Keyword::UNION)?;
3429
3430        self.expect_token(&Token::LParen)?;
3431
3432        let fields = self.parse_comma_separated(|p| {
3433            Ok(UnionField {
3434                field_name: p.parse_identifier()?,
3435                field_type: p.parse_data_type()?,
3436            })
3437        })?;
3438
3439        self.expect_token(&Token::RParen)?;
3440
3441        Ok(fields)
3442    }
3443
3444    /// DuckDB and ClickHouse specific: Parse a duckdb [dictionary] or a clickhouse [map] setting
3445    ///
3446    /// Syntax:
3447    ///
3448    /// ```sql
3449    /// {'field_name': expr1[, ... ]}
3450    /// ```
3451    ///
3452    /// [dictionary]: https://duckdb.org/docs/sql/data_types/struct#creating-structs
3453    /// [map]: https://clickhouse.com/docs/operations/settings/settings#additional_table_filters
3454    fn parse_dictionary(&mut self) -> Result<Expr, ParserError> {
3455        self.expect_token(&Token::LBrace)?;
3456
3457        let fields = self.parse_comma_separated0(Self::parse_dictionary_field, Token::RBrace)?;
3458
3459        self.expect_token(&Token::RBrace)?;
3460
3461        Ok(Expr::Dictionary(fields))
3462    }
3463
3464    /// Parse a field for a duckdb [dictionary] or a clickhouse [map] setting
3465    ///
3466    /// Syntax
3467    ///
3468    /// ```sql
3469    /// 'name': expr
3470    /// ```
3471    ///
3472    /// [dictionary]: https://duckdb.org/docs/sql/data_types/struct#creating-structs
3473    /// [map]: https://clickhouse.com/docs/operations/settings/settings#additional_table_filters
3474    fn parse_dictionary_field(&mut self) -> Result<DictionaryField, ParserError> {
3475        let key = self.parse_identifier()?;
3476
3477        self.expect_token(&Token::Colon)?;
3478
3479        let expr = self.parse_expr()?;
3480
3481        Ok(DictionaryField {
3482            key,
3483            value: Box::new(expr),
3484        })
3485    }
3486
3487    /// DuckDB specific: Parse a duckdb [map]
3488    ///
3489    /// Syntax:
3490    ///
3491    /// ```sql
3492    /// Map {key1: value1[, ... ]}
3493    /// ```
3494    ///
3495    /// [map]: https://duckdb.org/docs/sql/data_types/map.html#creating-maps
3496    fn parse_duckdb_map_literal(&mut self) -> Result<Expr, ParserError> {
3497        self.expect_token(&Token::LBrace)?;
3498        let fields = self.parse_comma_separated0(Self::parse_duckdb_map_field, Token::RBrace)?;
3499        self.expect_token(&Token::RBrace)?;
3500        Ok(Expr::Map(Map { entries: fields }))
3501    }
3502
3503    /// Parse a field for a duckdb [map]
3504    ///
3505    /// Syntax
3506    ///
3507    /// ```sql
3508    /// key: value
3509    /// ```
3510    ///
3511    /// [map]: https://duckdb.org/docs/sql/data_types/map.html#creating-maps
3512    fn parse_duckdb_map_field(&mut self) -> Result<MapEntry, ParserError> {
3513        // Stop before `:` so it can act as a key/value separator
3514        let key = self.parse_subexpr(self.dialect.prec_value(Precedence::Colon))?;
3515
3516        self.expect_token(&Token::Colon)?;
3517
3518        let value = self.parse_expr()?;
3519
3520        Ok(MapEntry {
3521            key: Box::new(key),
3522            value: Box::new(value),
3523        })
3524    }
3525
3526    /// Parse clickhouse [map]
3527    ///
3528    /// Syntax
3529    ///
3530    /// ```sql
3531    /// Map(key_data_type, value_data_type)
3532    /// ```
3533    ///
3534    /// [map]: https://clickhouse.com/docs/en/sql-reference/data-types/map
3535    fn parse_click_house_map_def(&mut self) -> Result<(DataType, DataType), ParserError> {
3536        self.expect_keyword_is(Keyword::MAP)?;
3537        self.expect_token(&Token::LParen)?;
3538        let key_data_type = self.parse_data_type()?;
3539        self.expect_token(&Token::Comma)?;
3540        let value_data_type = self.parse_data_type()?;
3541        self.expect_token(&Token::RParen)?;
3542
3543        Ok((key_data_type, value_data_type))
3544    }
3545
3546    /// Parse clickhouse [tuple]
3547    ///
3548    /// Syntax
3549    ///
3550    /// ```sql
3551    /// Tuple([field_name] field_type, ...)
3552    /// ```
3553    ///
3554    /// [tuple]: https://clickhouse.com/docs/en/sql-reference/data-types/tuple
3555    fn parse_click_house_tuple_def(&mut self) -> Result<Vec<StructField>, ParserError> {
3556        self.expect_keyword_is(Keyword::TUPLE)?;
3557        self.expect_token(&Token::LParen)?;
3558        let mut field_defs = vec![];
3559        loop {
3560            let (def, _) = self.parse_struct_field_def()?;
3561            field_defs.push(def);
3562            if !self.consume_token(&Token::Comma) {
3563                break;
3564            }
3565        }
3566        self.expect_token(&Token::RParen)?;
3567
3568        Ok(field_defs)
3569    }
3570
3571    /// For nested types that use the angle bracket syntax, this matches either
3572    /// `>`, `>>` or nothing depending on which variant is expected (specified by the previously
3573    /// matched `trailing_bracket` argument). It returns whether there is a trailing
3574    /// left to be matched - (i.e. if '>>' was matched).
3575    fn expect_closing_angle_bracket(
3576        &mut self,
3577        trailing_bracket: MatchedTrailingBracket,
3578    ) -> Result<MatchedTrailingBracket, ParserError> {
3579        let trailing_bracket = if !trailing_bracket.0 {
3580            match self.peek_token().token {
3581                Token::Gt => {
3582                    self.next_token();
3583                    false.into()
3584                }
3585                Token::ShiftRight => {
3586                    self.next_token();
3587                    true.into()
3588                }
3589                _ => return self.expected(">", self.peek_token()),
3590            }
3591        } else {
3592            false.into()
3593        };
3594
3595        Ok(trailing_bracket)
3596    }
3597
3598    /// Parse an operator following an expression
3599    pub fn parse_infix(&mut self, expr: Expr, precedence: u8) -> Result<Expr, ParserError> {
3600        // allow the dialect to override infix parsing
3601        if let Some(infix) = self.dialect.parse_infix(self, &expr, precedence) {
3602            return infix;
3603        }
3604
3605        let dialect = self.dialect;
3606
3607        self.advance_token();
3608        let tok = self.get_current_token();
3609        debug!("infix: {tok:?}");
3610        let tok_index = self.get_current_index();
3611        let span = tok.span;
3612        let regular_binary_operator = match &tok.token {
3613            Token::Spaceship => Some(BinaryOperator::Spaceship),
3614            Token::DoubleEq => Some(BinaryOperator::Eq),
3615            Token::Assignment => Some(BinaryOperator::Assignment),
3616            Token::Eq => Some(BinaryOperator::Eq),
3617            Token::Neq => Some(BinaryOperator::NotEq),
3618            Token::Gt => Some(BinaryOperator::Gt),
3619            Token::GtEq => Some(BinaryOperator::GtEq),
3620            Token::Lt => Some(BinaryOperator::Lt),
3621            Token::LtEq => Some(BinaryOperator::LtEq),
3622            Token::Plus => Some(BinaryOperator::Plus),
3623            Token::Minus => Some(BinaryOperator::Minus),
3624            Token::Mul => Some(BinaryOperator::Multiply),
3625            Token::Mod => Some(BinaryOperator::Modulo),
3626            Token::StringConcat => Some(BinaryOperator::StringConcat),
3627            Token::Pipe => Some(BinaryOperator::BitwiseOr),
3628            Token::Caret => {
3629                // In PostgreSQL, ^ stands for the exponentiation operation,
3630                // and # stands for XOR. See https://www.postgresql.org/docs/current/functions-math.html
3631                if dialect_is!(dialect is PostgreSqlDialect) {
3632                    Some(BinaryOperator::PGExp)
3633                } else {
3634                    Some(BinaryOperator::BitwiseXor)
3635                }
3636            }
3637            Token::Ampersand => Some(BinaryOperator::BitwiseAnd),
3638            Token::Div => Some(BinaryOperator::Divide),
3639            Token::DuckIntDiv if dialect_is!(dialect is DuckDbDialect | GenericDialect) => {
3640                Some(BinaryOperator::DuckIntegerDivide)
3641            }
3642            Token::ShiftLeft if dialect.supports_bitwise_shift_operators() => {
3643                Some(BinaryOperator::PGBitwiseShiftLeft)
3644            }
3645            Token::ShiftRight if dialect.supports_bitwise_shift_operators() => {
3646                Some(BinaryOperator::PGBitwiseShiftRight)
3647            }
3648            Token::Sharp if dialect_is!(dialect is PostgreSqlDialect | RedshiftSqlDialect) => {
3649                Some(BinaryOperator::PGBitwiseXor)
3650            }
3651            Token::Overlap if dialect_is!(dialect is PostgreSqlDialect | RedshiftSqlDialect) => {
3652                Some(BinaryOperator::PGOverlap)
3653            }
3654            Token::Overlap if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
3655                Some(BinaryOperator::PGOverlap)
3656            }
3657            Token::Overlap if dialect.supports_double_ampersand_operator() => {
3658                Some(BinaryOperator::And)
3659            }
3660            Token::CaretAt if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
3661                Some(BinaryOperator::PGStartsWith)
3662            }
3663            Token::Tilde => Some(BinaryOperator::PGRegexMatch),
3664            Token::TildeAsterisk => Some(BinaryOperator::PGRegexIMatch),
3665            Token::ExclamationMarkTilde => Some(BinaryOperator::PGRegexNotMatch),
3666            Token::ExclamationMarkTildeAsterisk => Some(BinaryOperator::PGRegexNotIMatch),
3667            Token::DoubleTilde => Some(BinaryOperator::PGLikeMatch),
3668            Token::DoubleTildeAsterisk => Some(BinaryOperator::PGILikeMatch),
3669            Token::ExclamationMarkDoubleTilde => Some(BinaryOperator::PGNotLikeMatch),
3670            Token::ExclamationMarkDoubleTildeAsterisk => Some(BinaryOperator::PGNotILikeMatch),
3671            Token::Arrow => Some(BinaryOperator::Arrow),
3672            Token::LongArrow => Some(BinaryOperator::LongArrow),
3673            Token::HashArrow => Some(BinaryOperator::HashArrow),
3674            Token::HashLongArrow => Some(BinaryOperator::HashLongArrow),
3675            Token::AtArrow => Some(BinaryOperator::AtArrow),
3676            Token::ArrowAt => Some(BinaryOperator::ArrowAt),
3677            Token::HashMinus => Some(BinaryOperator::HashMinus),
3678            Token::AtQuestion => Some(BinaryOperator::AtQuestion),
3679            Token::AtAt => Some(BinaryOperator::AtAt),
3680            Token::Question => Some(BinaryOperator::Question),
3681            Token::QuestionAnd => Some(BinaryOperator::QuestionAnd),
3682            Token::QuestionPipe => Some(BinaryOperator::QuestionPipe),
3683            Token::CustomBinaryOperator(s) => Some(BinaryOperator::Custom(s.clone())),
3684            Token::DoubleSharp if self.dialect.supports_geometric_types() => {
3685                Some(BinaryOperator::DoubleHash)
3686            }
3687
3688            Token::AmpersandLeftAngleBracket if self.dialect.supports_geometric_types() => {
3689                Some(BinaryOperator::AndLt)
3690            }
3691            Token::AmpersandRightAngleBracket if self.dialect.supports_geometric_types() => {
3692                Some(BinaryOperator::AndGt)
3693            }
3694            Token::QuestionMarkDash if self.dialect.supports_geometric_types() => {
3695                Some(BinaryOperator::QuestionDash)
3696            }
3697            Token::AmpersandLeftAngleBracketVerticalBar
3698                if self.dialect.supports_geometric_types() =>
3699            {
3700                Some(BinaryOperator::AndLtPipe)
3701            }
3702            Token::VerticalBarAmpersandRightAngleBracket
3703                if self.dialect.supports_geometric_types() =>
3704            {
3705                Some(BinaryOperator::PipeAndGt)
3706            }
3707            Token::TwoWayArrow if self.dialect.supports_geometric_types() => {
3708                Some(BinaryOperator::LtDashGt)
3709            }
3710            Token::LeftAngleBracketCaret if self.dialect.supports_geometric_types() => {
3711                Some(BinaryOperator::LtCaret)
3712            }
3713            Token::RightAngleBracketCaret if self.dialect.supports_geometric_types() => {
3714                Some(BinaryOperator::GtCaret)
3715            }
3716            Token::QuestionMarkSharp if self.dialect.supports_geometric_types() => {
3717                Some(BinaryOperator::QuestionHash)
3718            }
3719            Token::QuestionMarkDoubleVerticalBar if self.dialect.supports_geometric_types() => {
3720                Some(BinaryOperator::QuestionDoublePipe)
3721            }
3722            Token::QuestionMarkDashVerticalBar if self.dialect.supports_geometric_types() => {
3723                Some(BinaryOperator::QuestionDashPipe)
3724            }
3725            Token::TildeEqual if self.dialect.supports_geometric_types() => {
3726                Some(BinaryOperator::TildeEq)
3727            }
3728            Token::ShiftLeftVerticalBar if self.dialect.supports_geometric_types() => {
3729                Some(BinaryOperator::LtLtPipe)
3730            }
3731            Token::VerticalBarShiftRight if self.dialect.supports_geometric_types() => {
3732                Some(BinaryOperator::PipeGtGt)
3733            }
3734            Token::AtSign if self.dialect.supports_geometric_types() => Some(BinaryOperator::At),
3735
3736            Token::Word(w) => match w.keyword {
3737                Keyword::AND => Some(BinaryOperator::And),
3738                Keyword::OR => Some(BinaryOperator::Or),
3739                Keyword::XOR => Some(BinaryOperator::Xor),
3740                Keyword::OVERLAPS => Some(BinaryOperator::Overlaps),
3741                Keyword::OPERATOR if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
3742                    self.expect_token(&Token::LParen)?;
3743                    // there are special rules for operator names in
3744                    // postgres so we can not use 'parse_object'
3745                    // or similar.
3746                    // See https://www.postgresql.org/docs/current/sql-createoperator.html
3747                    let mut idents = vec![];
3748                    loop {
3749                        self.advance_token();
3750                        idents.push(self.get_current_token().to_string());
3751                        if !self.consume_token(&Token::Period) {
3752                            break;
3753                        }
3754                    }
3755                    self.expect_token(&Token::RParen)?;
3756                    Some(BinaryOperator::PGCustomBinaryOperator(idents))
3757                }
3758                _ => None,
3759            },
3760            _ => None,
3761        };
3762
3763        let tok = self.token_at(tok_index);
3764        if let Some(op) = regular_binary_operator {
3765            if let Some(keyword) =
3766                self.parse_one_of_keywords(&[Keyword::ANY, Keyword::ALL, Keyword::SOME])
3767            {
3768                self.expect_token(&Token::LParen)?;
3769                let right = if self.peek_sub_query() {
3770                    // We have a subquery ahead (SELECT\WITH ...) need to rewind and
3771                    // use the parenthesis for parsing the subquery as an expression.
3772                    self.prev_token(); // LParen
3773                    self.parse_subexpr(precedence)?
3774                } else {
3775                    // Non-subquery expression
3776                    let right = self.parse_subexpr(precedence)?;
3777                    self.expect_token(&Token::RParen)?;
3778                    right
3779                };
3780
3781                if !matches!(
3782                    op,
3783                    BinaryOperator::Gt
3784                        | BinaryOperator::Lt
3785                        | BinaryOperator::GtEq
3786                        | BinaryOperator::LtEq
3787                        | BinaryOperator::Eq
3788                        | BinaryOperator::NotEq
3789                        | BinaryOperator::PGRegexMatch
3790                        | BinaryOperator::PGRegexIMatch
3791                        | BinaryOperator::PGRegexNotMatch
3792                        | BinaryOperator::PGRegexNotIMatch
3793                        | BinaryOperator::PGLikeMatch
3794                        | BinaryOperator::PGILikeMatch
3795                        | BinaryOperator::PGNotLikeMatch
3796                        | BinaryOperator::PGNotILikeMatch
3797                ) {
3798                    return parser_err!(
3799                        format!(
3800                        "Expected one of [=, >, <, =>, =<, !=, ~, ~*, !~, !~*, ~~, ~~*, !~~, !~~*] as comparison operator, found: {op}"
3801                    ),
3802                        span.start
3803                    );
3804                };
3805
3806                Ok(match keyword {
3807                    Keyword::ALL => Expr::AllOp {
3808                        left: Box::new(expr),
3809                        compare_op: op,
3810                        right: Box::new(right),
3811                    },
3812                    Keyword::ANY | Keyword::SOME => Expr::AnyOp {
3813                        left: Box::new(expr),
3814                        compare_op: op,
3815                        right: Box::new(right),
3816                        is_some: keyword == Keyword::SOME,
3817                    },
3818                    unexpected_keyword => return Err(ParserError::ParserError(
3819                        format!("Internal parser error: expected any of {{ALL, ANY, SOME}}, got {unexpected_keyword:?}"),
3820                    )),
3821                })
3822            } else {
3823                Ok(Expr::BinaryOp {
3824                    left: Box::new(expr),
3825                    op,
3826                    right: Box::new(self.parse_subexpr(precedence)?),
3827                })
3828            }
3829        } else if let Token::Word(w) = &tok.token {
3830            match w.keyword {
3831                Keyword::IS => {
3832                    if self.parse_keyword(Keyword::NULL) {
3833                        Ok(Expr::IsNull(Box::new(expr)))
3834                    } else if self.parse_keywords(&[Keyword::NOT, Keyword::NULL]) {
3835                        Ok(Expr::IsNotNull(Box::new(expr)))
3836                    } else if self.parse_keywords(&[Keyword::TRUE]) {
3837                        Ok(Expr::IsTrue(Box::new(expr)))
3838                    } else if self.parse_keywords(&[Keyword::NOT, Keyword::TRUE]) {
3839                        Ok(Expr::IsNotTrue(Box::new(expr)))
3840                    } else if self.parse_keywords(&[Keyword::FALSE]) {
3841                        Ok(Expr::IsFalse(Box::new(expr)))
3842                    } else if self.parse_keywords(&[Keyword::NOT, Keyword::FALSE]) {
3843                        Ok(Expr::IsNotFalse(Box::new(expr)))
3844                    } else if self.parse_keywords(&[Keyword::UNKNOWN]) {
3845                        Ok(Expr::IsUnknown(Box::new(expr)))
3846                    } else if self.parse_keywords(&[Keyword::NOT, Keyword::UNKNOWN]) {
3847                        Ok(Expr::IsNotUnknown(Box::new(expr)))
3848                    } else if self.parse_keywords(&[Keyword::DISTINCT, Keyword::FROM]) {
3849                        let expr2 = self.parse_expr()?;
3850                        Ok(Expr::IsDistinctFrom(Box::new(expr), Box::new(expr2)))
3851                    } else if self.parse_keywords(&[Keyword::NOT, Keyword::DISTINCT, Keyword::FROM])
3852                    {
3853                        let expr2 = self.parse_expr()?;
3854                        Ok(Expr::IsNotDistinctFrom(Box::new(expr), Box::new(expr2)))
3855                    } else if let Ok(is_normalized) = self.parse_unicode_is_normalized(expr) {
3856                        Ok(is_normalized)
3857                    } else {
3858                        self.expected(
3859                            "[NOT] NULL | TRUE | FALSE | DISTINCT | [form] NORMALIZED FROM after IS",
3860                            self.peek_token(),
3861                        )
3862                    }
3863                }
3864                Keyword::AT => {
3865                    self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
3866                    Ok(Expr::AtTimeZone {
3867                        timestamp: Box::new(expr),
3868                        time_zone: Box::new(self.parse_subexpr(precedence)?),
3869                    })
3870                }
3871                Keyword::NOT
3872                | Keyword::IN
3873                | Keyword::BETWEEN
3874                | Keyword::LIKE
3875                | Keyword::ILIKE
3876                | Keyword::SIMILAR
3877                | Keyword::REGEXP
3878                | Keyword::RLIKE => {
3879                    self.prev_token();
3880                    let negated = self.parse_keyword(Keyword::NOT);
3881                    let regexp = self.parse_keyword(Keyword::REGEXP);
3882                    let rlike = self.parse_keyword(Keyword::RLIKE);
3883                    let null = if !self.in_column_definition_state() {
3884                        self.parse_keyword(Keyword::NULL)
3885                    } else {
3886                        false
3887                    };
3888                    if regexp || rlike {
3889                        Ok(Expr::RLike {
3890                            negated,
3891                            expr: Box::new(expr),
3892                            pattern: Box::new(
3893                                self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
3894                            ),
3895                            regexp,
3896                        })
3897                    } else if negated && null {
3898                        Ok(Expr::IsNotNull(Box::new(expr)))
3899                    } else if self.parse_keyword(Keyword::IN) {
3900                        self.parse_in(expr, negated)
3901                    } else if self.parse_keyword(Keyword::BETWEEN) {
3902                        self.parse_between(expr, negated)
3903                    } else if self.parse_keyword(Keyword::LIKE) {
3904                        Ok(Expr::Like {
3905                            negated,
3906                            any: self.parse_keyword(Keyword::ANY),
3907                            expr: Box::new(expr),
3908                            pattern: Box::new(
3909                                self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
3910                            ),
3911                            escape_char: self.parse_escape_char()?,
3912                        })
3913                    } else if self.parse_keyword(Keyword::ILIKE) {
3914                        Ok(Expr::ILike {
3915                            negated,
3916                            any: self.parse_keyword(Keyword::ANY),
3917                            expr: Box::new(expr),
3918                            pattern: Box::new(
3919                                self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
3920                            ),
3921                            escape_char: self.parse_escape_char()?,
3922                        })
3923                    } else if self.parse_keywords(&[Keyword::SIMILAR, Keyword::TO]) {
3924                        Ok(Expr::SimilarTo {
3925                            negated,
3926                            expr: Box::new(expr),
3927                            pattern: Box::new(
3928                                self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
3929                            ),
3930                            escape_char: self.parse_escape_char()?,
3931                        })
3932                    } else {
3933                        self.expected("IN or BETWEEN after NOT", self.peek_token())
3934                    }
3935                }
3936                Keyword::NOTNULL if dialect.supports_notnull_operator() => {
3937                    Ok(Expr::IsNotNull(Box::new(expr)))
3938                }
3939                Keyword::MEMBER => {
3940                    if self.parse_keyword(Keyword::OF) {
3941                        self.expect_token(&Token::LParen)?;
3942                        let array = self.parse_expr()?;
3943                        self.expect_token(&Token::RParen)?;
3944                        Ok(Expr::MemberOf(MemberOf {
3945                            value: Box::new(expr),
3946                            array: Box::new(array),
3947                        }))
3948                    } else {
3949                        self.expected("OF after MEMBER", self.peek_token())
3950                    }
3951                }
3952                // Can only happen if `get_next_precedence` got out of sync with this function
3953                _ => parser_err!(
3954                    format!("No infix parser for token {:?}", tok.token),
3955                    tok.span.start
3956                ),
3957            }
3958        } else if Token::DoubleColon == *tok {
3959            Ok(Expr::Cast {
3960                kind: CastKind::DoubleColon,
3961                expr: Box::new(expr),
3962                data_type: self.parse_data_type()?,
3963                array: false,
3964                format: None,
3965            })
3966        } else if Token::ExclamationMark == *tok && self.dialect.supports_factorial_operator() {
3967            Ok(Expr::UnaryOp {
3968                op: UnaryOperator::PGPostfixFactorial,
3969                expr: Box::new(expr),
3970            })
3971        } else if Token::LBracket == *tok && self.dialect.supports_partiql()
3972            || (Token::Colon == *tok)
3973        {
3974            self.prev_token();
3975            self.parse_json_access(expr)
3976        } else {
3977            // Can only happen if `get_next_precedence` got out of sync with this function
3978            parser_err!(
3979                format!("No infix parser for token {:?}", tok.token),
3980                tok.span.start
3981            )
3982        }
3983    }
3984
3985    /// Parse the `ESCAPE CHAR` portion of `LIKE`, `ILIKE`, and `SIMILAR TO`
3986    pub fn parse_escape_char(&mut self) -> Result<Option<Value>, ParserError> {
3987        if self.parse_keyword(Keyword::ESCAPE) {
3988            Ok(Some(self.parse_value()?.into()))
3989        } else {
3990            Ok(None)
3991        }
3992    }
3993
3994    /// Parses an array subscript like
3995    /// * `[:]`
3996    /// * `[l]`
3997    /// * `[l:]`
3998    /// * `[:u]`
3999    /// * `[l:u]`
4000    /// * `[l:u:s]`
4001    ///
4002    /// Parser is right after `[`
4003    fn parse_subscript_inner(&mut self) -> Result<Subscript, ParserError> {
4004        // at either `<lower>:(rest)` or `:(rest)]`
4005        let lower_bound = if self.consume_token(&Token::Colon) {
4006            None
4007        } else {
4008            // parse expr until we hit a colon (or any token with lower precedence)
4009            Some(self.parse_subexpr(self.dialect.prec_value(Precedence::Colon))?)
4010        };
4011
4012        // check for end
4013        if self.consume_token(&Token::RBracket) {
4014            if let Some(lower_bound) = lower_bound {
4015                return Ok(Subscript::Index { index: lower_bound });
4016            };
4017            return Ok(Subscript::Slice {
4018                lower_bound,
4019                upper_bound: None,
4020                stride: None,
4021            });
4022        }
4023
4024        // consume the `:`
4025        if lower_bound.is_some() {
4026            self.expect_token(&Token::Colon)?;
4027        }
4028
4029        // we are now at either `]`, `<upper>(rest)]`
4030        let upper_bound = if self.consume_token(&Token::RBracket) {
4031            return Ok(Subscript::Slice {
4032                lower_bound,
4033                upper_bound: None,
4034                stride: None,
4035            });
4036        } else {
4037            // parse expr until we hit a colon (or any token with lower precedence)
4038            Some(self.parse_subexpr(self.dialect.prec_value(Precedence::Colon))?)
4039        };
4040
4041        // check for end
4042        if self.consume_token(&Token::RBracket) {
4043            return Ok(Subscript::Slice {
4044                lower_bound,
4045                upper_bound,
4046                stride: None,
4047            });
4048        }
4049
4050        // we are now at `:]` or `:stride]`
4051        self.expect_token(&Token::Colon)?;
4052        let stride = if self.consume_token(&Token::RBracket) {
4053            None
4054        } else {
4055            Some(self.parse_expr()?)
4056        };
4057
4058        if stride.is_some() {
4059            self.expect_token(&Token::RBracket)?;
4060        }
4061
4062        Ok(Subscript::Slice {
4063            lower_bound,
4064            upper_bound,
4065            stride,
4066        })
4067    }
4068
4069    /// Parse a multi-dimension array accessing like `[1:3][1][1]`
4070    pub fn parse_multi_dim_subscript(
4071        &mut self,
4072        chain: &mut Vec<AccessExpr>,
4073    ) -> Result<(), ParserError> {
4074        while self.consume_token(&Token::LBracket) {
4075            self.parse_subscript(chain)?;
4076        }
4077        Ok(())
4078    }
4079
4080    /// Parses an array subscript like `[1:3]`
4081    ///
4082    /// Parser is right after `[`
4083    fn parse_subscript(&mut self, chain: &mut Vec<AccessExpr>) -> Result<(), ParserError> {
4084        let subscript = self.parse_subscript_inner()?;
4085        chain.push(AccessExpr::Subscript(subscript));
4086        Ok(())
4087    }
4088
4089    fn parse_json_path_object_key(&mut self) -> Result<JsonPathElem, ParserError> {
4090        let token = self.next_token();
4091        match token.token {
4092            Token::Word(Word {
4093                value,
4094                // path segments in SF dot notation can be unquoted or double-quoted
4095                quote_style: quote_style @ (Some('"') | None),
4096                // some experimentation suggests that snowflake permits
4097                // any keyword here unquoted.
4098                keyword: _,
4099            }) => Ok(JsonPathElem::Dot {
4100                key: value,
4101                quoted: quote_style.is_some(),
4102            }),
4103
4104            // This token should never be generated on snowflake or generic
4105            // dialects, but we handle it just in case this is used on future
4106            // dialects.
4107            Token::DoubleQuotedString(key) => Ok(JsonPathElem::Dot { key, quoted: true }),
4108
4109            _ => self.expected("variant object key name", token),
4110        }
4111    }
4112
4113    fn parse_json_access(&mut self, expr: Expr) -> Result<Expr, ParserError> {
4114        let path = self.parse_json_path()?;
4115        Ok(Expr::JsonAccess {
4116            value: Box::new(expr),
4117            path,
4118        })
4119    }
4120
4121    fn parse_json_path(&mut self) -> Result<JsonPath, ParserError> {
4122        let mut path = Vec::new();
4123        loop {
4124            match self.next_token().token {
4125                Token::Colon if path.is_empty() => {
4126                    path.push(self.parse_json_path_object_key()?);
4127                }
4128                Token::Period if !path.is_empty() => {
4129                    path.push(self.parse_json_path_object_key()?);
4130                }
4131                Token::LBracket => {
4132                    let key = self.parse_expr()?;
4133                    self.expect_token(&Token::RBracket)?;
4134
4135                    path.push(JsonPathElem::Bracket { key });
4136                }
4137                _ => {
4138                    self.prev_token();
4139                    break;
4140                }
4141            };
4142        }
4143
4144        debug_assert!(!path.is_empty());
4145        Ok(JsonPath { path })
4146    }
4147
4148    /// Parses the parens following the `[ NOT ] IN` operator.
4149    pub fn parse_in(&mut self, expr: Expr, negated: bool) -> Result<Expr, ParserError> {
4150        // BigQuery allows `IN UNNEST(array_expression)`
4151        // https://cloud.google.com/bigquery/docs/reference/standard-sql/operators#in_operators
4152        if self.parse_keyword(Keyword::UNNEST) {
4153            self.expect_token(&Token::LParen)?;
4154            let array_expr = self.parse_expr()?;
4155            self.expect_token(&Token::RParen)?;
4156            return Ok(Expr::InUnnest {
4157                expr: Box::new(expr),
4158                array_expr: Box::new(array_expr),
4159                negated,
4160            });
4161        }
4162        self.expect_token(&Token::LParen)?;
4163        let in_op = match self.maybe_parse(|p| p.parse_query())? {
4164            Some(subquery) => Expr::InSubquery {
4165                expr: Box::new(expr),
4166                subquery,
4167                negated,
4168            },
4169            None => Expr::InList {
4170                expr: Box::new(expr),
4171                list: if self.dialect.supports_in_empty_list() {
4172                    self.parse_comma_separated0(Parser::parse_expr, Token::RParen)?
4173                } else {
4174                    self.parse_comma_separated(Parser::parse_expr)?
4175                },
4176                negated,
4177            },
4178        };
4179        self.expect_token(&Token::RParen)?;
4180        Ok(in_op)
4181    }
4182
4183    /// Parses `BETWEEN <low> AND <high>`, assuming the `BETWEEN` keyword was already consumed.
4184    pub fn parse_between(&mut self, expr: Expr, negated: bool) -> Result<Expr, ParserError> {
4185        // Stop parsing subexpressions for <low> and <high> on tokens with
4186        // precedence lower than that of `BETWEEN`, such as `AND`, `IS`, etc.
4187        let low = self.parse_subexpr(self.dialect.prec_value(Precedence::Between))?;
4188        self.expect_keyword_is(Keyword::AND)?;
4189        let high = self.parse_subexpr(self.dialect.prec_value(Precedence::Between))?;
4190        Ok(Expr::Between {
4191            expr: Box::new(expr),
4192            negated,
4193            low: Box::new(low),
4194            high: Box::new(high),
4195        })
4196    }
4197
4198    /// Parse a PostgreSQL casting style which is in the form of `expr::datatype`.
4199    pub fn parse_pg_cast(&mut self, expr: Expr) -> Result<Expr, ParserError> {
4200        Ok(Expr::Cast {
4201            kind: CastKind::DoubleColon,
4202            expr: Box::new(expr),
4203            data_type: self.parse_data_type()?,
4204            array: false,
4205            format: None,
4206        })
4207    }
4208
4209    /// Get the precedence of the next token
4210    pub fn get_next_precedence(&self) -> Result<u8, ParserError> {
4211        self.dialect.get_next_precedence_default(self)
4212    }
4213
4214    /// Return the token at the given location, or EOF if the index is beyond
4215    /// the length of the current set of tokens.
4216    pub fn token_at(&self, index: usize) -> &TokenWithSpan {
4217        self.tokens.get(index).unwrap_or(&EOF_TOKEN)
4218    }
4219
4220    /// Return the first non-whitespace token that has not yet been processed
4221    /// or Token::EOF
4222    ///
4223    /// See [`Self::peek_token_ref`] to avoid the copy.
4224    pub fn peek_token(&self) -> TokenWithSpan {
4225        self.peek_nth_token(0)
4226    }
4227
4228    /// Return a reference to the first non-whitespace token that has not yet
4229    /// been processed or Token::EOF
4230    pub fn peek_token_ref(&self) -> &TokenWithSpan {
4231        self.peek_nth_token_ref(0)
4232    }
4233
4234    /// Returns the `N` next non-whitespace tokens that have not yet been
4235    /// processed.
4236    ///
4237    /// Example:
4238    /// ```rust
4239    /// # use sqlparser::dialect::GenericDialect;
4240    /// # use sqlparser::parser::Parser;
4241    /// # use sqlparser::keywords::Keyword;
4242    /// # use sqlparser::tokenizer::{Token, Word};
4243    /// let dialect = GenericDialect {};
4244    /// let mut parser = Parser::new(&dialect).try_with_sql("ORDER BY foo, bar").unwrap();
4245    ///
4246    /// // Note that Rust infers the number of tokens to peek based on the
4247    /// // length of the slice pattern!
4248    /// assert!(matches!(
4249    ///     parser.peek_tokens(),
4250    ///     [
4251    ///         Token::Word(Word { keyword: Keyword::ORDER, .. }),
4252    ///         Token::Word(Word { keyword: Keyword::BY, .. }),
4253    ///     ]
4254    /// ));
4255    /// ```
4256    pub fn peek_tokens<const N: usize>(&self) -> [Token; N] {
4257        self.peek_tokens_with_location()
4258            .map(|with_loc| with_loc.token)
4259    }
4260
4261    /// Returns the `N` next non-whitespace tokens with locations that have not
4262    /// yet been processed.
4263    ///
4264    /// See [`Self::peek_token`] for an example.
4265    pub fn peek_tokens_with_location<const N: usize>(&self) -> [TokenWithSpan; N] {
4266        let mut index = self.index;
4267        core::array::from_fn(|_| loop {
4268            let token = self.tokens.get(index);
4269            index += 1;
4270            if let Some(TokenWithSpan {
4271                token: Token::Whitespace(_),
4272                span: _,
4273            }) = token
4274            {
4275                continue;
4276            }
4277            break token.cloned().unwrap_or(TokenWithSpan {
4278                token: Token::EOF,
4279                span: Span::empty(),
4280            });
4281        })
4282    }
4283
4284    /// Returns references to the `N` next non-whitespace tokens
4285    /// that have not yet been processed.
4286    ///
4287    /// See [`Self::peek_tokens`] for an example.
4288    pub fn peek_tokens_ref<const N: usize>(&self) -> [&TokenWithSpan; N] {
4289        let mut index = self.index;
4290        core::array::from_fn(|_| loop {
4291            let token = self.tokens.get(index);
4292            index += 1;
4293            if let Some(TokenWithSpan {
4294                token: Token::Whitespace(_),
4295                span: _,
4296            }) = token
4297            {
4298                continue;
4299            }
4300            break token.unwrap_or(&EOF_TOKEN);
4301        })
4302    }
4303
4304    /// Return nth non-whitespace token that has not yet been processed
4305    pub fn peek_nth_token(&self, n: usize) -> TokenWithSpan {
4306        self.peek_nth_token_ref(n).clone()
4307    }
4308
4309    /// Return nth non-whitespace token that has not yet been processed
4310    pub fn peek_nth_token_ref(&self, mut n: usize) -> &TokenWithSpan {
4311        let mut index = self.index;
4312        loop {
4313            index += 1;
4314            match self.tokens.get(index - 1) {
4315                Some(TokenWithSpan {
4316                    token: Token::Whitespace(_),
4317                    span: _,
4318                }) => continue,
4319                non_whitespace => {
4320                    if n == 0 {
4321                        return non_whitespace.unwrap_or(&EOF_TOKEN);
4322                    }
4323                    n -= 1;
4324                }
4325            }
4326        }
4327    }
4328
4329    /// Return the first token, possibly whitespace, that has not yet been processed
4330    /// (or None if reached end-of-file).
4331    pub fn peek_token_no_skip(&self) -> TokenWithSpan {
4332        self.peek_nth_token_no_skip(0)
4333    }
4334
4335    /// Return nth token, possibly whitespace, that has not yet been processed.
4336    pub fn peek_nth_token_no_skip(&self, n: usize) -> TokenWithSpan {
4337        self.tokens
4338            .get(self.index + n)
4339            .cloned()
4340            .unwrap_or(TokenWithSpan {
4341                token: Token::EOF,
4342                span: Span::empty(),
4343            })
4344    }
4345
4346    /// Return nth token, possibly whitespace, that has not yet been processed.
4347    fn peek_nth_token_no_skip_ref(&self, n: usize) -> &TokenWithSpan {
4348        self.tokens.get(self.index + n).unwrap_or(&EOF_TOKEN)
4349    }
4350
4351    /// Return true if the next tokens exactly `expected`
4352    ///
4353    /// Does not advance the current token.
4354    fn peek_keywords(&mut self, expected: &[Keyword]) -> bool {
4355        let index = self.index;
4356        let matched = self.parse_keywords(expected);
4357        self.index = index;
4358        matched
4359    }
4360
4361    /// Advances to the next non-whitespace token and returns a copy.
4362    ///
4363    /// Please use [`Self::advance_token`] and [`Self::get_current_token`] to
4364    /// avoid the copy.
4365    pub fn next_token(&mut self) -> TokenWithSpan {
4366        self.advance_token();
4367        self.get_current_token().clone()
4368    }
4369
4370    /// Returns the index of the current token
4371    ///
4372    /// This can be used with APIs that expect an index, such as
4373    /// [`Self::token_at`]
4374    pub fn get_current_index(&self) -> usize {
4375        self.index.saturating_sub(1)
4376    }
4377
4378    /// Return the next unprocessed token, possibly whitespace.
4379    pub fn next_token_no_skip(&mut self) -> Option<&TokenWithSpan> {
4380        self.index += 1;
4381        self.tokens.get(self.index - 1)
4382    }
4383
4384    /// Advances the current token to the next non-whitespace token
4385    ///
4386    /// See [`Self::get_current_token`] to get the current token after advancing
4387    pub fn advance_token(&mut self) {
4388        loop {
4389            self.index += 1;
4390            match self.tokens.get(self.index - 1) {
4391                Some(TokenWithSpan {
4392                    token: Token::Whitespace(_),
4393                    span: _,
4394                }) => continue,
4395                _ => break,
4396            }
4397        }
4398    }
4399
4400    /// Returns a reference to the current token
4401    ///
4402    /// Does not advance the current token.
4403    pub fn get_current_token(&self) -> &TokenWithSpan {
4404        self.token_at(self.index.saturating_sub(1))
4405    }
4406
4407    /// Returns a reference to the previous token
4408    ///
4409    /// Does not advance the current token.
4410    pub fn get_previous_token(&self) -> &TokenWithSpan {
4411        self.token_at(self.index.saturating_sub(2))
4412    }
4413
4414    /// Returns a reference to the next token
4415    ///
4416    /// Does not advance the current token.
4417    pub fn get_next_token(&self) -> &TokenWithSpan {
4418        self.token_at(self.index)
4419    }
4420
4421    /// Seek back the last one non-whitespace token.
4422    ///
4423    /// Must be called after `next_token()`, otherwise might panic. OK to call
4424    /// after `next_token()` indicates an EOF.
4425    ///
4426    // TODO rename to backup_token and deprecate prev_token?
4427    pub fn prev_token(&mut self) {
4428        loop {
4429            assert!(self.index > 0);
4430            self.index -= 1;
4431            if let Some(TokenWithSpan {
4432                token: Token::Whitespace(_),
4433                span: _,
4434            }) = self.tokens.get(self.index)
4435            {
4436                continue;
4437            }
4438            return;
4439        }
4440    }
4441
4442    /// Report `found` was encountered instead of `expected`
4443    pub fn expected<T>(&self, expected: &str, found: TokenWithSpan) -> Result<T, ParserError> {
4444        parser_err!(
4445            format!("Expected: {expected}, found: {found}"),
4446            found.span.start
4447        )
4448    }
4449
4450    /// report `found` was encountered instead of `expected`
4451    pub fn expected_ref<T>(&self, expected: &str, found: &TokenWithSpan) -> Result<T, ParserError> {
4452        parser_err!(
4453            format!("Expected: {expected}, found: {found}"),
4454            found.span.start
4455        )
4456    }
4457
4458    /// Report that the token at `index` was found instead of `expected`.
4459    pub fn expected_at<T>(&self, expected: &str, index: usize) -> Result<T, ParserError> {
4460        let found = self.tokens.get(index).unwrap_or(&EOF_TOKEN);
4461        parser_err!(
4462            format!("Expected: {expected}, found: {found}"),
4463            found.span.start
4464        )
4465    }
4466
4467    /// If the current token is the `expected` keyword, consume it and returns
4468    /// true. Otherwise, no tokens are consumed and returns false.
4469    #[must_use]
4470    pub fn parse_keyword(&mut self, expected: Keyword) -> bool {
4471        if self.peek_keyword(expected) {
4472            self.advance_token();
4473            true
4474        } else {
4475            false
4476        }
4477    }
4478
4479    #[must_use]
4480    /// Check if the current token is the expected keyword without consuming it.
4481    ///
4482    /// Returns true if the current token matches the expected keyword.
4483    pub fn peek_keyword(&self, expected: Keyword) -> bool {
4484        matches!(&self.peek_token_ref().token, Token::Word(w) if expected == w.keyword)
4485    }
4486
4487    /// If the current token is the `expected` keyword followed by
4488    /// specified tokens, consume them and returns true.
4489    /// Otherwise, no tokens are consumed and returns false.
4490    ///
4491    /// Note that if the length of `tokens` is too long, this function will
4492    /// not be efficient as it does a loop on the tokens with `peek_nth_token`
4493    /// each time.
4494    pub fn parse_keyword_with_tokens(&mut self, expected: Keyword, tokens: &[Token]) -> bool {
4495        self.keyword_with_tokens(expected, tokens, true)
4496    }
4497
4498    /// Peeks to see if the current token is the `expected` keyword followed by specified tokens
4499    /// without consuming them.
4500    ///
4501    /// See [Self::parse_keyword_with_tokens] for details.
4502    pub(crate) fn peek_keyword_with_tokens(&mut self, expected: Keyword, tokens: &[Token]) -> bool {
4503        self.keyword_with_tokens(expected, tokens, false)
4504    }
4505
4506    fn keyword_with_tokens(&mut self, expected: Keyword, tokens: &[Token], consume: bool) -> bool {
4507        match &self.peek_token_ref().token {
4508            Token::Word(w) if expected == w.keyword => {
4509                for (idx, token) in tokens.iter().enumerate() {
4510                    if self.peek_nth_token_ref(idx + 1).token != *token {
4511                        return false;
4512                    }
4513                }
4514
4515                if consume {
4516                    for _ in 0..(tokens.len() + 1) {
4517                        self.advance_token();
4518                    }
4519                }
4520
4521                true
4522            }
4523            _ => false,
4524        }
4525    }
4526
4527    /// If the current and subsequent tokens exactly match the `keywords`
4528    /// sequence, consume them and returns true. Otherwise, no tokens are
4529    /// consumed and returns false
4530    #[must_use]
4531    pub fn parse_keywords(&mut self, keywords: &[Keyword]) -> bool {
4532        self.parse_keywords_indexed(keywords).is_some()
4533    }
4534
4535    /// Just like [Self::parse_keywords], but - upon success - returns the
4536    /// token index of the first keyword.
4537    #[must_use]
4538    fn parse_keywords_indexed(&mut self, keywords: &[Keyword]) -> Option<usize> {
4539        let start_index = self.index;
4540        let mut first_keyword_index = None;
4541        for &keyword in keywords {
4542            if !self.parse_keyword(keyword) {
4543                self.index = start_index;
4544                return None;
4545            }
4546            if first_keyword_index.is_none() {
4547                first_keyword_index = Some(self.index.saturating_sub(1));
4548            }
4549        }
4550        first_keyword_index
4551    }
4552
4553    /// If the current token is one of the given `keywords`, returns the keyword
4554    /// that matches, without consuming the token. Otherwise, returns [`None`].
4555    #[must_use]
4556    pub fn peek_one_of_keywords(&self, keywords: &[Keyword]) -> Option<Keyword> {
4557        for keyword in keywords {
4558            if self.peek_keyword(*keyword) {
4559                return Some(*keyword);
4560            }
4561        }
4562        None
4563    }
4564
4565    /// If the current token is one of the given `keywords`, consume the token
4566    /// and return the keyword that matches. Otherwise, no tokens are consumed
4567    /// and returns [`None`].
4568    #[must_use]
4569    pub fn parse_one_of_keywords(&mut self, keywords: &[Keyword]) -> Option<Keyword> {
4570        match &self.peek_token_ref().token {
4571            Token::Word(w) => {
4572                keywords
4573                    .iter()
4574                    .find(|keyword| **keyword == w.keyword)
4575                    .map(|keyword| {
4576                        self.advance_token();
4577                        *keyword
4578                    })
4579            }
4580            _ => None,
4581        }
4582    }
4583
4584    /// If the current token is one of the expected keywords, consume the token
4585    /// and return the keyword that matches. Otherwise, return an error.
4586    pub fn expect_one_of_keywords(&mut self, keywords: &[Keyword]) -> Result<Keyword, ParserError> {
4587        if let Some(keyword) = self.parse_one_of_keywords(keywords) {
4588            Ok(keyword)
4589        } else {
4590            let keywords: Vec<String> = keywords.iter().map(|x| format!("{x:?}")).collect();
4591            self.expected_ref(
4592                &format!("one of {}", keywords.join(" or ")),
4593                self.peek_token_ref(),
4594            )
4595        }
4596    }
4597
4598    /// If the current token is the `expected` keyword, consume the token.
4599    /// Otherwise, return an error.
4600    ///
4601    // todo deprecate in favor of expected_keyword_is
4602    pub fn expect_keyword(&mut self, expected: Keyword) -> Result<TokenWithSpan, ParserError> {
4603        if self.parse_keyword(expected) {
4604            Ok(self.get_current_token().clone())
4605        } else {
4606            self.expected_ref(format!("{:?}", &expected).as_str(), self.peek_token_ref())
4607        }
4608    }
4609
4610    /// If the current token is the `expected` keyword, consume the token.
4611    /// Otherwise, return an error.
4612    ///
4613    /// This differs from expect_keyword only in that the matched keyword
4614    /// token is not returned.
4615    pub fn expect_keyword_is(&mut self, expected: Keyword) -> Result<(), ParserError> {
4616        if self.parse_keyword(expected) {
4617            Ok(())
4618        } else {
4619            self.expected_ref(format!("{:?}", &expected).as_str(), self.peek_token_ref())
4620        }
4621    }
4622
4623    /// If the current and subsequent tokens exactly match the `keywords`
4624    /// sequence, consume them and returns Ok. Otherwise, return an Error.
4625    pub fn expect_keywords(&mut self, expected: &[Keyword]) -> Result<(), ParserError> {
4626        for &kw in expected {
4627            self.expect_keyword_is(kw)?;
4628        }
4629        Ok(())
4630    }
4631
4632    /// Consume the next token if it matches the expected token, otherwise return false
4633    ///
4634    /// See [Self::advance_token] to consume the token unconditionally
4635    #[must_use]
4636    pub fn consume_token(&mut self, expected: &Token) -> bool {
4637        if self.peek_token_ref() == expected {
4638            self.advance_token();
4639            true
4640        } else {
4641            false
4642        }
4643    }
4644
4645    /// If the current and subsequent tokens exactly match the `tokens`
4646    /// sequence, consume them and returns true. Otherwise, no tokens are
4647    /// consumed and returns false
4648    #[must_use]
4649    pub fn consume_tokens(&mut self, tokens: &[Token]) -> bool {
4650        let index = self.index;
4651        for token in tokens {
4652            if !self.consume_token(token) {
4653                self.index = index;
4654                return false;
4655            }
4656        }
4657        true
4658    }
4659
4660    /// Bail out if the current token is not an expected keyword, or consume it if it is
4661    pub fn expect_token(&mut self, expected: &Token) -> Result<TokenWithSpan, ParserError> {
4662        if self.peek_token_ref() == expected {
4663            Ok(self.next_token())
4664        } else {
4665            self.expected_ref(&expected.to_string(), self.peek_token_ref())
4666        }
4667    }
4668
4669    fn parse<T: FromStr>(s: String, loc: Location) -> Result<T, ParserError>
4670    where
4671        <T as FromStr>::Err: Display,
4672    {
4673        s.parse::<T>().map_err(|e| {
4674            ParserError::ParserError(format!(
4675                "Could not parse '{s}' as {}: {e}{loc}",
4676                core::any::type_name::<T>()
4677            ))
4678        })
4679    }
4680
4681    /// Parse a comma-separated list of 1+ SelectItem
4682    pub fn parse_projection(&mut self) -> Result<Vec<SelectItem>, ParserError> {
4683        // BigQuery and Snowflake allow trailing commas, but only in project lists
4684        // e.g. `SELECT 1, 2, FROM t`
4685        // https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#trailing_commas
4686        // https://docs.snowflake.com/en/release-notes/2024/8_11#select-supports-trailing-commas
4687
4688        let trailing_commas =
4689            self.options.trailing_commas | self.dialect.supports_projection_trailing_commas();
4690
4691        self.parse_comma_separated_with_trailing_commas(
4692            |p| p.parse_select_item(),
4693            trailing_commas,
4694            Self::is_reserved_for_column_alias,
4695        )
4696    }
4697
4698    /// Parse a list of actions for `GRANT` statements.
4699    pub fn parse_actions_list(&mut self) -> Result<Vec<Action>, ParserError> {
4700        let mut values = vec![];
4701        loop {
4702            values.push(self.parse_grant_permission()?);
4703            if !self.consume_token(&Token::Comma) {
4704                break;
4705            } else if self.options.trailing_commas {
4706                match self.peek_token().token {
4707                    Token::Word(kw) if kw.keyword == Keyword::ON => {
4708                        break;
4709                    }
4710                    Token::RParen
4711                    | Token::SemiColon
4712                    | Token::EOF
4713                    | Token::RBracket
4714                    | Token::RBrace => break,
4715                    _ => continue,
4716                }
4717            }
4718        }
4719        Ok(values)
4720    }
4721
4722    /// Parse a list of [TableWithJoins]
4723    fn parse_table_with_joins(&mut self) -> Result<Vec<TableWithJoins>, ParserError> {
4724        let trailing_commas = self.dialect.supports_from_trailing_commas();
4725
4726        self.parse_comma_separated_with_trailing_commas(
4727            Parser::parse_table_and_joins,
4728            trailing_commas,
4729            |kw, parser| !self.dialect.is_table_factor(kw, parser),
4730        )
4731    }
4732
4733    /// Parse the comma of a comma-separated syntax element.
4734    /// `R` is a predicate that should return true if the next
4735    /// keyword is a reserved keyword.
4736    /// Allows for control over trailing commas
4737    ///
4738    /// Returns true if there is a next element
4739    fn is_parse_comma_separated_end_with_trailing_commas<R>(
4740        &mut self,
4741        trailing_commas: bool,
4742        is_reserved_keyword: &R,
4743    ) -> bool
4744    where
4745        R: Fn(&Keyword, &mut Parser) -> bool,
4746    {
4747        if !self.consume_token(&Token::Comma) {
4748            true
4749        } else if trailing_commas {
4750            let token = self.next_token().token;
4751            let is_end = match token {
4752                Token::Word(ref kw) if is_reserved_keyword(&kw.keyword, self) => true,
4753                Token::RParen | Token::SemiColon | Token::EOF | Token::RBracket | Token::RBrace => {
4754                    true
4755                }
4756                _ => false,
4757            };
4758            self.prev_token();
4759
4760            is_end
4761        } else {
4762            false
4763        }
4764    }
4765
4766    /// Parse the comma of a comma-separated syntax element.
4767    /// Returns true if there is a next element
4768    fn is_parse_comma_separated_end(&mut self) -> bool {
4769        self.is_parse_comma_separated_end_with_trailing_commas(
4770            self.options.trailing_commas,
4771            &Self::is_reserved_for_column_alias,
4772        )
4773    }
4774
4775    /// Parse a comma-separated list of 1+ items accepted by `F`
4776    pub fn parse_comma_separated<T, F>(&mut self, f: F) -> Result<Vec<T>, ParserError>
4777    where
4778        F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4779    {
4780        self.parse_comma_separated_with_trailing_commas(
4781            f,
4782            self.options.trailing_commas,
4783            Self::is_reserved_for_column_alias,
4784        )
4785    }
4786
4787    /// Parse a comma-separated list of 1+ items accepted by `F`.
4788    /// `R` is a predicate that should return true if the next
4789    /// keyword is a reserved keyword.
4790    /// Allows for control over trailing commas.
4791    fn parse_comma_separated_with_trailing_commas<T, F, R>(
4792        &mut self,
4793        mut f: F,
4794        trailing_commas: bool,
4795        is_reserved_keyword: R,
4796    ) -> Result<Vec<T>, ParserError>
4797    where
4798        F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4799        R: Fn(&Keyword, &mut Parser) -> bool,
4800    {
4801        let mut values = vec![];
4802        loop {
4803            values.push(f(self)?);
4804            if self.is_parse_comma_separated_end_with_trailing_commas(
4805                trailing_commas,
4806                &is_reserved_keyword,
4807            ) {
4808                break;
4809            }
4810        }
4811        Ok(values)
4812    }
4813
4814    /// Parse a period-separated list of 1+ items accepted by `F`
4815    fn parse_period_separated<T, F>(&mut self, mut f: F) -> Result<Vec<T>, ParserError>
4816    where
4817        F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4818    {
4819        let mut values = vec![];
4820        loop {
4821            values.push(f(self)?);
4822            if !self.consume_token(&Token::Period) {
4823                break;
4824            }
4825        }
4826        Ok(values)
4827    }
4828
4829    /// Parse a keyword-separated list of 1+ items accepted by `F`
4830    pub fn parse_keyword_separated<T, F>(
4831        &mut self,
4832        keyword: Keyword,
4833        mut f: F,
4834    ) -> Result<Vec<T>, ParserError>
4835    where
4836        F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4837    {
4838        let mut values = vec![];
4839        loop {
4840            values.push(f(self)?);
4841            if !self.parse_keyword(keyword) {
4842                break;
4843            }
4844        }
4845        Ok(values)
4846    }
4847
4848    /// Parse an expression enclosed in parentheses.
4849    pub fn parse_parenthesized<T, F>(&mut self, mut f: F) -> Result<T, ParserError>
4850    where
4851        F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4852    {
4853        self.expect_token(&Token::LParen)?;
4854        let res = f(self)?;
4855        self.expect_token(&Token::RParen)?;
4856        Ok(res)
4857    }
4858
4859    /// Parse a comma-separated list of 0+ items accepted by `F`
4860    /// * `end_token` - expected end token for the closure (e.g. [Token::RParen], [Token::RBrace] ...)
4861    pub fn parse_comma_separated0<T, F>(
4862        &mut self,
4863        f: F,
4864        end_token: Token,
4865    ) -> Result<Vec<T>, ParserError>
4866    where
4867        F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4868    {
4869        if self.peek_token().token == end_token {
4870            return Ok(vec![]);
4871        }
4872
4873        if self.options.trailing_commas && self.peek_tokens() == [Token::Comma, end_token] {
4874            let _ = self.consume_token(&Token::Comma);
4875            return Ok(vec![]);
4876        }
4877
4878        self.parse_comma_separated(f)
4879    }
4880
4881    /// Parses 0 or more statements, each followed by a semicolon.
4882    /// If the next token is any of `terminal_keywords` then no more
4883    /// statements will be parsed.
4884    pub(crate) fn parse_statement_list(
4885        &mut self,
4886        terminal_keywords: &[Keyword],
4887    ) -> Result<Vec<Statement>, ParserError> {
4888        let mut values = vec![];
4889        loop {
4890            match &self.peek_nth_token_ref(0).token {
4891                Token::EOF => break,
4892                Token::Word(w) => {
4893                    if w.quote_style.is_none() && terminal_keywords.contains(&w.keyword) {
4894                        break;
4895                    }
4896                }
4897                _ => {}
4898            }
4899
4900            values.push(self.parse_statement()?);
4901            self.expect_token(&Token::SemiColon)?;
4902        }
4903        Ok(values)
4904    }
4905
4906    /// Default implementation of a predicate that returns true if
4907    /// the specified keyword is reserved for column alias.
4908    /// See [Dialect::is_column_alias]
4909    fn is_reserved_for_column_alias(kw: &Keyword, parser: &mut Parser) -> bool {
4910        !parser.dialect.is_column_alias(kw, parser)
4911    }
4912
4913    /// Run a parser method `f`, reverting back to the current position if unsuccessful.
4914    /// Returns `ParserError::RecursionLimitExceeded` if `f` returns a `RecursionLimitExceeded`.
4915    /// Returns `Ok(None)` if `f` returns any other error.
4916    pub fn maybe_parse<T, F>(&mut self, f: F) -> Result<Option<T>, ParserError>
4917    where
4918        F: FnMut(&mut Parser) -> Result<T, ParserError>,
4919    {
4920        match self.try_parse(f) {
4921            Ok(t) => Ok(Some(t)),
4922            Err(ParserError::RecursionLimitExceeded) => Err(ParserError::RecursionLimitExceeded),
4923            _ => Ok(None),
4924        }
4925    }
4926
4927    /// Run a parser method `f`, reverting back to the current position if unsuccessful.
4928    pub fn try_parse<T, F>(&mut self, mut f: F) -> Result<T, ParserError>
4929    where
4930        F: FnMut(&mut Parser) -> Result<T, ParserError>,
4931    {
4932        let index = self.index;
4933        match f(self) {
4934            Ok(t) => Ok(t),
4935            Err(e) => {
4936                // Unwind stack if limit exceeded
4937                self.index = index;
4938                Err(e)
4939            }
4940        }
4941    }
4942
4943    /// Parse either `ALL`, `DISTINCT` or `DISTINCT ON (...)`. Returns [`None`] if `ALL` is parsed
4944    /// and results in a [`ParserError`] if both `ALL` and `DISTINCT` are found.
4945    pub fn parse_all_or_distinct(&mut self) -> Result<Option<Distinct>, ParserError> {
4946        let loc = self.peek_token().span.start;
4947        let distinct = match self.parse_one_of_keywords(&[Keyword::ALL, Keyword::DISTINCT]) {
4948            Some(Keyword::ALL) => {
4949                if self.peek_keyword(Keyword::DISTINCT) {
4950                    return parser_err!("Cannot specify ALL then DISTINCT".to_string(), loc);
4951                }
4952                Some(Distinct::All)
4953            }
4954            Some(Keyword::DISTINCT) => {
4955                if self.peek_keyword(Keyword::ALL) {
4956                    return parser_err!("Cannot specify DISTINCT then ALL".to_string(), loc);
4957                }
4958                Some(Distinct::Distinct)
4959            }
4960            None => return Ok(None),
4961            _ => return parser_err!("ALL or DISTINCT", loc),
4962        };
4963
4964        let Some(Distinct::Distinct) = distinct else {
4965            return Ok(distinct);
4966        };
4967        if !self.parse_keyword(Keyword::ON) {
4968            return Ok(Some(Distinct::Distinct));
4969        }
4970
4971        self.expect_token(&Token::LParen)?;
4972        let col_names = if self.consume_token(&Token::RParen) {
4973            self.prev_token();
4974            Vec::new()
4975        } else {
4976            self.parse_comma_separated(Parser::parse_expr)?
4977        };
4978        self.expect_token(&Token::RParen)?;
4979        Ok(Some(Distinct::On(col_names)))
4980    }
4981
4982    /// Parse a SQL CREATE statement
4983    pub fn parse_create(&mut self) -> Result<Statement, ParserError> {
4984        let or_replace = self.parse_keywords(&[Keyword::OR, Keyword::REPLACE]);
4985        let or_alter = self.parse_keywords(&[Keyword::OR, Keyword::ALTER]);
4986        let local = self.parse_one_of_keywords(&[Keyword::LOCAL]).is_some();
4987        let global = self.parse_one_of_keywords(&[Keyword::GLOBAL]).is_some();
4988        let transient = self.parse_one_of_keywords(&[Keyword::TRANSIENT]).is_some();
4989        let global: Option<bool> = if global {
4990            Some(true)
4991        } else if local {
4992            Some(false)
4993        } else {
4994            None
4995        };
4996        let temporary = self
4997            .parse_one_of_keywords(&[Keyword::TEMP, Keyword::TEMPORARY])
4998            .is_some();
4999        let persistent = dialect_of!(self is DuckDbDialect)
5000            && self.parse_one_of_keywords(&[Keyword::PERSISTENT]).is_some();
5001        let create_view_params = self.parse_create_view_params()?;
5002        if self.parse_keyword(Keyword::TABLE) {
5003            self.parse_create_table(or_replace, temporary, global, transient)
5004                .map(Into::into)
5005        } else if self.peek_keyword(Keyword::MATERIALIZED)
5006            || self.peek_keyword(Keyword::VIEW)
5007            || self.peek_keywords(&[Keyword::SECURE, Keyword::MATERIALIZED, Keyword::VIEW])
5008            || self.peek_keywords(&[Keyword::SECURE, Keyword::VIEW])
5009        {
5010            self.parse_create_view(or_alter, or_replace, temporary, create_view_params)
5011                .map(Into::into)
5012        } else if self.parse_keyword(Keyword::POLICY) {
5013            self.parse_create_policy().map(Into::into)
5014        } else if self.parse_keyword(Keyword::EXTERNAL) {
5015            self.parse_create_external_table(or_replace).map(Into::into)
5016        } else if self.parse_keyword(Keyword::FUNCTION) {
5017            self.parse_create_function(or_alter, or_replace, temporary)
5018        } else if self.parse_keyword(Keyword::DOMAIN) {
5019            self.parse_create_domain().map(Into::into)
5020        } else if self.parse_keyword(Keyword::TRIGGER) {
5021            self.parse_create_trigger(temporary, or_alter, or_replace, false)
5022                .map(Into::into)
5023        } else if self.parse_keywords(&[Keyword::CONSTRAINT, Keyword::TRIGGER]) {
5024            self.parse_create_trigger(temporary, or_alter, or_replace, true)
5025                .map(Into::into)
5026        } else if self.parse_keyword(Keyword::MACRO) {
5027            self.parse_create_macro(or_replace, temporary)
5028        } else if self.parse_keyword(Keyword::SECRET) {
5029            self.parse_create_secret(or_replace, temporary, persistent)
5030        } else if self.parse_keyword(Keyword::USER) {
5031            self.parse_create_user(or_replace).map(Into::into)
5032        } else if or_replace {
5033            self.expected(
5034                "[EXTERNAL] TABLE or [MATERIALIZED] VIEW or FUNCTION after CREATE OR REPLACE",
5035                self.peek_token(),
5036            )
5037        } else if self.parse_keyword(Keyword::EXTENSION) {
5038            self.parse_create_extension().map(Into::into)
5039        } else if self.parse_keyword(Keyword::INDEX) {
5040            self.parse_create_index(false).map(Into::into)
5041        } else if self.parse_keywords(&[Keyword::UNIQUE, Keyword::INDEX]) {
5042            self.parse_create_index(true).map(Into::into)
5043        } else if self.parse_keyword(Keyword::VIRTUAL) {
5044            self.parse_create_virtual_table()
5045        } else if self.parse_keyword(Keyword::SCHEMA) {
5046            self.parse_create_schema()
5047        } else if self.parse_keyword(Keyword::DATABASE) {
5048            self.parse_create_database()
5049        } else if self.parse_keyword(Keyword::ROLE) {
5050            self.parse_create_role().map(Into::into)
5051        } else if self.parse_keyword(Keyword::SEQUENCE) {
5052            self.parse_create_sequence(temporary)
5053        } else if self.parse_keyword(Keyword::TYPE) {
5054            self.parse_create_type()
5055        } else if self.parse_keyword(Keyword::PROCEDURE) {
5056            self.parse_create_procedure(or_alter)
5057        } else if self.parse_keyword(Keyword::CONNECTOR) {
5058            self.parse_create_connector().map(Into::into)
5059        } else if self.parse_keyword(Keyword::OPERATOR) {
5060            // Check if this is CREATE OPERATOR FAMILY or CREATE OPERATOR CLASS
5061            if self.parse_keyword(Keyword::FAMILY) {
5062                self.parse_create_operator_family().map(Into::into)
5063            } else if self.parse_keyword(Keyword::CLASS) {
5064                self.parse_create_operator_class().map(Into::into)
5065            } else {
5066                self.parse_create_operator().map(Into::into)
5067            }
5068        } else if self.parse_keyword(Keyword::SERVER) {
5069            self.parse_pg_create_server()
5070        } else {
5071            self.expected("an object type after CREATE", self.peek_token())
5072        }
5073    }
5074
5075    fn parse_create_user(&mut self, or_replace: bool) -> Result<CreateUser, ParserError> {
5076        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5077        let name = self.parse_identifier()?;
5078        let options = self
5079            .parse_key_value_options(false, &[Keyword::WITH, Keyword::TAG])?
5080            .options;
5081        let with_tags = self.parse_keyword(Keyword::WITH);
5082        let tags = if self.parse_keyword(Keyword::TAG) {
5083            self.parse_key_value_options(true, &[])?.options
5084        } else {
5085            vec![]
5086        };
5087        Ok(CreateUser {
5088            or_replace,
5089            if_not_exists,
5090            name,
5091            options: KeyValueOptions {
5092                options,
5093                delimiter: KeyValueOptionsDelimiter::Space,
5094            },
5095            with_tags,
5096            tags: KeyValueOptions {
5097                options: tags,
5098                delimiter: KeyValueOptionsDelimiter::Comma,
5099            },
5100        })
5101    }
5102
5103    /// See [DuckDB Docs](https://duckdb.org/docs/sql/statements/create_secret.html) for more details.
5104    pub fn parse_create_secret(
5105        &mut self,
5106        or_replace: bool,
5107        temporary: bool,
5108        persistent: bool,
5109    ) -> Result<Statement, ParserError> {
5110        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5111
5112        let mut storage_specifier = None;
5113        let mut name = None;
5114        if self.peek_token() != Token::LParen {
5115            if self.parse_keyword(Keyword::IN) {
5116                storage_specifier = self.parse_identifier().ok()
5117            } else {
5118                name = self.parse_identifier().ok();
5119            }
5120
5121            // Storage specifier may follow the name
5122            if storage_specifier.is_none()
5123                && self.peek_token() != Token::LParen
5124                && self.parse_keyword(Keyword::IN)
5125            {
5126                storage_specifier = self.parse_identifier().ok();
5127            }
5128        }
5129
5130        self.expect_token(&Token::LParen)?;
5131        self.expect_keyword_is(Keyword::TYPE)?;
5132        let secret_type = self.parse_identifier()?;
5133
5134        let mut options = Vec::new();
5135        if self.consume_token(&Token::Comma) {
5136            options.append(&mut self.parse_comma_separated(|p| {
5137                let key = p.parse_identifier()?;
5138                let value = p.parse_identifier()?;
5139                Ok(SecretOption { key, value })
5140            })?);
5141        }
5142        self.expect_token(&Token::RParen)?;
5143
5144        let temp = match (temporary, persistent) {
5145            (true, false) => Some(true),
5146            (false, true) => Some(false),
5147            (false, false) => None,
5148            _ => self.expected("TEMPORARY or PERSISTENT", self.peek_token())?,
5149        };
5150
5151        Ok(Statement::CreateSecret {
5152            or_replace,
5153            temporary: temp,
5154            if_not_exists,
5155            name,
5156            storage_specifier,
5157            secret_type,
5158            options,
5159        })
5160    }
5161
5162    /// Parse a CACHE TABLE statement
5163    pub fn parse_cache_table(&mut self) -> Result<Statement, ParserError> {
5164        let (mut table_flag, mut options, mut has_as, mut query) = (None, vec![], false, None);
5165        if self.parse_keyword(Keyword::TABLE) {
5166            let table_name = self.parse_object_name(false)?;
5167            if self.peek_token().token != Token::EOF {
5168                if let Token::Word(word) = self.peek_token().token {
5169                    if word.keyword == Keyword::OPTIONS {
5170                        options = self.parse_options(Keyword::OPTIONS)?
5171                    }
5172                };
5173
5174                if self.peek_token().token != Token::EOF {
5175                    let (a, q) = self.parse_as_query()?;
5176                    has_as = a;
5177                    query = Some(q);
5178                }
5179
5180                Ok(Statement::Cache {
5181                    table_flag,
5182                    table_name,
5183                    has_as,
5184                    options,
5185                    query,
5186                })
5187            } else {
5188                Ok(Statement::Cache {
5189                    table_flag,
5190                    table_name,
5191                    has_as,
5192                    options,
5193                    query,
5194                })
5195            }
5196        } else {
5197            table_flag = Some(self.parse_object_name(false)?);
5198            if self.parse_keyword(Keyword::TABLE) {
5199                let table_name = self.parse_object_name(false)?;
5200                if self.peek_token() != Token::EOF {
5201                    if let Token::Word(word) = self.peek_token().token {
5202                        if word.keyword == Keyword::OPTIONS {
5203                            options = self.parse_options(Keyword::OPTIONS)?
5204                        }
5205                    };
5206
5207                    if self.peek_token() != Token::EOF {
5208                        let (a, q) = self.parse_as_query()?;
5209                        has_as = a;
5210                        query = Some(q);
5211                    }
5212
5213                    Ok(Statement::Cache {
5214                        table_flag,
5215                        table_name,
5216                        has_as,
5217                        options,
5218                        query,
5219                    })
5220                } else {
5221                    Ok(Statement::Cache {
5222                        table_flag,
5223                        table_name,
5224                        has_as,
5225                        options,
5226                        query,
5227                    })
5228                }
5229            } else {
5230                if self.peek_token() == Token::EOF {
5231                    self.prev_token();
5232                }
5233                self.expected("a `TABLE` keyword", self.peek_token())
5234            }
5235        }
5236    }
5237
5238    /// Parse 'AS' before as query,such as `WITH XXX AS SELECT XXX` oer `CACHE TABLE AS SELECT XXX`
5239    pub fn parse_as_query(&mut self) -> Result<(bool, Box<Query>), ParserError> {
5240        match self.peek_token().token {
5241            Token::Word(word) => match word.keyword {
5242                Keyword::AS => {
5243                    self.next_token();
5244                    Ok((true, self.parse_query()?))
5245                }
5246                _ => Ok((false, self.parse_query()?)),
5247            },
5248            _ => self.expected("a QUERY statement", self.peek_token()),
5249        }
5250    }
5251
5252    /// Parse a UNCACHE TABLE statement
5253    pub fn parse_uncache_table(&mut self) -> Result<Statement, ParserError> {
5254        self.expect_keyword_is(Keyword::TABLE)?;
5255        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
5256        let table_name = self.parse_object_name(false)?;
5257        Ok(Statement::UNCache {
5258            table_name,
5259            if_exists,
5260        })
5261    }
5262
5263    /// SQLite-specific `CREATE VIRTUAL TABLE`
5264    pub fn parse_create_virtual_table(&mut self) -> Result<Statement, ParserError> {
5265        self.expect_keyword_is(Keyword::TABLE)?;
5266        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5267        let table_name = self.parse_object_name(false)?;
5268        self.expect_keyword_is(Keyword::USING)?;
5269        let module_name = self.parse_identifier()?;
5270        // SQLite docs note that module "arguments syntax is sufficiently
5271        // general that the arguments can be made to appear as column
5272        // definitions in a traditional CREATE TABLE statement", but
5273        // we don't implement that.
5274        let module_args = self.parse_parenthesized_column_list(Optional, false)?;
5275        Ok(Statement::CreateVirtualTable {
5276            name: table_name,
5277            if_not_exists,
5278            module_name,
5279            module_args,
5280        })
5281    }
5282
5283    /// Parse a `CREATE SCHEMA` statement.
5284    pub fn parse_create_schema(&mut self) -> Result<Statement, ParserError> {
5285        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5286
5287        let schema_name = self.parse_schema_name()?;
5288
5289        let default_collate_spec = if self.parse_keywords(&[Keyword::DEFAULT, Keyword::COLLATE]) {
5290            Some(self.parse_expr()?)
5291        } else {
5292            None
5293        };
5294
5295        let with = if self.peek_keyword(Keyword::WITH) {
5296            Some(self.parse_options(Keyword::WITH)?)
5297        } else {
5298            None
5299        };
5300
5301        let options = if self.peek_keyword(Keyword::OPTIONS) {
5302            Some(self.parse_options(Keyword::OPTIONS)?)
5303        } else {
5304            None
5305        };
5306
5307        let clone = if self.parse_keyword(Keyword::CLONE) {
5308            Some(self.parse_object_name(false)?)
5309        } else {
5310            None
5311        };
5312
5313        Ok(Statement::CreateSchema {
5314            schema_name,
5315            if_not_exists,
5316            with,
5317            options,
5318            default_collate_spec,
5319            clone,
5320        })
5321    }
5322
5323    fn parse_schema_name(&mut self) -> Result<SchemaName, ParserError> {
5324        if self.parse_keyword(Keyword::AUTHORIZATION) {
5325            Ok(SchemaName::UnnamedAuthorization(self.parse_identifier()?))
5326        } else {
5327            let name = self.parse_object_name(false)?;
5328
5329            if self.parse_keyword(Keyword::AUTHORIZATION) {
5330                Ok(SchemaName::NamedAuthorization(
5331                    name,
5332                    self.parse_identifier()?,
5333                ))
5334            } else {
5335                Ok(SchemaName::Simple(name))
5336            }
5337        }
5338    }
5339
5340    /// Parse a `CREATE DATABASE` statement.
5341    pub fn parse_create_database(&mut self) -> Result<Statement, ParserError> {
5342        let ine = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5343        let db_name = self.parse_object_name(false)?;
5344        let mut location = None;
5345        let mut managed_location = None;
5346        loop {
5347            match self.parse_one_of_keywords(&[Keyword::LOCATION, Keyword::MANAGEDLOCATION]) {
5348                Some(Keyword::LOCATION) => location = Some(self.parse_literal_string()?),
5349                Some(Keyword::MANAGEDLOCATION) => {
5350                    managed_location = Some(self.parse_literal_string()?)
5351                }
5352                _ => break,
5353            }
5354        }
5355        let clone = if self.parse_keyword(Keyword::CLONE) {
5356            Some(self.parse_object_name(false)?)
5357        } else {
5358            None
5359        };
5360
5361        // Parse MySQL-style [DEFAULT] CHARACTER SET and [DEFAULT] COLLATE options
5362        //
5363        // Note: The docs only mention `CHARACTER SET`, but `CHARSET` is also supported.
5364        // Furthermore, MySQL will only accept one character set, raising an error if there is more
5365        // than one, but will accept multiple collations and use the last one.
5366        //
5367        // <https://dev.mysql.com/doc/refman/8.4/en/create-database.html>
5368        let mut default_charset = None;
5369        let mut default_collation = None;
5370        loop {
5371            let has_default = self.parse_keyword(Keyword::DEFAULT);
5372            if default_charset.is_none() && self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET])
5373                || self.parse_keyword(Keyword::CHARSET)
5374            {
5375                let _ = self.consume_token(&Token::Eq);
5376                default_charset = Some(self.parse_identifier()?.value);
5377            } else if self.parse_keyword(Keyword::COLLATE) {
5378                let _ = self.consume_token(&Token::Eq);
5379                default_collation = Some(self.parse_identifier()?.value);
5380            } else if has_default {
5381                // DEFAULT keyword not followed by CHARACTER SET, CHARSET, or COLLATE
5382                self.prev_token();
5383                break;
5384            } else {
5385                break;
5386            }
5387        }
5388
5389        Ok(Statement::CreateDatabase {
5390            db_name,
5391            if_not_exists: ine,
5392            location,
5393            managed_location,
5394            or_replace: false,
5395            transient: false,
5396            clone,
5397            data_retention_time_in_days: None,
5398            max_data_extension_time_in_days: None,
5399            external_volume: None,
5400            catalog: None,
5401            replace_invalid_characters: None,
5402            default_ddl_collation: None,
5403            storage_serialization_policy: None,
5404            comment: None,
5405            default_charset,
5406            default_collation,
5407            catalog_sync: None,
5408            catalog_sync_namespace_mode: None,
5409            catalog_sync_namespace_flatten_delimiter: None,
5410            with_tags: None,
5411            with_contacts: None,
5412        })
5413    }
5414
5415    /// Parse an optional `USING` clause for `CREATE FUNCTION`.
5416    pub fn parse_optional_create_function_using(
5417        &mut self,
5418    ) -> Result<Option<CreateFunctionUsing>, ParserError> {
5419        if !self.parse_keyword(Keyword::USING) {
5420            return Ok(None);
5421        };
5422        let keyword =
5423            self.expect_one_of_keywords(&[Keyword::JAR, Keyword::FILE, Keyword::ARCHIVE])?;
5424
5425        let uri = self.parse_literal_string()?;
5426
5427        match keyword {
5428            Keyword::JAR => Ok(Some(CreateFunctionUsing::Jar(uri))),
5429            Keyword::FILE => Ok(Some(CreateFunctionUsing::File(uri))),
5430            Keyword::ARCHIVE => Ok(Some(CreateFunctionUsing::Archive(uri))),
5431            _ => self.expected(
5432                "JAR, FILE or ARCHIVE, got {:?}",
5433                TokenWithSpan::wrap(Token::make_keyword(format!("{keyword:?}").as_str())),
5434            ),
5435        }
5436    }
5437
5438    /// Parse a `CREATE FUNCTION` statement.
5439    pub fn parse_create_function(
5440        &mut self,
5441        or_alter: bool,
5442        or_replace: bool,
5443        temporary: bool,
5444    ) -> Result<Statement, ParserError> {
5445        if dialect_of!(self is HiveDialect) {
5446            self.parse_hive_create_function(or_replace, temporary)
5447                .map(Into::into)
5448        } else if dialect_of!(self is PostgreSqlDialect | GenericDialect) {
5449            self.parse_postgres_create_function(or_replace, temporary)
5450                .map(Into::into)
5451        } else if dialect_of!(self is DuckDbDialect) {
5452            self.parse_create_macro(or_replace, temporary)
5453        } else if dialect_of!(self is BigQueryDialect) {
5454            self.parse_bigquery_create_function(or_replace, temporary)
5455                .map(Into::into)
5456        } else if dialect_of!(self is MsSqlDialect) {
5457            self.parse_mssql_create_function(or_alter, or_replace, temporary)
5458                .map(Into::into)
5459        } else {
5460            self.prev_token();
5461            self.expected("an object type after CREATE", self.peek_token())
5462        }
5463    }
5464
5465    /// Parse `CREATE FUNCTION` for [PostgreSQL]
5466    ///
5467    /// [PostgreSQL]: https://www.postgresql.org/docs/15/sql-createfunction.html
5468    fn parse_postgres_create_function(
5469        &mut self,
5470        or_replace: bool,
5471        temporary: bool,
5472    ) -> Result<CreateFunction, ParserError> {
5473        let name = self.parse_object_name(false)?;
5474
5475        self.expect_token(&Token::LParen)?;
5476        let args = if Token::RParen != self.peek_token_ref().token {
5477            self.parse_comma_separated(Parser::parse_function_arg)?
5478        } else {
5479            vec![]
5480        };
5481        self.expect_token(&Token::RParen)?;
5482
5483        let return_type = if self.parse_keyword(Keyword::RETURNS) {
5484            Some(self.parse_data_type()?)
5485        } else {
5486            None
5487        };
5488
5489        #[derive(Default)]
5490        struct Body {
5491            language: Option<Ident>,
5492            behavior: Option<FunctionBehavior>,
5493            function_body: Option<CreateFunctionBody>,
5494            called_on_null: Option<FunctionCalledOnNull>,
5495            parallel: Option<FunctionParallel>,
5496            security: Option<FunctionSecurity>,
5497        }
5498        let mut body = Body::default();
5499        let mut set_params: Vec<FunctionDefinitionSetParam> = Vec::new();
5500        loop {
5501            fn ensure_not_set<T>(field: &Option<T>, name: &str) -> Result<(), ParserError> {
5502                if field.is_some() {
5503                    return Err(ParserError::ParserError(format!(
5504                        "{name} specified more than once",
5505                    )));
5506                }
5507                Ok(())
5508            }
5509            if self.parse_keyword(Keyword::AS) {
5510                ensure_not_set(&body.function_body, "AS")?;
5511                body.function_body = Some(self.parse_create_function_body_string()?);
5512            } else if self.parse_keyword(Keyword::LANGUAGE) {
5513                ensure_not_set(&body.language, "LANGUAGE")?;
5514                body.language = Some(self.parse_identifier()?);
5515            } else if self.parse_keyword(Keyword::IMMUTABLE) {
5516                ensure_not_set(&body.behavior, "IMMUTABLE | STABLE | VOLATILE")?;
5517                body.behavior = Some(FunctionBehavior::Immutable);
5518            } else if self.parse_keyword(Keyword::STABLE) {
5519                ensure_not_set(&body.behavior, "IMMUTABLE | STABLE | VOLATILE")?;
5520                body.behavior = Some(FunctionBehavior::Stable);
5521            } else if self.parse_keyword(Keyword::VOLATILE) {
5522                ensure_not_set(&body.behavior, "IMMUTABLE | STABLE | VOLATILE")?;
5523                body.behavior = Some(FunctionBehavior::Volatile);
5524            } else if self.parse_keywords(&[
5525                Keyword::CALLED,
5526                Keyword::ON,
5527                Keyword::NULL,
5528                Keyword::INPUT,
5529            ]) {
5530                ensure_not_set(
5531                    &body.called_on_null,
5532                    "CALLED ON NULL INPUT | RETURNS NULL ON NULL INPUT | STRICT",
5533                )?;
5534                body.called_on_null = Some(FunctionCalledOnNull::CalledOnNullInput);
5535            } else if self.parse_keywords(&[
5536                Keyword::RETURNS,
5537                Keyword::NULL,
5538                Keyword::ON,
5539                Keyword::NULL,
5540                Keyword::INPUT,
5541            ]) {
5542                ensure_not_set(
5543                    &body.called_on_null,
5544                    "CALLED ON NULL INPUT | RETURNS NULL ON NULL INPUT | STRICT",
5545                )?;
5546                body.called_on_null = Some(FunctionCalledOnNull::ReturnsNullOnNullInput);
5547            } else if self.parse_keyword(Keyword::STRICT) {
5548                ensure_not_set(
5549                    &body.called_on_null,
5550                    "CALLED ON NULL INPUT | RETURNS NULL ON NULL INPUT | STRICT",
5551                )?;
5552                body.called_on_null = Some(FunctionCalledOnNull::Strict);
5553            } else if self.parse_keyword(Keyword::PARALLEL) {
5554                ensure_not_set(&body.parallel, "PARALLEL { UNSAFE | RESTRICTED | SAFE }")?;
5555                if self.parse_keyword(Keyword::UNSAFE) {
5556                    body.parallel = Some(FunctionParallel::Unsafe);
5557                } else if self.parse_keyword(Keyword::RESTRICTED) {
5558                    body.parallel = Some(FunctionParallel::Restricted);
5559                } else if self.parse_keyword(Keyword::SAFE) {
5560                    body.parallel = Some(FunctionParallel::Safe);
5561                } else {
5562                    return self.expected("one of UNSAFE | RESTRICTED | SAFE", self.peek_token());
5563                }
5564            } else if self.parse_keyword(Keyword::SECURITY) {
5565                ensure_not_set(&body.security, "SECURITY { DEFINER | INVOKER }")?;
5566                if self.parse_keyword(Keyword::DEFINER) {
5567                    body.security = Some(FunctionSecurity::Definer);
5568                } else if self.parse_keyword(Keyword::INVOKER) {
5569                    body.security = Some(FunctionSecurity::Invoker);
5570                } else {
5571                    return self.expected("DEFINER or INVOKER", self.peek_token());
5572                }
5573            } else if self.parse_keyword(Keyword::SET) {
5574                let name = self.parse_identifier()?;
5575                let value = if self.parse_keywords(&[Keyword::FROM, Keyword::CURRENT]) {
5576                    FunctionSetValue::FromCurrent
5577                } else {
5578                    if !self.consume_token(&Token::Eq) && !self.parse_keyword(Keyword::TO) {
5579                        return self.expected("= or TO", self.peek_token());
5580                    }
5581                    let values = self.parse_comma_separated(Parser::parse_expr)?;
5582                    FunctionSetValue::Values(values)
5583                };
5584                set_params.push(FunctionDefinitionSetParam { name, value });
5585            } else if self.parse_keyword(Keyword::RETURN) {
5586                ensure_not_set(&body.function_body, "RETURN")?;
5587                body.function_body = Some(CreateFunctionBody::Return(self.parse_expr()?));
5588            } else {
5589                break;
5590            }
5591        }
5592
5593        Ok(CreateFunction {
5594            or_alter: false,
5595            or_replace,
5596            temporary,
5597            name,
5598            args: Some(args),
5599            return_type,
5600            behavior: body.behavior,
5601            called_on_null: body.called_on_null,
5602            parallel: body.parallel,
5603            security: body.security,
5604            set_params,
5605            language: body.language,
5606            function_body: body.function_body,
5607            if_not_exists: false,
5608            using: None,
5609            determinism_specifier: None,
5610            options: None,
5611            remote_connection: None,
5612        })
5613    }
5614
5615    /// Parse `CREATE FUNCTION` for [Hive]
5616    ///
5617    /// [Hive]: https://cwiki.apache.org/confluence/display/hive/languagemanual+ddl#LanguageManualDDL-Create/Drop/ReloadFunction
5618    fn parse_hive_create_function(
5619        &mut self,
5620        or_replace: bool,
5621        temporary: bool,
5622    ) -> Result<CreateFunction, ParserError> {
5623        let name = self.parse_object_name(false)?;
5624        self.expect_keyword_is(Keyword::AS)?;
5625
5626        let body = self.parse_create_function_body_string()?;
5627        let using = self.parse_optional_create_function_using()?;
5628
5629        Ok(CreateFunction {
5630            or_alter: false,
5631            or_replace,
5632            temporary,
5633            name,
5634            function_body: Some(body),
5635            using,
5636            if_not_exists: false,
5637            args: None,
5638            return_type: None,
5639            behavior: None,
5640            called_on_null: None,
5641            parallel: None,
5642            security: None,
5643            set_params: vec![],
5644            language: None,
5645            determinism_specifier: None,
5646            options: None,
5647            remote_connection: None,
5648        })
5649    }
5650
5651    /// Parse `CREATE FUNCTION` for [BigQuery]
5652    ///
5653    /// [BigQuery]: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#create_function_statement
5654    fn parse_bigquery_create_function(
5655        &mut self,
5656        or_replace: bool,
5657        temporary: bool,
5658    ) -> Result<CreateFunction, ParserError> {
5659        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5660        let (name, args) = self.parse_create_function_name_and_params()?;
5661
5662        let return_type = if self.parse_keyword(Keyword::RETURNS) {
5663            Some(self.parse_data_type()?)
5664        } else {
5665            None
5666        };
5667
5668        let determinism_specifier = if self.parse_keyword(Keyword::DETERMINISTIC) {
5669            Some(FunctionDeterminismSpecifier::Deterministic)
5670        } else if self.parse_keywords(&[Keyword::NOT, Keyword::DETERMINISTIC]) {
5671            Some(FunctionDeterminismSpecifier::NotDeterministic)
5672        } else {
5673            None
5674        };
5675
5676        let language = if self.parse_keyword(Keyword::LANGUAGE) {
5677            Some(self.parse_identifier()?)
5678        } else {
5679            None
5680        };
5681
5682        let remote_connection =
5683            if self.parse_keywords(&[Keyword::REMOTE, Keyword::WITH, Keyword::CONNECTION]) {
5684                Some(self.parse_object_name(false)?)
5685            } else {
5686                None
5687            };
5688
5689        // `OPTIONS` may come before of after the function body but
5690        // may be specified at most once.
5691        let mut options = self.maybe_parse_options(Keyword::OPTIONS)?;
5692
5693        let function_body = if remote_connection.is_none() {
5694            self.expect_keyword_is(Keyword::AS)?;
5695            let expr = self.parse_expr()?;
5696            if options.is_none() {
5697                options = self.maybe_parse_options(Keyword::OPTIONS)?;
5698                Some(CreateFunctionBody::AsBeforeOptions {
5699                    body: expr,
5700                    link_symbol: None,
5701                })
5702            } else {
5703                Some(CreateFunctionBody::AsAfterOptions(expr))
5704            }
5705        } else {
5706            None
5707        };
5708
5709        Ok(CreateFunction {
5710            or_alter: false,
5711            or_replace,
5712            temporary,
5713            if_not_exists,
5714            name,
5715            args: Some(args),
5716            return_type,
5717            function_body,
5718            language,
5719            determinism_specifier,
5720            options,
5721            remote_connection,
5722            using: None,
5723            behavior: None,
5724            called_on_null: None,
5725            parallel: None,
5726            security: None,
5727            set_params: vec![],
5728        })
5729    }
5730
5731    /// Parse `CREATE FUNCTION` for [MsSql]
5732    ///
5733    /// [MsSql]: https://learn.microsoft.com/en-us/sql/t-sql/statements/create-function-transact-sql
5734    fn parse_mssql_create_function(
5735        &mut self,
5736        or_alter: bool,
5737        or_replace: bool,
5738        temporary: bool,
5739    ) -> Result<CreateFunction, ParserError> {
5740        let (name, args) = self.parse_create_function_name_and_params()?;
5741
5742        self.expect_keyword(Keyword::RETURNS)?;
5743
5744        let return_table = self.maybe_parse(|p| {
5745            let return_table_name = p.parse_identifier()?;
5746
5747            p.expect_keyword_is(Keyword::TABLE)?;
5748            p.prev_token();
5749
5750            let table_column_defs = match p.parse_data_type()? {
5751                DataType::Table(Some(table_column_defs)) if !table_column_defs.is_empty() => {
5752                    table_column_defs
5753                }
5754                _ => parser_err!(
5755                    "Expected table column definitions after TABLE keyword",
5756                    p.peek_token().span.start
5757                )?,
5758            };
5759
5760            Ok(DataType::NamedTable {
5761                name: ObjectName(vec![ObjectNamePart::Identifier(return_table_name)]),
5762                columns: table_column_defs,
5763            })
5764        })?;
5765
5766        let return_type = if return_table.is_some() {
5767            return_table
5768        } else {
5769            Some(self.parse_data_type()?)
5770        };
5771
5772        let _ = self.parse_keyword(Keyword::AS);
5773
5774        let function_body = if self.peek_keyword(Keyword::BEGIN) {
5775            let begin_token = self.expect_keyword(Keyword::BEGIN)?;
5776            let statements = self.parse_statement_list(&[Keyword::END])?;
5777            let end_token = self.expect_keyword(Keyword::END)?;
5778
5779            Some(CreateFunctionBody::AsBeginEnd(BeginEndStatements {
5780                begin_token: AttachedToken(begin_token),
5781                statements,
5782                end_token: AttachedToken(end_token),
5783            }))
5784        } else if self.parse_keyword(Keyword::RETURN) {
5785            if self.peek_token() == Token::LParen {
5786                Some(CreateFunctionBody::AsReturnExpr(self.parse_expr()?))
5787            } else if self.peek_keyword(Keyword::SELECT) {
5788                let select = self.parse_select()?;
5789                Some(CreateFunctionBody::AsReturnSelect(select))
5790            } else {
5791                parser_err!(
5792                    "Expected a subquery (or bare SELECT statement) after RETURN",
5793                    self.peek_token().span.start
5794                )?
5795            }
5796        } else {
5797            parser_err!("Unparsable function body", self.peek_token().span.start)?
5798        };
5799
5800        Ok(CreateFunction {
5801            or_alter,
5802            or_replace,
5803            temporary,
5804            if_not_exists: false,
5805            name,
5806            args: Some(args),
5807            return_type,
5808            function_body,
5809            language: None,
5810            determinism_specifier: None,
5811            options: None,
5812            remote_connection: None,
5813            using: None,
5814            behavior: None,
5815            called_on_null: None,
5816            parallel: None,
5817            security: None,
5818            set_params: vec![],
5819        })
5820    }
5821
5822    fn parse_create_function_name_and_params(
5823        &mut self,
5824    ) -> Result<(ObjectName, Vec<OperateFunctionArg>), ParserError> {
5825        let name = self.parse_object_name(false)?;
5826        let parse_function_param =
5827            |parser: &mut Parser| -> Result<OperateFunctionArg, ParserError> {
5828                let name = parser.parse_identifier()?;
5829                let data_type = parser.parse_data_type()?;
5830                let default_expr = if parser.consume_token(&Token::Eq) {
5831                    Some(parser.parse_expr()?)
5832                } else {
5833                    None
5834                };
5835
5836                Ok(OperateFunctionArg {
5837                    mode: None,
5838                    name: Some(name),
5839                    data_type,
5840                    default_expr,
5841                })
5842            };
5843        self.expect_token(&Token::LParen)?;
5844        let args = self.parse_comma_separated0(parse_function_param, Token::RParen)?;
5845        self.expect_token(&Token::RParen)?;
5846        Ok((name, args))
5847    }
5848
5849    fn parse_function_arg(&mut self) -> Result<OperateFunctionArg, ParserError> {
5850        let mode = if self.parse_keyword(Keyword::IN) {
5851            Some(ArgMode::In)
5852        } else if self.parse_keyword(Keyword::OUT) {
5853            Some(ArgMode::Out)
5854        } else if self.parse_keyword(Keyword::INOUT) {
5855            Some(ArgMode::InOut)
5856        } else {
5857            None
5858        };
5859
5860        // parse: [ argname ] argtype
5861        let mut name = None;
5862        let mut data_type = self.parse_data_type()?;
5863
5864        // To check whether the first token is a name or a type, we need to
5865        // peek the next token, which if it is another type keyword, then the
5866        // first token is a name and not a type in itself.
5867        let data_type_idx = self.get_current_index();
5868
5869        // DEFAULT will be parsed as `DataType::Custom`, which is undesirable in this context
5870        fn parse_data_type_no_default(parser: &mut Parser) -> Result<DataType, ParserError> {
5871            if parser.peek_keyword(Keyword::DEFAULT) {
5872                // This dummy error is ignored in `maybe_parse`
5873                parser_err!(
5874                    "The DEFAULT keyword is not a type",
5875                    parser.peek_token().span.start
5876                )
5877            } else {
5878                parser.parse_data_type()
5879            }
5880        }
5881
5882        if let Some(next_data_type) = self.maybe_parse(parse_data_type_no_default)? {
5883            let token = self.token_at(data_type_idx);
5884
5885            // We ensure that the token is a `Word` token, and not other special tokens.
5886            if !matches!(token.token, Token::Word(_)) {
5887                return self.expected("a name or type", token.clone());
5888            }
5889
5890            name = Some(Ident::new(token.to_string()));
5891            data_type = next_data_type;
5892        }
5893
5894        let default_expr = if self.parse_keyword(Keyword::DEFAULT) || self.consume_token(&Token::Eq)
5895        {
5896            Some(self.parse_expr()?)
5897        } else {
5898            None
5899        };
5900        Ok(OperateFunctionArg {
5901            mode,
5902            name,
5903            data_type,
5904            default_expr,
5905        })
5906    }
5907
5908    /// Parse statements of the DropTrigger type such as:
5909    ///
5910    /// ```sql
5911    /// DROP TRIGGER [ IF EXISTS ] name ON table_name [ CASCADE | RESTRICT ]
5912    /// ```
5913    pub fn parse_drop_trigger(&mut self) -> Result<DropTrigger, ParserError> {
5914        if !dialect_of!(self is PostgreSqlDialect | SQLiteDialect | GenericDialect | MySqlDialect | MsSqlDialect)
5915        {
5916            self.prev_token();
5917            return self.expected("an object type after DROP", self.peek_token());
5918        }
5919        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
5920        let trigger_name = self.parse_object_name(false)?;
5921        let table_name = if self.parse_keyword(Keyword::ON) {
5922            Some(self.parse_object_name(false)?)
5923        } else {
5924            None
5925        };
5926        let option = match self.parse_one_of_keywords(&[Keyword::CASCADE, Keyword::RESTRICT]) {
5927            Some(Keyword::CASCADE) => Some(ReferentialAction::Cascade),
5928            Some(Keyword::RESTRICT) => Some(ReferentialAction::Restrict),
5929            Some(unexpected_keyword) => return Err(ParserError::ParserError(
5930                format!("Internal parser error: expected any of {{CASCADE, RESTRICT}}, got {unexpected_keyword:?}"),
5931            )),
5932            None => None,
5933        };
5934        Ok(DropTrigger {
5935            if_exists,
5936            trigger_name,
5937            table_name,
5938            option,
5939        })
5940    }
5941
5942    /// Parse a `CREATE TRIGGER` statement.
5943    pub fn parse_create_trigger(
5944        &mut self,
5945        temporary: bool,
5946        or_alter: bool,
5947        or_replace: bool,
5948        is_constraint: bool,
5949    ) -> Result<CreateTrigger, ParserError> {
5950        if !dialect_of!(self is PostgreSqlDialect | SQLiteDialect | GenericDialect | MySqlDialect | MsSqlDialect)
5951        {
5952            self.prev_token();
5953            return self.expected("an object type after CREATE", self.peek_token());
5954        }
5955
5956        let name = self.parse_object_name(false)?;
5957        let period = self.maybe_parse(|parser| parser.parse_trigger_period())?;
5958
5959        let events = self.parse_keyword_separated(Keyword::OR, Parser::parse_trigger_event)?;
5960        self.expect_keyword_is(Keyword::ON)?;
5961        let table_name = self.parse_object_name(false)?;
5962
5963        let referenced_table_name = if self.parse_keyword(Keyword::FROM) {
5964            self.parse_object_name(true).ok()
5965        } else {
5966            None
5967        };
5968
5969        let characteristics = self.parse_constraint_characteristics()?;
5970
5971        let mut referencing = vec![];
5972        if self.parse_keyword(Keyword::REFERENCING) {
5973            while let Some(refer) = self.parse_trigger_referencing()? {
5974                referencing.push(refer);
5975            }
5976        }
5977
5978        let trigger_object = if self.parse_keyword(Keyword::FOR) {
5979            let include_each = self.parse_keyword(Keyword::EACH);
5980            let trigger_object =
5981                match self.expect_one_of_keywords(&[Keyword::ROW, Keyword::STATEMENT])? {
5982                    Keyword::ROW => TriggerObject::Row,
5983                    Keyword::STATEMENT => TriggerObject::Statement,
5984                    unexpected_keyword => return Err(ParserError::ParserError(
5985                        format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in ROW/STATEMENT"),
5986                    )),
5987                };
5988
5989            Some(if include_each {
5990                TriggerObjectKind::ForEach(trigger_object)
5991            } else {
5992                TriggerObjectKind::For(trigger_object)
5993            })
5994        } else {
5995            let _ = self.parse_keyword(Keyword::FOR);
5996
5997            None
5998        };
5999
6000        let condition = self
6001            .parse_keyword(Keyword::WHEN)
6002            .then(|| self.parse_expr())
6003            .transpose()?;
6004
6005        let mut exec_body = None;
6006        let mut statements = None;
6007        if self.parse_keyword(Keyword::EXECUTE) {
6008            exec_body = Some(self.parse_trigger_exec_body()?);
6009        } else {
6010            statements = Some(self.parse_conditional_statements(&[Keyword::END])?);
6011        }
6012
6013        Ok(CreateTrigger {
6014            or_alter,
6015            temporary,
6016            or_replace,
6017            is_constraint,
6018            name,
6019            period,
6020            period_before_table: true,
6021            events,
6022            table_name,
6023            referenced_table_name,
6024            referencing,
6025            trigger_object,
6026            condition,
6027            exec_body,
6028            statements_as: false,
6029            statements,
6030            characteristics,
6031        })
6032    }
6033
6034    /// Parse the period part of a trigger (`BEFORE`, `AFTER`, etc.).
6035    pub fn parse_trigger_period(&mut self) -> Result<TriggerPeriod, ParserError> {
6036        Ok(
6037            match self.expect_one_of_keywords(&[
6038                Keyword::FOR,
6039                Keyword::BEFORE,
6040                Keyword::AFTER,
6041                Keyword::INSTEAD,
6042            ])? {
6043                Keyword::FOR => TriggerPeriod::For,
6044                Keyword::BEFORE => TriggerPeriod::Before,
6045                Keyword::AFTER => TriggerPeriod::After,
6046                Keyword::INSTEAD => self
6047                    .expect_keyword_is(Keyword::OF)
6048                    .map(|_| TriggerPeriod::InsteadOf)?,
6049                unexpected_keyword => return Err(ParserError::ParserError(
6050                    format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in trigger period"),
6051                )),
6052            },
6053        )
6054    }
6055
6056    /// Parse the event part of a trigger (`INSERT`, `UPDATE`, etc.).
6057    pub fn parse_trigger_event(&mut self) -> Result<TriggerEvent, ParserError> {
6058        Ok(
6059            match self.expect_one_of_keywords(&[
6060                Keyword::INSERT,
6061                Keyword::UPDATE,
6062                Keyword::DELETE,
6063                Keyword::TRUNCATE,
6064            ])? {
6065                Keyword::INSERT => TriggerEvent::Insert,
6066                Keyword::UPDATE => {
6067                    if self.parse_keyword(Keyword::OF) {
6068                        let cols = self.parse_comma_separated(Parser::parse_identifier)?;
6069                        TriggerEvent::Update(cols)
6070                    } else {
6071                        TriggerEvent::Update(vec![])
6072                    }
6073                }
6074                Keyword::DELETE => TriggerEvent::Delete,
6075                Keyword::TRUNCATE => TriggerEvent::Truncate,
6076                unexpected_keyword => return Err(ParserError::ParserError(
6077                    format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in trigger event"),
6078                )),
6079            },
6080        )
6081    }
6082
6083    /// Parse the `REFERENCING` clause of a trigger.
6084    pub fn parse_trigger_referencing(&mut self) -> Result<Option<TriggerReferencing>, ParserError> {
6085        let refer_type = match self.parse_one_of_keywords(&[Keyword::OLD, Keyword::NEW]) {
6086            Some(Keyword::OLD) if self.parse_keyword(Keyword::TABLE) => {
6087                TriggerReferencingType::OldTable
6088            }
6089            Some(Keyword::NEW) if self.parse_keyword(Keyword::TABLE) => {
6090                TriggerReferencingType::NewTable
6091            }
6092            _ => {
6093                return Ok(None);
6094            }
6095        };
6096
6097        let is_as = self.parse_keyword(Keyword::AS);
6098        let transition_relation_name = self.parse_object_name(false)?;
6099        Ok(Some(TriggerReferencing {
6100            refer_type,
6101            is_as,
6102            transition_relation_name,
6103        }))
6104    }
6105
6106    /// Parse the execution body of a trigger (`FUNCTION` or `PROCEDURE`).
6107    pub fn parse_trigger_exec_body(&mut self) -> Result<TriggerExecBody, ParserError> {
6108        Ok(TriggerExecBody {
6109            exec_type: match self
6110                .expect_one_of_keywords(&[Keyword::FUNCTION, Keyword::PROCEDURE])?
6111            {
6112                Keyword::FUNCTION => TriggerExecBodyType::Function,
6113                Keyword::PROCEDURE => TriggerExecBodyType::Procedure,
6114                unexpected_keyword => return Err(ParserError::ParserError(
6115                    format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in trigger exec body"),
6116                )),
6117            },
6118            func_desc: self.parse_function_desc()?,
6119        })
6120    }
6121
6122    /// Parse a `CREATE MACRO` statement.
6123    pub fn parse_create_macro(
6124        &mut self,
6125        or_replace: bool,
6126        temporary: bool,
6127    ) -> Result<Statement, ParserError> {
6128        if dialect_of!(self is DuckDbDialect |  GenericDialect) {
6129            let name = self.parse_object_name(false)?;
6130            self.expect_token(&Token::LParen)?;
6131            let args = if self.consume_token(&Token::RParen) {
6132                self.prev_token();
6133                None
6134            } else {
6135                Some(self.parse_comma_separated(Parser::parse_macro_arg)?)
6136            };
6137
6138            self.expect_token(&Token::RParen)?;
6139            self.expect_keyword_is(Keyword::AS)?;
6140
6141            Ok(Statement::CreateMacro {
6142                or_replace,
6143                temporary,
6144                name,
6145                args,
6146                definition: if self.parse_keyword(Keyword::TABLE) {
6147                    MacroDefinition::Table(self.parse_query()?)
6148                } else {
6149                    MacroDefinition::Expr(self.parse_expr()?)
6150                },
6151            })
6152        } else {
6153            self.prev_token();
6154            self.expected("an object type after CREATE", self.peek_token())
6155        }
6156    }
6157
6158    fn parse_macro_arg(&mut self) -> Result<MacroArg, ParserError> {
6159        let name = self.parse_identifier()?;
6160
6161        let default_expr =
6162            if self.consume_token(&Token::Assignment) || self.consume_token(&Token::RArrow) {
6163                Some(self.parse_expr()?)
6164            } else {
6165                None
6166            };
6167        Ok(MacroArg { name, default_expr })
6168    }
6169
6170    /// Parse a `CREATE EXTERNAL TABLE` statement.
6171    pub fn parse_create_external_table(
6172        &mut self,
6173        or_replace: bool,
6174    ) -> Result<CreateTable, ParserError> {
6175        self.expect_keyword_is(Keyword::TABLE)?;
6176        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
6177        let table_name = self.parse_object_name(false)?;
6178        let (columns, constraints) = self.parse_columns()?;
6179
6180        let hive_distribution = self.parse_hive_distribution()?;
6181        let hive_formats = self.parse_hive_formats()?;
6182
6183        let file_format = if let Some(ref hf) = hive_formats {
6184            if let Some(ref ff) = hf.storage {
6185                match ff {
6186                    HiveIOFormat::FileFormat { format } => Some(*format),
6187                    _ => None,
6188                }
6189            } else {
6190                None
6191            }
6192        } else {
6193            None
6194        };
6195        let location = hive_formats.as_ref().and_then(|hf| hf.location.clone());
6196        let table_properties = self.parse_options(Keyword::TBLPROPERTIES)?;
6197        let table_options = if !table_properties.is_empty() {
6198            CreateTableOptions::TableProperties(table_properties)
6199        } else {
6200            CreateTableOptions::None
6201        };
6202        Ok(CreateTableBuilder::new(table_name)
6203            .columns(columns)
6204            .constraints(constraints)
6205            .hive_distribution(hive_distribution)
6206            .hive_formats(hive_formats)
6207            .table_options(table_options)
6208            .or_replace(or_replace)
6209            .if_not_exists(if_not_exists)
6210            .external(true)
6211            .file_format(file_format)
6212            .location(location)
6213            .build())
6214    }
6215
6216    /// Parse a file format for external tables.
6217    pub fn parse_file_format(&mut self) -> Result<FileFormat, ParserError> {
6218        let next_token = self.next_token();
6219        match &next_token.token {
6220            Token::Word(w) => match w.keyword {
6221                Keyword::AVRO => Ok(FileFormat::AVRO),
6222                Keyword::JSONFILE => Ok(FileFormat::JSONFILE),
6223                Keyword::ORC => Ok(FileFormat::ORC),
6224                Keyword::PARQUET => Ok(FileFormat::PARQUET),
6225                Keyword::RCFILE => Ok(FileFormat::RCFILE),
6226                Keyword::SEQUENCEFILE => Ok(FileFormat::SEQUENCEFILE),
6227                Keyword::TEXTFILE => Ok(FileFormat::TEXTFILE),
6228                _ => self.expected("fileformat", next_token),
6229            },
6230            _ => self.expected("fileformat", next_token),
6231        }
6232    }
6233
6234    fn parse_analyze_format_kind(&mut self) -> Result<AnalyzeFormatKind, ParserError> {
6235        if self.consume_token(&Token::Eq) {
6236            Ok(AnalyzeFormatKind::Assignment(self.parse_analyze_format()?))
6237        } else {
6238            Ok(AnalyzeFormatKind::Keyword(self.parse_analyze_format()?))
6239        }
6240    }
6241
6242    /// Parse an `ANALYZE FORMAT`.
6243    pub fn parse_analyze_format(&mut self) -> Result<AnalyzeFormat, ParserError> {
6244        let next_token = self.next_token();
6245        match &next_token.token {
6246            Token::Word(w) => match w.keyword {
6247                Keyword::TEXT => Ok(AnalyzeFormat::TEXT),
6248                Keyword::GRAPHVIZ => Ok(AnalyzeFormat::GRAPHVIZ),
6249                Keyword::JSON => Ok(AnalyzeFormat::JSON),
6250                Keyword::TREE => Ok(AnalyzeFormat::TREE),
6251                _ => self.expected("fileformat", next_token),
6252            },
6253            _ => self.expected("fileformat", next_token),
6254        }
6255    }
6256
6257    /// Parse a `CREATE VIEW` statement.
6258    pub fn parse_create_view(
6259        &mut self,
6260        or_alter: bool,
6261        or_replace: bool,
6262        temporary: bool,
6263        create_view_params: Option<CreateViewParams>,
6264    ) -> Result<CreateView, ParserError> {
6265        let secure = self.parse_keyword(Keyword::SECURE);
6266        let materialized = self.parse_keyword(Keyword::MATERIALIZED);
6267        self.expect_keyword_is(Keyword::VIEW)?;
6268        let allow_unquoted_hyphen = dialect_of!(self is BigQueryDialect);
6269        // Tries to parse IF NOT EXISTS either before name or after name
6270        // Name before IF NOT EXISTS is supported by snowflake but undocumented
6271        let if_not_exists_first =
6272            self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
6273        let name = self.parse_object_name(allow_unquoted_hyphen)?;
6274        let name_before_not_exists = !if_not_exists_first
6275            && self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
6276        let if_not_exists = if_not_exists_first || name_before_not_exists;
6277        // Many dialects support `OR ALTER` right after `CREATE`, but we don't (yet).
6278        // ANSI SQL and Postgres support RECURSIVE here, but we don't support it either.
6279        let columns = self.parse_view_columns()?;
6280        let mut options = CreateTableOptions::None;
6281        let with_options = self.parse_options(Keyword::WITH)?;
6282        if !with_options.is_empty() {
6283            options = CreateTableOptions::With(with_options);
6284        }
6285
6286        let cluster_by = if self.parse_keyword(Keyword::CLUSTER) {
6287            self.expect_keyword_is(Keyword::BY)?;
6288            self.parse_parenthesized_column_list(Optional, false)?
6289        } else {
6290            vec![]
6291        };
6292
6293        if dialect_of!(self is BigQueryDialect | GenericDialect) {
6294            if let Some(opts) = self.maybe_parse_options(Keyword::OPTIONS)? {
6295                if !opts.is_empty() {
6296                    options = CreateTableOptions::Options(opts);
6297                }
6298            };
6299        }
6300
6301        let to = if dialect_of!(self is ClickHouseDialect | GenericDialect)
6302            && self.parse_keyword(Keyword::TO)
6303        {
6304            Some(self.parse_object_name(false)?)
6305        } else {
6306            None
6307        };
6308
6309        let comment = if self.dialect.supports_create_view_comment_syntax()
6310            && self.parse_keyword(Keyword::COMMENT)
6311        {
6312            self.expect_token(&Token::Eq)?;
6313            Some(self.parse_comment_value()?)
6314        } else {
6315            None
6316        };
6317
6318        self.expect_keyword_is(Keyword::AS)?;
6319        let query = self.parse_query()?;
6320        // Optional `WITH [ CASCADED | LOCAL ] CHECK OPTION` is widely supported here.
6321
6322        let with_no_schema_binding = dialect_of!(self is RedshiftSqlDialect | GenericDialect)
6323            && self.parse_keywords(&[
6324                Keyword::WITH,
6325                Keyword::NO,
6326                Keyword::SCHEMA,
6327                Keyword::BINDING,
6328            ]);
6329
6330        Ok(CreateView {
6331            or_alter,
6332            name,
6333            columns,
6334            query,
6335            materialized,
6336            secure,
6337            or_replace,
6338            options,
6339            cluster_by,
6340            comment,
6341            with_no_schema_binding,
6342            if_not_exists,
6343            temporary,
6344            to,
6345            params: create_view_params,
6346            name_before_not_exists,
6347        })
6348    }
6349
6350    /// Parse optional parameters for the `CREATE VIEW` statement supported by [MySQL].
6351    ///
6352    /// [MySQL]: https://dev.mysql.com/doc/refman/9.1/en/create-view.html
6353    fn parse_create_view_params(&mut self) -> Result<Option<CreateViewParams>, ParserError> {
6354        let algorithm = if self.parse_keyword(Keyword::ALGORITHM) {
6355            self.expect_token(&Token::Eq)?;
6356            Some(
6357                match self.expect_one_of_keywords(&[
6358                    Keyword::UNDEFINED,
6359                    Keyword::MERGE,
6360                    Keyword::TEMPTABLE,
6361                ])? {
6362                    Keyword::UNDEFINED => CreateViewAlgorithm::Undefined,
6363                    Keyword::MERGE => CreateViewAlgorithm::Merge,
6364                    Keyword::TEMPTABLE => CreateViewAlgorithm::TempTable,
6365                    _ => {
6366                        self.prev_token();
6367                        let found = self.next_token();
6368                        return self
6369                            .expected("UNDEFINED or MERGE or TEMPTABLE after ALGORITHM =", found);
6370                    }
6371                },
6372            )
6373        } else {
6374            None
6375        };
6376        let definer = if self.parse_keyword(Keyword::DEFINER) {
6377            self.expect_token(&Token::Eq)?;
6378            Some(self.parse_grantee_name()?)
6379        } else {
6380            None
6381        };
6382        let security = if self.parse_keywords(&[Keyword::SQL, Keyword::SECURITY]) {
6383            Some(
6384                match self.expect_one_of_keywords(&[Keyword::DEFINER, Keyword::INVOKER])? {
6385                    Keyword::DEFINER => CreateViewSecurity::Definer,
6386                    Keyword::INVOKER => CreateViewSecurity::Invoker,
6387                    _ => {
6388                        self.prev_token();
6389                        let found = self.next_token();
6390                        return self.expected("DEFINER or INVOKER after SQL SECURITY", found);
6391                    }
6392                },
6393            )
6394        } else {
6395            None
6396        };
6397        if algorithm.is_some() || definer.is_some() || security.is_some() {
6398            Ok(Some(CreateViewParams {
6399                algorithm,
6400                definer,
6401                security,
6402            }))
6403        } else {
6404            Ok(None)
6405        }
6406    }
6407
6408    /// Parse a `CREATE ROLE` statement.
6409    pub fn parse_create_role(&mut self) -> Result<CreateRole, ParserError> {
6410        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
6411        let names = self.parse_comma_separated(|p| p.parse_object_name(false))?;
6412
6413        let _ = self.parse_keyword(Keyword::WITH); // [ WITH ]
6414
6415        let optional_keywords = if dialect_of!(self is MsSqlDialect) {
6416            vec![Keyword::AUTHORIZATION]
6417        } else if dialect_of!(self is PostgreSqlDialect) {
6418            vec![
6419                Keyword::LOGIN,
6420                Keyword::NOLOGIN,
6421                Keyword::INHERIT,
6422                Keyword::NOINHERIT,
6423                Keyword::BYPASSRLS,
6424                Keyword::NOBYPASSRLS,
6425                Keyword::PASSWORD,
6426                Keyword::CREATEDB,
6427                Keyword::NOCREATEDB,
6428                Keyword::CREATEROLE,
6429                Keyword::NOCREATEROLE,
6430                Keyword::SUPERUSER,
6431                Keyword::NOSUPERUSER,
6432                Keyword::REPLICATION,
6433                Keyword::NOREPLICATION,
6434                Keyword::CONNECTION,
6435                Keyword::VALID,
6436                Keyword::IN,
6437                Keyword::ROLE,
6438                Keyword::ADMIN,
6439                Keyword::USER,
6440            ]
6441        } else {
6442            vec![]
6443        };
6444
6445        // MSSQL
6446        let mut authorization_owner = None;
6447        // Postgres
6448        let mut login = None;
6449        let mut inherit = None;
6450        let mut bypassrls = None;
6451        let mut password = None;
6452        let mut create_db = None;
6453        let mut create_role = None;
6454        let mut superuser = None;
6455        let mut replication = None;
6456        let mut connection_limit = None;
6457        let mut valid_until = None;
6458        let mut in_role = vec![];
6459        let mut in_group = vec![];
6460        let mut role = vec![];
6461        let mut user = vec![];
6462        let mut admin = vec![];
6463
6464        while let Some(keyword) = self.parse_one_of_keywords(&optional_keywords) {
6465            let loc = self
6466                .tokens
6467                .get(self.index - 1)
6468                .map_or(Location { line: 0, column: 0 }, |t| t.span.start);
6469            match keyword {
6470                Keyword::AUTHORIZATION => {
6471                    if authorization_owner.is_some() {
6472                        parser_err!("Found multiple AUTHORIZATION", loc)
6473                    } else {
6474                        authorization_owner = Some(self.parse_object_name(false)?);
6475                        Ok(())
6476                    }
6477                }
6478                Keyword::LOGIN | Keyword::NOLOGIN => {
6479                    if login.is_some() {
6480                        parser_err!("Found multiple LOGIN or NOLOGIN", loc)
6481                    } else {
6482                        login = Some(keyword == Keyword::LOGIN);
6483                        Ok(())
6484                    }
6485                }
6486                Keyword::INHERIT | Keyword::NOINHERIT => {
6487                    if inherit.is_some() {
6488                        parser_err!("Found multiple INHERIT or NOINHERIT", loc)
6489                    } else {
6490                        inherit = Some(keyword == Keyword::INHERIT);
6491                        Ok(())
6492                    }
6493                }
6494                Keyword::BYPASSRLS | Keyword::NOBYPASSRLS => {
6495                    if bypassrls.is_some() {
6496                        parser_err!("Found multiple BYPASSRLS or NOBYPASSRLS", loc)
6497                    } else {
6498                        bypassrls = Some(keyword == Keyword::BYPASSRLS);
6499                        Ok(())
6500                    }
6501                }
6502                Keyword::CREATEDB | Keyword::NOCREATEDB => {
6503                    if create_db.is_some() {
6504                        parser_err!("Found multiple CREATEDB or NOCREATEDB", loc)
6505                    } else {
6506                        create_db = Some(keyword == Keyword::CREATEDB);
6507                        Ok(())
6508                    }
6509                }
6510                Keyword::CREATEROLE | Keyword::NOCREATEROLE => {
6511                    if create_role.is_some() {
6512                        parser_err!("Found multiple CREATEROLE or NOCREATEROLE", loc)
6513                    } else {
6514                        create_role = Some(keyword == Keyword::CREATEROLE);
6515                        Ok(())
6516                    }
6517                }
6518                Keyword::SUPERUSER | Keyword::NOSUPERUSER => {
6519                    if superuser.is_some() {
6520                        parser_err!("Found multiple SUPERUSER or NOSUPERUSER", loc)
6521                    } else {
6522                        superuser = Some(keyword == Keyword::SUPERUSER);
6523                        Ok(())
6524                    }
6525                }
6526                Keyword::REPLICATION | Keyword::NOREPLICATION => {
6527                    if replication.is_some() {
6528                        parser_err!("Found multiple REPLICATION or NOREPLICATION", loc)
6529                    } else {
6530                        replication = Some(keyword == Keyword::REPLICATION);
6531                        Ok(())
6532                    }
6533                }
6534                Keyword::PASSWORD => {
6535                    if password.is_some() {
6536                        parser_err!("Found multiple PASSWORD", loc)
6537                    } else {
6538                        password = if self.parse_keyword(Keyword::NULL) {
6539                            Some(Password::NullPassword)
6540                        } else {
6541                            Some(Password::Password(Expr::Value(self.parse_value()?)))
6542                        };
6543                        Ok(())
6544                    }
6545                }
6546                Keyword::CONNECTION => {
6547                    self.expect_keyword_is(Keyword::LIMIT)?;
6548                    if connection_limit.is_some() {
6549                        parser_err!("Found multiple CONNECTION LIMIT", loc)
6550                    } else {
6551                        connection_limit = Some(Expr::Value(self.parse_number_value()?));
6552                        Ok(())
6553                    }
6554                }
6555                Keyword::VALID => {
6556                    self.expect_keyword_is(Keyword::UNTIL)?;
6557                    if valid_until.is_some() {
6558                        parser_err!("Found multiple VALID UNTIL", loc)
6559                    } else {
6560                        valid_until = Some(Expr::Value(self.parse_value()?));
6561                        Ok(())
6562                    }
6563                }
6564                Keyword::IN => {
6565                    if self.parse_keyword(Keyword::ROLE) {
6566                        if !in_role.is_empty() {
6567                            parser_err!("Found multiple IN ROLE", loc)
6568                        } else {
6569                            in_role = self.parse_comma_separated(|p| p.parse_identifier())?;
6570                            Ok(())
6571                        }
6572                    } else if self.parse_keyword(Keyword::GROUP) {
6573                        if !in_group.is_empty() {
6574                            parser_err!("Found multiple IN GROUP", loc)
6575                        } else {
6576                            in_group = self.parse_comma_separated(|p| p.parse_identifier())?;
6577                            Ok(())
6578                        }
6579                    } else {
6580                        self.expected("ROLE or GROUP after IN", self.peek_token())
6581                    }
6582                }
6583                Keyword::ROLE => {
6584                    if !role.is_empty() {
6585                        parser_err!("Found multiple ROLE", loc)
6586                    } else {
6587                        role = self.parse_comma_separated(|p| p.parse_identifier())?;
6588                        Ok(())
6589                    }
6590                }
6591                Keyword::USER => {
6592                    if !user.is_empty() {
6593                        parser_err!("Found multiple USER", loc)
6594                    } else {
6595                        user = self.parse_comma_separated(|p| p.parse_identifier())?;
6596                        Ok(())
6597                    }
6598                }
6599                Keyword::ADMIN => {
6600                    if !admin.is_empty() {
6601                        parser_err!("Found multiple ADMIN", loc)
6602                    } else {
6603                        admin = self.parse_comma_separated(|p| p.parse_identifier())?;
6604                        Ok(())
6605                    }
6606                }
6607                _ => break,
6608            }?
6609        }
6610
6611        Ok(CreateRole {
6612            names,
6613            if_not_exists,
6614            login,
6615            inherit,
6616            bypassrls,
6617            password,
6618            create_db,
6619            create_role,
6620            replication,
6621            superuser,
6622            connection_limit,
6623            valid_until,
6624            in_role,
6625            in_group,
6626            role,
6627            user,
6628            admin,
6629            authorization_owner,
6630        })
6631    }
6632
6633    /// Parse an `OWNER` clause.
6634    pub fn parse_owner(&mut self) -> Result<Owner, ParserError> {
6635        let owner = match self.parse_one_of_keywords(&[Keyword::CURRENT_USER, Keyword::CURRENT_ROLE, Keyword::SESSION_USER]) {
6636            Some(Keyword::CURRENT_USER) => Owner::CurrentUser,
6637            Some(Keyword::CURRENT_ROLE) => Owner::CurrentRole,
6638            Some(Keyword::SESSION_USER) => Owner::SessionUser,
6639            Some(unexpected_keyword) => return Err(ParserError::ParserError(
6640                format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in owner"),
6641            )),
6642            None => {
6643                match self.parse_identifier() {
6644                    Ok(ident) => Owner::Ident(ident),
6645                    Err(e) => {
6646                        return Err(ParserError::ParserError(format!("Expected: CURRENT_USER, CURRENT_ROLE, SESSION_USER or identifier after OWNER TO. {e}")))
6647                    }
6648                }
6649            }
6650        };
6651        Ok(owner)
6652    }
6653
6654    /// Parses a [Statement::CreateDomain] statement.
6655    fn parse_create_domain(&mut self) -> Result<CreateDomain, ParserError> {
6656        let name = self.parse_object_name(false)?;
6657        self.expect_keyword_is(Keyword::AS)?;
6658        let data_type = self.parse_data_type()?;
6659        let collation = if self.parse_keyword(Keyword::COLLATE) {
6660            Some(self.parse_identifier()?)
6661        } else {
6662            None
6663        };
6664        let default = if self.parse_keyword(Keyword::DEFAULT) {
6665            Some(self.parse_expr()?)
6666        } else {
6667            None
6668        };
6669        let mut constraints = Vec::new();
6670        while let Some(constraint) = self.parse_optional_table_constraint()? {
6671            constraints.push(constraint);
6672        }
6673
6674        Ok(CreateDomain {
6675            name,
6676            data_type,
6677            collation,
6678            default,
6679            constraints,
6680        })
6681    }
6682
6683    /// ```sql
6684    ///     CREATE POLICY name ON table_name [ AS { PERMISSIVE | RESTRICTIVE } ]
6685    ///     [ FOR { ALL | SELECT | INSERT | UPDATE | DELETE } ]
6686    ///     [ TO { role_name | PUBLIC | CURRENT_USER | CURRENT_ROLE | SESSION_USER } [, ...] ]
6687    ///     [ USING ( using_expression ) ]
6688    ///     [ WITH CHECK ( with_check_expression ) ]
6689    /// ```
6690    ///
6691    /// [PostgreSQL Documentation](https://www.postgresql.org/docs/current/sql-createpolicy.html)
6692    pub fn parse_create_policy(&mut self) -> Result<CreatePolicy, ParserError> {
6693        let name = self.parse_identifier()?;
6694        self.expect_keyword_is(Keyword::ON)?;
6695        let table_name = self.parse_object_name(false)?;
6696
6697        let policy_type = if self.parse_keyword(Keyword::AS) {
6698            let keyword =
6699                self.expect_one_of_keywords(&[Keyword::PERMISSIVE, Keyword::RESTRICTIVE])?;
6700            Some(match keyword {
6701                Keyword::PERMISSIVE => CreatePolicyType::Permissive,
6702                Keyword::RESTRICTIVE => CreatePolicyType::Restrictive,
6703                unexpected_keyword => return Err(ParserError::ParserError(
6704                    format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in policy type"),
6705                )),
6706            })
6707        } else {
6708            None
6709        };
6710
6711        let command = if self.parse_keyword(Keyword::FOR) {
6712            let keyword = self.expect_one_of_keywords(&[
6713                Keyword::ALL,
6714                Keyword::SELECT,
6715                Keyword::INSERT,
6716                Keyword::UPDATE,
6717                Keyword::DELETE,
6718            ])?;
6719            Some(match keyword {
6720                Keyword::ALL => CreatePolicyCommand::All,
6721                Keyword::SELECT => CreatePolicyCommand::Select,
6722                Keyword::INSERT => CreatePolicyCommand::Insert,
6723                Keyword::UPDATE => CreatePolicyCommand::Update,
6724                Keyword::DELETE => CreatePolicyCommand::Delete,
6725                unexpected_keyword => return Err(ParserError::ParserError(
6726                    format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in policy command"),
6727                )),
6728            })
6729        } else {
6730            None
6731        };
6732
6733        let to = if self.parse_keyword(Keyword::TO) {
6734            Some(self.parse_comma_separated(|p| p.parse_owner())?)
6735        } else {
6736            None
6737        };
6738
6739        let using = if self.parse_keyword(Keyword::USING) {
6740            self.expect_token(&Token::LParen)?;
6741            let expr = self.parse_expr()?;
6742            self.expect_token(&Token::RParen)?;
6743            Some(expr)
6744        } else {
6745            None
6746        };
6747
6748        let with_check = if self.parse_keywords(&[Keyword::WITH, Keyword::CHECK]) {
6749            self.expect_token(&Token::LParen)?;
6750            let expr = self.parse_expr()?;
6751            self.expect_token(&Token::RParen)?;
6752            Some(expr)
6753        } else {
6754            None
6755        };
6756
6757        Ok(CreatePolicy {
6758            name,
6759            table_name,
6760            policy_type,
6761            command,
6762            to,
6763            using,
6764            with_check,
6765        })
6766    }
6767
6768    /// ```sql
6769    /// CREATE CONNECTOR [IF NOT EXISTS] connector_name
6770    /// [TYPE datasource_type]
6771    /// [URL datasource_url]
6772    /// [COMMENT connector_comment]
6773    /// [WITH DCPROPERTIES(property_name=property_value, ...)]
6774    /// ```
6775    ///
6776    /// [Hive Documentation](https://cwiki.apache.org/confluence/pages/viewpage.action?pageId=27362034#LanguageManualDDL-CreateDataConnectorCreateConnector)
6777    pub fn parse_create_connector(&mut self) -> Result<CreateConnector, ParserError> {
6778        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
6779        let name = self.parse_identifier()?;
6780
6781        let connector_type = if self.parse_keyword(Keyword::TYPE) {
6782            Some(self.parse_literal_string()?)
6783        } else {
6784            None
6785        };
6786
6787        let url = if self.parse_keyword(Keyword::URL) {
6788            Some(self.parse_literal_string()?)
6789        } else {
6790            None
6791        };
6792
6793        let comment = self.parse_optional_inline_comment()?;
6794
6795        let with_dcproperties =
6796            match self.parse_options_with_keywords(&[Keyword::WITH, Keyword::DCPROPERTIES])? {
6797                properties if !properties.is_empty() => Some(properties),
6798                _ => None,
6799            };
6800
6801        Ok(CreateConnector {
6802            name,
6803            if_not_exists,
6804            connector_type,
6805            url,
6806            comment,
6807            with_dcproperties,
6808        })
6809    }
6810
6811    /// Parse an operator name, which can contain special characters like +, -, <, >, =
6812    /// that are tokenized as operator tokens rather than identifiers.
6813    /// This is used for PostgreSQL CREATE OPERATOR statements.
6814    ///
6815    /// Examples: `+`, `myschema.+`, `pg_catalog.<=`
6816    fn parse_operator_name(&mut self) -> Result<ObjectName, ParserError> {
6817        let mut parts = vec![];
6818        loop {
6819            parts.push(ObjectNamePart::Identifier(Ident::new(
6820                self.next_token().to_string(),
6821            )));
6822            if !self.consume_token(&Token::Period) {
6823                break;
6824            }
6825        }
6826        Ok(ObjectName(parts))
6827    }
6828
6829    /// Parse a [Statement::CreateOperator]
6830    ///
6831    /// [PostgreSQL Documentation](https://www.postgresql.org/docs/current/sql-createoperator.html)
6832    pub fn parse_create_operator(&mut self) -> Result<CreateOperator, ParserError> {
6833        let name = self.parse_operator_name()?;
6834        self.expect_token(&Token::LParen)?;
6835
6836        let mut function: Option<ObjectName> = None;
6837        let mut is_procedure = false;
6838        let mut left_arg: Option<DataType> = None;
6839        let mut right_arg: Option<DataType> = None;
6840        let mut options: Vec<OperatorOption> = Vec::new();
6841
6842        loop {
6843            let keyword = self.expect_one_of_keywords(&[
6844                Keyword::FUNCTION,
6845                Keyword::PROCEDURE,
6846                Keyword::LEFTARG,
6847                Keyword::RIGHTARG,
6848                Keyword::COMMUTATOR,
6849                Keyword::NEGATOR,
6850                Keyword::RESTRICT,
6851                Keyword::JOIN,
6852                Keyword::HASHES,
6853                Keyword::MERGES,
6854            ])?;
6855
6856            match keyword {
6857                Keyword::HASHES if !options.iter().any(|o| matches!(o, OperatorOption::Hashes)) => {
6858                    options.push(OperatorOption::Hashes);
6859                }
6860                Keyword::MERGES if !options.iter().any(|o| matches!(o, OperatorOption::Merges)) => {
6861                    options.push(OperatorOption::Merges);
6862                }
6863                Keyword::FUNCTION | Keyword::PROCEDURE if function.is_none() => {
6864                    self.expect_token(&Token::Eq)?;
6865                    function = Some(self.parse_object_name(false)?);
6866                    is_procedure = keyword == Keyword::PROCEDURE;
6867                }
6868                Keyword::LEFTARG if left_arg.is_none() => {
6869                    self.expect_token(&Token::Eq)?;
6870                    left_arg = Some(self.parse_data_type()?);
6871                }
6872                Keyword::RIGHTARG if right_arg.is_none() => {
6873                    self.expect_token(&Token::Eq)?;
6874                    right_arg = Some(self.parse_data_type()?);
6875                }
6876                Keyword::COMMUTATOR
6877                    if !options
6878                        .iter()
6879                        .any(|o| matches!(o, OperatorOption::Commutator(_))) =>
6880                {
6881                    self.expect_token(&Token::Eq)?;
6882                    if self.parse_keyword(Keyword::OPERATOR) {
6883                        self.expect_token(&Token::LParen)?;
6884                        let op = self.parse_operator_name()?;
6885                        self.expect_token(&Token::RParen)?;
6886                        options.push(OperatorOption::Commutator(op));
6887                    } else {
6888                        options.push(OperatorOption::Commutator(self.parse_operator_name()?));
6889                    }
6890                }
6891                Keyword::NEGATOR
6892                    if !options
6893                        .iter()
6894                        .any(|o| matches!(o, OperatorOption::Negator(_))) =>
6895                {
6896                    self.expect_token(&Token::Eq)?;
6897                    if self.parse_keyword(Keyword::OPERATOR) {
6898                        self.expect_token(&Token::LParen)?;
6899                        let op = self.parse_operator_name()?;
6900                        self.expect_token(&Token::RParen)?;
6901                        options.push(OperatorOption::Negator(op));
6902                    } else {
6903                        options.push(OperatorOption::Negator(self.parse_operator_name()?));
6904                    }
6905                }
6906                Keyword::RESTRICT
6907                    if !options
6908                        .iter()
6909                        .any(|o| matches!(o, OperatorOption::Restrict(_))) =>
6910                {
6911                    self.expect_token(&Token::Eq)?;
6912                    options.push(OperatorOption::Restrict(Some(
6913                        self.parse_object_name(false)?,
6914                    )));
6915                }
6916                Keyword::JOIN if !options.iter().any(|o| matches!(o, OperatorOption::Join(_))) => {
6917                    self.expect_token(&Token::Eq)?;
6918                    options.push(OperatorOption::Join(Some(self.parse_object_name(false)?)));
6919                }
6920                _ => {
6921                    return Err(ParserError::ParserError(format!(
6922                        "Duplicate or unexpected keyword {:?} in CREATE OPERATOR",
6923                        keyword
6924                    )))
6925                }
6926            }
6927
6928            if !self.consume_token(&Token::Comma) {
6929                break;
6930            }
6931        }
6932
6933        // Expect closing parenthesis
6934        self.expect_token(&Token::RParen)?;
6935
6936        // FUNCTION is required
6937        let function = function.ok_or_else(|| {
6938            ParserError::ParserError("CREATE OPERATOR requires FUNCTION parameter".to_string())
6939        })?;
6940
6941        Ok(CreateOperator {
6942            name,
6943            function,
6944            is_procedure,
6945            left_arg,
6946            right_arg,
6947            options,
6948        })
6949    }
6950
6951    /// Parse a [Statement::CreateOperatorFamily]
6952    ///
6953    /// [PostgreSQL Documentation](https://www.postgresql.org/docs/current/sql-createopfamily.html)
6954    pub fn parse_create_operator_family(&mut self) -> Result<CreateOperatorFamily, ParserError> {
6955        let name = self.parse_object_name(false)?;
6956        self.expect_keyword(Keyword::USING)?;
6957        let using = self.parse_identifier()?;
6958
6959        Ok(CreateOperatorFamily { name, using })
6960    }
6961
6962    /// Parse a [Statement::CreateOperatorClass]
6963    ///
6964    /// [PostgreSQL Documentation](https://www.postgresql.org/docs/current/sql-createopclass.html)
6965    pub fn parse_create_operator_class(&mut self) -> Result<CreateOperatorClass, ParserError> {
6966        let name = self.parse_object_name(false)?;
6967        let default = self.parse_keyword(Keyword::DEFAULT);
6968        self.expect_keywords(&[Keyword::FOR, Keyword::TYPE])?;
6969        let for_type = self.parse_data_type()?;
6970        self.expect_keyword(Keyword::USING)?;
6971        let using = self.parse_identifier()?;
6972
6973        let family = if self.parse_keyword(Keyword::FAMILY) {
6974            Some(self.parse_object_name(false)?)
6975        } else {
6976            None
6977        };
6978
6979        self.expect_keyword(Keyword::AS)?;
6980
6981        let mut items = vec![];
6982        loop {
6983            if self.parse_keyword(Keyword::OPERATOR) {
6984                let strategy_number = self.parse_literal_uint()?;
6985                let operator_name = self.parse_operator_name()?;
6986
6987                // Optional operator argument types
6988                let op_types = if self.consume_token(&Token::LParen) {
6989                    let left = self.parse_data_type()?;
6990                    self.expect_token(&Token::Comma)?;
6991                    let right = self.parse_data_type()?;
6992                    self.expect_token(&Token::RParen)?;
6993                    Some(OperatorArgTypes { left, right })
6994                } else {
6995                    None
6996                };
6997
6998                // Optional purpose
6999                let purpose = if self.parse_keyword(Keyword::FOR) {
7000                    if self.parse_keyword(Keyword::SEARCH) {
7001                        Some(OperatorPurpose::ForSearch)
7002                    } else if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
7003                        let sort_family = self.parse_object_name(false)?;
7004                        Some(OperatorPurpose::ForOrderBy { sort_family })
7005                    } else {
7006                        return self.expected("SEARCH or ORDER BY after FOR", self.peek_token());
7007                    }
7008                } else {
7009                    None
7010                };
7011
7012                items.push(OperatorClassItem::Operator {
7013                    strategy_number,
7014                    operator_name,
7015                    op_types,
7016                    purpose,
7017                });
7018            } else if self.parse_keyword(Keyword::FUNCTION) {
7019                let support_number = self.parse_literal_uint()?;
7020
7021                // Optional operator types
7022                let op_types =
7023                    if self.consume_token(&Token::LParen) && self.peek_token() != Token::RParen {
7024                        let mut types = vec![];
7025                        loop {
7026                            types.push(self.parse_data_type()?);
7027                            if !self.consume_token(&Token::Comma) {
7028                                break;
7029                            }
7030                        }
7031                        self.expect_token(&Token::RParen)?;
7032                        Some(types)
7033                    } else if self.consume_token(&Token::LParen) {
7034                        self.expect_token(&Token::RParen)?;
7035                        Some(vec![])
7036                    } else {
7037                        None
7038                    };
7039
7040                let function_name = self.parse_object_name(false)?;
7041
7042                // Function argument types
7043                let argument_types = if self.consume_token(&Token::LParen) {
7044                    let mut types = vec![];
7045                    loop {
7046                        if self.peek_token() == Token::RParen {
7047                            break;
7048                        }
7049                        types.push(self.parse_data_type()?);
7050                        if !self.consume_token(&Token::Comma) {
7051                            break;
7052                        }
7053                    }
7054                    self.expect_token(&Token::RParen)?;
7055                    types
7056                } else {
7057                    vec![]
7058                };
7059
7060                items.push(OperatorClassItem::Function {
7061                    support_number,
7062                    op_types,
7063                    function_name,
7064                    argument_types,
7065                });
7066            } else if self.parse_keyword(Keyword::STORAGE) {
7067                let storage_type = self.parse_data_type()?;
7068                items.push(OperatorClassItem::Storage { storage_type });
7069            } else {
7070                break;
7071            }
7072
7073            // Check for comma separator
7074            if !self.consume_token(&Token::Comma) {
7075                break;
7076            }
7077        }
7078
7079        Ok(CreateOperatorClass {
7080            name,
7081            default,
7082            for_type,
7083            using,
7084            family,
7085            items,
7086        })
7087    }
7088
7089    /// Parse a `DROP` statement.
7090    pub fn parse_drop(&mut self) -> Result<Statement, ParserError> {
7091        // MySQL dialect supports `TEMPORARY`
7092        let temporary = dialect_of!(self is MySqlDialect | GenericDialect | DuckDbDialect)
7093            && self.parse_keyword(Keyword::TEMPORARY);
7094        let persistent = dialect_of!(self is DuckDbDialect)
7095            && self.parse_one_of_keywords(&[Keyword::PERSISTENT]).is_some();
7096
7097        let object_type = if self.parse_keyword(Keyword::TABLE) {
7098            ObjectType::Table
7099        } else if self.parse_keyword(Keyword::VIEW) {
7100            ObjectType::View
7101        } else if self.parse_keywords(&[Keyword::MATERIALIZED, Keyword::VIEW]) {
7102            ObjectType::MaterializedView
7103        } else if self.parse_keyword(Keyword::INDEX) {
7104            ObjectType::Index
7105        } else if self.parse_keyword(Keyword::ROLE) {
7106            ObjectType::Role
7107        } else if self.parse_keyword(Keyword::SCHEMA) {
7108            ObjectType::Schema
7109        } else if self.parse_keyword(Keyword::DATABASE) {
7110            ObjectType::Database
7111        } else if self.parse_keyword(Keyword::SEQUENCE) {
7112            ObjectType::Sequence
7113        } else if self.parse_keyword(Keyword::STAGE) {
7114            ObjectType::Stage
7115        } else if self.parse_keyword(Keyword::TYPE) {
7116            ObjectType::Type
7117        } else if self.parse_keyword(Keyword::USER) {
7118            ObjectType::User
7119        } else if self.parse_keyword(Keyword::STREAM) {
7120            ObjectType::Stream
7121        } else if self.parse_keyword(Keyword::FUNCTION) {
7122            return self.parse_drop_function().map(Into::into);
7123        } else if self.parse_keyword(Keyword::POLICY) {
7124            return self.parse_drop_policy().map(Into::into);
7125        } else if self.parse_keyword(Keyword::CONNECTOR) {
7126            return self.parse_drop_connector();
7127        } else if self.parse_keyword(Keyword::DOMAIN) {
7128            return self.parse_drop_domain().map(Into::into);
7129        } else if self.parse_keyword(Keyword::PROCEDURE) {
7130            return self.parse_drop_procedure();
7131        } else if self.parse_keyword(Keyword::SECRET) {
7132            return self.parse_drop_secret(temporary, persistent);
7133        } else if self.parse_keyword(Keyword::TRIGGER) {
7134            return self.parse_drop_trigger().map(Into::into);
7135        } else if self.parse_keyword(Keyword::EXTENSION) {
7136            return self.parse_drop_extension();
7137        } else if self.parse_keyword(Keyword::OPERATOR) {
7138            // Check if this is DROP OPERATOR FAMILY or DROP OPERATOR CLASS
7139            return if self.parse_keyword(Keyword::FAMILY) {
7140                self.parse_drop_operator_family()
7141            } else if self.parse_keyword(Keyword::CLASS) {
7142                self.parse_drop_operator_class()
7143            } else {
7144                self.parse_drop_operator()
7145            };
7146        } else {
7147            return self.expected(
7148                "CONNECTOR, DATABASE, EXTENSION, FUNCTION, INDEX, OPERATOR, POLICY, PROCEDURE, ROLE, SCHEMA, SECRET, SEQUENCE, STAGE, TABLE, TRIGGER, TYPE, VIEW, MATERIALIZED VIEW or USER after DROP",
7149                self.peek_token(),
7150            );
7151        };
7152        // Many dialects support the non-standard `IF EXISTS` clause and allow
7153        // specifying multiple objects to delete in a single statement
7154        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7155        let names = self.parse_comma_separated(|p| p.parse_object_name(false))?;
7156
7157        let loc = self.peek_token().span.start;
7158        let cascade = self.parse_keyword(Keyword::CASCADE);
7159        let restrict = self.parse_keyword(Keyword::RESTRICT);
7160        let purge = self.parse_keyword(Keyword::PURGE);
7161        if cascade && restrict {
7162            return parser_err!("Cannot specify both CASCADE and RESTRICT in DROP", loc);
7163        }
7164        if object_type == ObjectType::Role && (cascade || restrict || purge) {
7165            return parser_err!(
7166                "Cannot specify CASCADE, RESTRICT, or PURGE in DROP ROLE",
7167                loc
7168            );
7169        }
7170        let table = if self.parse_keyword(Keyword::ON) {
7171            Some(self.parse_object_name(false)?)
7172        } else {
7173            None
7174        };
7175        Ok(Statement::Drop {
7176            object_type,
7177            if_exists,
7178            names,
7179            cascade,
7180            restrict,
7181            purge,
7182            temporary,
7183            table,
7184        })
7185    }
7186
7187    fn parse_optional_drop_behavior(&mut self) -> Option<DropBehavior> {
7188        match self.parse_one_of_keywords(&[Keyword::CASCADE, Keyword::RESTRICT]) {
7189            Some(Keyword::CASCADE) => Some(DropBehavior::Cascade),
7190            Some(Keyword::RESTRICT) => Some(DropBehavior::Restrict),
7191            _ => None,
7192        }
7193    }
7194
7195    /// ```sql
7196    /// DROP FUNCTION [ IF EXISTS ] name [ ( [ [ argmode ] [ argname ] argtype [, ...] ] ) ] [, ...]
7197    /// [ CASCADE | RESTRICT ]
7198    /// ```
7199    fn parse_drop_function(&mut self) -> Result<DropFunction, ParserError> {
7200        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7201        let func_desc = self.parse_comma_separated(Parser::parse_function_desc)?;
7202        let drop_behavior = self.parse_optional_drop_behavior();
7203        Ok(DropFunction {
7204            if_exists,
7205            func_desc,
7206            drop_behavior,
7207        })
7208    }
7209
7210    /// ```sql
7211    /// DROP POLICY [ IF EXISTS ] name ON table_name [ CASCADE | RESTRICT ]
7212    /// ```
7213    ///
7214    /// [PostgreSQL Documentation](https://www.postgresql.org/docs/current/sql-droppolicy.html)
7215    fn parse_drop_policy(&mut self) -> Result<DropPolicy, ParserError> {
7216        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7217        let name = self.parse_identifier()?;
7218        self.expect_keyword_is(Keyword::ON)?;
7219        let table_name = self.parse_object_name(false)?;
7220        let drop_behavior = self.parse_optional_drop_behavior();
7221        Ok(DropPolicy {
7222            if_exists,
7223            name,
7224            table_name,
7225            drop_behavior,
7226        })
7227    }
7228    /// ```sql
7229    /// DROP CONNECTOR [IF EXISTS] name
7230    /// ```
7231    ///
7232    /// See [Hive](https://cwiki.apache.org/confluence/pages/viewpage.action?pageId=27362034#LanguageManualDDL-DropConnector)
7233    fn parse_drop_connector(&mut self) -> Result<Statement, ParserError> {
7234        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7235        let name = self.parse_identifier()?;
7236        Ok(Statement::DropConnector { if_exists, name })
7237    }
7238
7239    /// ```sql
7240    /// DROP DOMAIN [ IF EXISTS ] name [ CASCADE | RESTRICT ]
7241    /// ```
7242    fn parse_drop_domain(&mut self) -> Result<DropDomain, ParserError> {
7243        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7244        let name = self.parse_object_name(false)?;
7245        let drop_behavior = self.parse_optional_drop_behavior();
7246        Ok(DropDomain {
7247            if_exists,
7248            name,
7249            drop_behavior,
7250        })
7251    }
7252
7253    /// ```sql
7254    /// DROP PROCEDURE [ IF EXISTS ] name [ ( [ [ argmode ] [ argname ] argtype [, ...] ] ) ] [, ...]
7255    /// [ CASCADE | RESTRICT ]
7256    /// ```
7257    fn parse_drop_procedure(&mut self) -> Result<Statement, ParserError> {
7258        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7259        let proc_desc = self.parse_comma_separated(Parser::parse_function_desc)?;
7260        let drop_behavior = self.parse_optional_drop_behavior();
7261        Ok(Statement::DropProcedure {
7262            if_exists,
7263            proc_desc,
7264            drop_behavior,
7265        })
7266    }
7267
7268    fn parse_function_desc(&mut self) -> Result<FunctionDesc, ParserError> {
7269        let name = self.parse_object_name(false)?;
7270
7271        let args = if self.consume_token(&Token::LParen) {
7272            if self.consume_token(&Token::RParen) {
7273                Some(vec![])
7274            } else {
7275                let args = self.parse_comma_separated(Parser::parse_function_arg)?;
7276                self.expect_token(&Token::RParen)?;
7277                Some(args)
7278            }
7279        } else {
7280            None
7281        };
7282
7283        Ok(FunctionDesc { name, args })
7284    }
7285
7286    /// See [DuckDB Docs](https://duckdb.org/docs/sql/statements/create_secret.html) for more details.
7287    fn parse_drop_secret(
7288        &mut self,
7289        temporary: bool,
7290        persistent: bool,
7291    ) -> Result<Statement, ParserError> {
7292        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7293        let name = self.parse_identifier()?;
7294        let storage_specifier = if self.parse_keyword(Keyword::FROM) {
7295            self.parse_identifier().ok()
7296        } else {
7297            None
7298        };
7299        let temp = match (temporary, persistent) {
7300            (true, false) => Some(true),
7301            (false, true) => Some(false),
7302            (false, false) => None,
7303            _ => self.expected("TEMPORARY or PERSISTENT", self.peek_token())?,
7304        };
7305
7306        Ok(Statement::DropSecret {
7307            if_exists,
7308            temporary: temp,
7309            name,
7310            storage_specifier,
7311        })
7312    }
7313
7314    /// Parse a `DECLARE` statement.
7315    ///
7316    /// ```sql
7317    /// DECLARE name [ BINARY ] [ ASENSITIVE | INSENSITIVE ] [ [ NO ] SCROLL ]
7318    ///     CURSOR [ { WITH | WITHOUT } HOLD ] FOR query
7319    /// ```
7320    ///
7321    /// The syntax can vary significantly between warehouses. See the grammar
7322    /// on the warehouse specific function in such cases.
7323    pub fn parse_declare(&mut self) -> Result<Statement, ParserError> {
7324        if dialect_of!(self is BigQueryDialect) {
7325            return self.parse_big_query_declare();
7326        }
7327        if dialect_of!(self is SnowflakeDialect) {
7328            return self.parse_snowflake_declare();
7329        }
7330        if dialect_of!(self is MsSqlDialect) {
7331            return self.parse_mssql_declare();
7332        }
7333
7334        let name = self.parse_identifier()?;
7335
7336        let binary = Some(self.parse_keyword(Keyword::BINARY));
7337        let sensitive = if self.parse_keyword(Keyword::INSENSITIVE) {
7338            Some(true)
7339        } else if self.parse_keyword(Keyword::ASENSITIVE) {
7340            Some(false)
7341        } else {
7342            None
7343        };
7344        let scroll = if self.parse_keyword(Keyword::SCROLL) {
7345            Some(true)
7346        } else if self.parse_keywords(&[Keyword::NO, Keyword::SCROLL]) {
7347            Some(false)
7348        } else {
7349            None
7350        };
7351
7352        self.expect_keyword_is(Keyword::CURSOR)?;
7353        let declare_type = Some(DeclareType::Cursor);
7354
7355        let hold = match self.parse_one_of_keywords(&[Keyword::WITH, Keyword::WITHOUT]) {
7356            Some(keyword) => {
7357                self.expect_keyword_is(Keyword::HOLD)?;
7358
7359                match keyword {
7360                    Keyword::WITH => Some(true),
7361                    Keyword::WITHOUT => Some(false),
7362                    unexpected_keyword => return Err(ParserError::ParserError(
7363                        format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in cursor hold"),
7364                    )),
7365                }
7366            }
7367            None => None,
7368        };
7369
7370        self.expect_keyword_is(Keyword::FOR)?;
7371
7372        let query = Some(self.parse_query()?);
7373
7374        Ok(Statement::Declare {
7375            stmts: vec![Declare {
7376                names: vec![name],
7377                data_type: None,
7378                assignment: None,
7379                declare_type,
7380                binary,
7381                sensitive,
7382                scroll,
7383                hold,
7384                for_query: query,
7385            }],
7386        })
7387    }
7388
7389    /// Parse a [BigQuery] `DECLARE` statement.
7390    ///
7391    /// Syntax:
7392    /// ```text
7393    /// DECLARE variable_name[, ...] [{ <variable_type> | <DEFAULT expression> }];
7394    /// ```
7395    /// [BigQuery]: https://cloud.google.com/bigquery/docs/reference/standard-sql/procedural-language#declare
7396    pub fn parse_big_query_declare(&mut self) -> Result<Statement, ParserError> {
7397        let names = self.parse_comma_separated(Parser::parse_identifier)?;
7398
7399        let data_type = match self.peek_token().token {
7400            Token::Word(w) if w.keyword == Keyword::DEFAULT => None,
7401            _ => Some(self.parse_data_type()?),
7402        };
7403
7404        let expr = if data_type.is_some() {
7405            if self.parse_keyword(Keyword::DEFAULT) {
7406                Some(self.parse_expr()?)
7407            } else {
7408                None
7409            }
7410        } else {
7411            // If no variable type - default expression must be specified, per BQ docs.
7412            // i.e `DECLARE foo;` is invalid.
7413            self.expect_keyword_is(Keyword::DEFAULT)?;
7414            Some(self.parse_expr()?)
7415        };
7416
7417        Ok(Statement::Declare {
7418            stmts: vec![Declare {
7419                names,
7420                data_type,
7421                assignment: expr.map(|expr| DeclareAssignment::Default(Box::new(expr))),
7422                declare_type: None,
7423                binary: None,
7424                sensitive: None,
7425                scroll: None,
7426                hold: None,
7427                for_query: None,
7428            }],
7429        })
7430    }
7431
7432    /// Parse a [Snowflake] `DECLARE` statement.
7433    ///
7434    /// Syntax:
7435    /// ```text
7436    /// DECLARE
7437    ///   [{ <variable_declaration>
7438    ///      | <cursor_declaration>
7439    ///      | <resultset_declaration>
7440    ///      | <exception_declaration> }; ... ]
7441    ///
7442    /// <variable_declaration>
7443    /// <variable_name> [<type>] [ { DEFAULT | := } <expression>]
7444    ///
7445    /// <cursor_declaration>
7446    /// <cursor_name> CURSOR FOR <query>
7447    ///
7448    /// <resultset_declaration>
7449    /// <resultset_name> RESULTSET [ { DEFAULT | := } ( <query> ) ] ;
7450    ///
7451    /// <exception_declaration>
7452    /// <exception_name> EXCEPTION [ ( <exception_number> , '<exception_message>' ) ] ;
7453    /// ```
7454    ///
7455    /// [Snowflake]: https://docs.snowflake.com/en/sql-reference/snowflake-scripting/declare
7456    pub fn parse_snowflake_declare(&mut self) -> Result<Statement, ParserError> {
7457        let mut stmts = vec![];
7458        loop {
7459            let name = self.parse_identifier()?;
7460            let (declare_type, for_query, assigned_expr, data_type) =
7461                if self.parse_keyword(Keyword::CURSOR) {
7462                    self.expect_keyword_is(Keyword::FOR)?;
7463                    match self.peek_token().token {
7464                        Token::Word(w) if w.keyword == Keyword::SELECT => (
7465                            Some(DeclareType::Cursor),
7466                            Some(self.parse_query()?),
7467                            None,
7468                            None,
7469                        ),
7470                        _ => (
7471                            Some(DeclareType::Cursor),
7472                            None,
7473                            Some(DeclareAssignment::For(Box::new(self.parse_expr()?))),
7474                            None,
7475                        ),
7476                    }
7477                } else if self.parse_keyword(Keyword::RESULTSET) {
7478                    let assigned_expr = if self.peek_token().token != Token::SemiColon {
7479                        self.parse_snowflake_variable_declaration_expression()?
7480                    } else {
7481                        // Nothing more to do. The statement has no further parameters.
7482                        None
7483                    };
7484
7485                    (Some(DeclareType::ResultSet), None, assigned_expr, None)
7486                } else if self.parse_keyword(Keyword::EXCEPTION) {
7487                    let assigned_expr = if self.peek_token().token == Token::LParen {
7488                        Some(DeclareAssignment::Expr(Box::new(self.parse_expr()?)))
7489                    } else {
7490                        // Nothing more to do. The statement has no further parameters.
7491                        None
7492                    };
7493
7494                    (Some(DeclareType::Exception), None, assigned_expr, None)
7495                } else {
7496                    // Without an explicit keyword, the only valid option is variable declaration.
7497                    let (assigned_expr, data_type) = if let Some(assigned_expr) =
7498                        self.parse_snowflake_variable_declaration_expression()?
7499                    {
7500                        (Some(assigned_expr), None)
7501                    } else if let Token::Word(_) = self.peek_token().token {
7502                        let data_type = self.parse_data_type()?;
7503                        (
7504                            self.parse_snowflake_variable_declaration_expression()?,
7505                            Some(data_type),
7506                        )
7507                    } else {
7508                        (None, None)
7509                    };
7510                    (None, None, assigned_expr, data_type)
7511                };
7512            let stmt = Declare {
7513                names: vec![name],
7514                data_type,
7515                assignment: assigned_expr,
7516                declare_type,
7517                binary: None,
7518                sensitive: None,
7519                scroll: None,
7520                hold: None,
7521                for_query,
7522            };
7523
7524            stmts.push(stmt);
7525            if self.consume_token(&Token::SemiColon) {
7526                match self.peek_token().token {
7527                    Token::Word(w)
7528                        if ALL_KEYWORDS
7529                            .binary_search(&w.value.to_uppercase().as_str())
7530                            .is_err() =>
7531                    {
7532                        // Not a keyword - start of a new declaration.
7533                        continue;
7534                    }
7535                    _ => {
7536                        // Put back the semicolon, this is the end of the DECLARE statement.
7537                        self.prev_token();
7538                    }
7539                }
7540            }
7541
7542            break;
7543        }
7544
7545        Ok(Statement::Declare { stmts })
7546    }
7547
7548    /// Parse a [MsSql] `DECLARE` statement.
7549    ///
7550    /// Syntax:
7551    /// ```text
7552    /// DECLARE
7553    // {
7554    //   { @local_variable [AS] data_type [ = value ] }
7555    //   | { @cursor_variable_name CURSOR [ FOR ] }
7556    // } [ ,...n ]
7557    /// ```
7558    /// [MsSql]: https://learn.microsoft.com/en-us/sql/t-sql/language-elements/declare-local-variable-transact-sql?view=sql-server-ver16
7559    pub fn parse_mssql_declare(&mut self) -> Result<Statement, ParserError> {
7560        let stmts = self.parse_comma_separated(Parser::parse_mssql_declare_stmt)?;
7561
7562        Ok(Statement::Declare { stmts })
7563    }
7564
7565    /// Parse the body of a [MsSql] `DECLARE`statement.
7566    ///
7567    /// Syntax:
7568    /// ```text
7569    // {
7570    //   { @local_variable [AS] data_type [ = value ] }
7571    //   | { @cursor_variable_name CURSOR [ FOR ]}
7572    // } [ ,...n ]
7573    /// ```
7574    /// [MsSql]: https://learn.microsoft.com/en-us/sql/t-sql/language-elements/declare-local-variable-transact-sql?view=sql-server-ver16
7575    pub fn parse_mssql_declare_stmt(&mut self) -> Result<Declare, ParserError> {
7576        let name = {
7577            let ident = self.parse_identifier()?;
7578            if !ident.value.starts_with('@')
7579                && !matches!(
7580                    self.peek_token().token,
7581                    Token::Word(w) if w.keyword == Keyword::CURSOR
7582                )
7583            {
7584                Err(ParserError::TokenizerError(
7585                    "Invalid MsSql variable declaration.".to_string(),
7586                ))
7587            } else {
7588                Ok(ident)
7589            }
7590        }?;
7591
7592        let (declare_type, data_type) = match self.peek_token().token {
7593            Token::Word(w) => match w.keyword {
7594                Keyword::CURSOR => {
7595                    self.next_token();
7596                    (Some(DeclareType::Cursor), None)
7597                }
7598                Keyword::AS => {
7599                    self.next_token();
7600                    (None, Some(self.parse_data_type()?))
7601                }
7602                _ => (None, Some(self.parse_data_type()?)),
7603            },
7604            _ => (None, Some(self.parse_data_type()?)),
7605        };
7606
7607        let (for_query, assignment) = if self.peek_keyword(Keyword::FOR) {
7608            self.next_token();
7609            let query = Some(self.parse_query()?);
7610            (query, None)
7611        } else {
7612            let assignment = self.parse_mssql_variable_declaration_expression()?;
7613            (None, assignment)
7614        };
7615
7616        Ok(Declare {
7617            names: vec![name],
7618            data_type,
7619            assignment,
7620            declare_type,
7621            binary: None,
7622            sensitive: None,
7623            scroll: None,
7624            hold: None,
7625            for_query,
7626        })
7627    }
7628
7629    /// Parses the assigned expression in a variable declaration.
7630    ///
7631    /// Syntax:
7632    /// ```text
7633    /// [ { DEFAULT | := } <expression>]
7634    /// ```
7635    /// <https://docs.snowflake.com/en/sql-reference/snowflake-scripting/declare#variable-declaration-syntax>
7636    pub fn parse_snowflake_variable_declaration_expression(
7637        &mut self,
7638    ) -> Result<Option<DeclareAssignment>, ParserError> {
7639        Ok(match self.peek_token().token {
7640            Token::Word(w) if w.keyword == Keyword::DEFAULT => {
7641                self.next_token(); // Skip `DEFAULT`
7642                Some(DeclareAssignment::Default(Box::new(self.parse_expr()?)))
7643            }
7644            Token::Assignment => {
7645                self.next_token(); // Skip `:=`
7646                Some(DeclareAssignment::DuckAssignment(Box::new(
7647                    self.parse_expr()?,
7648                )))
7649            }
7650            _ => None,
7651        })
7652    }
7653
7654    /// Parses the assigned expression in a variable declaration.
7655    ///
7656    /// Syntax:
7657    /// ```text
7658    /// [ = <expression>]
7659    /// ```
7660    pub fn parse_mssql_variable_declaration_expression(
7661        &mut self,
7662    ) -> Result<Option<DeclareAssignment>, ParserError> {
7663        Ok(match self.peek_token().token {
7664            Token::Eq => {
7665                self.next_token(); // Skip `=`
7666                Some(DeclareAssignment::MsSqlAssignment(Box::new(
7667                    self.parse_expr()?,
7668                )))
7669            }
7670            _ => None,
7671        })
7672    }
7673
7674    /// Parse `FETCH [direction] { FROM | IN } cursor INTO target;` statement.
7675    pub fn parse_fetch_statement(&mut self) -> Result<Statement, ParserError> {
7676        let direction = if self.parse_keyword(Keyword::NEXT) {
7677            FetchDirection::Next
7678        } else if self.parse_keyword(Keyword::PRIOR) {
7679            FetchDirection::Prior
7680        } else if self.parse_keyword(Keyword::FIRST) {
7681            FetchDirection::First
7682        } else if self.parse_keyword(Keyword::LAST) {
7683            FetchDirection::Last
7684        } else if self.parse_keyword(Keyword::ABSOLUTE) {
7685            FetchDirection::Absolute {
7686                limit: self.parse_number_value()?.value,
7687            }
7688        } else if self.parse_keyword(Keyword::RELATIVE) {
7689            FetchDirection::Relative {
7690                limit: self.parse_number_value()?.value,
7691            }
7692        } else if self.parse_keyword(Keyword::FORWARD) {
7693            if self.parse_keyword(Keyword::ALL) {
7694                FetchDirection::ForwardAll
7695            } else {
7696                FetchDirection::Forward {
7697                    // TODO: Support optional
7698                    limit: Some(self.parse_number_value()?.value),
7699                }
7700            }
7701        } else if self.parse_keyword(Keyword::BACKWARD) {
7702            if self.parse_keyword(Keyword::ALL) {
7703                FetchDirection::BackwardAll
7704            } else {
7705                FetchDirection::Backward {
7706                    // TODO: Support optional
7707                    limit: Some(self.parse_number_value()?.value),
7708                }
7709            }
7710        } else if self.parse_keyword(Keyword::ALL) {
7711            FetchDirection::All
7712        } else {
7713            FetchDirection::Count {
7714                limit: self.parse_number_value()?.value,
7715            }
7716        };
7717
7718        let position = if self.peek_keyword(Keyword::FROM) {
7719            self.expect_keyword(Keyword::FROM)?;
7720            FetchPosition::From
7721        } else if self.peek_keyword(Keyword::IN) {
7722            self.expect_keyword(Keyword::IN)?;
7723            FetchPosition::In
7724        } else {
7725            return parser_err!("Expected FROM or IN", self.peek_token().span.start);
7726        };
7727
7728        let name = self.parse_identifier()?;
7729
7730        let into = if self.parse_keyword(Keyword::INTO) {
7731            Some(self.parse_object_name(false)?)
7732        } else {
7733            None
7734        };
7735
7736        Ok(Statement::Fetch {
7737            name,
7738            direction,
7739            position,
7740            into,
7741        })
7742    }
7743
7744    /// Parse a `DISCARD` statement.
7745    pub fn parse_discard(&mut self) -> Result<Statement, ParserError> {
7746        let object_type = if self.parse_keyword(Keyword::ALL) {
7747            DiscardObject::ALL
7748        } else if self.parse_keyword(Keyword::PLANS) {
7749            DiscardObject::PLANS
7750        } else if self.parse_keyword(Keyword::SEQUENCES) {
7751            DiscardObject::SEQUENCES
7752        } else if self.parse_keyword(Keyword::TEMP) || self.parse_keyword(Keyword::TEMPORARY) {
7753            DiscardObject::TEMP
7754        } else {
7755            return self.expected(
7756                "ALL, PLANS, SEQUENCES, TEMP or TEMPORARY after DISCARD",
7757                self.peek_token(),
7758            );
7759        };
7760        Ok(Statement::Discard { object_type })
7761    }
7762
7763    /// Parse a `CREATE INDEX` statement.
7764    pub fn parse_create_index(&mut self, unique: bool) -> Result<CreateIndex, ParserError> {
7765        let concurrently = self.parse_keyword(Keyword::CONCURRENTLY);
7766        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
7767
7768        let mut using = None;
7769
7770        let index_name = if if_not_exists || !self.parse_keyword(Keyword::ON) {
7771            let index_name = self.parse_object_name(false)?;
7772            // MySQL allows `USING index_type` either before or after `ON table_name`
7773            using = self.parse_optional_using_then_index_type()?;
7774            self.expect_keyword_is(Keyword::ON)?;
7775            Some(index_name)
7776        } else {
7777            None
7778        };
7779
7780        let table_name = self.parse_object_name(false)?;
7781
7782        // MySQL allows having two `USING` clauses.
7783        // In that case, the second clause overwrites the first.
7784        using = self.parse_optional_using_then_index_type()?.or(using);
7785
7786        let columns = self.parse_parenthesized_index_column_list()?;
7787
7788        let include = if self.parse_keyword(Keyword::INCLUDE) {
7789            self.expect_token(&Token::LParen)?;
7790            let columns = self.parse_comma_separated(|p| p.parse_identifier())?;
7791            self.expect_token(&Token::RParen)?;
7792            columns
7793        } else {
7794            vec![]
7795        };
7796
7797        let nulls_distinct = if self.parse_keyword(Keyword::NULLS) {
7798            let not = self.parse_keyword(Keyword::NOT);
7799            self.expect_keyword_is(Keyword::DISTINCT)?;
7800            Some(!not)
7801        } else {
7802            None
7803        };
7804
7805        let with = if self.dialect.supports_create_index_with_clause()
7806            && self.parse_keyword(Keyword::WITH)
7807        {
7808            self.expect_token(&Token::LParen)?;
7809            let with_params = self.parse_comma_separated(Parser::parse_expr)?;
7810            self.expect_token(&Token::RParen)?;
7811            with_params
7812        } else {
7813            Vec::new()
7814        };
7815
7816        let predicate = if self.parse_keyword(Keyword::WHERE) {
7817            Some(self.parse_expr()?)
7818        } else {
7819            None
7820        };
7821
7822        // MySQL options (including the modern style of `USING` after the column list instead of
7823        // before, which is deprecated) shouldn't conflict with other preceding options (e.g. `WITH
7824        // PARSER` won't be caught by the above `WITH` clause parsing because MySQL doesn't set that
7825        // support flag). This is probably invalid syntax for other dialects, but it is simpler to
7826        // parse it anyway (as we do inside `ALTER TABLE` and `CREATE TABLE` parsing).
7827        let index_options = self.parse_index_options()?;
7828
7829        // MySQL allows `ALGORITHM` and `LOCK` options. Unlike in `ALTER TABLE`, they need not be comma separated.
7830        let mut alter_options = Vec::new();
7831        while self
7832            .peek_one_of_keywords(&[Keyword::ALGORITHM, Keyword::LOCK])
7833            .is_some()
7834        {
7835            alter_options.push(self.parse_alter_table_operation()?)
7836        }
7837
7838        Ok(CreateIndex {
7839            name: index_name,
7840            table_name,
7841            using,
7842            columns,
7843            unique,
7844            concurrently,
7845            if_not_exists,
7846            include,
7847            nulls_distinct,
7848            with,
7849            predicate,
7850            index_options,
7851            alter_options,
7852        })
7853    }
7854
7855    /// Parse a `CREATE EXTENSION` statement.
7856    pub fn parse_create_extension(&mut self) -> Result<CreateExtension, ParserError> {
7857        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
7858        let name = self.parse_identifier()?;
7859
7860        let (schema, version, cascade) = if self.parse_keyword(Keyword::WITH) {
7861            let schema = if self.parse_keyword(Keyword::SCHEMA) {
7862                Some(self.parse_identifier()?)
7863            } else {
7864                None
7865            };
7866
7867            let version = if self.parse_keyword(Keyword::VERSION) {
7868                Some(self.parse_identifier()?)
7869            } else {
7870                None
7871            };
7872
7873            let cascade = self.parse_keyword(Keyword::CASCADE);
7874
7875            (schema, version, cascade)
7876        } else {
7877            (None, None, false)
7878        };
7879
7880        Ok(CreateExtension {
7881            name,
7882            if_not_exists,
7883            schema,
7884            version,
7885            cascade,
7886        })
7887    }
7888
7889    /// Parse a PostgreSQL-specific [Statement::DropExtension] statement.
7890    pub fn parse_drop_extension(&mut self) -> Result<Statement, ParserError> {
7891        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7892        let names = self.parse_comma_separated(|p| p.parse_identifier())?;
7893        let cascade_or_restrict =
7894            self.parse_one_of_keywords(&[Keyword::CASCADE, Keyword::RESTRICT]);
7895        Ok(Statement::DropExtension(DropExtension {
7896            names,
7897            if_exists,
7898            cascade_or_restrict: cascade_or_restrict
7899                .map(|k| match k {
7900                    Keyword::CASCADE => Ok(ReferentialAction::Cascade),
7901                    Keyword::RESTRICT => Ok(ReferentialAction::Restrict),
7902                    _ => self.expected("CASCADE or RESTRICT", self.peek_token()),
7903                })
7904                .transpose()?,
7905        }))
7906    }
7907
7908    /// Parse a[Statement::DropOperator] statement.
7909    ///
7910    pub fn parse_drop_operator(&mut self) -> Result<Statement, ParserError> {
7911        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7912        let operators = self.parse_comma_separated(|p| p.parse_drop_operator_signature())?;
7913        let drop_behavior = self.parse_optional_drop_behavior();
7914        Ok(Statement::DropOperator(DropOperator {
7915            if_exists,
7916            operators,
7917            drop_behavior,
7918        }))
7919    }
7920
7921    /// Parse an operator signature for a [Statement::DropOperator]
7922    /// Format: `name ( { left_type | NONE } , right_type )`
7923    fn parse_drop_operator_signature(&mut self) -> Result<DropOperatorSignature, ParserError> {
7924        let name = self.parse_operator_name()?;
7925        self.expect_token(&Token::LParen)?;
7926
7927        // Parse left operand type (or NONE for prefix operators)
7928        let left_type = if self.parse_keyword(Keyword::NONE) {
7929            None
7930        } else {
7931            Some(self.parse_data_type()?)
7932        };
7933
7934        self.expect_token(&Token::Comma)?;
7935
7936        // Parse right operand type (always required)
7937        let right_type = self.parse_data_type()?;
7938
7939        self.expect_token(&Token::RParen)?;
7940
7941        Ok(DropOperatorSignature {
7942            name,
7943            left_type,
7944            right_type,
7945        })
7946    }
7947
7948    /// Parse a [Statement::DropOperatorFamily]
7949    ///
7950    /// [PostgreSQL Documentation](https://www.postgresql.org/docs/current/sql-dropopfamily.html)
7951    pub fn parse_drop_operator_family(&mut self) -> Result<Statement, ParserError> {
7952        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7953        let names = self.parse_comma_separated(|p| p.parse_object_name(false))?;
7954        self.expect_keyword(Keyword::USING)?;
7955        let using = self.parse_identifier()?;
7956        let drop_behavior = self.parse_optional_drop_behavior();
7957        Ok(Statement::DropOperatorFamily(DropOperatorFamily {
7958            if_exists,
7959            names,
7960            using,
7961            drop_behavior,
7962        }))
7963    }
7964
7965    /// Parse a [Statement::DropOperatorClass]
7966    ///
7967    /// [PostgreSQL Documentation](https://www.postgresql.org/docs/current/sql-dropopclass.html)
7968    pub fn parse_drop_operator_class(&mut self) -> Result<Statement, ParserError> {
7969        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7970        let names = self.parse_comma_separated(|p| p.parse_object_name(false))?;
7971        self.expect_keyword(Keyword::USING)?;
7972        let using = self.parse_identifier()?;
7973        let drop_behavior = self.parse_optional_drop_behavior();
7974        Ok(Statement::DropOperatorClass(DropOperatorClass {
7975            if_exists,
7976            names,
7977            using,
7978            drop_behavior,
7979        }))
7980    }
7981
7982    /// Parse Hive distribution style.
7983    ///
7984    /// TODO: Support parsing for `SKEWED` distribution style.
7985    pub fn parse_hive_distribution(&mut self) -> Result<HiveDistributionStyle, ParserError> {
7986        if self.parse_keywords(&[Keyword::PARTITIONED, Keyword::BY]) {
7987            self.expect_token(&Token::LParen)?;
7988            let columns = self.parse_comma_separated(Parser::parse_column_def)?;
7989            self.expect_token(&Token::RParen)?;
7990            Ok(HiveDistributionStyle::PARTITIONED { columns })
7991        } else {
7992            Ok(HiveDistributionStyle::NONE)
7993        }
7994    }
7995
7996    /// Parse Hive formats.
7997    pub fn parse_hive_formats(&mut self) -> Result<Option<HiveFormat>, ParserError> {
7998        let mut hive_format: Option<HiveFormat> = None;
7999        loop {
8000            match self.parse_one_of_keywords(&[
8001                Keyword::ROW,
8002                Keyword::STORED,
8003                Keyword::LOCATION,
8004                Keyword::WITH,
8005            ]) {
8006                Some(Keyword::ROW) => {
8007                    hive_format
8008                        .get_or_insert_with(HiveFormat::default)
8009                        .row_format = Some(self.parse_row_format()?);
8010                }
8011                Some(Keyword::STORED) => {
8012                    self.expect_keyword_is(Keyword::AS)?;
8013                    if self.parse_keyword(Keyword::INPUTFORMAT) {
8014                        let input_format = self.parse_expr()?;
8015                        self.expect_keyword_is(Keyword::OUTPUTFORMAT)?;
8016                        let output_format = self.parse_expr()?;
8017                        hive_format.get_or_insert_with(HiveFormat::default).storage =
8018                            Some(HiveIOFormat::IOF {
8019                                input_format,
8020                                output_format,
8021                            });
8022                    } else {
8023                        let format = self.parse_file_format()?;
8024                        hive_format.get_or_insert_with(HiveFormat::default).storage =
8025                            Some(HiveIOFormat::FileFormat { format });
8026                    }
8027                }
8028                Some(Keyword::LOCATION) => {
8029                    hive_format.get_or_insert_with(HiveFormat::default).location =
8030                        Some(self.parse_literal_string()?);
8031                }
8032                Some(Keyword::WITH) => {
8033                    self.prev_token();
8034                    let properties = self
8035                        .parse_options_with_keywords(&[Keyword::WITH, Keyword::SERDEPROPERTIES])?;
8036                    if !properties.is_empty() {
8037                        hive_format
8038                            .get_or_insert_with(HiveFormat::default)
8039                            .serde_properties = Some(properties);
8040                    } else {
8041                        break;
8042                    }
8043                }
8044                None => break,
8045                _ => break,
8046            }
8047        }
8048
8049        Ok(hive_format)
8050    }
8051
8052    /// Parse Hive row format.
8053    pub fn parse_row_format(&mut self) -> Result<HiveRowFormat, ParserError> {
8054        self.expect_keyword_is(Keyword::FORMAT)?;
8055        match self.parse_one_of_keywords(&[Keyword::SERDE, Keyword::DELIMITED]) {
8056            Some(Keyword::SERDE) => {
8057                let class = self.parse_literal_string()?;
8058                Ok(HiveRowFormat::SERDE { class })
8059            }
8060            _ => {
8061                let mut row_delimiters = vec![];
8062
8063                loop {
8064                    match self.parse_one_of_keywords(&[
8065                        Keyword::FIELDS,
8066                        Keyword::COLLECTION,
8067                        Keyword::MAP,
8068                        Keyword::LINES,
8069                        Keyword::NULL,
8070                    ]) {
8071                        Some(Keyword::FIELDS) => {
8072                            if self.parse_keywords(&[Keyword::TERMINATED, Keyword::BY]) {
8073                                row_delimiters.push(HiveRowDelimiter {
8074                                    delimiter: HiveDelimiter::FieldsTerminatedBy,
8075                                    char: self.parse_identifier()?,
8076                                });
8077
8078                                if self.parse_keywords(&[Keyword::ESCAPED, Keyword::BY]) {
8079                                    row_delimiters.push(HiveRowDelimiter {
8080                                        delimiter: HiveDelimiter::FieldsEscapedBy,
8081                                        char: self.parse_identifier()?,
8082                                    });
8083                                }
8084                            } else {
8085                                break;
8086                            }
8087                        }
8088                        Some(Keyword::COLLECTION) => {
8089                            if self.parse_keywords(&[
8090                                Keyword::ITEMS,
8091                                Keyword::TERMINATED,
8092                                Keyword::BY,
8093                            ]) {
8094                                row_delimiters.push(HiveRowDelimiter {
8095                                    delimiter: HiveDelimiter::CollectionItemsTerminatedBy,
8096                                    char: self.parse_identifier()?,
8097                                });
8098                            } else {
8099                                break;
8100                            }
8101                        }
8102                        Some(Keyword::MAP) => {
8103                            if self.parse_keywords(&[
8104                                Keyword::KEYS,
8105                                Keyword::TERMINATED,
8106                                Keyword::BY,
8107                            ]) {
8108                                row_delimiters.push(HiveRowDelimiter {
8109                                    delimiter: HiveDelimiter::MapKeysTerminatedBy,
8110                                    char: self.parse_identifier()?,
8111                                });
8112                            } else {
8113                                break;
8114                            }
8115                        }
8116                        Some(Keyword::LINES) => {
8117                            if self.parse_keywords(&[Keyword::TERMINATED, Keyword::BY]) {
8118                                row_delimiters.push(HiveRowDelimiter {
8119                                    delimiter: HiveDelimiter::LinesTerminatedBy,
8120                                    char: self.parse_identifier()?,
8121                                });
8122                            } else {
8123                                break;
8124                            }
8125                        }
8126                        Some(Keyword::NULL) => {
8127                            if self.parse_keywords(&[Keyword::DEFINED, Keyword::AS]) {
8128                                row_delimiters.push(HiveRowDelimiter {
8129                                    delimiter: HiveDelimiter::NullDefinedAs,
8130                                    char: self.parse_identifier()?,
8131                                });
8132                            } else {
8133                                break;
8134                            }
8135                        }
8136                        _ => {
8137                            break;
8138                        }
8139                    }
8140                }
8141
8142                Ok(HiveRowFormat::DELIMITED {
8143                    delimiters: row_delimiters,
8144                })
8145            }
8146        }
8147    }
8148
8149    fn parse_optional_on_cluster(&mut self) -> Result<Option<Ident>, ParserError> {
8150        if self.parse_keywords(&[Keyword::ON, Keyword::CLUSTER]) {
8151            Ok(Some(self.parse_identifier()?))
8152        } else {
8153            Ok(None)
8154        }
8155    }
8156
8157    /// Parse `CREATE TABLE` statement.
8158    pub fn parse_create_table(
8159        &mut self,
8160        or_replace: bool,
8161        temporary: bool,
8162        global: Option<bool>,
8163        transient: bool,
8164    ) -> Result<CreateTable, ParserError> {
8165        let allow_unquoted_hyphen = dialect_of!(self is BigQueryDialect);
8166        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
8167        let table_name = self.parse_object_name(allow_unquoted_hyphen)?;
8168
8169        // PostgreSQL PARTITION OF for child partition tables
8170        // Note: This is a PostgreSQL-specific feature, but the dialect check was intentionally
8171        // removed to allow GenericDialect and other dialects to parse this syntax. This enables
8172        // multi-dialect SQL tools to work with PostgreSQL-specific DDL statements.
8173        //
8174        // PARTITION OF can be combined with other table definition clauses in the AST,
8175        // though PostgreSQL itself prohibits PARTITION OF with AS SELECT or LIKE clauses.
8176        // The parser accepts these combinations for flexibility; semantic validation
8177        // is left to downstream tools.
8178        // Child partitions can have their own constraints and indexes.
8179        let partition_of = if self.parse_keywords(&[Keyword::PARTITION, Keyword::OF]) {
8180            Some(self.parse_object_name(allow_unquoted_hyphen)?)
8181        } else {
8182            None
8183        };
8184
8185        // Clickhouse has `ON CLUSTER 'cluster'` syntax for DDLs
8186        let on_cluster = self.parse_optional_on_cluster()?;
8187
8188        let like = self.maybe_parse_create_table_like(allow_unquoted_hyphen)?;
8189
8190        let clone = if self.parse_keyword(Keyword::CLONE) {
8191            self.parse_object_name(allow_unquoted_hyphen).ok()
8192        } else {
8193            None
8194        };
8195
8196        // parse optional column list (schema)
8197        let (columns, constraints) = self.parse_columns()?;
8198        let comment_after_column_def =
8199            if dialect_of!(self is HiveDialect) && self.parse_keyword(Keyword::COMMENT) {
8200                let next_token = self.next_token();
8201                match next_token.token {
8202                    Token::SingleQuotedString(str) => Some(CommentDef::WithoutEq(str)),
8203                    _ => self.expected("comment", next_token)?,
8204                }
8205            } else {
8206                None
8207            };
8208
8209        // PostgreSQL PARTITION OF: partition bound specification
8210        let for_values = if partition_of.is_some() {
8211            if self.peek_keyword(Keyword::FOR) || self.peek_keyword(Keyword::DEFAULT) {
8212                Some(self.parse_partition_for_values()?)
8213            } else {
8214                return self.expected(
8215                    "FOR VALUES or DEFAULT after PARTITION OF",
8216                    self.peek_token(),
8217                );
8218            }
8219        } else {
8220            None
8221        };
8222
8223        // SQLite supports `WITHOUT ROWID` at the end of `CREATE TABLE`
8224        let without_rowid = self.parse_keywords(&[Keyword::WITHOUT, Keyword::ROWID]);
8225
8226        let hive_distribution = self.parse_hive_distribution()?;
8227        let clustered_by = self.parse_optional_clustered_by()?;
8228        let hive_formats = self.parse_hive_formats()?;
8229
8230        let create_table_config = self.parse_optional_create_table_config()?;
8231
8232        // ClickHouse supports `PRIMARY KEY`, before `ORDER BY`
8233        // https://clickhouse.com/docs/en/sql-reference/statements/create/table#primary-key
8234        let primary_key = if dialect_of!(self is ClickHouseDialect | GenericDialect)
8235            && self.parse_keywords(&[Keyword::PRIMARY, Keyword::KEY])
8236        {
8237            Some(Box::new(self.parse_expr()?))
8238        } else {
8239            None
8240        };
8241
8242        let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
8243            if self.consume_token(&Token::LParen) {
8244                let columns = if self.peek_token() != Token::RParen {
8245                    self.parse_comma_separated(|p| p.parse_expr())?
8246                } else {
8247                    vec![]
8248                };
8249                self.expect_token(&Token::RParen)?;
8250                Some(OneOrManyWithParens::Many(columns))
8251            } else {
8252                Some(OneOrManyWithParens::One(self.parse_expr()?))
8253            }
8254        } else {
8255            None
8256        };
8257
8258        let on_commit = if self.parse_keywords(&[Keyword::ON, Keyword::COMMIT]) {
8259            Some(self.parse_create_table_on_commit()?)
8260        } else {
8261            None
8262        };
8263
8264        let strict = self.parse_keyword(Keyword::STRICT);
8265
8266        // Parse optional `AS ( query )`
8267        let query = if self.parse_keyword(Keyword::AS) {
8268            Some(self.parse_query()?)
8269        } else if self.dialect.supports_create_table_select() && self.parse_keyword(Keyword::SELECT)
8270        {
8271            // rewind the SELECT keyword
8272            self.prev_token();
8273            Some(self.parse_query()?)
8274        } else {
8275            None
8276        };
8277
8278        Ok(CreateTableBuilder::new(table_name)
8279            .temporary(temporary)
8280            .columns(columns)
8281            .constraints(constraints)
8282            .or_replace(or_replace)
8283            .if_not_exists(if_not_exists)
8284            .transient(transient)
8285            .hive_distribution(hive_distribution)
8286            .hive_formats(hive_formats)
8287            .global(global)
8288            .query(query)
8289            .without_rowid(without_rowid)
8290            .like(like)
8291            .clone_clause(clone)
8292            .comment_after_column_def(comment_after_column_def)
8293            .order_by(order_by)
8294            .on_commit(on_commit)
8295            .on_cluster(on_cluster)
8296            .clustered_by(clustered_by)
8297            .partition_by(create_table_config.partition_by)
8298            .cluster_by(create_table_config.cluster_by)
8299            .inherits(create_table_config.inherits)
8300            .partition_of(partition_of)
8301            .for_values(for_values)
8302            .table_options(create_table_config.table_options)
8303            .primary_key(primary_key)
8304            .strict(strict)
8305            .build())
8306    }
8307
8308    fn maybe_parse_create_table_like(
8309        &mut self,
8310        allow_unquoted_hyphen: bool,
8311    ) -> Result<Option<CreateTableLikeKind>, ParserError> {
8312        let like = if self.dialect.supports_create_table_like_parenthesized()
8313            && self.consume_token(&Token::LParen)
8314        {
8315            if self.parse_keyword(Keyword::LIKE) {
8316                let name = self.parse_object_name(allow_unquoted_hyphen)?;
8317                let defaults = if self.parse_keywords(&[Keyword::INCLUDING, Keyword::DEFAULTS]) {
8318                    Some(CreateTableLikeDefaults::Including)
8319                } else if self.parse_keywords(&[Keyword::EXCLUDING, Keyword::DEFAULTS]) {
8320                    Some(CreateTableLikeDefaults::Excluding)
8321                } else {
8322                    None
8323                };
8324                self.expect_token(&Token::RParen)?;
8325                Some(CreateTableLikeKind::Parenthesized(CreateTableLike {
8326                    name,
8327                    defaults,
8328                }))
8329            } else {
8330                // Rollback the '(' it's probably the columns list
8331                self.prev_token();
8332                None
8333            }
8334        } else if self.parse_keyword(Keyword::LIKE) || self.parse_keyword(Keyword::ILIKE) {
8335            let name = self.parse_object_name(allow_unquoted_hyphen)?;
8336            Some(CreateTableLikeKind::Plain(CreateTableLike {
8337                name,
8338                defaults: None,
8339            }))
8340        } else {
8341            None
8342        };
8343        Ok(like)
8344    }
8345
8346    pub(crate) fn parse_create_table_on_commit(&mut self) -> Result<OnCommit, ParserError> {
8347        if self.parse_keywords(&[Keyword::DELETE, Keyword::ROWS]) {
8348            Ok(OnCommit::DeleteRows)
8349        } else if self.parse_keywords(&[Keyword::PRESERVE, Keyword::ROWS]) {
8350            Ok(OnCommit::PreserveRows)
8351        } else if self.parse_keywords(&[Keyword::DROP]) {
8352            Ok(OnCommit::Drop)
8353        } else {
8354            parser_err!(
8355                "Expecting DELETE ROWS, PRESERVE ROWS or DROP",
8356                self.peek_token()
8357            )
8358        }
8359    }
8360
8361    /// Parse [ForValues] of a `PARTITION OF` clause.
8362    ///
8363    /// Parses: `FOR VALUES partition_bound_spec | DEFAULT`
8364    ///
8365    /// [PostgreSQL](https://www.postgresql.org/docs/current/sql-createtable.html)
8366    fn parse_partition_for_values(&mut self) -> Result<ForValues, ParserError> {
8367        if self.parse_keyword(Keyword::DEFAULT) {
8368            return Ok(ForValues::Default);
8369        }
8370
8371        self.expect_keywords(&[Keyword::FOR, Keyword::VALUES])?;
8372
8373        if self.parse_keyword(Keyword::IN) {
8374            // FOR VALUES IN (expr, ...)
8375            self.expect_token(&Token::LParen)?;
8376            if self.peek_token() == Token::RParen {
8377                return self.expected("at least one value", self.peek_token());
8378            }
8379            let values = self.parse_comma_separated(Parser::parse_expr)?;
8380            self.expect_token(&Token::RParen)?;
8381            Ok(ForValues::In(values))
8382        } else if self.parse_keyword(Keyword::FROM) {
8383            // FOR VALUES FROM (...) TO (...)
8384            self.expect_token(&Token::LParen)?;
8385            if self.peek_token() == Token::RParen {
8386                return self.expected("at least one value", self.peek_token());
8387            }
8388            let from = self.parse_comma_separated(Parser::parse_partition_bound_value)?;
8389            self.expect_token(&Token::RParen)?;
8390            self.expect_keyword(Keyword::TO)?;
8391            self.expect_token(&Token::LParen)?;
8392            if self.peek_token() == Token::RParen {
8393                return self.expected("at least one value", self.peek_token());
8394            }
8395            let to = self.parse_comma_separated(Parser::parse_partition_bound_value)?;
8396            self.expect_token(&Token::RParen)?;
8397            Ok(ForValues::From { from, to })
8398        } else if self.parse_keyword(Keyword::WITH) {
8399            // FOR VALUES WITH (MODULUS n, REMAINDER r)
8400            self.expect_token(&Token::LParen)?;
8401            self.expect_keyword(Keyword::MODULUS)?;
8402            let modulus = self.parse_literal_uint()?;
8403            self.expect_token(&Token::Comma)?;
8404            self.expect_keyword(Keyword::REMAINDER)?;
8405            let remainder = self.parse_literal_uint()?;
8406            self.expect_token(&Token::RParen)?;
8407            Ok(ForValues::With { modulus, remainder })
8408        } else {
8409            self.expected("IN, FROM, or WITH after FOR VALUES", self.peek_token())
8410        }
8411    }
8412
8413    /// Parse a single [PartitionBoundValue].
8414    fn parse_partition_bound_value(&mut self) -> Result<PartitionBoundValue, ParserError> {
8415        if self.parse_keyword(Keyword::MINVALUE) {
8416            Ok(PartitionBoundValue::MinValue)
8417        } else if self.parse_keyword(Keyword::MAXVALUE) {
8418            Ok(PartitionBoundValue::MaxValue)
8419        } else {
8420            Ok(PartitionBoundValue::Expr(self.parse_expr()?))
8421        }
8422    }
8423
8424    /// Parse configuration like inheritance, partitioning, clustering information during the table creation.
8425    ///
8426    /// [BigQuery](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#syntax_2)
8427    /// [PostgreSQL](https://www.postgresql.org/docs/current/ddl-partitioning.html)
8428    /// [MySql](https://dev.mysql.com/doc/refman/8.4/en/create-table.html)
8429    fn parse_optional_create_table_config(
8430        &mut self,
8431    ) -> Result<CreateTableConfiguration, ParserError> {
8432        let mut table_options = CreateTableOptions::None;
8433
8434        let inherits = if self.parse_keyword(Keyword::INHERITS) {
8435            Some(self.parse_parenthesized_qualified_column_list(IsOptional::Mandatory, false)?)
8436        } else {
8437            None
8438        };
8439
8440        // PostgreSQL supports `WITH ( options )`, before `AS`
8441        let with_options = self.parse_options(Keyword::WITH)?;
8442        if !with_options.is_empty() {
8443            table_options = CreateTableOptions::With(with_options)
8444        }
8445
8446        let table_properties = self.parse_options(Keyword::TBLPROPERTIES)?;
8447        if !table_properties.is_empty() {
8448            table_options = CreateTableOptions::TableProperties(table_properties);
8449        }
8450        let partition_by = if dialect_of!(self is BigQueryDialect | PostgreSqlDialect | GenericDialect)
8451            && self.parse_keywords(&[Keyword::PARTITION, Keyword::BY])
8452        {
8453            Some(Box::new(self.parse_expr()?))
8454        } else {
8455            None
8456        };
8457
8458        let mut cluster_by = None;
8459        if dialect_of!(self is BigQueryDialect | GenericDialect) {
8460            if self.parse_keywords(&[Keyword::CLUSTER, Keyword::BY]) {
8461                cluster_by = Some(WrappedCollection::NoWrapping(
8462                    self.parse_comma_separated(|p| p.parse_expr())?,
8463                ));
8464            };
8465
8466            if let Token::Word(word) = self.peek_token().token {
8467                if word.keyword == Keyword::OPTIONS {
8468                    table_options =
8469                        CreateTableOptions::Options(self.parse_options(Keyword::OPTIONS)?)
8470                }
8471            };
8472        }
8473
8474        if !dialect_of!(self is HiveDialect) && table_options == CreateTableOptions::None {
8475            let plain_options = self.parse_plain_options()?;
8476            if !plain_options.is_empty() {
8477                table_options = CreateTableOptions::Plain(plain_options)
8478            }
8479        };
8480
8481        Ok(CreateTableConfiguration {
8482            partition_by,
8483            cluster_by,
8484            inherits,
8485            table_options,
8486        })
8487    }
8488
8489    fn parse_plain_option(&mut self) -> Result<Option<SqlOption>, ParserError> {
8490        // Single parameter option
8491        // <https://dev.mysql.com/doc/refman/8.4/en/create-table.html>
8492        if self.parse_keywords(&[Keyword::START, Keyword::TRANSACTION]) {
8493            return Ok(Some(SqlOption::Ident(Ident::new("START TRANSACTION"))));
8494        }
8495
8496        // Custom option
8497        // <https://dev.mysql.com/doc/refman/8.4/en/create-table.html>
8498        if self.parse_keywords(&[Keyword::COMMENT]) {
8499            let has_eq = self.consume_token(&Token::Eq);
8500            let value = self.next_token();
8501
8502            let comment = match (has_eq, value.token) {
8503                (true, Token::SingleQuotedString(s)) => {
8504                    Ok(Some(SqlOption::Comment(CommentDef::WithEq(s))))
8505                }
8506                (false, Token::SingleQuotedString(s)) => {
8507                    Ok(Some(SqlOption::Comment(CommentDef::WithoutEq(s))))
8508                }
8509                (_, token) => {
8510                    self.expected("Token::SingleQuotedString", TokenWithSpan::wrap(token))
8511                }
8512            };
8513            return comment;
8514        }
8515
8516        // <https://dev.mysql.com/doc/refman/8.4/en/create-table.html>
8517        // <https://clickhouse.com/docs/sql-reference/statements/create/table>
8518        if self.parse_keywords(&[Keyword::ENGINE]) {
8519            let _ = self.consume_token(&Token::Eq);
8520            let value = self.next_token();
8521
8522            let engine = match value.token {
8523                Token::Word(w) => {
8524                    let parameters = if self.peek_token() == Token::LParen {
8525                        self.parse_parenthesized_identifiers()?
8526                    } else {
8527                        vec![]
8528                    };
8529
8530                    Ok(Some(SqlOption::NamedParenthesizedList(
8531                        NamedParenthesizedList {
8532                            key: Ident::new("ENGINE"),
8533                            name: Some(Ident::new(w.value)),
8534                            values: parameters,
8535                        },
8536                    )))
8537                }
8538                _ => {
8539                    return self.expected("Token::Word", value)?;
8540                }
8541            };
8542
8543            return engine;
8544        }
8545
8546        // <https://dev.mysql.com/doc/refman/8.4/en/create-table.html>
8547        if self.parse_keywords(&[Keyword::TABLESPACE]) {
8548            let _ = self.consume_token(&Token::Eq);
8549            let value = self.next_token();
8550
8551            let tablespace = match value.token {
8552                Token::Word(Word { value: name, .. }) | Token::SingleQuotedString(name) => {
8553                    let storage = match self.parse_keyword(Keyword::STORAGE) {
8554                        true => {
8555                            let _ = self.consume_token(&Token::Eq);
8556                            let storage_token = self.next_token();
8557                            match &storage_token.token {
8558                                Token::Word(w) => match w.value.to_uppercase().as_str() {
8559                                    "DISK" => Some(StorageType::Disk),
8560                                    "MEMORY" => Some(StorageType::Memory),
8561                                    _ => self
8562                                        .expected("Storage type (DISK or MEMORY)", storage_token)?,
8563                                },
8564                                _ => self.expected("Token::Word", storage_token)?,
8565                            }
8566                        }
8567                        false => None,
8568                    };
8569
8570                    Ok(Some(SqlOption::TableSpace(TablespaceOption {
8571                        name,
8572                        storage,
8573                    })))
8574                }
8575                _ => {
8576                    return self.expected("Token::Word", value)?;
8577                }
8578            };
8579
8580            return tablespace;
8581        }
8582
8583        // <https://dev.mysql.com/doc/refman/8.4/en/create-table.html>
8584        if self.parse_keyword(Keyword::UNION) {
8585            let _ = self.consume_token(&Token::Eq);
8586            let value = self.next_token();
8587
8588            match value.token {
8589                Token::LParen => {
8590                    let tables: Vec<Ident> =
8591                        self.parse_comma_separated0(Parser::parse_identifier, Token::RParen)?;
8592                    self.expect_token(&Token::RParen)?;
8593
8594                    return Ok(Some(SqlOption::NamedParenthesizedList(
8595                        NamedParenthesizedList {
8596                            key: Ident::new("UNION"),
8597                            name: None,
8598                            values: tables,
8599                        },
8600                    )));
8601                }
8602                _ => {
8603                    return self.expected("Token::LParen", value)?;
8604                }
8605            }
8606        }
8607
8608        // Key/Value parameter option
8609        let key = if self.parse_keywords(&[Keyword::DEFAULT, Keyword::CHARSET]) {
8610            Ident::new("DEFAULT CHARSET")
8611        } else if self.parse_keyword(Keyword::CHARSET) {
8612            Ident::new("CHARSET")
8613        } else if self.parse_keywords(&[Keyword::DEFAULT, Keyword::CHARACTER, Keyword::SET]) {
8614            Ident::new("DEFAULT CHARACTER SET")
8615        } else if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
8616            Ident::new("CHARACTER SET")
8617        } else if self.parse_keywords(&[Keyword::DEFAULT, Keyword::COLLATE]) {
8618            Ident::new("DEFAULT COLLATE")
8619        } else if self.parse_keyword(Keyword::COLLATE) {
8620            Ident::new("COLLATE")
8621        } else if self.parse_keywords(&[Keyword::DATA, Keyword::DIRECTORY]) {
8622            Ident::new("DATA DIRECTORY")
8623        } else if self.parse_keywords(&[Keyword::INDEX, Keyword::DIRECTORY]) {
8624            Ident::new("INDEX DIRECTORY")
8625        } else if self.parse_keyword(Keyword::KEY_BLOCK_SIZE) {
8626            Ident::new("KEY_BLOCK_SIZE")
8627        } else if self.parse_keyword(Keyword::ROW_FORMAT) {
8628            Ident::new("ROW_FORMAT")
8629        } else if self.parse_keyword(Keyword::PACK_KEYS) {
8630            Ident::new("PACK_KEYS")
8631        } else if self.parse_keyword(Keyword::STATS_AUTO_RECALC) {
8632            Ident::new("STATS_AUTO_RECALC")
8633        } else if self.parse_keyword(Keyword::STATS_PERSISTENT) {
8634            Ident::new("STATS_PERSISTENT")
8635        } else if self.parse_keyword(Keyword::STATS_SAMPLE_PAGES) {
8636            Ident::new("STATS_SAMPLE_PAGES")
8637        } else if self.parse_keyword(Keyword::DELAY_KEY_WRITE) {
8638            Ident::new("DELAY_KEY_WRITE")
8639        } else if self.parse_keyword(Keyword::COMPRESSION) {
8640            Ident::new("COMPRESSION")
8641        } else if self.parse_keyword(Keyword::ENCRYPTION) {
8642            Ident::new("ENCRYPTION")
8643        } else if self.parse_keyword(Keyword::MAX_ROWS) {
8644            Ident::new("MAX_ROWS")
8645        } else if self.parse_keyword(Keyword::MIN_ROWS) {
8646            Ident::new("MIN_ROWS")
8647        } else if self.parse_keyword(Keyword::AUTOEXTEND_SIZE) {
8648            Ident::new("AUTOEXTEND_SIZE")
8649        } else if self.parse_keyword(Keyword::AVG_ROW_LENGTH) {
8650            Ident::new("AVG_ROW_LENGTH")
8651        } else if self.parse_keyword(Keyword::CHECKSUM) {
8652            Ident::new("CHECKSUM")
8653        } else if self.parse_keyword(Keyword::CONNECTION) {
8654            Ident::new("CONNECTION")
8655        } else if self.parse_keyword(Keyword::ENGINE_ATTRIBUTE) {
8656            Ident::new("ENGINE_ATTRIBUTE")
8657        } else if self.parse_keyword(Keyword::PASSWORD) {
8658            Ident::new("PASSWORD")
8659        } else if self.parse_keyword(Keyword::SECONDARY_ENGINE_ATTRIBUTE) {
8660            Ident::new("SECONDARY_ENGINE_ATTRIBUTE")
8661        } else if self.parse_keyword(Keyword::INSERT_METHOD) {
8662            Ident::new("INSERT_METHOD")
8663        } else if self.parse_keyword(Keyword::AUTO_INCREMENT) {
8664            Ident::new("AUTO_INCREMENT")
8665        } else {
8666            return Ok(None);
8667        };
8668
8669        let _ = self.consume_token(&Token::Eq);
8670
8671        let value = match self
8672            .maybe_parse(|parser| parser.parse_value())?
8673            .map(Expr::Value)
8674        {
8675            Some(expr) => expr,
8676            None => Expr::Identifier(self.parse_identifier()?),
8677        };
8678
8679        Ok(Some(SqlOption::KeyValue { key, value }))
8680    }
8681
8682    /// Parse plain options.
8683    pub fn parse_plain_options(&mut self) -> Result<Vec<SqlOption>, ParserError> {
8684        let mut options = Vec::new();
8685
8686        while let Some(option) = self.parse_plain_option()? {
8687            options.push(option);
8688            // Some dialects support comma-separated options; it shouldn't introduce ambiguity to
8689            // consume it for all dialects.
8690            let _ = self.consume_token(&Token::Comma);
8691        }
8692
8693        Ok(options)
8694    }
8695
8696    /// Parse optional inline comment.
8697    pub fn parse_optional_inline_comment(&mut self) -> Result<Option<CommentDef>, ParserError> {
8698        let comment = if self.parse_keyword(Keyword::COMMENT) {
8699            let has_eq = self.consume_token(&Token::Eq);
8700            let comment = self.parse_comment_value()?;
8701            Some(if has_eq {
8702                CommentDef::WithEq(comment)
8703            } else {
8704                CommentDef::WithoutEq(comment)
8705            })
8706        } else {
8707            None
8708        };
8709        Ok(comment)
8710    }
8711
8712    /// Parse comment value.
8713    pub fn parse_comment_value(&mut self) -> Result<String, ParserError> {
8714        let next_token = self.next_token();
8715        let value = match next_token.token {
8716            Token::SingleQuotedString(str) => str,
8717            Token::DollarQuotedString(str) => str.value,
8718            _ => self.expected("string literal", next_token)?,
8719        };
8720        Ok(value)
8721    }
8722
8723    /// Parse optional procedure parameters.
8724    pub fn parse_optional_procedure_parameters(
8725        &mut self,
8726    ) -> Result<Option<Vec<ProcedureParam>>, ParserError> {
8727        let mut params = vec![];
8728        if !self.consume_token(&Token::LParen) || self.consume_token(&Token::RParen) {
8729            return Ok(Some(params));
8730        }
8731        loop {
8732            if let Token::Word(_) = self.peek_token().token {
8733                params.push(self.parse_procedure_param()?)
8734            }
8735            let comma = self.consume_token(&Token::Comma);
8736            if self.consume_token(&Token::RParen) {
8737                // allow a trailing comma, even though it's not in standard
8738                break;
8739            } else if !comma {
8740                return self.expected("',' or ')' after parameter definition", self.peek_token());
8741            }
8742        }
8743        Ok(Some(params))
8744    }
8745
8746    /// Parse columns and constraints.
8747    pub fn parse_columns(&mut self) -> Result<(Vec<ColumnDef>, Vec<TableConstraint>), ParserError> {
8748        let mut columns = vec![];
8749        let mut constraints = vec![];
8750        if !self.consume_token(&Token::LParen) || self.consume_token(&Token::RParen) {
8751            return Ok((columns, constraints));
8752        }
8753
8754        loop {
8755            if let Some(constraint) = self.parse_optional_table_constraint()? {
8756                constraints.push(constraint);
8757            } else if let Token::Word(_) = self.peek_token().token {
8758                columns.push(self.parse_column_def()?);
8759            } else {
8760                return self.expected("column name or constraint definition", self.peek_token());
8761            }
8762
8763            let comma = self.consume_token(&Token::Comma);
8764            let rparen = self.peek_token().token == Token::RParen;
8765
8766            if !comma && !rparen {
8767                return self.expected("',' or ')' after column definition", self.peek_token());
8768            };
8769
8770            if rparen
8771                && (!comma
8772                    || self.dialect.supports_column_definition_trailing_commas()
8773                    || self.options.trailing_commas)
8774            {
8775                let _ = self.consume_token(&Token::RParen);
8776                break;
8777            }
8778        }
8779
8780        Ok((columns, constraints))
8781    }
8782
8783    /// Parse procedure parameter.
8784    pub fn parse_procedure_param(&mut self) -> Result<ProcedureParam, ParserError> {
8785        let mode = if self.parse_keyword(Keyword::IN) {
8786            Some(ArgMode::In)
8787        } else if self.parse_keyword(Keyword::OUT) {
8788            Some(ArgMode::Out)
8789        } else if self.parse_keyword(Keyword::INOUT) {
8790            Some(ArgMode::InOut)
8791        } else {
8792            None
8793        };
8794        let name = self.parse_identifier()?;
8795        let data_type = self.parse_data_type()?;
8796        let default = if self.consume_token(&Token::Eq) {
8797            Some(self.parse_expr()?)
8798        } else {
8799            None
8800        };
8801
8802        Ok(ProcedureParam {
8803            name,
8804            data_type,
8805            mode,
8806            default,
8807        })
8808    }
8809
8810    /// Parse column definition.
8811    pub fn parse_column_def(&mut self) -> Result<ColumnDef, ParserError> {
8812        let col_name = self.parse_identifier()?;
8813        let data_type = if self.is_column_type_sqlite_unspecified() {
8814            DataType::Unspecified
8815        } else {
8816            self.parse_data_type()?
8817        };
8818        let mut options = vec![];
8819        loop {
8820            if self.parse_keyword(Keyword::CONSTRAINT) {
8821                let name = Some(self.parse_identifier()?);
8822                if let Some(option) = self.parse_optional_column_option()? {
8823                    options.push(ColumnOptionDef { name, option });
8824                } else {
8825                    return self.expected(
8826                        "constraint details after CONSTRAINT <name>",
8827                        self.peek_token(),
8828                    );
8829                }
8830            } else if let Some(option) = self.parse_optional_column_option()? {
8831                options.push(ColumnOptionDef { name: None, option });
8832            } else {
8833                break;
8834            };
8835        }
8836        Ok(ColumnDef {
8837            name: col_name,
8838            data_type,
8839            options,
8840        })
8841    }
8842
8843    fn is_column_type_sqlite_unspecified(&mut self) -> bool {
8844        if dialect_of!(self is SQLiteDialect) {
8845            match self.peek_token().token {
8846                Token::Word(word) => matches!(
8847                    word.keyword,
8848                    Keyword::CONSTRAINT
8849                        | Keyword::PRIMARY
8850                        | Keyword::NOT
8851                        | Keyword::UNIQUE
8852                        | Keyword::CHECK
8853                        | Keyword::DEFAULT
8854                        | Keyword::COLLATE
8855                        | Keyword::REFERENCES
8856                        | Keyword::GENERATED
8857                        | Keyword::AS
8858                ),
8859                _ => true, // e.g. comma immediately after column name
8860            }
8861        } else {
8862            false
8863        }
8864    }
8865
8866    /// Parse optional column option.
8867    pub fn parse_optional_column_option(&mut self) -> Result<Option<ColumnOption>, ParserError> {
8868        if let Some(option) = self.dialect.parse_column_option(self)? {
8869            return option;
8870        }
8871
8872        self.with_state(
8873            ColumnDefinition,
8874            |parser| -> Result<Option<ColumnOption>, ParserError> {
8875                parser.parse_optional_column_option_inner()
8876            },
8877        )
8878    }
8879
8880    fn parse_optional_column_option_inner(&mut self) -> Result<Option<ColumnOption>, ParserError> {
8881        if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
8882            Ok(Some(ColumnOption::CharacterSet(
8883                self.parse_object_name(false)?,
8884            )))
8885        } else if self.parse_keywords(&[Keyword::COLLATE]) {
8886            Ok(Some(ColumnOption::Collation(
8887                self.parse_object_name(false)?,
8888            )))
8889        } else if self.parse_keywords(&[Keyword::NOT, Keyword::NULL]) {
8890            Ok(Some(ColumnOption::NotNull))
8891        } else if self.parse_keywords(&[Keyword::COMMENT]) {
8892            Ok(Some(ColumnOption::Comment(self.parse_comment_value()?)))
8893        } else if self.parse_keyword(Keyword::NULL) {
8894            Ok(Some(ColumnOption::Null))
8895        } else if self.parse_keyword(Keyword::DEFAULT) {
8896            Ok(Some(ColumnOption::Default(self.parse_expr()?)))
8897        } else if dialect_of!(self is ClickHouseDialect| GenericDialect)
8898            && self.parse_keyword(Keyword::MATERIALIZED)
8899        {
8900            Ok(Some(ColumnOption::Materialized(self.parse_expr()?)))
8901        } else if dialect_of!(self is ClickHouseDialect| GenericDialect)
8902            && self.parse_keyword(Keyword::ALIAS)
8903        {
8904            Ok(Some(ColumnOption::Alias(self.parse_expr()?)))
8905        } else if dialect_of!(self is ClickHouseDialect| GenericDialect)
8906            && self.parse_keyword(Keyword::EPHEMERAL)
8907        {
8908            // The expression is optional for the EPHEMERAL syntax, so we need to check
8909            // if the column definition has remaining tokens before parsing the expression.
8910            if matches!(self.peek_token().token, Token::Comma | Token::RParen) {
8911                Ok(Some(ColumnOption::Ephemeral(None)))
8912            } else {
8913                Ok(Some(ColumnOption::Ephemeral(Some(self.parse_expr()?))))
8914            }
8915        } else if self.parse_keywords(&[Keyword::PRIMARY, Keyword::KEY]) {
8916            let characteristics = self.parse_constraint_characteristics()?;
8917            Ok(Some(
8918                PrimaryKeyConstraint {
8919                    name: None,
8920                    index_name: None,
8921                    index_type: None,
8922                    columns: vec![],
8923                    index_options: vec![],
8924                    characteristics,
8925                }
8926                .into(),
8927            ))
8928        } else if self.parse_keyword(Keyword::UNIQUE) {
8929            let characteristics = self.parse_constraint_characteristics()?;
8930            Ok(Some(
8931                UniqueConstraint {
8932                    name: None,
8933                    index_name: None,
8934                    index_type_display: KeyOrIndexDisplay::None,
8935                    index_type: None,
8936                    columns: vec![],
8937                    index_options: vec![],
8938                    characteristics,
8939                    nulls_distinct: NullsDistinctOption::None,
8940                }
8941                .into(),
8942            ))
8943        } else if self.parse_keyword(Keyword::REFERENCES) {
8944            let foreign_table = self.parse_object_name(false)?;
8945            // PostgreSQL allows omitting the column list and
8946            // uses the primary key column of the foreign table by default
8947            let referred_columns = self.parse_parenthesized_column_list(Optional, false)?;
8948            let mut match_kind = None;
8949            let mut on_delete = None;
8950            let mut on_update = None;
8951            loop {
8952                if match_kind.is_none() && self.parse_keyword(Keyword::MATCH) {
8953                    match_kind = Some(self.parse_match_kind()?);
8954                } else if on_delete.is_none()
8955                    && self.parse_keywords(&[Keyword::ON, Keyword::DELETE])
8956                {
8957                    on_delete = Some(self.parse_referential_action()?);
8958                } else if on_update.is_none()
8959                    && self.parse_keywords(&[Keyword::ON, Keyword::UPDATE])
8960                {
8961                    on_update = Some(self.parse_referential_action()?);
8962                } else {
8963                    break;
8964                }
8965            }
8966            let characteristics = self.parse_constraint_characteristics()?;
8967
8968            Ok(Some(
8969                ForeignKeyConstraint {
8970                    name: None,       // Column-level constraints don't have names
8971                    index_name: None, // Not applicable for column-level constraints
8972                    columns: vec![],  // Not applicable for column-level constraints
8973                    foreign_table,
8974                    referred_columns,
8975                    on_delete,
8976                    on_update,
8977                    match_kind,
8978                    characteristics,
8979                }
8980                .into(),
8981            ))
8982        } else if self.parse_keyword(Keyword::CHECK) {
8983            self.expect_token(&Token::LParen)?;
8984            // since `CHECK` requires parentheses, we can parse the inner expression in ParserState::Normal
8985            let expr: Expr = self.with_state(ParserState::Normal, |p| p.parse_expr())?;
8986            self.expect_token(&Token::RParen)?;
8987
8988            let enforced = if self.parse_keyword(Keyword::ENFORCED) {
8989                Some(true)
8990            } else if self.parse_keywords(&[Keyword::NOT, Keyword::ENFORCED]) {
8991                Some(false)
8992            } else {
8993                None
8994            };
8995
8996            Ok(Some(
8997                CheckConstraint {
8998                    name: None, // Column-level check constraints don't have names
8999                    expr: Box::new(expr),
9000                    enforced,
9001                }
9002                .into(),
9003            ))
9004        } else if self.parse_keyword(Keyword::AUTO_INCREMENT)
9005            && dialect_of!(self is MySqlDialect | GenericDialect)
9006        {
9007            // Support AUTO_INCREMENT for MySQL
9008            Ok(Some(ColumnOption::DialectSpecific(vec![
9009                Token::make_keyword("AUTO_INCREMENT"),
9010            ])))
9011        } else if self.parse_keyword(Keyword::AUTOINCREMENT)
9012            && dialect_of!(self is SQLiteDialect |  GenericDialect)
9013        {
9014            // Support AUTOINCREMENT for SQLite
9015            Ok(Some(ColumnOption::DialectSpecific(vec![
9016                Token::make_keyword("AUTOINCREMENT"),
9017            ])))
9018        } else if self.parse_keyword(Keyword::ASC)
9019            && self.dialect.supports_asc_desc_in_column_definition()
9020        {
9021            // Support ASC for SQLite
9022            Ok(Some(ColumnOption::DialectSpecific(vec![
9023                Token::make_keyword("ASC"),
9024            ])))
9025        } else if self.parse_keyword(Keyword::DESC)
9026            && self.dialect.supports_asc_desc_in_column_definition()
9027        {
9028            // Support DESC for SQLite
9029            Ok(Some(ColumnOption::DialectSpecific(vec![
9030                Token::make_keyword("DESC"),
9031            ])))
9032        } else if self.parse_keywords(&[Keyword::ON, Keyword::UPDATE])
9033            && dialect_of!(self is MySqlDialect | GenericDialect)
9034        {
9035            let expr = self.parse_expr()?;
9036            Ok(Some(ColumnOption::OnUpdate(expr)))
9037        } else if self.parse_keyword(Keyword::GENERATED) {
9038            self.parse_optional_column_option_generated()
9039        } else if dialect_of!(self is BigQueryDialect | GenericDialect)
9040            && self.parse_keyword(Keyword::OPTIONS)
9041        {
9042            self.prev_token();
9043            Ok(Some(ColumnOption::Options(
9044                self.parse_options(Keyword::OPTIONS)?,
9045            )))
9046        } else if self.parse_keyword(Keyword::AS)
9047            && dialect_of!(self is MySqlDialect | SQLiteDialect | DuckDbDialect | GenericDialect)
9048        {
9049            self.parse_optional_column_option_as()
9050        } else if self.parse_keyword(Keyword::SRID)
9051            && dialect_of!(self is MySqlDialect | GenericDialect)
9052        {
9053            Ok(Some(ColumnOption::Srid(Box::new(self.parse_expr()?))))
9054        } else if self.parse_keyword(Keyword::IDENTITY)
9055            && dialect_of!(self is MsSqlDialect | GenericDialect)
9056        {
9057            let parameters = if self.consume_token(&Token::LParen) {
9058                let seed = self.parse_number()?;
9059                self.expect_token(&Token::Comma)?;
9060                let increment = self.parse_number()?;
9061                self.expect_token(&Token::RParen)?;
9062
9063                Some(IdentityPropertyFormatKind::FunctionCall(
9064                    IdentityParameters { seed, increment },
9065                ))
9066            } else {
9067                None
9068            };
9069            Ok(Some(ColumnOption::Identity(
9070                IdentityPropertyKind::Identity(IdentityProperty {
9071                    parameters,
9072                    order: None,
9073                }),
9074            )))
9075        } else if dialect_of!(self is SQLiteDialect | GenericDialect)
9076            && self.parse_keywords(&[Keyword::ON, Keyword::CONFLICT])
9077        {
9078            // Support ON CONFLICT for SQLite
9079            Ok(Some(ColumnOption::OnConflict(
9080                self.expect_one_of_keywords(&[
9081                    Keyword::ROLLBACK,
9082                    Keyword::ABORT,
9083                    Keyword::FAIL,
9084                    Keyword::IGNORE,
9085                    Keyword::REPLACE,
9086                ])?,
9087            )))
9088        } else if self.parse_keyword(Keyword::INVISIBLE) {
9089            Ok(Some(ColumnOption::Invisible))
9090        } else {
9091            Ok(None)
9092        }
9093    }
9094
9095    pub(crate) fn parse_tag(&mut self) -> Result<Tag, ParserError> {
9096        let name = self.parse_object_name(false)?;
9097        self.expect_token(&Token::Eq)?;
9098        let value = self.parse_literal_string()?;
9099
9100        Ok(Tag::new(name, value))
9101    }
9102
9103    fn parse_optional_column_option_generated(
9104        &mut self,
9105    ) -> Result<Option<ColumnOption>, ParserError> {
9106        if self.parse_keywords(&[Keyword::ALWAYS, Keyword::AS, Keyword::IDENTITY]) {
9107            let mut sequence_options = vec![];
9108            if self.expect_token(&Token::LParen).is_ok() {
9109                sequence_options = self.parse_create_sequence_options()?;
9110                self.expect_token(&Token::RParen)?;
9111            }
9112            Ok(Some(ColumnOption::Generated {
9113                generated_as: GeneratedAs::Always,
9114                sequence_options: Some(sequence_options),
9115                generation_expr: None,
9116                generation_expr_mode: None,
9117                generated_keyword: true,
9118            }))
9119        } else if self.parse_keywords(&[
9120            Keyword::BY,
9121            Keyword::DEFAULT,
9122            Keyword::AS,
9123            Keyword::IDENTITY,
9124        ]) {
9125            let mut sequence_options = vec![];
9126            if self.expect_token(&Token::LParen).is_ok() {
9127                sequence_options = self.parse_create_sequence_options()?;
9128                self.expect_token(&Token::RParen)?;
9129            }
9130            Ok(Some(ColumnOption::Generated {
9131                generated_as: GeneratedAs::ByDefault,
9132                sequence_options: Some(sequence_options),
9133                generation_expr: None,
9134                generation_expr_mode: None,
9135                generated_keyword: true,
9136            }))
9137        } else if self.parse_keywords(&[Keyword::ALWAYS, Keyword::AS]) {
9138            if self.expect_token(&Token::LParen).is_ok() {
9139                let expr: Expr = self.with_state(ParserState::Normal, |p| p.parse_expr())?;
9140                self.expect_token(&Token::RParen)?;
9141                let (gen_as, expr_mode) = if self.parse_keywords(&[Keyword::STORED]) {
9142                    Ok((
9143                        GeneratedAs::ExpStored,
9144                        Some(GeneratedExpressionMode::Stored),
9145                    ))
9146                } else if dialect_of!(self is PostgreSqlDialect) {
9147                    // Postgres' AS IDENTITY branches are above, this one needs STORED
9148                    self.expected("STORED", self.peek_token())
9149                } else if self.parse_keywords(&[Keyword::VIRTUAL]) {
9150                    Ok((GeneratedAs::Always, Some(GeneratedExpressionMode::Virtual)))
9151                } else {
9152                    Ok((GeneratedAs::Always, None))
9153                }?;
9154
9155                Ok(Some(ColumnOption::Generated {
9156                    generated_as: gen_as,
9157                    sequence_options: None,
9158                    generation_expr: Some(expr),
9159                    generation_expr_mode: expr_mode,
9160                    generated_keyword: true,
9161                }))
9162            } else {
9163                Ok(None)
9164            }
9165        } else {
9166            Ok(None)
9167        }
9168    }
9169
9170    fn parse_optional_column_option_as(&mut self) -> Result<Option<ColumnOption>, ParserError> {
9171        // Some DBs allow 'AS (expr)', shorthand for GENERATED ALWAYS AS
9172        self.expect_token(&Token::LParen)?;
9173        let expr = self.parse_expr()?;
9174        self.expect_token(&Token::RParen)?;
9175
9176        let (gen_as, expr_mode) = if self.parse_keywords(&[Keyword::STORED]) {
9177            (
9178                GeneratedAs::ExpStored,
9179                Some(GeneratedExpressionMode::Stored),
9180            )
9181        } else if self.parse_keywords(&[Keyword::VIRTUAL]) {
9182            (GeneratedAs::Always, Some(GeneratedExpressionMode::Virtual))
9183        } else {
9184            (GeneratedAs::Always, None)
9185        };
9186
9187        Ok(Some(ColumnOption::Generated {
9188            generated_as: gen_as,
9189            sequence_options: None,
9190            generation_expr: Some(expr),
9191            generation_expr_mode: expr_mode,
9192            generated_keyword: false,
9193        }))
9194    }
9195
9196    /// Parse optional `CLUSTERED BY` clause for Hive/Generic dialects.
9197    pub fn parse_optional_clustered_by(&mut self) -> Result<Option<ClusteredBy>, ParserError> {
9198        let clustered_by = if dialect_of!(self is HiveDialect|GenericDialect)
9199            && self.parse_keywords(&[Keyword::CLUSTERED, Keyword::BY])
9200        {
9201            let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
9202
9203            let sorted_by = if self.parse_keywords(&[Keyword::SORTED, Keyword::BY]) {
9204                self.expect_token(&Token::LParen)?;
9205                let sorted_by_columns = self.parse_comma_separated(|p| p.parse_order_by_expr())?;
9206                self.expect_token(&Token::RParen)?;
9207                Some(sorted_by_columns)
9208            } else {
9209                None
9210            };
9211
9212            self.expect_keyword_is(Keyword::INTO)?;
9213            let num_buckets = self.parse_number_value()?.value;
9214            self.expect_keyword_is(Keyword::BUCKETS)?;
9215            Some(ClusteredBy {
9216                columns,
9217                sorted_by,
9218                num_buckets,
9219            })
9220        } else {
9221            None
9222        };
9223        Ok(clustered_by)
9224    }
9225
9226    /// Parse a referential action used in foreign key clauses.
9227    ///
9228    /// Recognized forms: `RESTRICT`, `CASCADE`, `SET NULL`, `NO ACTION`, `SET DEFAULT`.
9229    pub fn parse_referential_action(&mut self) -> Result<ReferentialAction, ParserError> {
9230        if self.parse_keyword(Keyword::RESTRICT) {
9231            Ok(ReferentialAction::Restrict)
9232        } else if self.parse_keyword(Keyword::CASCADE) {
9233            Ok(ReferentialAction::Cascade)
9234        } else if self.parse_keywords(&[Keyword::SET, Keyword::NULL]) {
9235            Ok(ReferentialAction::SetNull)
9236        } else if self.parse_keywords(&[Keyword::NO, Keyword::ACTION]) {
9237            Ok(ReferentialAction::NoAction)
9238        } else if self.parse_keywords(&[Keyword::SET, Keyword::DEFAULT]) {
9239            Ok(ReferentialAction::SetDefault)
9240        } else {
9241            self.expected(
9242                "one of RESTRICT, CASCADE, SET NULL, NO ACTION or SET DEFAULT",
9243                self.peek_token(),
9244            )
9245        }
9246    }
9247
9248    /// Parse a `MATCH` kind for constraint references: `FULL`, `PARTIAL`, or `SIMPLE`.
9249    pub fn parse_match_kind(&mut self) -> Result<ConstraintReferenceMatchKind, ParserError> {
9250        if self.parse_keyword(Keyword::FULL) {
9251            Ok(ConstraintReferenceMatchKind::Full)
9252        } else if self.parse_keyword(Keyword::PARTIAL) {
9253            Ok(ConstraintReferenceMatchKind::Partial)
9254        } else if self.parse_keyword(Keyword::SIMPLE) {
9255            Ok(ConstraintReferenceMatchKind::Simple)
9256        } else {
9257            self.expected("one of FULL, PARTIAL or SIMPLE", self.peek_token())
9258        }
9259    }
9260
9261    /// Parse optional constraint characteristics such as `DEFERRABLE`, `INITIALLY` and `ENFORCED`.
9262    pub fn parse_constraint_characteristics(
9263        &mut self,
9264    ) -> Result<Option<ConstraintCharacteristics>, ParserError> {
9265        let mut cc = ConstraintCharacteristics::default();
9266
9267        loop {
9268            if cc.deferrable.is_none() && self.parse_keywords(&[Keyword::NOT, Keyword::DEFERRABLE])
9269            {
9270                cc.deferrable = Some(false);
9271            } else if cc.deferrable.is_none() && self.parse_keyword(Keyword::DEFERRABLE) {
9272                cc.deferrable = Some(true);
9273            } else if cc.initially.is_none() && self.parse_keyword(Keyword::INITIALLY) {
9274                if self.parse_keyword(Keyword::DEFERRED) {
9275                    cc.initially = Some(DeferrableInitial::Deferred);
9276                } else if self.parse_keyword(Keyword::IMMEDIATE) {
9277                    cc.initially = Some(DeferrableInitial::Immediate);
9278                } else {
9279                    self.expected("one of DEFERRED or IMMEDIATE", self.peek_token())?;
9280                }
9281            } else if cc.enforced.is_none() && self.parse_keyword(Keyword::ENFORCED) {
9282                cc.enforced = Some(true);
9283            } else if cc.enforced.is_none()
9284                && self.parse_keywords(&[Keyword::NOT, Keyword::ENFORCED])
9285            {
9286                cc.enforced = Some(false);
9287            } else {
9288                break;
9289            }
9290        }
9291
9292        if cc.deferrable.is_some() || cc.initially.is_some() || cc.enforced.is_some() {
9293            Ok(Some(cc))
9294        } else {
9295            Ok(None)
9296        }
9297    }
9298
9299    /// Parse an optional table constraint (e.g. `PRIMARY KEY`, `UNIQUE`, `FOREIGN KEY`, `CHECK`).
9300    pub fn parse_optional_table_constraint(
9301        &mut self,
9302    ) -> Result<Option<TableConstraint>, ParserError> {
9303        let name = if self.parse_keyword(Keyword::CONSTRAINT) {
9304            if self.dialect.supports_constraint_keyword_without_name()
9305                && self
9306                    .peek_one_of_keywords(&[
9307                        Keyword::CHECK,
9308                        Keyword::PRIMARY,
9309                        Keyword::UNIQUE,
9310                        Keyword::FOREIGN,
9311                    ])
9312                    .is_some()
9313            {
9314                None
9315            } else {
9316                Some(self.parse_identifier()?)
9317            }
9318        } else {
9319            None
9320        };
9321
9322        let next_token = self.next_token();
9323        match next_token.token {
9324            Token::Word(w) if w.keyword == Keyword::UNIQUE => {
9325                let index_type_display = self.parse_index_type_display();
9326                if !dialect_of!(self is GenericDialect | MySqlDialect)
9327                    && !index_type_display.is_none()
9328                {
9329                    return self
9330                        .expected("`index_name` or `(column_name [, ...])`", self.peek_token());
9331                }
9332
9333                let nulls_distinct = self.parse_optional_nulls_distinct()?;
9334
9335                // optional index name
9336                let index_name = self.parse_optional_ident()?;
9337                let index_type = self.parse_optional_using_then_index_type()?;
9338
9339                let columns = self.parse_parenthesized_index_column_list()?;
9340                let index_options = self.parse_index_options()?;
9341                let characteristics = self.parse_constraint_characteristics()?;
9342                Ok(Some(
9343                    UniqueConstraint {
9344                        name,
9345                        index_name,
9346                        index_type_display,
9347                        index_type,
9348                        columns,
9349                        index_options,
9350                        characteristics,
9351                        nulls_distinct,
9352                    }
9353                    .into(),
9354                ))
9355            }
9356            Token::Word(w) if w.keyword == Keyword::PRIMARY => {
9357                // after `PRIMARY` always stay `KEY`
9358                self.expect_keyword_is(Keyword::KEY)?;
9359
9360                // optional index name
9361                let index_name = self.parse_optional_ident()?;
9362                let index_type = self.parse_optional_using_then_index_type()?;
9363
9364                let columns = self.parse_parenthesized_index_column_list()?;
9365                let index_options = self.parse_index_options()?;
9366                let characteristics = self.parse_constraint_characteristics()?;
9367                Ok(Some(
9368                    PrimaryKeyConstraint {
9369                        name,
9370                        index_name,
9371                        index_type,
9372                        columns,
9373                        index_options,
9374                        characteristics,
9375                    }
9376                    .into(),
9377                ))
9378            }
9379            Token::Word(w) if w.keyword == Keyword::FOREIGN => {
9380                self.expect_keyword_is(Keyword::KEY)?;
9381                let index_name = self.parse_optional_ident()?;
9382                let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
9383                self.expect_keyword_is(Keyword::REFERENCES)?;
9384                let foreign_table = self.parse_object_name(false)?;
9385                let referred_columns = self.parse_parenthesized_column_list(Optional, false)?;
9386                let mut match_kind = None;
9387                let mut on_delete = None;
9388                let mut on_update = None;
9389                loop {
9390                    if match_kind.is_none() && self.parse_keyword(Keyword::MATCH) {
9391                        match_kind = Some(self.parse_match_kind()?);
9392                    } else if on_delete.is_none()
9393                        && self.parse_keywords(&[Keyword::ON, Keyword::DELETE])
9394                    {
9395                        on_delete = Some(self.parse_referential_action()?);
9396                    } else if on_update.is_none()
9397                        && self.parse_keywords(&[Keyword::ON, Keyword::UPDATE])
9398                    {
9399                        on_update = Some(self.parse_referential_action()?);
9400                    } else {
9401                        break;
9402                    }
9403                }
9404
9405                let characteristics = self.parse_constraint_characteristics()?;
9406
9407                Ok(Some(
9408                    ForeignKeyConstraint {
9409                        name,
9410                        index_name,
9411                        columns,
9412                        foreign_table,
9413                        referred_columns,
9414                        on_delete,
9415                        on_update,
9416                        match_kind,
9417                        characteristics,
9418                    }
9419                    .into(),
9420                ))
9421            }
9422            Token::Word(w) if w.keyword == Keyword::CHECK => {
9423                self.expect_token(&Token::LParen)?;
9424                let expr = Box::new(self.parse_expr()?);
9425                self.expect_token(&Token::RParen)?;
9426
9427                let enforced = if self.parse_keyword(Keyword::ENFORCED) {
9428                    Some(true)
9429                } else if self.parse_keywords(&[Keyword::NOT, Keyword::ENFORCED]) {
9430                    Some(false)
9431                } else {
9432                    None
9433                };
9434
9435                Ok(Some(
9436                    CheckConstraint {
9437                        name,
9438                        expr,
9439                        enforced,
9440                    }
9441                    .into(),
9442                ))
9443            }
9444            Token::Word(w)
9445                if (w.keyword == Keyword::INDEX || w.keyword == Keyword::KEY)
9446                    && dialect_of!(self is GenericDialect | MySqlDialect)
9447                    && name.is_none() =>
9448            {
9449                let display_as_key = w.keyword == Keyword::KEY;
9450
9451                let name = match self.peek_token().token {
9452                    Token::Word(word) if word.keyword == Keyword::USING => None,
9453                    _ => self.parse_optional_ident()?,
9454                };
9455
9456                let index_type = self.parse_optional_using_then_index_type()?;
9457                let columns = self.parse_parenthesized_index_column_list()?;
9458                let index_options = self.parse_index_options()?;
9459
9460                Ok(Some(
9461                    IndexConstraint {
9462                        display_as_key,
9463                        name,
9464                        index_type,
9465                        columns,
9466                        index_options,
9467                    }
9468                    .into(),
9469                ))
9470            }
9471            Token::Word(w)
9472                if (w.keyword == Keyword::FULLTEXT || w.keyword == Keyword::SPATIAL)
9473                    && dialect_of!(self is GenericDialect | MySqlDialect) =>
9474            {
9475                if let Some(name) = name {
9476                    return self.expected(
9477                        "FULLTEXT or SPATIAL option without constraint name",
9478                        TokenWithSpan {
9479                            token: Token::make_keyword(&name.to_string()),
9480                            span: next_token.span,
9481                        },
9482                    );
9483                }
9484
9485                let fulltext = w.keyword == Keyword::FULLTEXT;
9486
9487                let index_type_display = self.parse_index_type_display();
9488
9489                let opt_index_name = self.parse_optional_ident()?;
9490
9491                let columns = self.parse_parenthesized_index_column_list()?;
9492
9493                Ok(Some(
9494                    FullTextOrSpatialConstraint {
9495                        fulltext,
9496                        index_type_display,
9497                        opt_index_name,
9498                        columns,
9499                    }
9500                    .into(),
9501                ))
9502            }
9503            _ => {
9504                if name.is_some() {
9505                    self.expected("PRIMARY, UNIQUE, FOREIGN, or CHECK", next_token)
9506                } else {
9507                    self.prev_token();
9508                    Ok(None)
9509                }
9510            }
9511        }
9512    }
9513
9514    fn parse_optional_nulls_distinct(&mut self) -> Result<NullsDistinctOption, ParserError> {
9515        Ok(if self.parse_keyword(Keyword::NULLS) {
9516            let not = self.parse_keyword(Keyword::NOT);
9517            self.expect_keyword_is(Keyword::DISTINCT)?;
9518            if not {
9519                NullsDistinctOption::NotDistinct
9520            } else {
9521                NullsDistinctOption::Distinct
9522            }
9523        } else {
9524            NullsDistinctOption::None
9525        })
9526    }
9527
9528    /// Optionally parse a parenthesized list of `SqlOption`s introduced by `keyword`.
9529    pub fn maybe_parse_options(
9530        &mut self,
9531        keyword: Keyword,
9532    ) -> Result<Option<Vec<SqlOption>>, ParserError> {
9533        if let Token::Word(word) = self.peek_token().token {
9534            if word.keyword == keyword {
9535                return Ok(Some(self.parse_options(keyword)?));
9536            }
9537        };
9538        Ok(None)
9539    }
9540
9541    /// Parse a parenthesized list of `SqlOption`s following `keyword`, or return an empty vec.
9542    pub fn parse_options(&mut self, keyword: Keyword) -> Result<Vec<SqlOption>, ParserError> {
9543        if self.parse_keyword(keyword) {
9544            self.expect_token(&Token::LParen)?;
9545            let options = self.parse_comma_separated0(Parser::parse_sql_option, Token::RParen)?;
9546            self.expect_token(&Token::RParen)?;
9547            Ok(options)
9548        } else {
9549            Ok(vec![])
9550        }
9551    }
9552
9553    /// Parse options introduced by one of `keywords` followed by a parenthesized list.
9554    pub fn parse_options_with_keywords(
9555        &mut self,
9556        keywords: &[Keyword],
9557    ) -> Result<Vec<SqlOption>, ParserError> {
9558        if self.parse_keywords(keywords) {
9559            self.expect_token(&Token::LParen)?;
9560            let options = self.parse_comma_separated(Parser::parse_sql_option)?;
9561            self.expect_token(&Token::RParen)?;
9562            Ok(options)
9563        } else {
9564            Ok(vec![])
9565        }
9566    }
9567
9568    /// Parse an index type token (e.g. `BTREE`, `HASH`, or a custom identifier).
9569    pub fn parse_index_type(&mut self) -> Result<IndexType, ParserError> {
9570        Ok(if self.parse_keyword(Keyword::BTREE) {
9571            IndexType::BTree
9572        } else if self.parse_keyword(Keyword::HASH) {
9573            IndexType::Hash
9574        } else if self.parse_keyword(Keyword::GIN) {
9575            IndexType::GIN
9576        } else if self.parse_keyword(Keyword::GIST) {
9577            IndexType::GiST
9578        } else if self.parse_keyword(Keyword::SPGIST) {
9579            IndexType::SPGiST
9580        } else if self.parse_keyword(Keyword::BRIN) {
9581            IndexType::BRIN
9582        } else if self.parse_keyword(Keyword::BLOOM) {
9583            IndexType::Bloom
9584        } else {
9585            IndexType::Custom(self.parse_identifier()?)
9586        })
9587    }
9588
9589    /// Optionally parse the `USING` keyword, followed by an [IndexType]
9590    /// Example:
9591    /// ```sql
9592    //// USING BTREE (name, age DESC)
9593    /// ```
9594    /// Optionally parse `USING <index_type>` and return the parsed `IndexType` if present.
9595    pub fn parse_optional_using_then_index_type(
9596        &mut self,
9597    ) -> Result<Option<IndexType>, ParserError> {
9598        if self.parse_keyword(Keyword::USING) {
9599            Ok(Some(self.parse_index_type()?))
9600        } else {
9601            Ok(None)
9602        }
9603    }
9604
9605    /// Parse `[ident]`, mostly `ident` is name, like:
9606    /// `window_name`, `index_name`, ...
9607    /// Parse an optional identifier, returning `Some(Ident)` if present.
9608    pub fn parse_optional_ident(&mut self) -> Result<Option<Ident>, ParserError> {
9609        self.maybe_parse(|parser| parser.parse_identifier())
9610    }
9611
9612    #[must_use]
9613    /// Parse optional `KEY` or `INDEX` display tokens used in index/constraint declarations.
9614    pub fn parse_index_type_display(&mut self) -> KeyOrIndexDisplay {
9615        if self.parse_keyword(Keyword::KEY) {
9616            KeyOrIndexDisplay::Key
9617        } else if self.parse_keyword(Keyword::INDEX) {
9618            KeyOrIndexDisplay::Index
9619        } else {
9620            KeyOrIndexDisplay::None
9621        }
9622    }
9623
9624    /// Parse an optional index option such as `USING <type>` or `COMMENT <string>`.
9625    pub fn parse_optional_index_option(&mut self) -> Result<Option<IndexOption>, ParserError> {
9626        if let Some(index_type) = self.parse_optional_using_then_index_type()? {
9627            Ok(Some(IndexOption::Using(index_type)))
9628        } else if self.parse_keyword(Keyword::COMMENT) {
9629            let s = self.parse_literal_string()?;
9630            Ok(Some(IndexOption::Comment(s)))
9631        } else {
9632            Ok(None)
9633        }
9634    }
9635
9636    /// Parse zero or more index options and return them as a vector.
9637    pub fn parse_index_options(&mut self) -> Result<Vec<IndexOption>, ParserError> {
9638        let mut options = Vec::new();
9639
9640        loop {
9641            match self.parse_optional_index_option()? {
9642                Some(index_option) => options.push(index_option),
9643                None => return Ok(options),
9644            }
9645        }
9646    }
9647
9648    /// Parse a single `SqlOption` used by various dialect-specific DDL statements.
9649    pub fn parse_sql_option(&mut self) -> Result<SqlOption, ParserError> {
9650        let is_mssql = dialect_of!(self is MsSqlDialect|GenericDialect);
9651
9652        match self.peek_token().token {
9653            Token::Word(w) if w.keyword == Keyword::HEAP && is_mssql => {
9654                Ok(SqlOption::Ident(self.parse_identifier()?))
9655            }
9656            Token::Word(w) if w.keyword == Keyword::PARTITION && is_mssql => {
9657                self.parse_option_partition()
9658            }
9659            Token::Word(w) if w.keyword == Keyword::CLUSTERED && is_mssql => {
9660                self.parse_option_clustered()
9661            }
9662            _ => {
9663                let name = self.parse_identifier()?;
9664                self.expect_token(&Token::Eq)?;
9665                let value = self.parse_expr()?;
9666
9667                Ok(SqlOption::KeyValue { key: name, value })
9668            }
9669        }
9670    }
9671
9672    /// Parse a `CLUSTERED` table option (MSSQL-specific syntaxes supported).
9673    pub fn parse_option_clustered(&mut self) -> Result<SqlOption, ParserError> {
9674        if self.parse_keywords(&[
9675            Keyword::CLUSTERED,
9676            Keyword::COLUMNSTORE,
9677            Keyword::INDEX,
9678            Keyword::ORDER,
9679        ]) {
9680            Ok(SqlOption::Clustered(
9681                TableOptionsClustered::ColumnstoreIndexOrder(
9682                    self.parse_parenthesized_column_list(IsOptional::Mandatory, false)?,
9683                ),
9684            ))
9685        } else if self.parse_keywords(&[Keyword::CLUSTERED, Keyword::COLUMNSTORE, Keyword::INDEX]) {
9686            Ok(SqlOption::Clustered(
9687                TableOptionsClustered::ColumnstoreIndex,
9688            ))
9689        } else if self.parse_keywords(&[Keyword::CLUSTERED, Keyword::INDEX]) {
9690            self.expect_token(&Token::LParen)?;
9691
9692            let columns = self.parse_comma_separated(|p| {
9693                let name = p.parse_identifier()?;
9694                let asc = p.parse_asc_desc();
9695
9696                Ok(ClusteredIndex { name, asc })
9697            })?;
9698
9699            self.expect_token(&Token::RParen)?;
9700
9701            Ok(SqlOption::Clustered(TableOptionsClustered::Index(columns)))
9702        } else {
9703            Err(ParserError::ParserError(
9704                "invalid CLUSTERED sequence".to_string(),
9705            ))
9706        }
9707    }
9708
9709    /// Parse a `PARTITION(...) FOR VALUES(...)` table option.
9710    pub fn parse_option_partition(&mut self) -> Result<SqlOption, ParserError> {
9711        self.expect_keyword_is(Keyword::PARTITION)?;
9712        self.expect_token(&Token::LParen)?;
9713        let column_name = self.parse_identifier()?;
9714
9715        self.expect_keyword_is(Keyword::RANGE)?;
9716        let range_direction = if self.parse_keyword(Keyword::LEFT) {
9717            Some(PartitionRangeDirection::Left)
9718        } else if self.parse_keyword(Keyword::RIGHT) {
9719            Some(PartitionRangeDirection::Right)
9720        } else {
9721            None
9722        };
9723
9724        self.expect_keywords(&[Keyword::FOR, Keyword::VALUES])?;
9725        self.expect_token(&Token::LParen)?;
9726
9727        let for_values = self.parse_comma_separated(Parser::parse_expr)?;
9728
9729        self.expect_token(&Token::RParen)?;
9730        self.expect_token(&Token::RParen)?;
9731
9732        Ok(SqlOption::Partition {
9733            column_name,
9734            range_direction,
9735            for_values,
9736        })
9737    }
9738
9739    /// Parse a parenthesized list of partition expressions and return a `Partition` value.
9740    pub fn parse_partition(&mut self) -> Result<Partition, ParserError> {
9741        self.expect_token(&Token::LParen)?;
9742        let partitions = self.parse_comma_separated(Parser::parse_expr)?;
9743        self.expect_token(&Token::RParen)?;
9744        Ok(Partition::Partitions(partitions))
9745    }
9746
9747    /// Parse a parenthesized `SELECT` projection used for projection-based operations.
9748    pub fn parse_projection_select(&mut self) -> Result<ProjectionSelect, ParserError> {
9749        self.expect_token(&Token::LParen)?;
9750        self.expect_keyword_is(Keyword::SELECT)?;
9751        let projection = self.parse_projection()?;
9752        let group_by = self.parse_optional_group_by()?;
9753        let order_by = self.parse_optional_order_by()?;
9754        self.expect_token(&Token::RParen)?;
9755        Ok(ProjectionSelect {
9756            projection,
9757            group_by,
9758            order_by,
9759        })
9760    }
9761    /// Parse `ALTER TABLE ... ADD PROJECTION ...` operation.
9762    pub fn parse_alter_table_add_projection(&mut self) -> Result<AlterTableOperation, ParserError> {
9763        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
9764        let name = self.parse_identifier()?;
9765        let query = self.parse_projection_select()?;
9766        Ok(AlterTableOperation::AddProjection {
9767            if_not_exists,
9768            name,
9769            select: query,
9770        })
9771    }
9772
9773    /// Parse a single `ALTER TABLE` operation and return an `AlterTableOperation`.
9774    pub fn parse_alter_table_operation(&mut self) -> Result<AlterTableOperation, ParserError> {
9775        let operation = if self.parse_keyword(Keyword::ADD) {
9776            if let Some(constraint) = self.parse_optional_table_constraint()? {
9777                let not_valid = self.parse_keywords(&[Keyword::NOT, Keyword::VALID]);
9778                AlterTableOperation::AddConstraint {
9779                    constraint,
9780                    not_valid,
9781                }
9782            } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
9783                && self.parse_keyword(Keyword::PROJECTION)
9784            {
9785                return self.parse_alter_table_add_projection();
9786            } else {
9787                let if_not_exists =
9788                    self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
9789                let mut new_partitions = vec![];
9790                loop {
9791                    if self.parse_keyword(Keyword::PARTITION) {
9792                        new_partitions.push(self.parse_partition()?);
9793                    } else {
9794                        break;
9795                    }
9796                }
9797                if !new_partitions.is_empty() {
9798                    AlterTableOperation::AddPartitions {
9799                        if_not_exists,
9800                        new_partitions,
9801                    }
9802                } else {
9803                    let column_keyword = self.parse_keyword(Keyword::COLUMN);
9804
9805                    let if_not_exists = if dialect_of!(self is PostgreSqlDialect | BigQueryDialect | DuckDbDialect | GenericDialect)
9806                    {
9807                        self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS])
9808                            || if_not_exists
9809                    } else {
9810                        false
9811                    };
9812
9813                    let column_def = self.parse_column_def()?;
9814
9815                    let column_position = self.parse_column_position()?;
9816
9817                    AlterTableOperation::AddColumn {
9818                        column_keyword,
9819                        if_not_exists,
9820                        column_def,
9821                        column_position,
9822                    }
9823                }
9824            }
9825        } else if self.parse_keyword(Keyword::RENAME) {
9826            if dialect_of!(self is PostgreSqlDialect) && self.parse_keyword(Keyword::CONSTRAINT) {
9827                let old_name = self.parse_identifier()?;
9828                self.expect_keyword_is(Keyword::TO)?;
9829                let new_name = self.parse_identifier()?;
9830                AlterTableOperation::RenameConstraint { old_name, new_name }
9831            } else if self.parse_keyword(Keyword::TO) {
9832                let table_name = self.parse_object_name(false)?;
9833                AlterTableOperation::RenameTable {
9834                    table_name: RenameTableNameKind::To(table_name),
9835                }
9836            } else if self.parse_keyword(Keyword::AS) {
9837                let table_name = self.parse_object_name(false)?;
9838                AlterTableOperation::RenameTable {
9839                    table_name: RenameTableNameKind::As(table_name),
9840                }
9841            } else {
9842                let _ = self.parse_keyword(Keyword::COLUMN); // [ COLUMN ]
9843                let old_column_name = self.parse_identifier()?;
9844                self.expect_keyword_is(Keyword::TO)?;
9845                let new_column_name = self.parse_identifier()?;
9846                AlterTableOperation::RenameColumn {
9847                    old_column_name,
9848                    new_column_name,
9849                }
9850            }
9851        } else if self.parse_keyword(Keyword::DISABLE) {
9852            if self.parse_keywords(&[Keyword::ROW, Keyword::LEVEL, Keyword::SECURITY]) {
9853                AlterTableOperation::DisableRowLevelSecurity {}
9854            } else if self.parse_keyword(Keyword::RULE) {
9855                let name = self.parse_identifier()?;
9856                AlterTableOperation::DisableRule { name }
9857            } else if self.parse_keyword(Keyword::TRIGGER) {
9858                let name = self.parse_identifier()?;
9859                AlterTableOperation::DisableTrigger { name }
9860            } else {
9861                return self.expected(
9862                    "ROW LEVEL SECURITY, RULE, or TRIGGER after DISABLE",
9863                    self.peek_token(),
9864                );
9865            }
9866        } else if self.parse_keyword(Keyword::ENABLE) {
9867            if self.parse_keywords(&[Keyword::ALWAYS, Keyword::RULE]) {
9868                let name = self.parse_identifier()?;
9869                AlterTableOperation::EnableAlwaysRule { name }
9870            } else if self.parse_keywords(&[Keyword::ALWAYS, Keyword::TRIGGER]) {
9871                let name = self.parse_identifier()?;
9872                AlterTableOperation::EnableAlwaysTrigger { name }
9873            } else if self.parse_keywords(&[Keyword::ROW, Keyword::LEVEL, Keyword::SECURITY]) {
9874                AlterTableOperation::EnableRowLevelSecurity {}
9875            } else if self.parse_keywords(&[Keyword::REPLICA, Keyword::RULE]) {
9876                let name = self.parse_identifier()?;
9877                AlterTableOperation::EnableReplicaRule { name }
9878            } else if self.parse_keywords(&[Keyword::REPLICA, Keyword::TRIGGER]) {
9879                let name = self.parse_identifier()?;
9880                AlterTableOperation::EnableReplicaTrigger { name }
9881            } else if self.parse_keyword(Keyword::RULE) {
9882                let name = self.parse_identifier()?;
9883                AlterTableOperation::EnableRule { name }
9884            } else if self.parse_keyword(Keyword::TRIGGER) {
9885                let name = self.parse_identifier()?;
9886                AlterTableOperation::EnableTrigger { name }
9887            } else {
9888                return self.expected(
9889                    "ALWAYS, REPLICA, ROW LEVEL SECURITY, RULE, or TRIGGER after ENABLE",
9890                    self.peek_token(),
9891                );
9892            }
9893        } else if self.parse_keywords(&[
9894            Keyword::FORCE,
9895            Keyword::ROW,
9896            Keyword::LEVEL,
9897            Keyword::SECURITY,
9898        ]) {
9899            AlterTableOperation::ForceRowLevelSecurity
9900        } else if self.parse_keywords(&[
9901            Keyword::NO,
9902            Keyword::FORCE,
9903            Keyword::ROW,
9904            Keyword::LEVEL,
9905            Keyword::SECURITY,
9906        ]) {
9907            AlterTableOperation::NoForceRowLevelSecurity
9908        } else if self.parse_keywords(&[Keyword::CLEAR, Keyword::PROJECTION])
9909            && dialect_of!(self is ClickHouseDialect|GenericDialect)
9910        {
9911            let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
9912            let name = self.parse_identifier()?;
9913            let partition = if self.parse_keywords(&[Keyword::IN, Keyword::PARTITION]) {
9914                Some(self.parse_identifier()?)
9915            } else {
9916                None
9917            };
9918            AlterTableOperation::ClearProjection {
9919                if_exists,
9920                name,
9921                partition,
9922            }
9923        } else if self.parse_keywords(&[Keyword::MATERIALIZE, Keyword::PROJECTION])
9924            && dialect_of!(self is ClickHouseDialect|GenericDialect)
9925        {
9926            let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
9927            let name = self.parse_identifier()?;
9928            let partition = if self.parse_keywords(&[Keyword::IN, Keyword::PARTITION]) {
9929                Some(self.parse_identifier()?)
9930            } else {
9931                None
9932            };
9933            AlterTableOperation::MaterializeProjection {
9934                if_exists,
9935                name,
9936                partition,
9937            }
9938        } else if self.parse_keyword(Keyword::DROP) {
9939            if self.parse_keywords(&[Keyword::IF, Keyword::EXISTS, Keyword::PARTITION]) {
9940                self.expect_token(&Token::LParen)?;
9941                let partitions = self.parse_comma_separated(Parser::parse_expr)?;
9942                self.expect_token(&Token::RParen)?;
9943                AlterTableOperation::DropPartitions {
9944                    partitions,
9945                    if_exists: true,
9946                }
9947            } else if self.parse_keyword(Keyword::PARTITION) {
9948                self.expect_token(&Token::LParen)?;
9949                let partitions = self.parse_comma_separated(Parser::parse_expr)?;
9950                self.expect_token(&Token::RParen)?;
9951                AlterTableOperation::DropPartitions {
9952                    partitions,
9953                    if_exists: false,
9954                }
9955            } else if self.parse_keyword(Keyword::CONSTRAINT) {
9956                let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
9957                let name = self.parse_identifier()?;
9958                let drop_behavior = self.parse_optional_drop_behavior();
9959                AlterTableOperation::DropConstraint {
9960                    if_exists,
9961                    name,
9962                    drop_behavior,
9963                }
9964            } else if self.parse_keywords(&[Keyword::PRIMARY, Keyword::KEY]) {
9965                let drop_behavior = self.parse_optional_drop_behavior();
9966                AlterTableOperation::DropPrimaryKey { drop_behavior }
9967            } else if self.parse_keywords(&[Keyword::FOREIGN, Keyword::KEY]) {
9968                let name = self.parse_identifier()?;
9969                let drop_behavior = self.parse_optional_drop_behavior();
9970                AlterTableOperation::DropForeignKey {
9971                    name,
9972                    drop_behavior,
9973                }
9974            } else if self.parse_keyword(Keyword::INDEX) {
9975                let name = self.parse_identifier()?;
9976                AlterTableOperation::DropIndex { name }
9977            } else if self.parse_keyword(Keyword::PROJECTION)
9978                && dialect_of!(self is ClickHouseDialect|GenericDialect)
9979            {
9980                let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
9981                let name = self.parse_identifier()?;
9982                AlterTableOperation::DropProjection { if_exists, name }
9983            } else if self.parse_keywords(&[Keyword::CLUSTERING, Keyword::KEY]) {
9984                AlterTableOperation::DropClusteringKey
9985            } else {
9986                let has_column_keyword = self.parse_keyword(Keyword::COLUMN); // [ COLUMN ]
9987                let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
9988                let column_names = if self.dialect.supports_comma_separated_drop_column_list() {
9989                    self.parse_comma_separated(Parser::parse_identifier)?
9990                } else {
9991                    vec![self.parse_identifier()?]
9992                };
9993                let drop_behavior = self.parse_optional_drop_behavior();
9994                AlterTableOperation::DropColumn {
9995                    has_column_keyword,
9996                    column_names,
9997                    if_exists,
9998                    drop_behavior,
9999                }
10000            }
10001        } else if self.parse_keyword(Keyword::PARTITION) {
10002            self.expect_token(&Token::LParen)?;
10003            let before = self.parse_comma_separated(Parser::parse_expr)?;
10004            self.expect_token(&Token::RParen)?;
10005            self.expect_keyword_is(Keyword::RENAME)?;
10006            self.expect_keywords(&[Keyword::TO, Keyword::PARTITION])?;
10007            self.expect_token(&Token::LParen)?;
10008            let renames = self.parse_comma_separated(Parser::parse_expr)?;
10009            self.expect_token(&Token::RParen)?;
10010            AlterTableOperation::RenamePartitions {
10011                old_partitions: before,
10012                new_partitions: renames,
10013            }
10014        } else if self.parse_keyword(Keyword::CHANGE) {
10015            let _ = self.parse_keyword(Keyword::COLUMN); // [ COLUMN ]
10016            let old_name = self.parse_identifier()?;
10017            let new_name = self.parse_identifier()?;
10018            let data_type = self.parse_data_type()?;
10019            let mut options = vec![];
10020            while let Some(option) = self.parse_optional_column_option()? {
10021                options.push(option);
10022            }
10023
10024            let column_position = self.parse_column_position()?;
10025
10026            AlterTableOperation::ChangeColumn {
10027                old_name,
10028                new_name,
10029                data_type,
10030                options,
10031                column_position,
10032            }
10033        } else if self.parse_keyword(Keyword::MODIFY) {
10034            let _ = self.parse_keyword(Keyword::COLUMN); // [ COLUMN ]
10035            let col_name = self.parse_identifier()?;
10036            let data_type = self.parse_data_type()?;
10037            let mut options = vec![];
10038            while let Some(option) = self.parse_optional_column_option()? {
10039                options.push(option);
10040            }
10041
10042            let column_position = self.parse_column_position()?;
10043
10044            AlterTableOperation::ModifyColumn {
10045                col_name,
10046                data_type,
10047                options,
10048                column_position,
10049            }
10050        } else if self.parse_keyword(Keyword::ALTER) {
10051            let _ = self.parse_keyword(Keyword::COLUMN); // [ COLUMN ]
10052            let column_name = self.parse_identifier()?;
10053            let is_postgresql = dialect_of!(self is PostgreSqlDialect);
10054
10055            let op: AlterColumnOperation = if self.parse_keywords(&[
10056                Keyword::SET,
10057                Keyword::NOT,
10058                Keyword::NULL,
10059            ]) {
10060                AlterColumnOperation::SetNotNull {}
10061            } else if self.parse_keywords(&[Keyword::DROP, Keyword::NOT, Keyword::NULL]) {
10062                AlterColumnOperation::DropNotNull {}
10063            } else if self.parse_keywords(&[Keyword::SET, Keyword::DEFAULT]) {
10064                AlterColumnOperation::SetDefault {
10065                    value: self.parse_expr()?,
10066                }
10067            } else if self.parse_keywords(&[Keyword::DROP, Keyword::DEFAULT]) {
10068                AlterColumnOperation::DropDefault {}
10069            } else if self.parse_keywords(&[Keyword::SET, Keyword::DATA, Keyword::TYPE]) {
10070                self.parse_set_data_type(true)?
10071            } else if self.parse_keyword(Keyword::TYPE) {
10072                self.parse_set_data_type(false)?
10073            } else if self.parse_keywords(&[Keyword::ADD, Keyword::GENERATED]) {
10074                let generated_as = if self.parse_keyword(Keyword::ALWAYS) {
10075                    Some(GeneratedAs::Always)
10076                } else if self.parse_keywords(&[Keyword::BY, Keyword::DEFAULT]) {
10077                    Some(GeneratedAs::ByDefault)
10078                } else {
10079                    None
10080                };
10081
10082                self.expect_keywords(&[Keyword::AS, Keyword::IDENTITY])?;
10083
10084                let mut sequence_options: Option<Vec<SequenceOptions>> = None;
10085
10086                if self.peek_token().token == Token::LParen {
10087                    self.expect_token(&Token::LParen)?;
10088                    sequence_options = Some(self.parse_create_sequence_options()?);
10089                    self.expect_token(&Token::RParen)?;
10090                }
10091
10092                AlterColumnOperation::AddGenerated {
10093                    generated_as,
10094                    sequence_options,
10095                }
10096            } else {
10097                let message = if is_postgresql {
10098                    "SET/DROP NOT NULL, SET DEFAULT, SET DATA TYPE, or ADD GENERATED after ALTER COLUMN"
10099                } else {
10100                    "SET/DROP NOT NULL, SET DEFAULT, or SET DATA TYPE after ALTER COLUMN"
10101                };
10102
10103                return self.expected(message, self.peek_token());
10104            };
10105            AlterTableOperation::AlterColumn { column_name, op }
10106        } else if self.parse_keyword(Keyword::SWAP) {
10107            self.expect_keyword_is(Keyword::WITH)?;
10108            let table_name = self.parse_object_name(false)?;
10109            AlterTableOperation::SwapWith { table_name }
10110        } else if dialect_of!(self is PostgreSqlDialect | GenericDialect)
10111            && self.parse_keywords(&[Keyword::OWNER, Keyword::TO])
10112        {
10113            let new_owner = self.parse_owner()?;
10114            AlterTableOperation::OwnerTo { new_owner }
10115        } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
10116            && self.parse_keyword(Keyword::ATTACH)
10117        {
10118            AlterTableOperation::AttachPartition {
10119                partition: self.parse_part_or_partition()?,
10120            }
10121        } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
10122            && self.parse_keyword(Keyword::DETACH)
10123        {
10124            AlterTableOperation::DetachPartition {
10125                partition: self.parse_part_or_partition()?,
10126            }
10127        } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
10128            && self.parse_keyword(Keyword::FREEZE)
10129        {
10130            let partition = self.parse_part_or_partition()?;
10131            let with_name = if self.parse_keyword(Keyword::WITH) {
10132                self.expect_keyword_is(Keyword::NAME)?;
10133                Some(self.parse_identifier()?)
10134            } else {
10135                None
10136            };
10137            AlterTableOperation::FreezePartition {
10138                partition,
10139                with_name,
10140            }
10141        } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
10142            && self.parse_keyword(Keyword::UNFREEZE)
10143        {
10144            let partition = self.parse_part_or_partition()?;
10145            let with_name = if self.parse_keyword(Keyword::WITH) {
10146                self.expect_keyword_is(Keyword::NAME)?;
10147                Some(self.parse_identifier()?)
10148            } else {
10149                None
10150            };
10151            AlterTableOperation::UnfreezePartition {
10152                partition,
10153                with_name,
10154            }
10155        } else if self.parse_keywords(&[Keyword::CLUSTER, Keyword::BY]) {
10156            self.expect_token(&Token::LParen)?;
10157            let exprs = self.parse_comma_separated(|parser| parser.parse_expr())?;
10158            self.expect_token(&Token::RParen)?;
10159            AlterTableOperation::ClusterBy { exprs }
10160        } else if self.parse_keywords(&[Keyword::SUSPEND, Keyword::RECLUSTER]) {
10161            AlterTableOperation::SuspendRecluster
10162        } else if self.parse_keywords(&[Keyword::RESUME, Keyword::RECLUSTER]) {
10163            AlterTableOperation::ResumeRecluster
10164        } else if self.parse_keyword(Keyword::LOCK) {
10165            let equals = self.consume_token(&Token::Eq);
10166            let lock = match self.parse_one_of_keywords(&[
10167                Keyword::DEFAULT,
10168                Keyword::EXCLUSIVE,
10169                Keyword::NONE,
10170                Keyword::SHARED,
10171            ]) {
10172                Some(Keyword::DEFAULT) => AlterTableLock::Default,
10173                Some(Keyword::EXCLUSIVE) => AlterTableLock::Exclusive,
10174                Some(Keyword::NONE) => AlterTableLock::None,
10175                Some(Keyword::SHARED) => AlterTableLock::Shared,
10176                _ => self.expected(
10177                    "DEFAULT, EXCLUSIVE, NONE or SHARED after LOCK [=]",
10178                    self.peek_token(),
10179                )?,
10180            };
10181            AlterTableOperation::Lock { equals, lock }
10182        } else if self.parse_keyword(Keyword::ALGORITHM) {
10183            let equals = self.consume_token(&Token::Eq);
10184            let algorithm = match self.parse_one_of_keywords(&[
10185                Keyword::DEFAULT,
10186                Keyword::INSTANT,
10187                Keyword::INPLACE,
10188                Keyword::COPY,
10189            ]) {
10190                Some(Keyword::DEFAULT) => AlterTableAlgorithm::Default,
10191                Some(Keyword::INSTANT) => AlterTableAlgorithm::Instant,
10192                Some(Keyword::INPLACE) => AlterTableAlgorithm::Inplace,
10193                Some(Keyword::COPY) => AlterTableAlgorithm::Copy,
10194                _ => self.expected(
10195                    "DEFAULT, INSTANT, INPLACE, or COPY after ALGORITHM [=]",
10196                    self.peek_token(),
10197                )?,
10198            };
10199            AlterTableOperation::Algorithm { equals, algorithm }
10200        } else if self.parse_keyword(Keyword::AUTO_INCREMENT) {
10201            let equals = self.consume_token(&Token::Eq);
10202            let value = self.parse_number_value()?;
10203            AlterTableOperation::AutoIncrement { equals, value }
10204        } else if self.parse_keywords(&[Keyword::REPLICA, Keyword::IDENTITY]) {
10205            let identity = if self.parse_keyword(Keyword::NOTHING) {
10206                ReplicaIdentity::Nothing
10207            } else if self.parse_keyword(Keyword::FULL) {
10208                ReplicaIdentity::Full
10209            } else if self.parse_keyword(Keyword::DEFAULT) {
10210                ReplicaIdentity::Default
10211            } else if self.parse_keywords(&[Keyword::USING, Keyword::INDEX]) {
10212                ReplicaIdentity::Index(self.parse_identifier()?)
10213            } else {
10214                return self.expected(
10215                    "NOTHING, FULL, DEFAULT, or USING INDEX index_name after REPLICA IDENTITY",
10216                    self.peek_token(),
10217                );
10218            };
10219
10220            AlterTableOperation::ReplicaIdentity { identity }
10221        } else if self.parse_keywords(&[Keyword::VALIDATE, Keyword::CONSTRAINT]) {
10222            let name = self.parse_identifier()?;
10223            AlterTableOperation::ValidateConstraint { name }
10224        } else {
10225            let mut options =
10226                self.parse_options_with_keywords(&[Keyword::SET, Keyword::TBLPROPERTIES])?;
10227            if !options.is_empty() {
10228                AlterTableOperation::SetTblProperties {
10229                    table_properties: options,
10230                }
10231            } else {
10232                options = self.parse_options(Keyword::SET)?;
10233                if !options.is_empty() {
10234                    AlterTableOperation::SetOptionsParens { options }
10235                } else {
10236                    return self.expected(
10237                    "ADD, RENAME, PARTITION, SWAP, DROP, REPLICA IDENTITY, SET, or SET TBLPROPERTIES after ALTER TABLE",
10238                    self.peek_token(),
10239                  );
10240                }
10241            }
10242        };
10243        Ok(operation)
10244    }
10245
10246    fn parse_set_data_type(&mut self, had_set: bool) -> Result<AlterColumnOperation, ParserError> {
10247        let data_type = self.parse_data_type()?;
10248        let using = if self.dialect.supports_alter_column_type_using()
10249            && self.parse_keyword(Keyword::USING)
10250        {
10251            Some(self.parse_expr()?)
10252        } else {
10253            None
10254        };
10255        Ok(AlterColumnOperation::SetDataType {
10256            data_type,
10257            using,
10258            had_set,
10259        })
10260    }
10261
10262    fn parse_part_or_partition(&mut self) -> Result<Partition, ParserError> {
10263        let keyword = self.expect_one_of_keywords(&[Keyword::PART, Keyword::PARTITION])?;
10264        match keyword {
10265            Keyword::PART => Ok(Partition::Part(self.parse_expr()?)),
10266            Keyword::PARTITION => Ok(Partition::Expr(self.parse_expr()?)),
10267            // unreachable because expect_one_of_keywords used above
10268            unexpected_keyword => Err(ParserError::ParserError(
10269                format!("Internal parser error: expected any of {{PART, PARTITION}}, got {unexpected_keyword:?}"),
10270            )),
10271        }
10272    }
10273
10274    /// Parse an `ALTER <object>` statement and dispatch to the appropriate alter handler.
10275    pub fn parse_alter(&mut self) -> Result<Statement, ParserError> {
10276        let object_type = self.expect_one_of_keywords(&[
10277            Keyword::VIEW,
10278            Keyword::TYPE,
10279            Keyword::TABLE,
10280            Keyword::INDEX,
10281            Keyword::ROLE,
10282            Keyword::POLICY,
10283            Keyword::CONNECTOR,
10284            Keyword::ICEBERG,
10285            Keyword::SCHEMA,
10286            Keyword::USER,
10287            Keyword::OPERATOR,
10288        ])?;
10289        match object_type {
10290            Keyword::SCHEMA => {
10291                self.prev_token();
10292                self.prev_token();
10293                self.parse_alter_schema()
10294            }
10295            Keyword::VIEW => self.parse_alter_view(),
10296            Keyword::TYPE => self.parse_alter_type(),
10297            Keyword::TABLE => self.parse_alter_table(false),
10298            Keyword::ICEBERG => {
10299                self.expect_keyword(Keyword::TABLE)?;
10300                self.parse_alter_table(true)
10301            }
10302            Keyword::INDEX => {
10303                let index_name = self.parse_object_name(false)?;
10304                let operation = if self.parse_keyword(Keyword::RENAME) {
10305                    if self.parse_keyword(Keyword::TO) {
10306                        let index_name = self.parse_object_name(false)?;
10307                        AlterIndexOperation::RenameIndex { index_name }
10308                    } else {
10309                        return self.expected("TO after RENAME", self.peek_token());
10310                    }
10311                } else {
10312                    return self.expected("RENAME after ALTER INDEX", self.peek_token());
10313                };
10314
10315                Ok(Statement::AlterIndex {
10316                    name: index_name,
10317                    operation,
10318                })
10319            }
10320            Keyword::OPERATOR => {
10321                if self.parse_keyword(Keyword::FAMILY) {
10322                    self.parse_alter_operator_family().map(Into::into)
10323                } else if self.parse_keyword(Keyword::CLASS) {
10324                    self.parse_alter_operator_class().map(Into::into)
10325                } else {
10326                    self.parse_alter_operator().map(Into::into)
10327                }
10328            }
10329            Keyword::ROLE => self.parse_alter_role(),
10330            Keyword::POLICY => self.parse_alter_policy().map(Into::into),
10331            Keyword::CONNECTOR => self.parse_alter_connector(),
10332            Keyword::USER => self.parse_alter_user().map(Into::into),
10333            // unreachable because expect_one_of_keywords used above
10334            unexpected_keyword => Err(ParserError::ParserError(
10335                format!("Internal parser error: expected any of {{VIEW, TYPE, TABLE, INDEX, ROLE, POLICY, CONNECTOR, ICEBERG, SCHEMA, USER, OPERATOR}}, got {unexpected_keyword:?}"),
10336            )),
10337        }
10338    }
10339
10340    /// Parse a [Statement::AlterTable]
10341    pub fn parse_alter_table(&mut self, iceberg: bool) -> Result<Statement, ParserError> {
10342        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
10343        let only = self.parse_keyword(Keyword::ONLY); // [ ONLY ]
10344        let table_name = self.parse_object_name(false)?;
10345        let on_cluster = self.parse_optional_on_cluster()?;
10346        let operations = self.parse_comma_separated(Parser::parse_alter_table_operation)?;
10347
10348        let mut location = None;
10349        if self.parse_keyword(Keyword::LOCATION) {
10350            location = Some(HiveSetLocation {
10351                has_set: false,
10352                location: self.parse_identifier()?,
10353            });
10354        } else if self.parse_keywords(&[Keyword::SET, Keyword::LOCATION]) {
10355            location = Some(HiveSetLocation {
10356                has_set: true,
10357                location: self.parse_identifier()?,
10358            });
10359        }
10360
10361        let end_token = if self.peek_token_ref().token == Token::SemiColon {
10362            self.peek_token_ref().clone()
10363        } else {
10364            self.get_current_token().clone()
10365        };
10366
10367        Ok(AlterTable {
10368            name: table_name,
10369            if_exists,
10370            only,
10371            operations,
10372            location,
10373            on_cluster,
10374            table_type: if iceberg {
10375                Some(AlterTableType::Iceberg)
10376            } else {
10377                None
10378            },
10379            end_token: AttachedToken(end_token),
10380        }
10381        .into())
10382    }
10383
10384    /// Parse an `ALTER VIEW` statement.
10385    pub fn parse_alter_view(&mut self) -> Result<Statement, ParserError> {
10386        let name = self.parse_object_name(false)?;
10387        let columns = self.parse_parenthesized_column_list(Optional, false)?;
10388
10389        let with_options = self.parse_options(Keyword::WITH)?;
10390
10391        self.expect_keyword_is(Keyword::AS)?;
10392        let query = self.parse_query()?;
10393
10394        Ok(Statement::AlterView {
10395            name,
10396            columns,
10397            query,
10398            with_options,
10399        })
10400    }
10401
10402    /// Parse a [Statement::AlterType]
10403    pub fn parse_alter_type(&mut self) -> Result<Statement, ParserError> {
10404        let name = self.parse_object_name(false)?;
10405
10406        if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
10407            let new_name = self.parse_identifier()?;
10408            Ok(Statement::AlterType(AlterType {
10409                name,
10410                operation: AlterTypeOperation::Rename(AlterTypeRename { new_name }),
10411            }))
10412        } else if self.parse_keywords(&[Keyword::ADD, Keyword::VALUE]) {
10413            let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
10414            let new_enum_value = self.parse_identifier()?;
10415            let position = if self.parse_keyword(Keyword::BEFORE) {
10416                Some(AlterTypeAddValuePosition::Before(self.parse_identifier()?))
10417            } else if self.parse_keyword(Keyword::AFTER) {
10418                Some(AlterTypeAddValuePosition::After(self.parse_identifier()?))
10419            } else {
10420                None
10421            };
10422
10423            Ok(Statement::AlterType(AlterType {
10424                name,
10425                operation: AlterTypeOperation::AddValue(AlterTypeAddValue {
10426                    if_not_exists,
10427                    value: new_enum_value,
10428                    position,
10429                }),
10430            }))
10431        } else if self.parse_keywords(&[Keyword::RENAME, Keyword::VALUE]) {
10432            let existing_enum_value = self.parse_identifier()?;
10433            self.expect_keyword(Keyword::TO)?;
10434            let new_enum_value = self.parse_identifier()?;
10435
10436            Ok(Statement::AlterType(AlterType {
10437                name,
10438                operation: AlterTypeOperation::RenameValue(AlterTypeRenameValue {
10439                    from: existing_enum_value,
10440                    to: new_enum_value,
10441                }),
10442            }))
10443        } else {
10444            self.expected_ref(
10445                "{RENAME TO | { RENAME | ADD } VALUE}",
10446                self.peek_token_ref(),
10447            )
10448        }
10449    }
10450
10451    /// Parse a [Statement::AlterOperator]
10452    ///
10453    /// [PostgreSQL Documentation](https://www.postgresql.org/docs/current/sql-alteroperator.html)
10454    pub fn parse_alter_operator(&mut self) -> Result<AlterOperator, ParserError> {
10455        let name = self.parse_operator_name()?;
10456
10457        // Parse (left_type, right_type)
10458        self.expect_token(&Token::LParen)?;
10459
10460        let left_type = if self.parse_keyword(Keyword::NONE) {
10461            None
10462        } else {
10463            Some(self.parse_data_type()?)
10464        };
10465
10466        self.expect_token(&Token::Comma)?;
10467        let right_type = self.parse_data_type()?;
10468        self.expect_token(&Token::RParen)?;
10469
10470        // Parse the operation
10471        let operation = if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
10472            let owner = if self.parse_keyword(Keyword::CURRENT_ROLE) {
10473                Owner::CurrentRole
10474            } else if self.parse_keyword(Keyword::CURRENT_USER) {
10475                Owner::CurrentUser
10476            } else if self.parse_keyword(Keyword::SESSION_USER) {
10477                Owner::SessionUser
10478            } else {
10479                Owner::Ident(self.parse_identifier()?)
10480            };
10481            AlterOperatorOperation::OwnerTo(owner)
10482        } else if self.parse_keywords(&[Keyword::SET, Keyword::SCHEMA]) {
10483            let schema_name = self.parse_object_name(false)?;
10484            AlterOperatorOperation::SetSchema { schema_name }
10485        } else if self.parse_keyword(Keyword::SET) {
10486            self.expect_token(&Token::LParen)?;
10487
10488            let mut options = Vec::new();
10489            loop {
10490                let keyword = self.expect_one_of_keywords(&[
10491                    Keyword::RESTRICT,
10492                    Keyword::JOIN,
10493                    Keyword::COMMUTATOR,
10494                    Keyword::NEGATOR,
10495                    Keyword::HASHES,
10496                    Keyword::MERGES,
10497                ])?;
10498
10499                match keyword {
10500                    Keyword::RESTRICT => {
10501                        self.expect_token(&Token::Eq)?;
10502                        let proc_name = if self.parse_keyword(Keyword::NONE) {
10503                            None
10504                        } else {
10505                            Some(self.parse_object_name(false)?)
10506                        };
10507                        options.push(OperatorOption::Restrict(proc_name));
10508                    }
10509                    Keyword::JOIN => {
10510                        self.expect_token(&Token::Eq)?;
10511                        let proc_name = if self.parse_keyword(Keyword::NONE) {
10512                            None
10513                        } else {
10514                            Some(self.parse_object_name(false)?)
10515                        };
10516                        options.push(OperatorOption::Join(proc_name));
10517                    }
10518                    Keyword::COMMUTATOR => {
10519                        self.expect_token(&Token::Eq)?;
10520                        let op_name = self.parse_operator_name()?;
10521                        options.push(OperatorOption::Commutator(op_name));
10522                    }
10523                    Keyword::NEGATOR => {
10524                        self.expect_token(&Token::Eq)?;
10525                        let op_name = self.parse_operator_name()?;
10526                        options.push(OperatorOption::Negator(op_name));
10527                    }
10528                    Keyword::HASHES => {
10529                        options.push(OperatorOption::Hashes);
10530                    }
10531                    Keyword::MERGES => {
10532                        options.push(OperatorOption::Merges);
10533                    }
10534                    unexpected_keyword => return Err(ParserError::ParserError(
10535                        format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in operator option"),
10536                    )),
10537                }
10538
10539                if !self.consume_token(&Token::Comma) {
10540                    break;
10541                }
10542            }
10543
10544            self.expect_token(&Token::RParen)?;
10545            AlterOperatorOperation::Set { options }
10546        } else {
10547            return self.expected_ref(
10548                "OWNER TO, SET SCHEMA, or SET after ALTER OPERATOR",
10549                self.peek_token_ref(),
10550            );
10551        };
10552
10553        Ok(AlterOperator {
10554            name,
10555            left_type,
10556            right_type,
10557            operation,
10558        })
10559    }
10560
10561    /// Parse an operator item for ALTER OPERATOR FAMILY ADD operations
10562    fn parse_operator_family_add_operator(&mut self) -> Result<OperatorFamilyItem, ParserError> {
10563        let strategy_number = self.parse_literal_uint()?;
10564        let operator_name = self.parse_operator_name()?;
10565
10566        // Operator argument types (required for ALTER OPERATOR FAMILY)
10567        self.expect_token(&Token::LParen)?;
10568        let op_types = self.parse_comma_separated(Parser::parse_data_type)?;
10569        self.expect_token(&Token::RParen)?;
10570
10571        // Optional purpose
10572        let purpose = if self.parse_keyword(Keyword::FOR) {
10573            if self.parse_keyword(Keyword::SEARCH) {
10574                Some(OperatorPurpose::ForSearch)
10575            } else if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
10576                let sort_family = self.parse_object_name(false)?;
10577                Some(OperatorPurpose::ForOrderBy { sort_family })
10578            } else {
10579                return self.expected("SEARCH or ORDER BY after FOR", self.peek_token());
10580            }
10581        } else {
10582            None
10583        };
10584
10585        Ok(OperatorFamilyItem::Operator {
10586            strategy_number,
10587            operator_name,
10588            op_types,
10589            purpose,
10590        })
10591    }
10592
10593    /// Parse a function item for ALTER OPERATOR FAMILY ADD operations
10594    fn parse_operator_family_add_function(&mut self) -> Result<OperatorFamilyItem, ParserError> {
10595        let support_number = self.parse_literal_uint()?;
10596
10597        // Optional operator types
10598        let op_types = if self.consume_token(&Token::LParen) && self.peek_token() != Token::RParen {
10599            let types = self.parse_comma_separated(Parser::parse_data_type)?;
10600            self.expect_token(&Token::RParen)?;
10601            Some(types)
10602        } else if self.consume_token(&Token::LParen) {
10603            self.expect_token(&Token::RParen)?;
10604            Some(vec![])
10605        } else {
10606            None
10607        };
10608
10609        let function_name = self.parse_object_name(false)?;
10610
10611        // Function argument types
10612        let argument_types = if self.consume_token(&Token::LParen) {
10613            if self.peek_token() == Token::RParen {
10614                self.expect_token(&Token::RParen)?;
10615                vec![]
10616            } else {
10617                let types = self.parse_comma_separated(Parser::parse_data_type)?;
10618                self.expect_token(&Token::RParen)?;
10619                types
10620            }
10621        } else {
10622            vec![]
10623        };
10624
10625        Ok(OperatorFamilyItem::Function {
10626            support_number,
10627            op_types,
10628            function_name,
10629            argument_types,
10630        })
10631    }
10632
10633    /// Parse an operator item for ALTER OPERATOR FAMILY DROP operations
10634    fn parse_operator_family_drop_operator(
10635        &mut self,
10636    ) -> Result<OperatorFamilyDropItem, ParserError> {
10637        let strategy_number = self.parse_literal_uint()?;
10638
10639        // Operator argument types (required for DROP)
10640        self.expect_token(&Token::LParen)?;
10641        let op_types = self.parse_comma_separated(Parser::parse_data_type)?;
10642        self.expect_token(&Token::RParen)?;
10643
10644        Ok(OperatorFamilyDropItem::Operator {
10645            strategy_number,
10646            op_types,
10647        })
10648    }
10649
10650    /// Parse a function item for ALTER OPERATOR FAMILY DROP operations
10651    fn parse_operator_family_drop_function(
10652        &mut self,
10653    ) -> Result<OperatorFamilyDropItem, ParserError> {
10654        let support_number = self.parse_literal_uint()?;
10655
10656        // Operator types (required for DROP)
10657        self.expect_token(&Token::LParen)?;
10658        let op_types = self.parse_comma_separated(Parser::parse_data_type)?;
10659        self.expect_token(&Token::RParen)?;
10660
10661        Ok(OperatorFamilyDropItem::Function {
10662            support_number,
10663            op_types,
10664        })
10665    }
10666
10667    /// Parse an operator family item for ADD operations (dispatches to operator or function parsing)
10668    fn parse_operator_family_add_item(&mut self) -> Result<OperatorFamilyItem, ParserError> {
10669        if self.parse_keyword(Keyword::OPERATOR) {
10670            self.parse_operator_family_add_operator()
10671        } else if self.parse_keyword(Keyword::FUNCTION) {
10672            self.parse_operator_family_add_function()
10673        } else {
10674            self.expected("OPERATOR or FUNCTION", self.peek_token())
10675        }
10676    }
10677
10678    /// Parse an operator family item for DROP operations (dispatches to operator or function parsing)
10679    fn parse_operator_family_drop_item(&mut self) -> Result<OperatorFamilyDropItem, ParserError> {
10680        if self.parse_keyword(Keyword::OPERATOR) {
10681            self.parse_operator_family_drop_operator()
10682        } else if self.parse_keyword(Keyword::FUNCTION) {
10683            self.parse_operator_family_drop_function()
10684        } else {
10685            self.expected("OPERATOR or FUNCTION", self.peek_token())
10686        }
10687    }
10688
10689    /// Parse a [Statement::AlterOperatorFamily]
10690    /// See <https://www.postgresql.org/docs/current/sql-alteropfamily.html>
10691    pub fn parse_alter_operator_family(&mut self) -> Result<AlterOperatorFamily, ParserError> {
10692        let name = self.parse_object_name(false)?;
10693        self.expect_keyword(Keyword::USING)?;
10694        let using = self.parse_identifier()?;
10695
10696        let operation = if self.parse_keyword(Keyword::ADD) {
10697            let items = self.parse_comma_separated(Parser::parse_operator_family_add_item)?;
10698            AlterOperatorFamilyOperation::Add { items }
10699        } else if self.parse_keyword(Keyword::DROP) {
10700            let items = self.parse_comma_separated(Parser::parse_operator_family_drop_item)?;
10701            AlterOperatorFamilyOperation::Drop { items }
10702        } else if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
10703            let new_name = self.parse_object_name(false)?;
10704            AlterOperatorFamilyOperation::RenameTo { new_name }
10705        } else if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
10706            let owner = self.parse_owner()?;
10707            AlterOperatorFamilyOperation::OwnerTo(owner)
10708        } else if self.parse_keywords(&[Keyword::SET, Keyword::SCHEMA]) {
10709            let schema_name = self.parse_object_name(false)?;
10710            AlterOperatorFamilyOperation::SetSchema { schema_name }
10711        } else {
10712            return self.expected_ref(
10713                "ADD, DROP, RENAME TO, OWNER TO, or SET SCHEMA after ALTER OPERATOR FAMILY",
10714                self.peek_token_ref(),
10715            );
10716        };
10717
10718        Ok(AlterOperatorFamily {
10719            name,
10720            using,
10721            operation,
10722        })
10723    }
10724
10725    /// Parse an `ALTER OPERATOR CLASS` statement.
10726    ///
10727    /// Handles operations like `RENAME TO`, `OWNER TO`, and `SET SCHEMA`.
10728    pub fn parse_alter_operator_class(&mut self) -> Result<AlterOperatorClass, ParserError> {
10729        let name = self.parse_object_name(false)?;
10730        self.expect_keyword(Keyword::USING)?;
10731        let using = self.parse_identifier()?;
10732
10733        let operation = if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
10734            let new_name = self.parse_object_name(false)?;
10735            AlterOperatorClassOperation::RenameTo { new_name }
10736        } else if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
10737            let owner = self.parse_owner()?;
10738            AlterOperatorClassOperation::OwnerTo(owner)
10739        } else if self.parse_keywords(&[Keyword::SET, Keyword::SCHEMA]) {
10740            let schema_name = self.parse_object_name(false)?;
10741            AlterOperatorClassOperation::SetSchema { schema_name }
10742        } else {
10743            return self.expected_ref(
10744                "RENAME TO, OWNER TO, or SET SCHEMA after ALTER OPERATOR CLASS",
10745                self.peek_token_ref(),
10746            );
10747        };
10748
10749        Ok(AlterOperatorClass {
10750            name,
10751            using,
10752            operation,
10753        })
10754    }
10755
10756    /// Parse an `ALTER SCHEMA` statement.
10757    ///
10758    /// Supports operations such as setting options, renaming, adding/dropping replicas, and changing owner.
10759    pub fn parse_alter_schema(&mut self) -> Result<Statement, ParserError> {
10760        self.expect_keywords(&[Keyword::ALTER, Keyword::SCHEMA])?;
10761        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
10762        let name = self.parse_object_name(false)?;
10763        let operation = if self.parse_keywords(&[Keyword::SET, Keyword::OPTIONS]) {
10764            self.prev_token();
10765            let options = self.parse_options(Keyword::OPTIONS)?;
10766            AlterSchemaOperation::SetOptionsParens { options }
10767        } else if self.parse_keywords(&[Keyword::SET, Keyword::DEFAULT, Keyword::COLLATE]) {
10768            let collate = self.parse_expr()?;
10769            AlterSchemaOperation::SetDefaultCollate { collate }
10770        } else if self.parse_keywords(&[Keyword::ADD, Keyword::REPLICA]) {
10771            let replica = self.parse_identifier()?;
10772            let options = if self.peek_keyword(Keyword::OPTIONS) {
10773                Some(self.parse_options(Keyword::OPTIONS)?)
10774            } else {
10775                None
10776            };
10777            AlterSchemaOperation::AddReplica { replica, options }
10778        } else if self.parse_keywords(&[Keyword::DROP, Keyword::REPLICA]) {
10779            let replica = self.parse_identifier()?;
10780            AlterSchemaOperation::DropReplica { replica }
10781        } else if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
10782            let new_name = self.parse_object_name(false)?;
10783            AlterSchemaOperation::Rename { name: new_name }
10784        } else if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
10785            let owner = self.parse_owner()?;
10786            AlterSchemaOperation::OwnerTo { owner }
10787        } else {
10788            return self.expected_ref("ALTER SCHEMA operation", self.peek_token_ref());
10789        };
10790        Ok(Statement::AlterSchema(AlterSchema {
10791            name,
10792            if_exists,
10793            operations: vec![operation],
10794        }))
10795    }
10796
10797    /// Parse a `CALL procedure_name(arg1, arg2, ...)`
10798    /// or `CALL procedure_name` statement
10799    pub fn parse_call(&mut self) -> Result<Statement, ParserError> {
10800        let object_name = self.parse_object_name(false)?;
10801        if self.peek_token().token == Token::LParen {
10802            match self.parse_function(object_name)? {
10803                Expr::Function(f) => Ok(Statement::Call(f)),
10804                other => parser_err!(
10805                    format!("Expected a simple procedure call but found: {other}"),
10806                    self.peek_token().span.start
10807                ),
10808            }
10809        } else {
10810            Ok(Statement::Call(Function {
10811                name: object_name,
10812                uses_odbc_syntax: false,
10813                parameters: FunctionArguments::None,
10814                args: FunctionArguments::None,
10815                over: None,
10816                filter: None,
10817                null_treatment: None,
10818                within_group: vec![],
10819            }))
10820        }
10821    }
10822
10823    /// Parse a copy statement
10824    pub fn parse_copy(&mut self) -> Result<Statement, ParserError> {
10825        let source;
10826        if self.consume_token(&Token::LParen) {
10827            source = CopySource::Query(self.parse_query()?);
10828            self.expect_token(&Token::RParen)?;
10829        } else {
10830            let table_name = self.parse_object_name(false)?;
10831            let columns = self.parse_parenthesized_column_list(Optional, false)?;
10832            source = CopySource::Table {
10833                table_name,
10834                columns,
10835            };
10836        }
10837        let to = match self.parse_one_of_keywords(&[Keyword::FROM, Keyword::TO]) {
10838            Some(Keyword::FROM) => false,
10839            Some(Keyword::TO) => true,
10840            _ => self.expected("FROM or TO", self.peek_token())?,
10841        };
10842        if !to {
10843            // Use a separate if statement to prevent Rust compiler from complaining about
10844            // "if statement in this position is unstable: https://github.com/rust-lang/rust/issues/53667"
10845            if let CopySource::Query(_) = source {
10846                return Err(ParserError::ParserError(
10847                    "COPY ... FROM does not support query as a source".to_string(),
10848                ));
10849            }
10850        }
10851        let target = if self.parse_keyword(Keyword::STDIN) {
10852            CopyTarget::Stdin
10853        } else if self.parse_keyword(Keyword::STDOUT) {
10854            CopyTarget::Stdout
10855        } else if self.parse_keyword(Keyword::PROGRAM) {
10856            CopyTarget::Program {
10857                command: self.parse_literal_string()?,
10858            }
10859        } else {
10860            CopyTarget::File {
10861                filename: self.parse_literal_string()?,
10862            }
10863        };
10864        let _ = self.parse_keyword(Keyword::WITH); // [ WITH ]
10865        let mut options = vec![];
10866        if self.consume_token(&Token::LParen) {
10867            options = self.parse_comma_separated(Parser::parse_copy_option)?;
10868            self.expect_token(&Token::RParen)?;
10869        }
10870        let mut legacy_options = vec![];
10871        while let Some(opt) = self.maybe_parse(|parser| parser.parse_copy_legacy_option())? {
10872            legacy_options.push(opt);
10873        }
10874        let values = if let CopyTarget::Stdin = target {
10875            self.expect_token(&Token::SemiColon)?;
10876            self.parse_tsv()
10877        } else {
10878            vec![]
10879        };
10880        Ok(Statement::Copy {
10881            source,
10882            to,
10883            target,
10884            options,
10885            legacy_options,
10886            values,
10887        })
10888    }
10889
10890    /// Parse [Statement::Open]
10891    fn parse_open(&mut self) -> Result<Statement, ParserError> {
10892        self.expect_keyword(Keyword::OPEN)?;
10893        Ok(Statement::Open(OpenStatement {
10894            cursor_name: self.parse_identifier()?,
10895        }))
10896    }
10897
10898    /// Parse a `CLOSE` cursor statement.
10899    pub fn parse_close(&mut self) -> Result<Statement, ParserError> {
10900        let cursor = if self.parse_keyword(Keyword::ALL) {
10901            CloseCursor::All
10902        } else {
10903            let name = self.parse_identifier()?;
10904
10905            CloseCursor::Specific { name }
10906        };
10907
10908        Ok(Statement::Close { cursor })
10909    }
10910
10911    fn parse_copy_option(&mut self) -> Result<CopyOption, ParserError> {
10912        let ret = match self.parse_one_of_keywords(&[
10913            Keyword::FORMAT,
10914            Keyword::FREEZE,
10915            Keyword::DELIMITER,
10916            Keyword::NULL,
10917            Keyword::HEADER,
10918            Keyword::QUOTE,
10919            Keyword::ESCAPE,
10920            Keyword::FORCE_QUOTE,
10921            Keyword::FORCE_NOT_NULL,
10922            Keyword::FORCE_NULL,
10923            Keyword::ENCODING,
10924        ]) {
10925            Some(Keyword::FORMAT) => CopyOption::Format(self.parse_identifier()?),
10926            Some(Keyword::FREEZE) => CopyOption::Freeze(!matches!(
10927                self.parse_one_of_keywords(&[Keyword::TRUE, Keyword::FALSE]),
10928                Some(Keyword::FALSE)
10929            )),
10930            Some(Keyword::DELIMITER) => CopyOption::Delimiter(self.parse_literal_char()?),
10931            Some(Keyword::NULL) => CopyOption::Null(self.parse_literal_string()?),
10932            Some(Keyword::HEADER) => CopyOption::Header(!matches!(
10933                self.parse_one_of_keywords(&[Keyword::TRUE, Keyword::FALSE]),
10934                Some(Keyword::FALSE)
10935            )),
10936            Some(Keyword::QUOTE) => CopyOption::Quote(self.parse_literal_char()?),
10937            Some(Keyword::ESCAPE) => CopyOption::Escape(self.parse_literal_char()?),
10938            Some(Keyword::FORCE_QUOTE) => {
10939                CopyOption::ForceQuote(self.parse_parenthesized_column_list(Mandatory, false)?)
10940            }
10941            Some(Keyword::FORCE_NOT_NULL) => {
10942                CopyOption::ForceNotNull(self.parse_parenthesized_column_list(Mandatory, false)?)
10943            }
10944            Some(Keyword::FORCE_NULL) => {
10945                CopyOption::ForceNull(self.parse_parenthesized_column_list(Mandatory, false)?)
10946            }
10947            Some(Keyword::ENCODING) => CopyOption::Encoding(self.parse_literal_string()?),
10948            _ => self.expected("option", self.peek_token())?,
10949        };
10950        Ok(ret)
10951    }
10952
10953    fn parse_copy_legacy_option(&mut self) -> Result<CopyLegacyOption, ParserError> {
10954        // FORMAT \[ AS \] is optional
10955        if self.parse_keyword(Keyword::FORMAT) {
10956            let _ = self.parse_keyword(Keyword::AS);
10957        }
10958
10959        let ret = match self.parse_one_of_keywords(&[
10960            Keyword::ACCEPTANYDATE,
10961            Keyword::ACCEPTINVCHARS,
10962            Keyword::ADDQUOTES,
10963            Keyword::ALLOWOVERWRITE,
10964            Keyword::BINARY,
10965            Keyword::BLANKSASNULL,
10966            Keyword::BZIP2,
10967            Keyword::CLEANPATH,
10968            Keyword::COMPUPDATE,
10969            Keyword::CSV,
10970            Keyword::DATEFORMAT,
10971            Keyword::DELIMITER,
10972            Keyword::EMPTYASNULL,
10973            Keyword::ENCRYPTED,
10974            Keyword::ESCAPE,
10975            Keyword::EXTENSION,
10976            Keyword::FIXEDWIDTH,
10977            Keyword::GZIP,
10978            Keyword::HEADER,
10979            Keyword::IAM_ROLE,
10980            Keyword::IGNOREHEADER,
10981            Keyword::JSON,
10982            Keyword::MANIFEST,
10983            Keyword::MAXFILESIZE,
10984            Keyword::NULL,
10985            Keyword::PARALLEL,
10986            Keyword::PARQUET,
10987            Keyword::PARTITION,
10988            Keyword::REGION,
10989            Keyword::REMOVEQUOTES,
10990            Keyword::ROWGROUPSIZE,
10991            Keyword::STATUPDATE,
10992            Keyword::TIMEFORMAT,
10993            Keyword::TRUNCATECOLUMNS,
10994            Keyword::ZSTD,
10995        ]) {
10996            Some(Keyword::ACCEPTANYDATE) => CopyLegacyOption::AcceptAnyDate,
10997            Some(Keyword::ACCEPTINVCHARS) => {
10998                let _ = self.parse_keyword(Keyword::AS); // [ AS ]
10999                let ch = if matches!(self.peek_token().token, Token::SingleQuotedString(_)) {
11000                    Some(self.parse_literal_string()?)
11001                } else {
11002                    None
11003                };
11004                CopyLegacyOption::AcceptInvChars(ch)
11005            }
11006            Some(Keyword::ADDQUOTES) => CopyLegacyOption::AddQuotes,
11007            Some(Keyword::ALLOWOVERWRITE) => CopyLegacyOption::AllowOverwrite,
11008            Some(Keyword::BINARY) => CopyLegacyOption::Binary,
11009            Some(Keyword::BLANKSASNULL) => CopyLegacyOption::BlankAsNull,
11010            Some(Keyword::BZIP2) => CopyLegacyOption::Bzip2,
11011            Some(Keyword::CLEANPATH) => CopyLegacyOption::CleanPath,
11012            Some(Keyword::COMPUPDATE) => {
11013                let preset = self.parse_keyword(Keyword::PRESET);
11014                let enabled = match self.parse_one_of_keywords(&[
11015                    Keyword::TRUE,
11016                    Keyword::FALSE,
11017                    Keyword::ON,
11018                    Keyword::OFF,
11019                ]) {
11020                    Some(Keyword::TRUE) | Some(Keyword::ON) => Some(true),
11021                    Some(Keyword::FALSE) | Some(Keyword::OFF) => Some(false),
11022                    _ => None,
11023                };
11024                CopyLegacyOption::CompUpdate { preset, enabled }
11025            }
11026            Some(Keyword::CSV) => CopyLegacyOption::Csv({
11027                let mut opts = vec![];
11028                while let Some(opt) =
11029                    self.maybe_parse(|parser| parser.parse_copy_legacy_csv_option())?
11030                {
11031                    opts.push(opt);
11032                }
11033                opts
11034            }),
11035            Some(Keyword::DATEFORMAT) => {
11036                let _ = self.parse_keyword(Keyword::AS);
11037                let fmt = if matches!(self.peek_token().token, Token::SingleQuotedString(_)) {
11038                    Some(self.parse_literal_string()?)
11039                } else {
11040                    None
11041                };
11042                CopyLegacyOption::DateFormat(fmt)
11043            }
11044            Some(Keyword::DELIMITER) => {
11045                let _ = self.parse_keyword(Keyword::AS);
11046                CopyLegacyOption::Delimiter(self.parse_literal_char()?)
11047            }
11048            Some(Keyword::EMPTYASNULL) => CopyLegacyOption::EmptyAsNull,
11049            Some(Keyword::ENCRYPTED) => {
11050                let auto = self.parse_keyword(Keyword::AUTO);
11051                CopyLegacyOption::Encrypted { auto }
11052            }
11053            Some(Keyword::ESCAPE) => CopyLegacyOption::Escape,
11054            Some(Keyword::EXTENSION) => {
11055                let ext = self.parse_literal_string()?;
11056                CopyLegacyOption::Extension(ext)
11057            }
11058            Some(Keyword::FIXEDWIDTH) => {
11059                let spec = self.parse_literal_string()?;
11060                CopyLegacyOption::FixedWidth(spec)
11061            }
11062            Some(Keyword::GZIP) => CopyLegacyOption::Gzip,
11063            Some(Keyword::HEADER) => CopyLegacyOption::Header,
11064            Some(Keyword::IAM_ROLE) => CopyLegacyOption::IamRole(self.parse_iam_role_kind()?),
11065            Some(Keyword::IGNOREHEADER) => {
11066                let _ = self.parse_keyword(Keyword::AS);
11067                let num_rows = self.parse_literal_uint()?;
11068                CopyLegacyOption::IgnoreHeader(num_rows)
11069            }
11070            Some(Keyword::JSON) => {
11071                let _ = self.parse_keyword(Keyword::AS);
11072                let fmt = if matches!(self.peek_token().token, Token::SingleQuotedString(_)) {
11073                    Some(self.parse_literal_string()?)
11074                } else {
11075                    None
11076                };
11077                CopyLegacyOption::Json(fmt)
11078            }
11079            Some(Keyword::MANIFEST) => {
11080                let verbose = self.parse_keyword(Keyword::VERBOSE);
11081                CopyLegacyOption::Manifest { verbose }
11082            }
11083            Some(Keyword::MAXFILESIZE) => {
11084                let _ = self.parse_keyword(Keyword::AS);
11085                let size = self.parse_number_value()?.value;
11086                let unit = match self.parse_one_of_keywords(&[Keyword::MB, Keyword::GB]) {
11087                    Some(Keyword::MB) => Some(FileSizeUnit::MB),
11088                    Some(Keyword::GB) => Some(FileSizeUnit::GB),
11089                    _ => None,
11090                };
11091                CopyLegacyOption::MaxFileSize(FileSize { size, unit })
11092            }
11093            Some(Keyword::NULL) => {
11094                let _ = self.parse_keyword(Keyword::AS);
11095                CopyLegacyOption::Null(self.parse_literal_string()?)
11096            }
11097            Some(Keyword::PARALLEL) => {
11098                let enabled = match self.parse_one_of_keywords(&[
11099                    Keyword::TRUE,
11100                    Keyword::FALSE,
11101                    Keyword::ON,
11102                    Keyword::OFF,
11103                ]) {
11104                    Some(Keyword::TRUE) | Some(Keyword::ON) => Some(true),
11105                    Some(Keyword::FALSE) | Some(Keyword::OFF) => Some(false),
11106                    _ => None,
11107                };
11108                CopyLegacyOption::Parallel(enabled)
11109            }
11110            Some(Keyword::PARQUET) => CopyLegacyOption::Parquet,
11111            Some(Keyword::PARTITION) => {
11112                self.expect_keyword(Keyword::BY)?;
11113                let columns = self.parse_parenthesized_column_list(IsOptional::Mandatory, false)?;
11114                let include = self.parse_keyword(Keyword::INCLUDE);
11115                CopyLegacyOption::PartitionBy(UnloadPartitionBy { columns, include })
11116            }
11117            Some(Keyword::REGION) => {
11118                let _ = self.parse_keyword(Keyword::AS);
11119                let region = self.parse_literal_string()?;
11120                CopyLegacyOption::Region(region)
11121            }
11122            Some(Keyword::REMOVEQUOTES) => CopyLegacyOption::RemoveQuotes,
11123            Some(Keyword::ROWGROUPSIZE) => {
11124                let _ = self.parse_keyword(Keyword::AS);
11125                let file_size = self.parse_file_size()?;
11126                CopyLegacyOption::RowGroupSize(file_size)
11127            }
11128            Some(Keyword::STATUPDATE) => {
11129                let enabled = match self.parse_one_of_keywords(&[
11130                    Keyword::TRUE,
11131                    Keyword::FALSE,
11132                    Keyword::ON,
11133                    Keyword::OFF,
11134                ]) {
11135                    Some(Keyword::TRUE) | Some(Keyword::ON) => Some(true),
11136                    Some(Keyword::FALSE) | Some(Keyword::OFF) => Some(false),
11137                    _ => None,
11138                };
11139                CopyLegacyOption::StatUpdate(enabled)
11140            }
11141            Some(Keyword::TIMEFORMAT) => {
11142                let _ = self.parse_keyword(Keyword::AS);
11143                let fmt = if matches!(self.peek_token().token, Token::SingleQuotedString(_)) {
11144                    Some(self.parse_literal_string()?)
11145                } else {
11146                    None
11147                };
11148                CopyLegacyOption::TimeFormat(fmt)
11149            }
11150            Some(Keyword::TRUNCATECOLUMNS) => CopyLegacyOption::TruncateColumns,
11151            Some(Keyword::ZSTD) => CopyLegacyOption::Zstd,
11152            _ => self.expected("option", self.peek_token())?,
11153        };
11154        Ok(ret)
11155    }
11156
11157    fn parse_file_size(&mut self) -> Result<FileSize, ParserError> {
11158        let size = self.parse_number_value()?.value;
11159        let unit = self.maybe_parse_file_size_unit();
11160        Ok(FileSize { size, unit })
11161    }
11162
11163    fn maybe_parse_file_size_unit(&mut self) -> Option<FileSizeUnit> {
11164        match self.parse_one_of_keywords(&[Keyword::MB, Keyword::GB]) {
11165            Some(Keyword::MB) => Some(FileSizeUnit::MB),
11166            Some(Keyword::GB) => Some(FileSizeUnit::GB),
11167            _ => None,
11168        }
11169    }
11170
11171    fn parse_iam_role_kind(&mut self) -> Result<IamRoleKind, ParserError> {
11172        if self.parse_keyword(Keyword::DEFAULT) {
11173            Ok(IamRoleKind::Default)
11174        } else {
11175            let arn = self.parse_literal_string()?;
11176            Ok(IamRoleKind::Arn(arn))
11177        }
11178    }
11179
11180    fn parse_copy_legacy_csv_option(&mut self) -> Result<CopyLegacyCsvOption, ParserError> {
11181        let ret = match self.parse_one_of_keywords(&[
11182            Keyword::HEADER,
11183            Keyword::QUOTE,
11184            Keyword::ESCAPE,
11185            Keyword::FORCE,
11186        ]) {
11187            Some(Keyword::HEADER) => CopyLegacyCsvOption::Header,
11188            Some(Keyword::QUOTE) => {
11189                let _ = self.parse_keyword(Keyword::AS); // [ AS ]
11190                CopyLegacyCsvOption::Quote(self.parse_literal_char()?)
11191            }
11192            Some(Keyword::ESCAPE) => {
11193                let _ = self.parse_keyword(Keyword::AS); // [ AS ]
11194                CopyLegacyCsvOption::Escape(self.parse_literal_char()?)
11195            }
11196            Some(Keyword::FORCE) if self.parse_keywords(&[Keyword::NOT, Keyword::NULL]) => {
11197                CopyLegacyCsvOption::ForceNotNull(
11198                    self.parse_comma_separated(|p| p.parse_identifier())?,
11199                )
11200            }
11201            Some(Keyword::FORCE) if self.parse_keywords(&[Keyword::QUOTE]) => {
11202                CopyLegacyCsvOption::ForceQuote(
11203                    self.parse_comma_separated(|p| p.parse_identifier())?,
11204                )
11205            }
11206            _ => self.expected("csv option", self.peek_token())?,
11207        };
11208        Ok(ret)
11209    }
11210
11211    fn parse_literal_char(&mut self) -> Result<char, ParserError> {
11212        let s = self.parse_literal_string()?;
11213        if s.len() != 1 {
11214            let loc = self
11215                .tokens
11216                .get(self.index - 1)
11217                .map_or(Location { line: 0, column: 0 }, |t| t.span.start);
11218            return parser_err!(format!("Expect a char, found {s:?}"), loc);
11219        }
11220        Ok(s.chars().next().unwrap())
11221    }
11222
11223    /// Parse a tab separated values in
11224    /// COPY payload
11225    pub fn parse_tsv(&mut self) -> Vec<Option<String>> {
11226        self.parse_tab_value()
11227    }
11228
11229    /// Parse a single tab-separated value row used by `COPY` payload parsing.
11230    pub fn parse_tab_value(&mut self) -> Vec<Option<String>> {
11231        let mut values = vec![];
11232        let mut content = String::new();
11233        while let Some(t) = self.next_token_no_skip().map(|t| &t.token) {
11234            match t {
11235                Token::Whitespace(Whitespace::Tab) => {
11236                    values.push(Some(core::mem::take(&mut content)));
11237                }
11238                Token::Whitespace(Whitespace::Newline) => {
11239                    values.push(Some(core::mem::take(&mut content)));
11240                }
11241                Token::Backslash => {
11242                    if self.consume_token(&Token::Period) {
11243                        return values;
11244                    }
11245                    if let Token::Word(w) = self.next_token().token {
11246                        if w.value == "N" {
11247                            values.push(None);
11248                        }
11249                    }
11250                }
11251                _ => {
11252                    content.push_str(&t.to_string());
11253                }
11254            }
11255        }
11256        values
11257    }
11258
11259    /// Parse a literal value (numbers, strings, date/time, booleans)
11260    pub fn parse_value(&mut self) -> Result<ValueWithSpan, ParserError> {
11261        let next_token = self.next_token();
11262        let span = next_token.span;
11263        let ok_value = |value: Value| Ok(value.with_span(span));
11264        match next_token.token {
11265            Token::Word(w) => match w.keyword {
11266                Keyword::TRUE if self.dialect.supports_boolean_literals() => {
11267                    ok_value(Value::Boolean(true))
11268                }
11269                Keyword::FALSE if self.dialect.supports_boolean_literals() => {
11270                    ok_value(Value::Boolean(false))
11271                }
11272                Keyword::NULL => ok_value(Value::Null),
11273                Keyword::NoKeyword if w.quote_style.is_some() => match w.quote_style {
11274                    Some('"') => ok_value(Value::DoubleQuotedString(w.value)),
11275                    Some('\'') => ok_value(Value::SingleQuotedString(w.value)),
11276                    _ => self.expected(
11277                        "A value?",
11278                        TokenWithSpan {
11279                            token: Token::Word(w),
11280                            span,
11281                        },
11282                    )?,
11283                },
11284                _ => self.expected(
11285                    "a concrete value",
11286                    TokenWithSpan {
11287                        token: Token::Word(w),
11288                        span,
11289                    },
11290                ),
11291            },
11292            // The call to n.parse() returns a bigdecimal when the
11293            // bigdecimal feature is enabled, and is otherwise a no-op
11294            // (i.e., it returns the input string).
11295            Token::Number(n, l) => ok_value(Value::Number(Self::parse(n, span.start)?, l)),
11296            Token::SingleQuotedString(ref s) => ok_value(Value::SingleQuotedString(
11297                self.maybe_concat_string_literal(s.to_string()),
11298            )),
11299            Token::DoubleQuotedString(ref s) => ok_value(Value::DoubleQuotedString(
11300                self.maybe_concat_string_literal(s.to_string()),
11301            )),
11302            Token::TripleSingleQuotedString(ref s) => {
11303                ok_value(Value::TripleSingleQuotedString(s.to_string()))
11304            }
11305            Token::TripleDoubleQuotedString(ref s) => {
11306                ok_value(Value::TripleDoubleQuotedString(s.to_string()))
11307            }
11308            Token::DollarQuotedString(ref s) => ok_value(Value::DollarQuotedString(s.clone())),
11309            Token::SingleQuotedByteStringLiteral(ref s) => {
11310                ok_value(Value::SingleQuotedByteStringLiteral(s.clone()))
11311            }
11312            Token::DoubleQuotedByteStringLiteral(ref s) => {
11313                ok_value(Value::DoubleQuotedByteStringLiteral(s.clone()))
11314            }
11315            Token::TripleSingleQuotedByteStringLiteral(ref s) => {
11316                ok_value(Value::TripleSingleQuotedByteStringLiteral(s.clone()))
11317            }
11318            Token::TripleDoubleQuotedByteStringLiteral(ref s) => {
11319                ok_value(Value::TripleDoubleQuotedByteStringLiteral(s.clone()))
11320            }
11321            Token::SingleQuotedRawStringLiteral(ref s) => {
11322                ok_value(Value::SingleQuotedRawStringLiteral(s.clone()))
11323            }
11324            Token::DoubleQuotedRawStringLiteral(ref s) => {
11325                ok_value(Value::DoubleQuotedRawStringLiteral(s.clone()))
11326            }
11327            Token::TripleSingleQuotedRawStringLiteral(ref s) => {
11328                ok_value(Value::TripleSingleQuotedRawStringLiteral(s.clone()))
11329            }
11330            Token::TripleDoubleQuotedRawStringLiteral(ref s) => {
11331                ok_value(Value::TripleDoubleQuotedRawStringLiteral(s.clone()))
11332            }
11333            Token::NationalStringLiteral(ref s) => {
11334                ok_value(Value::NationalStringLiteral(s.to_string()))
11335            }
11336            Token::QuoteDelimitedStringLiteral(v) => {
11337                ok_value(Value::QuoteDelimitedStringLiteral(v))
11338            }
11339            Token::NationalQuoteDelimitedStringLiteral(v) => {
11340                ok_value(Value::NationalQuoteDelimitedStringLiteral(v))
11341            }
11342            Token::EscapedStringLiteral(ref s) => {
11343                ok_value(Value::EscapedStringLiteral(s.to_string()))
11344            }
11345            Token::UnicodeStringLiteral(ref s) => {
11346                ok_value(Value::UnicodeStringLiteral(s.to_string()))
11347            }
11348            Token::HexStringLiteral(ref s) => ok_value(Value::HexStringLiteral(s.to_string())),
11349            Token::Placeholder(ref s) => ok_value(Value::Placeholder(s.to_string())),
11350            tok @ Token::Colon | tok @ Token::AtSign => {
11351                // 1. Not calling self.parse_identifier(false)?
11352                //    because only in placeholder we want to check
11353                //    numbers as idfentifies.  This because snowflake
11354                //    allows numbers as placeholders
11355                // 2. Not calling self.next_token() to enforce `tok`
11356                //    be followed immediately by a word/number, ie.
11357                //    without any whitespace in between
11358                let next_token = self.next_token_no_skip().unwrap_or(&EOF_TOKEN).clone();
11359                let ident = match next_token.token {
11360                    Token::Word(w) => Ok(w.into_ident(next_token.span)),
11361                    Token::Number(w, false) => Ok(Ident::with_span(next_token.span, w)),
11362                    _ => self.expected("placeholder", next_token),
11363                }?;
11364                Ok(Value::Placeholder(format!("{tok}{}", ident.value))
11365                    .with_span(Span::new(span.start, ident.span.end)))
11366            }
11367            unexpected => self.expected(
11368                "a value",
11369                TokenWithSpan {
11370                    token: unexpected,
11371                    span,
11372                },
11373            ),
11374        }
11375    }
11376
11377    fn maybe_concat_string_literal(&mut self, mut str: String) -> String {
11378        if self.dialect.supports_string_literal_concatenation() {
11379            while let Token::SingleQuotedString(ref s) | Token::DoubleQuotedString(ref s) =
11380                self.peek_token_ref().token
11381            {
11382                str.push_str(s);
11383                self.advance_token();
11384            }
11385        } else if self
11386            .dialect
11387            .supports_string_literal_concatenation_with_newline()
11388        {
11389            // We are iterating over tokens including whitespaces, to identify
11390            // string literals separated by newlines so we can concatenate them.
11391            let mut after_newline = false;
11392            loop {
11393                match self.peek_token_no_skip().token {
11394                    Token::Whitespace(Whitespace::Newline) => {
11395                        after_newline = true;
11396                        self.next_token_no_skip();
11397                    }
11398                    Token::Whitespace(_) => {
11399                        self.next_token_no_skip();
11400                    }
11401                    Token::SingleQuotedString(ref s) | Token::DoubleQuotedString(ref s)
11402                        if after_newline =>
11403                    {
11404                        str.push_str(s.clone().as_str());
11405                        self.next_token_no_skip();
11406                        after_newline = false;
11407                    }
11408                    _ => break,
11409                }
11410            }
11411        }
11412
11413        str
11414    }
11415
11416    /// Parse an unsigned numeric literal
11417    pub fn parse_number_value(&mut self) -> Result<ValueWithSpan, ParserError> {
11418        let value_wrapper = self.parse_value()?;
11419        match &value_wrapper.value {
11420            Value::Number(_, _) => Ok(value_wrapper),
11421            Value::Placeholder(_) => Ok(value_wrapper),
11422            _ => {
11423                self.prev_token();
11424                self.expected("literal number", self.peek_token())
11425            }
11426        }
11427    }
11428
11429    /// Parse a numeric literal as an expression. Returns a [`Expr::UnaryOp`] if the number is signed,
11430    /// otherwise returns a [`Expr::Value`]
11431    pub fn parse_number(&mut self) -> Result<Expr, ParserError> {
11432        let next_token = self.next_token();
11433        match next_token.token {
11434            Token::Plus => Ok(Expr::UnaryOp {
11435                op: UnaryOperator::Plus,
11436                expr: Box::new(Expr::Value(self.parse_number_value()?)),
11437            }),
11438            Token::Minus => Ok(Expr::UnaryOp {
11439                op: UnaryOperator::Minus,
11440                expr: Box::new(Expr::Value(self.parse_number_value()?)),
11441            }),
11442            _ => {
11443                self.prev_token();
11444                Ok(Expr::Value(self.parse_number_value()?))
11445            }
11446        }
11447    }
11448
11449    fn parse_introduced_string_expr(&mut self) -> Result<Expr, ParserError> {
11450        let next_token = self.next_token();
11451        let span = next_token.span;
11452        match next_token.token {
11453            Token::SingleQuotedString(ref s) => Ok(Expr::Value(
11454                Value::SingleQuotedString(s.to_string()).with_span(span),
11455            )),
11456            Token::DoubleQuotedString(ref s) => Ok(Expr::Value(
11457                Value::DoubleQuotedString(s.to_string()).with_span(span),
11458            )),
11459            Token::HexStringLiteral(ref s) => Ok(Expr::Value(
11460                Value::HexStringLiteral(s.to_string()).with_span(span),
11461            )),
11462            unexpected => self.expected(
11463                "a string value",
11464                TokenWithSpan {
11465                    token: unexpected,
11466                    span,
11467                },
11468            ),
11469        }
11470    }
11471
11472    /// Parse an unsigned literal integer/long
11473    pub fn parse_literal_uint(&mut self) -> Result<u64, ParserError> {
11474        let next_token = self.next_token();
11475        match next_token.token {
11476            Token::Number(s, _) => Self::parse::<u64>(s, next_token.span.start),
11477            _ => self.expected("literal int", next_token),
11478        }
11479    }
11480
11481    /// Parse the body of a `CREATE FUNCTION` specified as a string.
11482    /// e.g. `CREATE FUNCTION ... AS $$ body $$`.
11483    fn parse_create_function_body_string(&mut self) -> Result<CreateFunctionBody, ParserError> {
11484        let parse_string_expr = |parser: &mut Parser| -> Result<Expr, ParserError> {
11485            let peek_token = parser.peek_token();
11486            let span = peek_token.span;
11487            match peek_token.token {
11488                Token::DollarQuotedString(s) if dialect_of!(parser is PostgreSqlDialect | GenericDialect) =>
11489                {
11490                    parser.next_token();
11491                    Ok(Expr::Value(Value::DollarQuotedString(s).with_span(span)))
11492                }
11493                _ => Ok(Expr::Value(
11494                    Value::SingleQuotedString(parser.parse_literal_string()?).with_span(span),
11495                )),
11496            }
11497        };
11498
11499        Ok(CreateFunctionBody::AsBeforeOptions {
11500            body: parse_string_expr(self)?,
11501            link_symbol: if self.consume_token(&Token::Comma) {
11502                Some(parse_string_expr(self)?)
11503            } else {
11504                None
11505            },
11506        })
11507    }
11508
11509    /// Parse a literal string
11510    pub fn parse_literal_string(&mut self) -> Result<String, ParserError> {
11511        let next_token = self.next_token();
11512        match next_token.token {
11513            Token::Word(Word {
11514                value,
11515                keyword: Keyword::NoKeyword,
11516                ..
11517            }) => Ok(value),
11518            Token::SingleQuotedString(s) => Ok(s),
11519            Token::DoubleQuotedString(s) => Ok(s),
11520            Token::EscapedStringLiteral(s) if dialect_of!(self is PostgreSqlDialect | GenericDialect) => {
11521                Ok(s)
11522            }
11523            Token::UnicodeStringLiteral(s) => Ok(s),
11524            _ => self.expected("literal string", next_token),
11525        }
11526    }
11527
11528    /// Parse a boolean string
11529    pub(crate) fn parse_boolean_string(&mut self) -> Result<bool, ParserError> {
11530        match self.parse_one_of_keywords(&[Keyword::TRUE, Keyword::FALSE]) {
11531            Some(Keyword::TRUE) => Ok(true),
11532            Some(Keyword::FALSE) => Ok(false),
11533            _ => self.expected("TRUE or FALSE", self.peek_token()),
11534        }
11535    }
11536
11537    /// Parse a literal unicode normalization clause
11538    pub fn parse_unicode_is_normalized(&mut self, expr: Expr) -> Result<Expr, ParserError> {
11539        let neg = self.parse_keyword(Keyword::NOT);
11540        let normalized_form = self.maybe_parse(|parser| {
11541            match parser.parse_one_of_keywords(&[
11542                Keyword::NFC,
11543                Keyword::NFD,
11544                Keyword::NFKC,
11545                Keyword::NFKD,
11546            ]) {
11547                Some(Keyword::NFC) => Ok(NormalizationForm::NFC),
11548                Some(Keyword::NFD) => Ok(NormalizationForm::NFD),
11549                Some(Keyword::NFKC) => Ok(NormalizationForm::NFKC),
11550                Some(Keyword::NFKD) => Ok(NormalizationForm::NFKD),
11551                _ => parser.expected("unicode normalization form", parser.peek_token()),
11552            }
11553        })?;
11554        if self.parse_keyword(Keyword::NORMALIZED) {
11555            return Ok(Expr::IsNormalized {
11556                expr: Box::new(expr),
11557                form: normalized_form,
11558                negated: neg,
11559            });
11560        }
11561        self.expected("unicode normalization form", self.peek_token())
11562    }
11563
11564    /// Parse parenthesized enum members, used with `ENUM(...)` type definitions.
11565    pub fn parse_enum_values(&mut self) -> Result<Vec<EnumMember>, ParserError> {
11566        self.expect_token(&Token::LParen)?;
11567        let values = self.parse_comma_separated(|parser| {
11568            let name = parser.parse_literal_string()?;
11569            let e = if parser.consume_token(&Token::Eq) {
11570                let value = parser.parse_number()?;
11571                EnumMember::NamedValue(name, value)
11572            } else {
11573                EnumMember::Name(name)
11574            };
11575            Ok(e)
11576        })?;
11577        self.expect_token(&Token::RParen)?;
11578
11579        Ok(values)
11580    }
11581
11582    /// Parse a SQL datatype (in the context of a CREATE TABLE statement for example)
11583    pub fn parse_data_type(&mut self) -> Result<DataType, ParserError> {
11584        let (ty, trailing_bracket) = self.parse_data_type_helper()?;
11585        if trailing_bracket.0 {
11586            return parser_err!(
11587                format!("unmatched > after parsing data type {ty}"),
11588                self.peek_token()
11589            );
11590        }
11591
11592        Ok(ty)
11593    }
11594
11595    fn parse_data_type_helper(
11596        &mut self,
11597    ) -> Result<(DataType, MatchedTrailingBracket), ParserError> {
11598        let dialect = self.dialect;
11599        self.advance_token();
11600        let next_token = self.get_current_token();
11601        let next_token_index = self.get_current_index();
11602
11603        let mut trailing_bracket: MatchedTrailingBracket = false.into();
11604        let mut data = match &next_token.token {
11605            Token::Word(w) => match w.keyword {
11606                Keyword::BOOLEAN => Ok(DataType::Boolean),
11607                Keyword::BOOL => Ok(DataType::Bool),
11608                Keyword::FLOAT => {
11609                    let precision = self.parse_exact_number_optional_precision_scale()?;
11610
11611                    if self.parse_keyword(Keyword::UNSIGNED) {
11612                        Ok(DataType::FloatUnsigned(precision))
11613                    } else {
11614                        Ok(DataType::Float(precision))
11615                    }
11616                }
11617                Keyword::REAL => {
11618                    if self.parse_keyword(Keyword::UNSIGNED) {
11619                        Ok(DataType::RealUnsigned)
11620                    } else {
11621                        Ok(DataType::Real)
11622                    }
11623                }
11624                Keyword::FLOAT4 => Ok(DataType::Float4),
11625                Keyword::FLOAT32 => Ok(DataType::Float32),
11626                Keyword::FLOAT64 => Ok(DataType::Float64),
11627                Keyword::FLOAT8 => Ok(DataType::Float8),
11628                Keyword::DOUBLE => {
11629                    if self.parse_keyword(Keyword::PRECISION) {
11630                        if self.parse_keyword(Keyword::UNSIGNED) {
11631                            Ok(DataType::DoublePrecisionUnsigned)
11632                        } else {
11633                            Ok(DataType::DoublePrecision)
11634                        }
11635                    } else {
11636                        let precision = self.parse_exact_number_optional_precision_scale()?;
11637
11638                        if self.parse_keyword(Keyword::UNSIGNED) {
11639                            Ok(DataType::DoubleUnsigned(precision))
11640                        } else {
11641                            Ok(DataType::Double(precision))
11642                        }
11643                    }
11644                }
11645                Keyword::TINYINT => {
11646                    let optional_precision = self.parse_optional_precision();
11647                    if self.parse_keyword(Keyword::UNSIGNED) {
11648                        Ok(DataType::TinyIntUnsigned(optional_precision?))
11649                    } else {
11650                        if dialect.supports_data_type_signed_suffix() {
11651                            let _ = self.parse_keyword(Keyword::SIGNED);
11652                        }
11653                        Ok(DataType::TinyInt(optional_precision?))
11654                    }
11655                }
11656                Keyword::INT2 => {
11657                    let optional_precision = self.parse_optional_precision();
11658                    if self.parse_keyword(Keyword::UNSIGNED) {
11659                        Ok(DataType::Int2Unsigned(optional_precision?))
11660                    } else {
11661                        Ok(DataType::Int2(optional_precision?))
11662                    }
11663                }
11664                Keyword::SMALLINT => {
11665                    let optional_precision = self.parse_optional_precision();
11666                    if self.parse_keyword(Keyword::UNSIGNED) {
11667                        Ok(DataType::SmallIntUnsigned(optional_precision?))
11668                    } else {
11669                        if dialect.supports_data_type_signed_suffix() {
11670                            let _ = self.parse_keyword(Keyword::SIGNED);
11671                        }
11672                        Ok(DataType::SmallInt(optional_precision?))
11673                    }
11674                }
11675                Keyword::MEDIUMINT => {
11676                    let optional_precision = self.parse_optional_precision();
11677                    if self.parse_keyword(Keyword::UNSIGNED) {
11678                        Ok(DataType::MediumIntUnsigned(optional_precision?))
11679                    } else {
11680                        if dialect.supports_data_type_signed_suffix() {
11681                            let _ = self.parse_keyword(Keyword::SIGNED);
11682                        }
11683                        Ok(DataType::MediumInt(optional_precision?))
11684                    }
11685                }
11686                Keyword::INT => {
11687                    let optional_precision = self.parse_optional_precision();
11688                    if self.parse_keyword(Keyword::UNSIGNED) {
11689                        Ok(DataType::IntUnsigned(optional_precision?))
11690                    } else {
11691                        if dialect.supports_data_type_signed_suffix() {
11692                            let _ = self.parse_keyword(Keyword::SIGNED);
11693                        }
11694                        Ok(DataType::Int(optional_precision?))
11695                    }
11696                }
11697                Keyword::INT4 => {
11698                    let optional_precision = self.parse_optional_precision();
11699                    if self.parse_keyword(Keyword::UNSIGNED) {
11700                        Ok(DataType::Int4Unsigned(optional_precision?))
11701                    } else {
11702                        Ok(DataType::Int4(optional_precision?))
11703                    }
11704                }
11705                Keyword::INT8 => {
11706                    let optional_precision = self.parse_optional_precision();
11707                    if self.parse_keyword(Keyword::UNSIGNED) {
11708                        Ok(DataType::Int8Unsigned(optional_precision?))
11709                    } else {
11710                        Ok(DataType::Int8(optional_precision?))
11711                    }
11712                }
11713                Keyword::INT16 => Ok(DataType::Int16),
11714                Keyword::INT32 => Ok(DataType::Int32),
11715                Keyword::INT64 => Ok(DataType::Int64),
11716                Keyword::INT128 => Ok(DataType::Int128),
11717                Keyword::INT256 => Ok(DataType::Int256),
11718                Keyword::INTEGER => {
11719                    let optional_precision = self.parse_optional_precision();
11720                    if self.parse_keyword(Keyword::UNSIGNED) {
11721                        Ok(DataType::IntegerUnsigned(optional_precision?))
11722                    } else {
11723                        if dialect.supports_data_type_signed_suffix() {
11724                            let _ = self.parse_keyword(Keyword::SIGNED);
11725                        }
11726                        Ok(DataType::Integer(optional_precision?))
11727                    }
11728                }
11729                Keyword::BIGINT => {
11730                    let optional_precision = self.parse_optional_precision();
11731                    if self.parse_keyword(Keyword::UNSIGNED) {
11732                        Ok(DataType::BigIntUnsigned(optional_precision?))
11733                    } else {
11734                        if dialect.supports_data_type_signed_suffix() {
11735                            let _ = self.parse_keyword(Keyword::SIGNED);
11736                        }
11737                        Ok(DataType::BigInt(optional_precision?))
11738                    }
11739                }
11740                Keyword::HUGEINT => Ok(DataType::HugeInt),
11741                Keyword::UBIGINT => Ok(DataType::UBigInt),
11742                Keyword::UHUGEINT => Ok(DataType::UHugeInt),
11743                Keyword::USMALLINT => Ok(DataType::USmallInt),
11744                Keyword::UTINYINT => Ok(DataType::UTinyInt),
11745                Keyword::UINT8 => Ok(DataType::UInt8),
11746                Keyword::UINT16 => Ok(DataType::UInt16),
11747                Keyword::UINT32 => Ok(DataType::UInt32),
11748                Keyword::UINT64 => Ok(DataType::UInt64),
11749                Keyword::UINT128 => Ok(DataType::UInt128),
11750                Keyword::UINT256 => Ok(DataType::UInt256),
11751                Keyword::VARCHAR => Ok(DataType::Varchar(self.parse_optional_character_length()?)),
11752                Keyword::NVARCHAR => {
11753                    Ok(DataType::Nvarchar(self.parse_optional_character_length()?))
11754                }
11755                Keyword::CHARACTER => {
11756                    if self.parse_keyword(Keyword::VARYING) {
11757                        Ok(DataType::CharacterVarying(
11758                            self.parse_optional_character_length()?,
11759                        ))
11760                    } else if self.parse_keywords(&[Keyword::LARGE, Keyword::OBJECT]) {
11761                        Ok(DataType::CharacterLargeObject(
11762                            self.parse_optional_precision()?,
11763                        ))
11764                    } else {
11765                        Ok(DataType::Character(self.parse_optional_character_length()?))
11766                    }
11767                }
11768                Keyword::CHAR => {
11769                    if self.parse_keyword(Keyword::VARYING) {
11770                        Ok(DataType::CharVarying(
11771                            self.parse_optional_character_length()?,
11772                        ))
11773                    } else if self.parse_keywords(&[Keyword::LARGE, Keyword::OBJECT]) {
11774                        Ok(DataType::CharLargeObject(self.parse_optional_precision()?))
11775                    } else {
11776                        Ok(DataType::Char(self.parse_optional_character_length()?))
11777                    }
11778                }
11779                Keyword::CLOB => Ok(DataType::Clob(self.parse_optional_precision()?)),
11780                Keyword::BINARY => Ok(DataType::Binary(self.parse_optional_precision()?)),
11781                Keyword::VARBINARY => Ok(DataType::Varbinary(self.parse_optional_binary_length()?)),
11782                Keyword::BLOB => Ok(DataType::Blob(self.parse_optional_precision()?)),
11783                Keyword::TINYBLOB => Ok(DataType::TinyBlob),
11784                Keyword::MEDIUMBLOB => Ok(DataType::MediumBlob),
11785                Keyword::LONGBLOB => Ok(DataType::LongBlob),
11786                Keyword::BYTES => Ok(DataType::Bytes(self.parse_optional_precision()?)),
11787                Keyword::BIT => {
11788                    if self.parse_keyword(Keyword::VARYING) {
11789                        Ok(DataType::BitVarying(self.parse_optional_precision()?))
11790                    } else {
11791                        Ok(DataType::Bit(self.parse_optional_precision()?))
11792                    }
11793                }
11794                Keyword::VARBIT => Ok(DataType::VarBit(self.parse_optional_precision()?)),
11795                Keyword::UUID => Ok(DataType::Uuid),
11796                Keyword::DATE => Ok(DataType::Date),
11797                Keyword::DATE32 => Ok(DataType::Date32),
11798                Keyword::DATETIME => Ok(DataType::Datetime(self.parse_optional_precision()?)),
11799                Keyword::DATETIME64 => {
11800                    self.prev_token();
11801                    let (precision, time_zone) = self.parse_datetime_64()?;
11802                    Ok(DataType::Datetime64(precision, time_zone))
11803                }
11804                Keyword::TIMESTAMP => {
11805                    let precision = self.parse_optional_precision()?;
11806                    let tz = if self.parse_keyword(Keyword::WITH) {
11807                        self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
11808                        TimezoneInfo::WithTimeZone
11809                    } else if self.parse_keyword(Keyword::WITHOUT) {
11810                        self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
11811                        TimezoneInfo::WithoutTimeZone
11812                    } else {
11813                        TimezoneInfo::None
11814                    };
11815                    Ok(DataType::Timestamp(precision, tz))
11816                }
11817                Keyword::TIMESTAMPTZ => Ok(DataType::Timestamp(
11818                    self.parse_optional_precision()?,
11819                    TimezoneInfo::Tz,
11820                )),
11821                Keyword::TIMESTAMP_NTZ => {
11822                    Ok(DataType::TimestampNtz(self.parse_optional_precision()?))
11823                }
11824                Keyword::TIME => {
11825                    let precision = self.parse_optional_precision()?;
11826                    let tz = if self.parse_keyword(Keyword::WITH) {
11827                        self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
11828                        TimezoneInfo::WithTimeZone
11829                    } else if self.parse_keyword(Keyword::WITHOUT) {
11830                        self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
11831                        TimezoneInfo::WithoutTimeZone
11832                    } else {
11833                        TimezoneInfo::None
11834                    };
11835                    Ok(DataType::Time(precision, tz))
11836                }
11837                Keyword::TIMETZ => Ok(DataType::Time(
11838                    self.parse_optional_precision()?,
11839                    TimezoneInfo::Tz,
11840                )),
11841                Keyword::INTERVAL => {
11842                    if self.dialect.supports_interval_options() {
11843                        let fields = self.maybe_parse_optional_interval_fields()?;
11844                        let precision = self.parse_optional_precision()?;
11845                        Ok(DataType::Interval { fields, precision })
11846                    } else {
11847                        Ok(DataType::Interval {
11848                            fields: None,
11849                            precision: None,
11850                        })
11851                    }
11852                }
11853                Keyword::JSON => Ok(DataType::JSON),
11854                Keyword::JSONB => Ok(DataType::JSONB),
11855                Keyword::REGCLASS => Ok(DataType::Regclass),
11856                Keyword::STRING => Ok(DataType::String(self.parse_optional_precision()?)),
11857                Keyword::FIXEDSTRING => {
11858                    self.expect_token(&Token::LParen)?;
11859                    let character_length = self.parse_literal_uint()?;
11860                    self.expect_token(&Token::RParen)?;
11861                    Ok(DataType::FixedString(character_length))
11862                }
11863                Keyword::TEXT => Ok(DataType::Text),
11864                Keyword::TINYTEXT => Ok(DataType::TinyText),
11865                Keyword::MEDIUMTEXT => Ok(DataType::MediumText),
11866                Keyword::LONGTEXT => Ok(DataType::LongText),
11867                Keyword::BYTEA => Ok(DataType::Bytea),
11868                Keyword::NUMERIC => Ok(DataType::Numeric(
11869                    self.parse_exact_number_optional_precision_scale()?,
11870                )),
11871                Keyword::DECIMAL => {
11872                    let precision = self.parse_exact_number_optional_precision_scale()?;
11873
11874                    if self.parse_keyword(Keyword::UNSIGNED) {
11875                        Ok(DataType::DecimalUnsigned(precision))
11876                    } else {
11877                        Ok(DataType::Decimal(precision))
11878                    }
11879                }
11880                Keyword::DEC => {
11881                    let precision = self.parse_exact_number_optional_precision_scale()?;
11882
11883                    if self.parse_keyword(Keyword::UNSIGNED) {
11884                        Ok(DataType::DecUnsigned(precision))
11885                    } else {
11886                        Ok(DataType::Dec(precision))
11887                    }
11888                }
11889                Keyword::BIGNUMERIC => Ok(DataType::BigNumeric(
11890                    self.parse_exact_number_optional_precision_scale()?,
11891                )),
11892                Keyword::BIGDECIMAL => Ok(DataType::BigDecimal(
11893                    self.parse_exact_number_optional_precision_scale()?,
11894                )),
11895                Keyword::ENUM => Ok(DataType::Enum(self.parse_enum_values()?, None)),
11896                Keyword::ENUM8 => Ok(DataType::Enum(self.parse_enum_values()?, Some(8))),
11897                Keyword::ENUM16 => Ok(DataType::Enum(self.parse_enum_values()?, Some(16))),
11898                Keyword::SET => Ok(DataType::Set(self.parse_string_values()?)),
11899                Keyword::ARRAY => {
11900                    if self.dialect.supports_array_typedef_without_element_type() {
11901                        Ok(DataType::Array(ArrayElemTypeDef::None))
11902                    } else if dialect_of!(self is ClickHouseDialect) {
11903                        Ok(self.parse_sub_type(|internal_type| {
11904                            DataType::Array(ArrayElemTypeDef::Parenthesis(internal_type))
11905                        })?)
11906                    } else {
11907                        self.expect_token(&Token::Lt)?;
11908                        let (inside_type, _trailing_bracket) = self.parse_data_type_helper()?;
11909                        trailing_bracket = self.expect_closing_angle_bracket(_trailing_bracket)?;
11910                        Ok(DataType::Array(ArrayElemTypeDef::AngleBracket(Box::new(
11911                            inside_type,
11912                        ))))
11913                    }
11914                }
11915                Keyword::STRUCT if dialect_is!(dialect is DuckDbDialect) => {
11916                    self.prev_token();
11917                    let field_defs = self.parse_duckdb_struct_type_def()?;
11918                    Ok(DataType::Struct(field_defs, StructBracketKind::Parentheses))
11919                }
11920                Keyword::STRUCT if dialect_is!(dialect is BigQueryDialect | GenericDialect) => {
11921                    self.prev_token();
11922                    let (field_defs, _trailing_bracket) =
11923                        self.parse_struct_type_def(Self::parse_struct_field_def)?;
11924                    trailing_bracket = _trailing_bracket;
11925                    Ok(DataType::Struct(
11926                        field_defs,
11927                        StructBracketKind::AngleBrackets,
11928                    ))
11929                }
11930                Keyword::UNION if dialect_is!(dialect is DuckDbDialect | GenericDialect) => {
11931                    self.prev_token();
11932                    let fields = self.parse_union_type_def()?;
11933                    Ok(DataType::Union(fields))
11934                }
11935                Keyword::NULLABLE if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
11936                    Ok(self.parse_sub_type(DataType::Nullable)?)
11937                }
11938                Keyword::LOWCARDINALITY if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
11939                    Ok(self.parse_sub_type(DataType::LowCardinality)?)
11940                }
11941                Keyword::MAP if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
11942                    self.prev_token();
11943                    let (key_data_type, value_data_type) = self.parse_click_house_map_def()?;
11944                    Ok(DataType::Map(
11945                        Box::new(key_data_type),
11946                        Box::new(value_data_type),
11947                    ))
11948                }
11949                Keyword::NESTED if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
11950                    self.expect_token(&Token::LParen)?;
11951                    let field_defs = self.parse_comma_separated(Parser::parse_column_def)?;
11952                    self.expect_token(&Token::RParen)?;
11953                    Ok(DataType::Nested(field_defs))
11954                }
11955                Keyword::TUPLE if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
11956                    self.prev_token();
11957                    let field_defs = self.parse_click_house_tuple_def()?;
11958                    Ok(DataType::Tuple(field_defs))
11959                }
11960                Keyword::TRIGGER => Ok(DataType::Trigger),
11961                Keyword::ANY if self.peek_keyword(Keyword::TYPE) => {
11962                    let _ = self.parse_keyword(Keyword::TYPE);
11963                    Ok(DataType::AnyType)
11964                }
11965                Keyword::TABLE => {
11966                    // an LParen after the TABLE keyword indicates that table columns are being defined
11967                    // whereas no LParen indicates an anonymous table expression will be returned
11968                    if self.peek_token() == Token::LParen {
11969                        let columns = self.parse_returns_table_columns()?;
11970                        Ok(DataType::Table(Some(columns)))
11971                    } else {
11972                        Ok(DataType::Table(None))
11973                    }
11974                }
11975                Keyword::SIGNED => {
11976                    if self.parse_keyword(Keyword::INTEGER) {
11977                        Ok(DataType::SignedInteger)
11978                    } else {
11979                        Ok(DataType::Signed)
11980                    }
11981                }
11982                Keyword::UNSIGNED => {
11983                    if self.parse_keyword(Keyword::INTEGER) {
11984                        Ok(DataType::UnsignedInteger)
11985                    } else {
11986                        Ok(DataType::Unsigned)
11987                    }
11988                }
11989                Keyword::TSVECTOR if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
11990                    Ok(DataType::TsVector)
11991                }
11992                Keyword::TSQUERY if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
11993                    Ok(DataType::TsQuery)
11994                }
11995                _ => {
11996                    self.prev_token();
11997                    let type_name = self.parse_object_name(false)?;
11998                    if let Some(modifiers) = self.parse_optional_type_modifiers()? {
11999                        Ok(DataType::Custom(type_name, modifiers))
12000                    } else {
12001                        Ok(DataType::Custom(type_name, vec![]))
12002                    }
12003                }
12004            },
12005            _ => self.expected_at("a data type name", next_token_index),
12006        }?;
12007
12008        if self.dialect.supports_array_typedef_with_brackets() {
12009            while self.consume_token(&Token::LBracket) {
12010                // Parse optional array data type size
12011                let size = self.maybe_parse(|p| p.parse_literal_uint())?;
12012                self.expect_token(&Token::RBracket)?;
12013                data = DataType::Array(ArrayElemTypeDef::SquareBracket(Box::new(data), size))
12014            }
12015        }
12016        Ok((data, trailing_bracket))
12017    }
12018
12019    fn parse_returns_table_column(&mut self) -> Result<ColumnDef, ParserError> {
12020        self.parse_column_def()
12021    }
12022
12023    fn parse_returns_table_columns(&mut self) -> Result<Vec<ColumnDef>, ParserError> {
12024        self.expect_token(&Token::LParen)?;
12025        let columns = self.parse_comma_separated(Parser::parse_returns_table_column)?;
12026        self.expect_token(&Token::RParen)?;
12027        Ok(columns)
12028    }
12029
12030    /// Parse a parenthesized, comma-separated list of single-quoted strings.
12031    pub fn parse_string_values(&mut self) -> Result<Vec<String>, ParserError> {
12032        self.expect_token(&Token::LParen)?;
12033        let mut values = Vec::new();
12034        loop {
12035            let next_token = self.next_token();
12036            match next_token.token {
12037                Token::SingleQuotedString(value) => values.push(value),
12038                _ => self.expected("a string", next_token)?,
12039            }
12040            let next_token = self.next_token();
12041            match next_token.token {
12042                Token::Comma => (),
12043                Token::RParen => break,
12044                _ => self.expected(", or }", next_token)?,
12045            }
12046        }
12047        Ok(values)
12048    }
12049
12050    /// Strictly parse `identifier AS identifier`
12051    pub fn parse_identifier_with_alias(&mut self) -> Result<IdentWithAlias, ParserError> {
12052        let ident = self.parse_identifier()?;
12053        self.expect_keyword_is(Keyword::AS)?;
12054        let alias = self.parse_identifier()?;
12055        Ok(IdentWithAlias { ident, alias })
12056    }
12057
12058    /// Parse `identifier [AS] identifier` where the AS keyword is optional
12059    fn parse_identifier_with_optional_alias(&mut self) -> Result<IdentWithAlias, ParserError> {
12060        let ident = self.parse_identifier()?;
12061        let _after_as = self.parse_keyword(Keyword::AS);
12062        let alias = self.parse_identifier()?;
12063        Ok(IdentWithAlias { ident, alias })
12064    }
12065
12066    /// Parse comma-separated list of parenthesized queries for pipe operators
12067    fn parse_pipe_operator_queries(&mut self) -> Result<Vec<Query>, ParserError> {
12068        self.parse_comma_separated(|parser| {
12069            parser.expect_token(&Token::LParen)?;
12070            let query = parser.parse_query()?;
12071            parser.expect_token(&Token::RParen)?;
12072            Ok(*query)
12073        })
12074    }
12075
12076    /// Parse set quantifier for pipe operators that require DISTINCT. E.g. INTERSECT and EXCEPT
12077    fn parse_distinct_required_set_quantifier(
12078        &mut self,
12079        operator_name: &str,
12080    ) -> Result<SetQuantifier, ParserError> {
12081        let quantifier = self.parse_set_quantifier(&Some(SetOperator::Intersect));
12082        match quantifier {
12083            SetQuantifier::Distinct | SetQuantifier::DistinctByName => Ok(quantifier),
12084            _ => Err(ParserError::ParserError(format!(
12085                "{operator_name} pipe operator requires DISTINCT modifier",
12086            ))),
12087        }
12088    }
12089
12090    /// Parse optional identifier alias (with or without AS keyword)
12091    fn parse_identifier_optional_alias(&mut self) -> Result<Option<Ident>, ParserError> {
12092        if self.parse_keyword(Keyword::AS) {
12093            Ok(Some(self.parse_identifier()?))
12094        } else {
12095            // Check if the next token is an identifier (implicit alias)
12096            self.maybe_parse(|parser| parser.parse_identifier())
12097        }
12098    }
12099
12100    /// Optionally parses an alias for a select list item
12101    fn maybe_parse_select_item_alias(&mut self) -> Result<Option<Ident>, ParserError> {
12102        fn validator(explicit: bool, kw: &Keyword, parser: &mut Parser) -> bool {
12103            parser.dialect.is_select_item_alias(explicit, kw, parser)
12104        }
12105        self.parse_optional_alias_inner(None, validator)
12106    }
12107
12108    /// Optionally parses an alias for a table like in `... FROM generate_series(1, 10) AS t (col)`.
12109    /// In this case, the alias is allowed to optionally name the columns in the table, in
12110    /// addition to the table itself.
12111    pub fn maybe_parse_table_alias(&mut self) -> Result<Option<TableAlias>, ParserError> {
12112        fn validator(explicit: bool, kw: &Keyword, parser: &mut Parser) -> bool {
12113            parser.dialect.is_table_factor_alias(explicit, kw, parser)
12114        }
12115        let explicit = self.peek_keyword(Keyword::AS);
12116        match self.parse_optional_alias_inner(None, validator)? {
12117            Some(name) => {
12118                let columns = self.parse_table_alias_column_defs()?;
12119                Ok(Some(TableAlias {
12120                    explicit,
12121                    name,
12122                    columns,
12123                }))
12124            }
12125            None => Ok(None),
12126        }
12127    }
12128
12129    fn parse_table_index_hints(&mut self) -> Result<Vec<TableIndexHints>, ParserError> {
12130        let mut hints = vec![];
12131        while let Some(hint_type) =
12132            self.parse_one_of_keywords(&[Keyword::USE, Keyword::IGNORE, Keyword::FORCE])
12133        {
12134            let hint_type = match hint_type {
12135                Keyword::USE => TableIndexHintType::Use,
12136                Keyword::IGNORE => TableIndexHintType::Ignore,
12137                Keyword::FORCE => TableIndexHintType::Force,
12138                _ => {
12139                    return self.expected(
12140                        "expected to match USE/IGNORE/FORCE keyword",
12141                        self.peek_token(),
12142                    )
12143                }
12144            };
12145            let index_type = match self.parse_one_of_keywords(&[Keyword::INDEX, Keyword::KEY]) {
12146                Some(Keyword::INDEX) => TableIndexType::Index,
12147                Some(Keyword::KEY) => TableIndexType::Key,
12148                _ => {
12149                    return self.expected("expected to match INDEX/KEY keyword", self.peek_token())
12150                }
12151            };
12152            let for_clause = if self.parse_keyword(Keyword::FOR) {
12153                let clause = if self.parse_keyword(Keyword::JOIN) {
12154                    TableIndexHintForClause::Join
12155                } else if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
12156                    TableIndexHintForClause::OrderBy
12157                } else if self.parse_keywords(&[Keyword::GROUP, Keyword::BY]) {
12158                    TableIndexHintForClause::GroupBy
12159                } else {
12160                    return self.expected(
12161                        "expected to match FOR/ORDER BY/GROUP BY table hint in for clause",
12162                        self.peek_token(),
12163                    );
12164                };
12165                Some(clause)
12166            } else {
12167                None
12168            };
12169
12170            self.expect_token(&Token::LParen)?;
12171            let index_names = if self.peek_token().token != Token::RParen {
12172                self.parse_comma_separated(Parser::parse_identifier)?
12173            } else {
12174                vec![]
12175            };
12176            self.expect_token(&Token::RParen)?;
12177            hints.push(TableIndexHints {
12178                hint_type,
12179                index_type,
12180                for_clause,
12181                index_names,
12182            });
12183        }
12184        Ok(hints)
12185    }
12186
12187    /// Wrapper for parse_optional_alias_inner, left for backwards-compatibility
12188    /// but new flows should use the context-specific methods such as `maybe_parse_select_item_alias`
12189    /// and `maybe_parse_table_alias`.
12190    pub fn parse_optional_alias(
12191        &mut self,
12192        reserved_kwds: &[Keyword],
12193    ) -> Result<Option<Ident>, ParserError> {
12194        fn validator(_explicit: bool, _kw: &Keyword, _parser: &mut Parser) -> bool {
12195            false
12196        }
12197        self.parse_optional_alias_inner(Some(reserved_kwds), validator)
12198    }
12199
12200    /// Parses an optional alias after a SQL element such as a select list item
12201    /// or a table name.
12202    ///
12203    /// This method accepts an optional list of reserved keywords or a function
12204    /// to call to validate if a keyword should be parsed as an alias, to allow
12205    /// callers to customize the parsing logic based on their context.
12206    fn parse_optional_alias_inner<F>(
12207        &mut self,
12208        reserved_kwds: Option<&[Keyword]>,
12209        validator: F,
12210    ) -> Result<Option<Ident>, ParserError>
12211    where
12212        F: Fn(bool, &Keyword, &mut Parser) -> bool,
12213    {
12214        let after_as = self.parse_keyword(Keyword::AS);
12215
12216        let next_token = self.next_token();
12217        match next_token.token {
12218            // Accepts a keyword as an alias if the AS keyword explicitly indicate an alias or if the
12219            // caller provided a list of reserved keywords and the keyword is not on that list.
12220            Token::Word(w)
12221                if reserved_kwds.is_some()
12222                    && (after_as || reserved_kwds.is_some_and(|x| !x.contains(&w.keyword))) =>
12223            {
12224                Ok(Some(w.into_ident(next_token.span)))
12225            }
12226            // Accepts a keyword as alias based on the caller's context, such as to what SQL element
12227            // this word is a potential alias of using the validator call-back. This allows for
12228            // dialect-specific logic.
12229            Token::Word(w) if validator(after_as, &w.keyword, self) => {
12230                Ok(Some(w.into_ident(next_token.span)))
12231            }
12232            // For backwards-compatibility, we accept quoted strings as aliases regardless of the context.
12233            Token::SingleQuotedString(s) => Ok(Some(Ident::with_quote('\'', s))),
12234            Token::DoubleQuotedString(s) => Ok(Some(Ident::with_quote('\"', s))),
12235            _ => {
12236                if after_as {
12237                    return self.expected("an identifier after AS", next_token);
12238                }
12239                self.prev_token();
12240                Ok(None) // no alias found
12241            }
12242        }
12243    }
12244
12245    /// Parse an optional `GROUP BY` clause, returning `Some(GroupByExpr)` when present.
12246    pub fn parse_optional_group_by(&mut self) -> Result<Option<GroupByExpr>, ParserError> {
12247        if self.parse_keywords(&[Keyword::GROUP, Keyword::BY]) {
12248            let expressions = if self.parse_keyword(Keyword::ALL) {
12249                None
12250            } else {
12251                Some(self.parse_comma_separated(Parser::parse_group_by_expr)?)
12252            };
12253
12254            let mut modifiers = vec![];
12255            if self.dialect.supports_group_by_with_modifier() {
12256                loop {
12257                    if !self.parse_keyword(Keyword::WITH) {
12258                        break;
12259                    }
12260                    let keyword = self.expect_one_of_keywords(&[
12261                        Keyword::ROLLUP,
12262                        Keyword::CUBE,
12263                        Keyword::TOTALS,
12264                    ])?;
12265                    modifiers.push(match keyword {
12266                        Keyword::ROLLUP => GroupByWithModifier::Rollup,
12267                        Keyword::CUBE => GroupByWithModifier::Cube,
12268                        Keyword::TOTALS => GroupByWithModifier::Totals,
12269                        _ => {
12270                            return parser_err!(
12271                                "BUG: expected to match GroupBy modifier keyword",
12272                                self.peek_token().span.start
12273                            )
12274                        }
12275                    });
12276                }
12277            }
12278            if self.parse_keywords(&[Keyword::GROUPING, Keyword::SETS]) {
12279                self.expect_token(&Token::LParen)?;
12280                let result = self.parse_comma_separated(|p| {
12281                    if p.peek_token_ref().token == Token::LParen {
12282                        p.parse_tuple(true, true)
12283                    } else {
12284                        Ok(vec![p.parse_expr()?])
12285                    }
12286                })?;
12287                self.expect_token(&Token::RParen)?;
12288                modifiers.push(GroupByWithModifier::GroupingSets(Expr::GroupingSets(
12289                    result,
12290                )));
12291            };
12292            let group_by = match expressions {
12293                None => GroupByExpr::All(modifiers),
12294                Some(exprs) => GroupByExpr::Expressions(exprs, modifiers),
12295            };
12296            Ok(Some(group_by))
12297        } else {
12298            Ok(None)
12299        }
12300    }
12301
12302    /// Parse an optional `ORDER BY` clause, returning `Some(OrderBy)` when present.
12303    pub fn parse_optional_order_by(&mut self) -> Result<Option<OrderBy>, ParserError> {
12304        if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
12305            let order_by =
12306                if self.dialect.supports_order_by_all() && self.parse_keyword(Keyword::ALL) {
12307                    let order_by_options = self.parse_order_by_options()?;
12308                    OrderBy {
12309                        kind: OrderByKind::All(order_by_options),
12310                        interpolate: None,
12311                    }
12312                } else {
12313                    let exprs = self.parse_comma_separated(Parser::parse_order_by_expr)?;
12314                    let interpolate = if self.dialect.supports_interpolate() {
12315                        self.parse_interpolations()?
12316                    } else {
12317                        None
12318                    };
12319                    OrderBy {
12320                        kind: OrderByKind::Expressions(exprs),
12321                        interpolate,
12322                    }
12323                };
12324            Ok(Some(order_by))
12325        } else {
12326            Ok(None)
12327        }
12328    }
12329
12330    fn parse_optional_limit_clause(&mut self) -> Result<Option<LimitClause>, ParserError> {
12331        let mut offset = if self.parse_keyword(Keyword::OFFSET) {
12332            Some(self.parse_offset()?)
12333        } else {
12334            None
12335        };
12336
12337        let (limit, limit_by) = if self.parse_keyword(Keyword::LIMIT) {
12338            let expr = self.parse_limit()?;
12339
12340            if self.dialect.supports_limit_comma()
12341                && offset.is_none()
12342                && expr.is_some() // ALL not supported with comma
12343                && self.consume_token(&Token::Comma)
12344            {
12345                let offset = expr.ok_or_else(|| {
12346                    ParserError::ParserError(
12347                        "Missing offset for LIMIT <offset>, <limit>".to_string(),
12348                    )
12349                })?;
12350                return Ok(Some(LimitClause::OffsetCommaLimit {
12351                    offset,
12352                    limit: self.parse_expr()?,
12353                }));
12354            }
12355
12356            let limit_by = if self.dialect.supports_limit_by() && self.parse_keyword(Keyword::BY) {
12357                Some(self.parse_comma_separated(Parser::parse_expr)?)
12358            } else {
12359                None
12360            };
12361
12362            (Some(expr), limit_by)
12363        } else {
12364            (None, None)
12365        };
12366
12367        if offset.is_none() && limit.is_some() && self.parse_keyword(Keyword::OFFSET) {
12368            offset = Some(self.parse_offset()?);
12369        }
12370
12371        if offset.is_some() || (limit.is_some() && limit != Some(None)) || limit_by.is_some() {
12372            Ok(Some(LimitClause::LimitOffset {
12373                limit: limit.unwrap_or_default(),
12374                offset,
12375                limit_by: limit_by.unwrap_or_default(),
12376            }))
12377        } else {
12378            Ok(None)
12379        }
12380    }
12381
12382    /// Parse a table object for insertion
12383    /// e.g. `some_database.some_table` or `FUNCTION some_table_func(...)`
12384    pub fn parse_table_object(&mut self) -> Result<TableObject, ParserError> {
12385        if self.dialect.supports_insert_table_function() && self.parse_keyword(Keyword::FUNCTION) {
12386            let fn_name = self.parse_object_name(false)?;
12387            self.parse_function_call(fn_name)
12388                .map(TableObject::TableFunction)
12389        } else {
12390            self.parse_object_name(false).map(TableObject::TableName)
12391        }
12392    }
12393
12394    /// Parse a possibly qualified, possibly quoted identifier, e.g.
12395    /// `foo` or `myschema."table"
12396    ///
12397    /// The `in_table_clause` parameter indicates whether the object name is a table in a FROM, JOIN,
12398    /// or similar table clause. Currently, this is used only to support unquoted hyphenated identifiers
12399    /// in this context on BigQuery.
12400    pub fn parse_object_name(&mut self, in_table_clause: bool) -> Result<ObjectName, ParserError> {
12401        self.parse_object_name_inner(in_table_clause, false)
12402    }
12403
12404    /// Parse a possibly qualified, possibly quoted identifier, e.g.
12405    /// `foo` or `myschema."table"
12406    ///
12407    /// The `in_table_clause` parameter indicates whether the object name is a table in a FROM, JOIN,
12408    /// or similar table clause. Currently, this is used only to support unquoted hyphenated identifiers
12409    /// in this context on BigQuery.
12410    ///
12411    /// The `allow_wildcards` parameter indicates whether to allow for wildcards in the object name
12412    /// e.g. *, *.*, `foo`.*, or "foo"."bar"
12413    fn parse_object_name_inner(
12414        &mut self,
12415        in_table_clause: bool,
12416        allow_wildcards: bool,
12417    ) -> Result<ObjectName, ParserError> {
12418        let mut parts = vec![];
12419        if dialect_of!(self is BigQueryDialect) && in_table_clause {
12420            loop {
12421                let (ident, end_with_period) = self.parse_unquoted_hyphenated_identifier()?;
12422                parts.push(ObjectNamePart::Identifier(ident));
12423                if !self.consume_token(&Token::Period) && !end_with_period {
12424                    break;
12425                }
12426            }
12427        } else {
12428            loop {
12429                if allow_wildcards && self.peek_token().token == Token::Mul {
12430                    let span = self.next_token().span;
12431                    parts.push(ObjectNamePart::Identifier(Ident {
12432                        value: Token::Mul.to_string(),
12433                        quote_style: None,
12434                        span,
12435                    }));
12436                } else if dialect_of!(self is BigQueryDialect) && in_table_clause {
12437                    let (ident, end_with_period) = self.parse_unquoted_hyphenated_identifier()?;
12438                    parts.push(ObjectNamePart::Identifier(ident));
12439                    if !self.consume_token(&Token::Period) && !end_with_period {
12440                        break;
12441                    }
12442                } else if self.dialect.supports_object_name_double_dot_notation()
12443                    && parts.len() == 1
12444                    && matches!(self.peek_token().token, Token::Period)
12445                {
12446                    // Empty string here means default schema
12447                    parts.push(ObjectNamePart::Identifier(Ident::new("")));
12448                } else {
12449                    let ident = self.parse_identifier()?;
12450                    let part = if self
12451                        .dialect
12452                        .is_identifier_generating_function_name(&ident, &parts)
12453                    {
12454                        self.expect_token(&Token::LParen)?;
12455                        let args: Vec<FunctionArg> =
12456                            self.parse_comma_separated0(Self::parse_function_args, Token::RParen)?;
12457                        self.expect_token(&Token::RParen)?;
12458                        ObjectNamePart::Function(ObjectNamePartFunction { name: ident, args })
12459                    } else {
12460                        ObjectNamePart::Identifier(ident)
12461                    };
12462                    parts.push(part);
12463                }
12464
12465                if !self.consume_token(&Token::Period) {
12466                    break;
12467                }
12468            }
12469        }
12470
12471        // BigQuery accepts any number of quoted identifiers of a table name.
12472        // https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#quoted_identifiers
12473        if dialect_of!(self is BigQueryDialect)
12474            && parts.iter().any(|part| {
12475                part.as_ident()
12476                    .is_some_and(|ident| ident.value.contains('.'))
12477            })
12478        {
12479            parts = parts
12480                .into_iter()
12481                .flat_map(|part| match part.as_ident() {
12482                    Some(ident) => ident
12483                        .value
12484                        .split('.')
12485                        .map(|value| {
12486                            ObjectNamePart::Identifier(Ident {
12487                                value: value.into(),
12488                                quote_style: ident.quote_style,
12489                                span: ident.span,
12490                            })
12491                        })
12492                        .collect::<Vec<_>>(),
12493                    None => vec![part],
12494                })
12495                .collect()
12496        }
12497
12498        Ok(ObjectName(parts))
12499    }
12500
12501    /// Parse identifiers
12502    pub fn parse_identifiers(&mut self) -> Result<Vec<Ident>, ParserError> {
12503        let mut idents = vec![];
12504        loop {
12505            let token = self.peek_token_ref();
12506            match &token.token {
12507                Token::Word(w) => {
12508                    idents.push(w.to_ident(token.span));
12509                }
12510                Token::EOF | Token::Eq | Token::SemiColon | Token::VerticalBarRightAngleBracket => {
12511                    break
12512                }
12513                _ => {}
12514            }
12515            self.advance_token();
12516        }
12517        Ok(idents)
12518    }
12519
12520    /// Parse identifiers of form ident1[.identN]*
12521    ///
12522    /// Similar in functionality to [parse_identifiers], with difference
12523    /// being this function is much more strict about parsing a valid multipart identifier, not
12524    /// allowing extraneous tokens to be parsed, otherwise it fails.
12525    ///
12526    /// For example:
12527    ///
12528    /// ```rust
12529    /// use sqlparser::ast::Ident;
12530    /// use sqlparser::dialect::GenericDialect;
12531    /// use sqlparser::parser::Parser;
12532    ///
12533    /// let dialect = GenericDialect {};
12534    /// let expected = vec![Ident::new("one"), Ident::new("two")];
12535    ///
12536    /// // expected usage
12537    /// let sql = "one.two";
12538    /// let mut parser = Parser::new(&dialect).try_with_sql(sql).unwrap();
12539    /// let actual = parser.parse_multipart_identifier().unwrap();
12540    /// assert_eq!(&actual, &expected);
12541    ///
12542    /// // parse_identifiers is more loose on what it allows, parsing successfully
12543    /// let sql = "one + two";
12544    /// let mut parser = Parser::new(&dialect).try_with_sql(sql).unwrap();
12545    /// let actual = parser.parse_identifiers().unwrap();
12546    /// assert_eq!(&actual, &expected);
12547    ///
12548    /// // expected to strictly fail due to + separator
12549    /// let sql = "one + two";
12550    /// let mut parser = Parser::new(&dialect).try_with_sql(sql).unwrap();
12551    /// let actual = parser.parse_multipart_identifier().unwrap_err();
12552    /// assert_eq!(
12553    ///     actual.to_string(),
12554    ///     "sql parser error: Unexpected token in identifier: +"
12555    /// );
12556    /// ```
12557    ///
12558    /// [parse_identifiers]: Parser::parse_identifiers
12559    pub fn parse_multipart_identifier(&mut self) -> Result<Vec<Ident>, ParserError> {
12560        let mut idents = vec![];
12561
12562        // expecting at least one word for identifier
12563        let next_token = self.next_token();
12564        match next_token.token {
12565            Token::Word(w) => idents.push(w.into_ident(next_token.span)),
12566            Token::EOF => {
12567                return Err(ParserError::ParserError(
12568                    "Empty input when parsing identifier".to_string(),
12569                ))?
12570            }
12571            token => {
12572                return Err(ParserError::ParserError(format!(
12573                    "Unexpected token in identifier: {token}"
12574                )))?
12575            }
12576        };
12577
12578        // parse optional next parts if exist
12579        loop {
12580            match self.next_token().token {
12581                // ensure that optional period is succeeded by another identifier
12582                Token::Period => {
12583                    let next_token = self.next_token();
12584                    match next_token.token {
12585                        Token::Word(w) => idents.push(w.into_ident(next_token.span)),
12586                        Token::EOF => {
12587                            return Err(ParserError::ParserError(
12588                                "Trailing period in identifier".to_string(),
12589                            ))?
12590                        }
12591                        token => {
12592                            return Err(ParserError::ParserError(format!(
12593                                "Unexpected token following period in identifier: {token}"
12594                            )))?
12595                        }
12596                    }
12597                }
12598                Token::EOF => break,
12599                token => {
12600                    return Err(ParserError::ParserError(format!(
12601                        "Unexpected token in identifier: {token}"
12602                    )))?;
12603                }
12604            }
12605        }
12606
12607        Ok(idents)
12608    }
12609
12610    /// Parse a simple one-word identifier (possibly quoted, possibly a keyword)
12611    pub fn parse_identifier(&mut self) -> Result<Ident, ParserError> {
12612        let next_token = self.next_token();
12613        match next_token.token {
12614            Token::Word(w) => Ok(w.into_ident(next_token.span)),
12615            Token::SingleQuotedString(s) => Ok(Ident::with_quote('\'', s)),
12616            Token::DoubleQuotedString(s) => Ok(Ident::with_quote('\"', s)),
12617            _ => self.expected("identifier", next_token),
12618        }
12619    }
12620
12621    /// On BigQuery, hyphens are permitted in unquoted identifiers inside of a FROM or
12622    /// TABLE clause.
12623    ///
12624    /// The first segment must be an ordinary unquoted identifier, e.g. it must not start
12625    /// with a digit. Subsequent segments are either must either be valid identifiers or
12626    /// integers, e.g. foo-123 is allowed, but foo-123a is not.
12627    ///
12628    /// [BigQuery-lexical](https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical)
12629    ///
12630    /// Return a tuple of the identifier and a boolean indicating it ends with a period.
12631    fn parse_unquoted_hyphenated_identifier(&mut self) -> Result<(Ident, bool), ParserError> {
12632        match self.peek_token().token {
12633            Token::Word(w) => {
12634                let quote_style_is_none = w.quote_style.is_none();
12635                let mut requires_whitespace = false;
12636                let mut ident = w.into_ident(self.next_token().span);
12637                if quote_style_is_none {
12638                    while matches!(self.peek_token_no_skip().token, Token::Minus) {
12639                        self.next_token();
12640                        ident.value.push('-');
12641
12642                        let token = self
12643                            .next_token_no_skip()
12644                            .cloned()
12645                            .unwrap_or(TokenWithSpan::wrap(Token::EOF));
12646                        requires_whitespace = match token.token {
12647                            Token::Word(next_word) if next_word.quote_style.is_none() => {
12648                                ident.value.push_str(&next_word.value);
12649                                false
12650                            }
12651                            Token::Number(s, false) => {
12652                                // A number token can represent a decimal value ending with a period, e.g., `Number('123.')`.
12653                                // However, for an [ObjectName], it is part of a hyphenated identifier, e.g., `foo-123.bar`.
12654                                //
12655                                // If a number token is followed by a period, it is part of an [ObjectName].
12656                                // Return the identifier with `true` if the number token is followed by a period, indicating that
12657                                // parsing should continue for the next part of the hyphenated identifier.
12658                                if s.ends_with('.') {
12659                                    let Some(s) = s.split('.').next().filter(|s| {
12660                                        !s.is_empty() && s.chars().all(|c| c.is_ascii_digit())
12661                                    }) else {
12662                                        return self.expected(
12663                                            "continuation of hyphenated identifier",
12664                                            TokenWithSpan::new(Token::Number(s, false), token.span),
12665                                        );
12666                                    };
12667                                    ident.value.push_str(s);
12668                                    return Ok((ident, true));
12669                                } else {
12670                                    ident.value.push_str(&s);
12671                                }
12672                                // If next token is period, then it is part of an ObjectName and we don't expect whitespace
12673                                // after the number.
12674                                !matches!(self.peek_token().token, Token::Period)
12675                            }
12676                            _ => {
12677                                return self
12678                                    .expected("continuation of hyphenated identifier", token);
12679                            }
12680                        }
12681                    }
12682
12683                    // If the last segment was a number, we must check that it's followed by whitespace,
12684                    // otherwise foo-123a will be parsed as `foo-123` with the alias `a`.
12685                    if requires_whitespace {
12686                        let token = self.next_token();
12687                        if !matches!(token.token, Token::EOF | Token::Whitespace(_)) {
12688                            return self
12689                                .expected("whitespace following hyphenated identifier", token);
12690                        }
12691                    }
12692                }
12693                Ok((ident, false))
12694            }
12695            _ => Ok((self.parse_identifier()?, false)),
12696        }
12697    }
12698
12699    /// Parses a parenthesized, comma-separated list of column definitions within a view.
12700    fn parse_view_columns(&mut self) -> Result<Vec<ViewColumnDef>, ParserError> {
12701        if self.consume_token(&Token::LParen) {
12702            if self.peek_token().token == Token::RParen {
12703                self.next_token();
12704                Ok(vec![])
12705            } else {
12706                let cols = self.parse_comma_separated_with_trailing_commas(
12707                    Parser::parse_view_column,
12708                    self.dialect.supports_column_definition_trailing_commas(),
12709                    Self::is_reserved_for_column_alias,
12710                )?;
12711                self.expect_token(&Token::RParen)?;
12712                Ok(cols)
12713            }
12714        } else {
12715            Ok(vec![])
12716        }
12717    }
12718
12719    /// Parses a column definition within a view.
12720    fn parse_view_column(&mut self) -> Result<ViewColumnDef, ParserError> {
12721        let name = self.parse_identifier()?;
12722        let options = self.parse_view_column_options()?;
12723        let data_type = if dialect_of!(self is ClickHouseDialect) {
12724            Some(self.parse_data_type()?)
12725        } else {
12726            None
12727        };
12728        Ok(ViewColumnDef {
12729            name,
12730            data_type,
12731            options,
12732        })
12733    }
12734
12735    fn parse_view_column_options(&mut self) -> Result<Option<ColumnOptions>, ParserError> {
12736        let mut options = Vec::new();
12737        loop {
12738            let option = self.parse_optional_column_option()?;
12739            if let Some(option) = option {
12740                options.push(option);
12741            } else {
12742                break;
12743            }
12744        }
12745        if options.is_empty() {
12746            Ok(None)
12747        } else if self.dialect.supports_space_separated_column_options() {
12748            Ok(Some(ColumnOptions::SpaceSeparated(options)))
12749        } else {
12750            Ok(Some(ColumnOptions::CommaSeparated(options)))
12751        }
12752    }
12753
12754    /// Parses a parenthesized comma-separated list of unqualified, possibly quoted identifiers.
12755    /// For example: `(col1, "col 2", ...)`
12756    pub fn parse_parenthesized_column_list(
12757        &mut self,
12758        optional: IsOptional,
12759        allow_empty: bool,
12760    ) -> Result<Vec<Ident>, ParserError> {
12761        self.parse_parenthesized_column_list_inner(optional, allow_empty, |p| p.parse_identifier())
12762    }
12763
12764    /// Parse a parenthesized list of compound identifiers as expressions.
12765    pub fn parse_parenthesized_compound_identifier_list(
12766        &mut self,
12767        optional: IsOptional,
12768        allow_empty: bool,
12769    ) -> Result<Vec<Expr>, ParserError> {
12770        self.parse_parenthesized_column_list_inner(optional, allow_empty, |p| {
12771            Ok(Expr::CompoundIdentifier(
12772                p.parse_period_separated(|p| p.parse_identifier())?,
12773            ))
12774        })
12775    }
12776
12777    /// Parses a parenthesized comma-separated list of index columns, which can be arbitrary
12778    /// expressions with ordering information (and an opclass in some dialects).
12779    fn parse_parenthesized_index_column_list(&mut self) -> Result<Vec<IndexColumn>, ParserError> {
12780        self.parse_parenthesized_column_list_inner(Mandatory, false, |p| {
12781            p.parse_create_index_expr()
12782        })
12783    }
12784
12785    /// Parses a parenthesized comma-separated list of qualified, possibly quoted identifiers.
12786    /// For example: `(db1.sc1.tbl1.col1, db1.sc1.tbl1."col 2", ...)`
12787    pub fn parse_parenthesized_qualified_column_list(
12788        &mut self,
12789        optional: IsOptional,
12790        allow_empty: bool,
12791    ) -> Result<Vec<ObjectName>, ParserError> {
12792        self.parse_parenthesized_column_list_inner(optional, allow_empty, |p| {
12793            p.parse_object_name(true)
12794        })
12795    }
12796
12797    /// Parses a parenthesized comma-separated list of columns using
12798    /// the provided function to parse each element.
12799    fn parse_parenthesized_column_list_inner<F, T>(
12800        &mut self,
12801        optional: IsOptional,
12802        allow_empty: bool,
12803        mut f: F,
12804    ) -> Result<Vec<T>, ParserError>
12805    where
12806        F: FnMut(&mut Parser) -> Result<T, ParserError>,
12807    {
12808        if self.consume_token(&Token::LParen) {
12809            if allow_empty && self.peek_token().token == Token::RParen {
12810                self.next_token();
12811                Ok(vec![])
12812            } else {
12813                let cols = self.parse_comma_separated(|p| f(p))?;
12814                self.expect_token(&Token::RParen)?;
12815                Ok(cols)
12816            }
12817        } else if optional == Optional {
12818            Ok(vec![])
12819        } else {
12820            self.expected("a list of columns in parentheses", self.peek_token())
12821        }
12822    }
12823
12824    /// Parses a parenthesized comma-separated list of table alias column definitions.
12825    fn parse_table_alias_column_defs(&mut self) -> Result<Vec<TableAliasColumnDef>, ParserError> {
12826        if self.consume_token(&Token::LParen) {
12827            let cols = self.parse_comma_separated(|p| {
12828                let name = p.parse_identifier()?;
12829                let data_type = p.maybe_parse(|p| p.parse_data_type())?;
12830                Ok(TableAliasColumnDef { name, data_type })
12831            })?;
12832            self.expect_token(&Token::RParen)?;
12833            Ok(cols)
12834        } else {
12835            Ok(vec![])
12836        }
12837    }
12838
12839    /// Parse an unsigned precision value enclosed in parentheses, e.g. `(10)`.
12840    pub fn parse_precision(&mut self) -> Result<u64, ParserError> {
12841        self.expect_token(&Token::LParen)?;
12842        let n = self.parse_literal_uint()?;
12843        self.expect_token(&Token::RParen)?;
12844        Ok(n)
12845    }
12846
12847    /// Parse an optional precision `(n)` and return it as `Some(n)` when present.
12848    pub fn parse_optional_precision(&mut self) -> Result<Option<u64>, ParserError> {
12849        if self.consume_token(&Token::LParen) {
12850            let n = self.parse_literal_uint()?;
12851            self.expect_token(&Token::RParen)?;
12852            Ok(Some(n))
12853        } else {
12854            Ok(None)
12855        }
12856    }
12857
12858    fn maybe_parse_optional_interval_fields(
12859        &mut self,
12860    ) -> Result<Option<IntervalFields>, ParserError> {
12861        match self.parse_one_of_keywords(&[
12862            // Can be followed by `TO` option
12863            Keyword::YEAR,
12864            Keyword::DAY,
12865            Keyword::HOUR,
12866            Keyword::MINUTE,
12867            // No `TO` option
12868            Keyword::MONTH,
12869            Keyword::SECOND,
12870        ]) {
12871            Some(Keyword::YEAR) => {
12872                if self.peek_keyword(Keyword::TO) {
12873                    self.expect_keyword(Keyword::TO)?;
12874                    self.expect_keyword(Keyword::MONTH)?;
12875                    Ok(Some(IntervalFields::YearToMonth))
12876                } else {
12877                    Ok(Some(IntervalFields::Year))
12878                }
12879            }
12880            Some(Keyword::DAY) => {
12881                if self.peek_keyword(Keyword::TO) {
12882                    self.expect_keyword(Keyword::TO)?;
12883                    match self.expect_one_of_keywords(&[
12884                        Keyword::HOUR,
12885                        Keyword::MINUTE,
12886                        Keyword::SECOND,
12887                    ])? {
12888                        Keyword::HOUR => Ok(Some(IntervalFields::DayToHour)),
12889                        Keyword::MINUTE => Ok(Some(IntervalFields::DayToMinute)),
12890                        Keyword::SECOND => Ok(Some(IntervalFields::DayToSecond)),
12891                        _ => {
12892                            self.prev_token();
12893                            self.expected("HOUR, MINUTE, or SECOND", self.peek_token())
12894                        }
12895                    }
12896                } else {
12897                    Ok(Some(IntervalFields::Day))
12898                }
12899            }
12900            Some(Keyword::HOUR) => {
12901                if self.peek_keyword(Keyword::TO) {
12902                    self.expect_keyword(Keyword::TO)?;
12903                    match self.expect_one_of_keywords(&[Keyword::MINUTE, Keyword::SECOND])? {
12904                        Keyword::MINUTE => Ok(Some(IntervalFields::HourToMinute)),
12905                        Keyword::SECOND => Ok(Some(IntervalFields::HourToSecond)),
12906                        _ => {
12907                            self.prev_token();
12908                            self.expected("MINUTE or SECOND", self.peek_token())
12909                        }
12910                    }
12911                } else {
12912                    Ok(Some(IntervalFields::Hour))
12913                }
12914            }
12915            Some(Keyword::MINUTE) => {
12916                if self.peek_keyword(Keyword::TO) {
12917                    self.expect_keyword(Keyword::TO)?;
12918                    self.expect_keyword(Keyword::SECOND)?;
12919                    Ok(Some(IntervalFields::MinuteToSecond))
12920                } else {
12921                    Ok(Some(IntervalFields::Minute))
12922                }
12923            }
12924            Some(Keyword::MONTH) => Ok(Some(IntervalFields::Month)),
12925            Some(Keyword::SECOND) => Ok(Some(IntervalFields::Second)),
12926            Some(_) => {
12927                self.prev_token();
12928                self.expected(
12929                    "YEAR, MONTH, DAY, HOUR, MINUTE, or SECOND",
12930                    self.peek_token(),
12931                )
12932            }
12933            None => Ok(None),
12934        }
12935    }
12936
12937    /// Parse datetime64 [1]
12938    /// Syntax
12939    /// ```sql
12940    /// DateTime64(precision[, timezone])
12941    /// ```
12942    ///
12943    /// [1]: https://clickhouse.com/docs/en/sql-reference/data-types/datetime64
12944    pub fn parse_datetime_64(&mut self) -> Result<(u64, Option<String>), ParserError> {
12945        self.expect_keyword_is(Keyword::DATETIME64)?;
12946        self.expect_token(&Token::LParen)?;
12947        let precision = self.parse_literal_uint()?;
12948        let time_zone = if self.consume_token(&Token::Comma) {
12949            Some(self.parse_literal_string()?)
12950        } else {
12951            None
12952        };
12953        self.expect_token(&Token::RParen)?;
12954        Ok((precision, time_zone))
12955    }
12956
12957    /// Parse an optional character length specification `(n | MAX [CHARACTERS|OCTETS])`.
12958    pub fn parse_optional_character_length(
12959        &mut self,
12960    ) -> Result<Option<CharacterLength>, ParserError> {
12961        if self.consume_token(&Token::LParen) {
12962            let character_length = self.parse_character_length()?;
12963            self.expect_token(&Token::RParen)?;
12964            Ok(Some(character_length))
12965        } else {
12966            Ok(None)
12967        }
12968    }
12969
12970    /// Parse an optional binary length specification like `(n)`.
12971    pub fn parse_optional_binary_length(&mut self) -> Result<Option<BinaryLength>, ParserError> {
12972        if self.consume_token(&Token::LParen) {
12973            let binary_length = self.parse_binary_length()?;
12974            self.expect_token(&Token::RParen)?;
12975            Ok(Some(binary_length))
12976        } else {
12977            Ok(None)
12978        }
12979    }
12980
12981    /// Parse a character length, handling `MAX` or integer lengths with optional units.
12982    pub fn parse_character_length(&mut self) -> Result<CharacterLength, ParserError> {
12983        if self.parse_keyword(Keyword::MAX) {
12984            return Ok(CharacterLength::Max);
12985        }
12986        let length = self.parse_literal_uint()?;
12987        let unit = if self.parse_keyword(Keyword::CHARACTERS) {
12988            Some(CharLengthUnits::Characters)
12989        } else if self.parse_keyword(Keyword::OCTETS) {
12990            Some(CharLengthUnits::Octets)
12991        } else {
12992            None
12993        };
12994        Ok(CharacterLength::IntegerLength { length, unit })
12995    }
12996
12997    /// Parse a binary length specification, returning `BinaryLength`.
12998    pub fn parse_binary_length(&mut self) -> Result<BinaryLength, ParserError> {
12999        if self.parse_keyword(Keyword::MAX) {
13000            return Ok(BinaryLength::Max);
13001        }
13002        let length = self.parse_literal_uint()?;
13003        Ok(BinaryLength::IntegerLength { length })
13004    }
13005
13006    /// Parse an optional `(precision[, scale])` and return `(Option<precision>, Option<scale>)`.
13007    pub fn parse_optional_precision_scale(
13008        &mut self,
13009    ) -> Result<(Option<u64>, Option<u64>), ParserError> {
13010        if self.consume_token(&Token::LParen) {
13011            let n = self.parse_literal_uint()?;
13012            let scale = if self.consume_token(&Token::Comma) {
13013                Some(self.parse_literal_uint()?)
13014            } else {
13015                None
13016            };
13017            self.expect_token(&Token::RParen)?;
13018            Ok((Some(n), scale))
13019        } else {
13020            Ok((None, None))
13021        }
13022    }
13023
13024    /// Parse exact-number precision/scale info like `(precision[, scale])` for decimal types.
13025    pub fn parse_exact_number_optional_precision_scale(
13026        &mut self,
13027    ) -> Result<ExactNumberInfo, ParserError> {
13028        if self.consume_token(&Token::LParen) {
13029            let precision = self.parse_literal_uint()?;
13030            let scale = if self.consume_token(&Token::Comma) {
13031                Some(self.parse_signed_integer()?)
13032            } else {
13033                None
13034            };
13035
13036            self.expect_token(&Token::RParen)?;
13037
13038            match scale {
13039                None => Ok(ExactNumberInfo::Precision(precision)),
13040                Some(scale) => Ok(ExactNumberInfo::PrecisionAndScale(precision, scale)),
13041            }
13042        } else {
13043            Ok(ExactNumberInfo::None)
13044        }
13045    }
13046
13047    /// Parse an optionally signed integer literal.
13048    fn parse_signed_integer(&mut self) -> Result<i64, ParserError> {
13049        let is_negative = self.consume_token(&Token::Minus);
13050
13051        if !is_negative {
13052            let _ = self.consume_token(&Token::Plus);
13053        }
13054
13055        let current_token = self.peek_token_ref();
13056        match &current_token.token {
13057            Token::Number(s, _) => {
13058                let s = s.clone();
13059                let span_start = current_token.span.start;
13060                self.advance_token();
13061                let value = Self::parse::<i64>(s, span_start)?;
13062                Ok(if is_negative { -value } else { value })
13063            }
13064            _ => self.expected_ref("number", current_token),
13065        }
13066    }
13067
13068    /// Parse optional type modifiers appearing in parentheses e.g. `(UNSIGNED, ZEROFILL)`.
13069    pub fn parse_optional_type_modifiers(&mut self) -> Result<Option<Vec<String>>, ParserError> {
13070        if self.consume_token(&Token::LParen) {
13071            let mut modifiers = Vec::new();
13072            loop {
13073                let next_token = self.next_token();
13074                match next_token.token {
13075                    Token::Word(w) => modifiers.push(w.to_string()),
13076                    Token::Number(n, _) => modifiers.push(n),
13077                    Token::SingleQuotedString(s) => modifiers.push(s),
13078
13079                    Token::Comma => {
13080                        continue;
13081                    }
13082                    Token::RParen => {
13083                        break;
13084                    }
13085                    _ => self.expected("type modifiers", next_token)?,
13086                }
13087            }
13088
13089            Ok(Some(modifiers))
13090        } else {
13091            Ok(None)
13092        }
13093    }
13094
13095    /// Parse a parenthesized sub data type
13096    fn parse_sub_type<F>(&mut self, parent_type: F) -> Result<DataType, ParserError>
13097    where
13098        F: FnOnce(Box<DataType>) -> DataType,
13099    {
13100        self.expect_token(&Token::LParen)?;
13101        let inside_type = self.parse_data_type()?;
13102        self.expect_token(&Token::RParen)?;
13103        Ok(parent_type(inside_type.into()))
13104    }
13105
13106    /// Parse a DELETE statement, returning a `Box`ed SetExpr
13107    ///
13108    /// This is used to reduce the size of the stack frames in debug builds
13109    fn parse_delete_setexpr_boxed(
13110        &mut self,
13111        delete_token: TokenWithSpan,
13112    ) -> Result<Box<SetExpr>, ParserError> {
13113        Ok(Box::new(SetExpr::Delete(self.parse_delete(delete_token)?)))
13114    }
13115
13116    /// Parse a `DELETE` statement and return `Statement::Delete`.
13117    pub fn parse_delete(&mut self, delete_token: TokenWithSpan) -> Result<Statement, ParserError> {
13118        let optimizer_hint = self.maybe_parse_optimizer_hint()?;
13119        let (tables, with_from_keyword) = if !self.parse_keyword(Keyword::FROM) {
13120            // `FROM` keyword is optional in BigQuery SQL.
13121            // https://cloud.google.com/bigquery/docs/reference/standard-sql/dml-syntax#delete_statement
13122            if dialect_of!(self is BigQueryDialect | OracleDialect | GenericDialect) {
13123                (vec![], false)
13124            } else {
13125                let tables = self.parse_comma_separated(|p| p.parse_object_name(false))?;
13126                self.expect_keyword_is(Keyword::FROM)?;
13127                (tables, true)
13128            }
13129        } else {
13130            (vec![], true)
13131        };
13132
13133        let from = self.parse_comma_separated(Parser::parse_table_and_joins)?;
13134        let using = if self.parse_keyword(Keyword::USING) {
13135            Some(self.parse_comma_separated(Parser::parse_table_and_joins)?)
13136        } else {
13137            None
13138        };
13139        let selection = if self.parse_keyword(Keyword::WHERE) {
13140            Some(self.parse_expr()?)
13141        } else {
13142            None
13143        };
13144        let returning = if self.parse_keyword(Keyword::RETURNING) {
13145            Some(self.parse_comma_separated(Parser::parse_select_item)?)
13146        } else {
13147            None
13148        };
13149        let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
13150            self.parse_comma_separated(Parser::parse_order_by_expr)?
13151        } else {
13152            vec![]
13153        };
13154        let limit = if self.parse_keyword(Keyword::LIMIT) {
13155            self.parse_limit()?
13156        } else {
13157            None
13158        };
13159
13160        Ok(Statement::Delete(Delete {
13161            delete_token: delete_token.into(),
13162            optimizer_hint,
13163            tables,
13164            from: if with_from_keyword {
13165                FromTable::WithFromKeyword(from)
13166            } else {
13167                FromTable::WithoutKeyword(from)
13168            },
13169            using,
13170            selection,
13171            returning,
13172            order_by,
13173            limit,
13174        }))
13175    }
13176
13177    /// Parse a `KILL` statement, optionally specifying `CONNECTION`, `QUERY`, or `MUTATION`.
13178    /// KILL [CONNECTION | QUERY | MUTATION] processlist_id
13179    pub fn parse_kill(&mut self) -> Result<Statement, ParserError> {
13180        let modifier_keyword =
13181            self.parse_one_of_keywords(&[Keyword::CONNECTION, Keyword::QUERY, Keyword::MUTATION]);
13182
13183        let id = self.parse_literal_uint()?;
13184
13185        let modifier = match modifier_keyword {
13186            Some(Keyword::CONNECTION) => Some(KillType::Connection),
13187            Some(Keyword::QUERY) => Some(KillType::Query),
13188            Some(Keyword::MUTATION) => {
13189                if dialect_of!(self is ClickHouseDialect | GenericDialect) {
13190                    Some(KillType::Mutation)
13191                } else {
13192                    self.expected(
13193                        "Unsupported type for KILL, allowed: CONNECTION | QUERY",
13194                        self.peek_token(),
13195                    )?
13196                }
13197            }
13198            _ => None,
13199        };
13200
13201        Ok(Statement::Kill { modifier, id })
13202    }
13203
13204    /// Parse an `EXPLAIN` statement, handling dialect-specific options and modifiers.
13205    pub fn parse_explain(
13206        &mut self,
13207        describe_alias: DescribeAlias,
13208    ) -> Result<Statement, ParserError> {
13209        let mut analyze = false;
13210        let mut verbose = false;
13211        let mut query_plan = false;
13212        let mut estimate = false;
13213        let mut format = None;
13214        let mut options = None;
13215
13216        // Note: DuckDB is compatible with PostgreSQL syntax for this statement,
13217        // although not all features may be implemented.
13218        if describe_alias == DescribeAlias::Explain
13219            && self.dialect.supports_explain_with_utility_options()
13220            && self.peek_token().token == Token::LParen
13221        {
13222            options = Some(self.parse_utility_options()?)
13223        } else if self.parse_keywords(&[Keyword::QUERY, Keyword::PLAN]) {
13224            query_plan = true;
13225        } else if self.parse_keyword(Keyword::ESTIMATE) {
13226            estimate = true;
13227        } else {
13228            analyze = self.parse_keyword(Keyword::ANALYZE);
13229            verbose = self.parse_keyword(Keyword::VERBOSE);
13230            if self.parse_keyword(Keyword::FORMAT) {
13231                format = Some(self.parse_analyze_format_kind()?);
13232            }
13233        }
13234
13235        match self.maybe_parse(|parser| parser.parse_statement())? {
13236            Some(Statement::Explain { .. }) | Some(Statement::ExplainTable { .. }) => Err(
13237                ParserError::ParserError("Explain must be root of the plan".to_string()),
13238            ),
13239            Some(statement) => Ok(Statement::Explain {
13240                describe_alias,
13241                analyze,
13242                verbose,
13243                query_plan,
13244                estimate,
13245                statement: Box::new(statement),
13246                format,
13247                options,
13248            }),
13249            _ => {
13250                let hive_format =
13251                    match self.parse_one_of_keywords(&[Keyword::EXTENDED, Keyword::FORMATTED]) {
13252                        Some(Keyword::EXTENDED) => Some(HiveDescribeFormat::Extended),
13253                        Some(Keyword::FORMATTED) => Some(HiveDescribeFormat::Formatted),
13254                        _ => None,
13255                    };
13256
13257                let has_table_keyword = if self.dialect.describe_requires_table_keyword() {
13258                    // only allow to use TABLE keyword for DESC|DESCRIBE statement
13259                    self.parse_keyword(Keyword::TABLE)
13260                } else {
13261                    false
13262                };
13263
13264                let table_name = self.parse_object_name(false)?;
13265                Ok(Statement::ExplainTable {
13266                    describe_alias,
13267                    hive_format,
13268                    has_table_keyword,
13269                    table_name,
13270                })
13271            }
13272        }
13273    }
13274
13275    /// Parse a query expression, i.e. a `SELECT` statement optionally
13276    /// preceded with some `WITH` CTE declarations and optionally followed
13277    /// by `ORDER BY`. Unlike some other parse_... methods, this one doesn't
13278    /// expect the initial keyword to be already consumed
13279    #[cfg_attr(feature = "recursive-protection", recursive::recursive)]
13280    pub fn parse_query(&mut self) -> Result<Box<Query>, ParserError> {
13281        let _guard = self.recursion_counter.try_decrease()?;
13282        let with = if self.parse_keyword(Keyword::WITH) {
13283            let with_token = self.get_current_token();
13284            Some(With {
13285                with_token: with_token.clone().into(),
13286                recursive: self.parse_keyword(Keyword::RECURSIVE),
13287                cte_tables: self.parse_comma_separated(Parser::parse_cte)?,
13288            })
13289        } else {
13290            None
13291        };
13292        if self.parse_keyword(Keyword::INSERT) {
13293            Ok(Query {
13294                with,
13295                body: self.parse_insert_setexpr_boxed(self.get_current_token().clone())?,
13296                order_by: None,
13297                limit_clause: None,
13298                fetch: None,
13299                locks: vec![],
13300                for_clause: None,
13301                settings: None,
13302                format_clause: None,
13303                pipe_operators: vec![],
13304            }
13305            .into())
13306        } else if self.parse_keyword(Keyword::UPDATE) {
13307            Ok(Query {
13308                with,
13309                body: self.parse_update_setexpr_boxed(self.get_current_token().clone())?,
13310                order_by: None,
13311                limit_clause: None,
13312                fetch: None,
13313                locks: vec![],
13314                for_clause: None,
13315                settings: None,
13316                format_clause: None,
13317                pipe_operators: vec![],
13318            }
13319            .into())
13320        } else if self.parse_keyword(Keyword::DELETE) {
13321            Ok(Query {
13322                with,
13323                body: self.parse_delete_setexpr_boxed(self.get_current_token().clone())?,
13324                limit_clause: None,
13325                order_by: None,
13326                fetch: None,
13327                locks: vec![],
13328                for_clause: None,
13329                settings: None,
13330                format_clause: None,
13331                pipe_operators: vec![],
13332            }
13333            .into())
13334        } else if self.parse_keyword(Keyword::MERGE) {
13335            Ok(Query {
13336                with,
13337                body: self.parse_merge_setexpr_boxed(self.get_current_token().clone())?,
13338                limit_clause: None,
13339                order_by: None,
13340                fetch: None,
13341                locks: vec![],
13342                for_clause: None,
13343                settings: None,
13344                format_clause: None,
13345                pipe_operators: vec![],
13346            }
13347            .into())
13348        } else {
13349            let body = self.parse_query_body(self.dialect.prec_unknown())?;
13350
13351            let order_by = self.parse_optional_order_by()?;
13352
13353            let limit_clause = self.parse_optional_limit_clause()?;
13354
13355            let settings = self.parse_settings()?;
13356
13357            let fetch = if self.parse_keyword(Keyword::FETCH) {
13358                Some(self.parse_fetch()?)
13359            } else {
13360                None
13361            };
13362
13363            let mut for_clause = None;
13364            let mut locks = Vec::new();
13365            while self.parse_keyword(Keyword::FOR) {
13366                if let Some(parsed_for_clause) = self.parse_for_clause()? {
13367                    for_clause = Some(parsed_for_clause);
13368                    break;
13369                } else {
13370                    locks.push(self.parse_lock()?);
13371                }
13372            }
13373            let format_clause =
13374                if self.dialect.supports_select_format() && self.parse_keyword(Keyword::FORMAT) {
13375                    if self.parse_keyword(Keyword::NULL) {
13376                        Some(FormatClause::Null)
13377                    } else {
13378                        let ident = self.parse_identifier()?;
13379                        Some(FormatClause::Identifier(ident))
13380                    }
13381                } else {
13382                    None
13383                };
13384
13385            let pipe_operators = if self.dialect.supports_pipe_operator() {
13386                self.parse_pipe_operators()?
13387            } else {
13388                Vec::new()
13389            };
13390
13391            Ok(Query {
13392                with,
13393                body,
13394                order_by,
13395                limit_clause,
13396                fetch,
13397                locks,
13398                for_clause,
13399                settings,
13400                format_clause,
13401                pipe_operators,
13402            }
13403            .into())
13404        }
13405    }
13406
13407    fn parse_pipe_operators(&mut self) -> Result<Vec<PipeOperator>, ParserError> {
13408        let mut pipe_operators = Vec::new();
13409
13410        while self.consume_token(&Token::VerticalBarRightAngleBracket) {
13411            let kw = self.expect_one_of_keywords(&[
13412                Keyword::SELECT,
13413                Keyword::EXTEND,
13414                Keyword::SET,
13415                Keyword::DROP,
13416                Keyword::AS,
13417                Keyword::WHERE,
13418                Keyword::LIMIT,
13419                Keyword::AGGREGATE,
13420                Keyword::ORDER,
13421                Keyword::TABLESAMPLE,
13422                Keyword::RENAME,
13423                Keyword::UNION,
13424                Keyword::INTERSECT,
13425                Keyword::EXCEPT,
13426                Keyword::CALL,
13427                Keyword::PIVOT,
13428                Keyword::UNPIVOT,
13429                Keyword::JOIN,
13430                Keyword::INNER,
13431                Keyword::LEFT,
13432                Keyword::RIGHT,
13433                Keyword::FULL,
13434                Keyword::CROSS,
13435            ])?;
13436            match kw {
13437                Keyword::SELECT => {
13438                    let exprs = self.parse_comma_separated(Parser::parse_select_item)?;
13439                    pipe_operators.push(PipeOperator::Select { exprs })
13440                }
13441                Keyword::EXTEND => {
13442                    let exprs = self.parse_comma_separated(Parser::parse_select_item)?;
13443                    pipe_operators.push(PipeOperator::Extend { exprs })
13444                }
13445                Keyword::SET => {
13446                    let assignments = self.parse_comma_separated(Parser::parse_assignment)?;
13447                    pipe_operators.push(PipeOperator::Set { assignments })
13448                }
13449                Keyword::DROP => {
13450                    let columns = self.parse_identifiers()?;
13451                    pipe_operators.push(PipeOperator::Drop { columns })
13452                }
13453                Keyword::AS => {
13454                    let alias = self.parse_identifier()?;
13455                    pipe_operators.push(PipeOperator::As { alias })
13456                }
13457                Keyword::WHERE => {
13458                    let expr = self.parse_expr()?;
13459                    pipe_operators.push(PipeOperator::Where { expr })
13460                }
13461                Keyword::LIMIT => {
13462                    let expr = self.parse_expr()?;
13463                    let offset = if self.parse_keyword(Keyword::OFFSET) {
13464                        Some(self.parse_expr()?)
13465                    } else {
13466                        None
13467                    };
13468                    pipe_operators.push(PipeOperator::Limit { expr, offset })
13469                }
13470                Keyword::AGGREGATE => {
13471                    let full_table_exprs = if self.peek_keyword(Keyword::GROUP) {
13472                        vec![]
13473                    } else {
13474                        self.parse_comma_separated(|parser| {
13475                            parser.parse_expr_with_alias_and_order_by()
13476                        })?
13477                    };
13478
13479                    let group_by_expr = if self.parse_keywords(&[Keyword::GROUP, Keyword::BY]) {
13480                        self.parse_comma_separated(|parser| {
13481                            parser.parse_expr_with_alias_and_order_by()
13482                        })?
13483                    } else {
13484                        vec![]
13485                    };
13486
13487                    pipe_operators.push(PipeOperator::Aggregate {
13488                        full_table_exprs,
13489                        group_by_expr,
13490                    })
13491                }
13492                Keyword::ORDER => {
13493                    self.expect_one_of_keywords(&[Keyword::BY])?;
13494                    let exprs = self.parse_comma_separated(Parser::parse_order_by_expr)?;
13495                    pipe_operators.push(PipeOperator::OrderBy { exprs })
13496                }
13497                Keyword::TABLESAMPLE => {
13498                    let sample = self.parse_table_sample(TableSampleModifier::TableSample)?;
13499                    pipe_operators.push(PipeOperator::TableSample { sample });
13500                }
13501                Keyword::RENAME => {
13502                    let mappings =
13503                        self.parse_comma_separated(Parser::parse_identifier_with_optional_alias)?;
13504                    pipe_operators.push(PipeOperator::Rename { mappings });
13505                }
13506                Keyword::UNION => {
13507                    let set_quantifier = self.parse_set_quantifier(&Some(SetOperator::Union));
13508                    let queries = self.parse_pipe_operator_queries()?;
13509                    pipe_operators.push(PipeOperator::Union {
13510                        set_quantifier,
13511                        queries,
13512                    });
13513                }
13514                Keyword::INTERSECT => {
13515                    let set_quantifier =
13516                        self.parse_distinct_required_set_quantifier("INTERSECT")?;
13517                    let queries = self.parse_pipe_operator_queries()?;
13518                    pipe_operators.push(PipeOperator::Intersect {
13519                        set_quantifier,
13520                        queries,
13521                    });
13522                }
13523                Keyword::EXCEPT => {
13524                    let set_quantifier = self.parse_distinct_required_set_quantifier("EXCEPT")?;
13525                    let queries = self.parse_pipe_operator_queries()?;
13526                    pipe_operators.push(PipeOperator::Except {
13527                        set_quantifier,
13528                        queries,
13529                    });
13530                }
13531                Keyword::CALL => {
13532                    let function_name = self.parse_object_name(false)?;
13533                    let function_expr = self.parse_function(function_name)?;
13534                    if let Expr::Function(function) = function_expr {
13535                        let alias = self.parse_identifier_optional_alias()?;
13536                        pipe_operators.push(PipeOperator::Call { function, alias });
13537                    } else {
13538                        return Err(ParserError::ParserError(
13539                            "Expected function call after CALL".to_string(),
13540                        ));
13541                    }
13542                }
13543                Keyword::PIVOT => {
13544                    self.expect_token(&Token::LParen)?;
13545                    let aggregate_functions =
13546                        self.parse_comma_separated(Self::parse_aliased_function_call)?;
13547                    self.expect_keyword_is(Keyword::FOR)?;
13548                    let value_column = self.parse_period_separated(|p| p.parse_identifier())?;
13549                    self.expect_keyword_is(Keyword::IN)?;
13550
13551                    self.expect_token(&Token::LParen)?;
13552                    let value_source = if self.parse_keyword(Keyword::ANY) {
13553                        let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
13554                            self.parse_comma_separated(Parser::parse_order_by_expr)?
13555                        } else {
13556                            vec![]
13557                        };
13558                        PivotValueSource::Any(order_by)
13559                    } else if self.peek_sub_query() {
13560                        PivotValueSource::Subquery(self.parse_query()?)
13561                    } else {
13562                        PivotValueSource::List(
13563                            self.parse_comma_separated(Self::parse_expr_with_alias)?,
13564                        )
13565                    };
13566                    self.expect_token(&Token::RParen)?;
13567                    self.expect_token(&Token::RParen)?;
13568
13569                    let alias = self.parse_identifier_optional_alias()?;
13570
13571                    pipe_operators.push(PipeOperator::Pivot {
13572                        aggregate_functions,
13573                        value_column,
13574                        value_source,
13575                        alias,
13576                    });
13577                }
13578                Keyword::UNPIVOT => {
13579                    self.expect_token(&Token::LParen)?;
13580                    let value_column = self.parse_identifier()?;
13581                    self.expect_keyword(Keyword::FOR)?;
13582                    let name_column = self.parse_identifier()?;
13583                    self.expect_keyword(Keyword::IN)?;
13584
13585                    self.expect_token(&Token::LParen)?;
13586                    let unpivot_columns = self.parse_comma_separated(Parser::parse_identifier)?;
13587                    self.expect_token(&Token::RParen)?;
13588
13589                    self.expect_token(&Token::RParen)?;
13590
13591                    let alias = self.parse_identifier_optional_alias()?;
13592
13593                    pipe_operators.push(PipeOperator::Unpivot {
13594                        value_column,
13595                        name_column,
13596                        unpivot_columns,
13597                        alias,
13598                    });
13599                }
13600                Keyword::JOIN
13601                | Keyword::INNER
13602                | Keyword::LEFT
13603                | Keyword::RIGHT
13604                | Keyword::FULL
13605                | Keyword::CROSS => {
13606                    self.prev_token();
13607                    let mut joins = self.parse_joins()?;
13608                    if joins.len() != 1 {
13609                        return Err(ParserError::ParserError(
13610                            "Join pipe operator must have a single join".to_string(),
13611                        ));
13612                    }
13613                    let join = joins.swap_remove(0);
13614                    pipe_operators.push(PipeOperator::Join(join))
13615                }
13616                unhandled => {
13617                    return Err(ParserError::ParserError(format!(
13618                    "`expect_one_of_keywords` further up allowed unhandled keyword: {unhandled:?}"
13619                )))
13620                }
13621            }
13622        }
13623        Ok(pipe_operators)
13624    }
13625
13626    fn parse_settings(&mut self) -> Result<Option<Vec<Setting>>, ParserError> {
13627        let settings = if self.dialect.supports_settings() && self.parse_keyword(Keyword::SETTINGS)
13628        {
13629            let key_values = self.parse_comma_separated(|p| {
13630                let key = p.parse_identifier()?;
13631                p.expect_token(&Token::Eq)?;
13632                let value = p.parse_expr()?;
13633                Ok(Setting { key, value })
13634            })?;
13635            Some(key_values)
13636        } else {
13637            None
13638        };
13639        Ok(settings)
13640    }
13641
13642    /// Parse a mssql `FOR [XML | JSON | BROWSE]` clause
13643    pub fn parse_for_clause(&mut self) -> Result<Option<ForClause>, ParserError> {
13644        if self.parse_keyword(Keyword::XML) {
13645            Ok(Some(self.parse_for_xml()?))
13646        } else if self.parse_keyword(Keyword::JSON) {
13647            Ok(Some(self.parse_for_json()?))
13648        } else if self.parse_keyword(Keyword::BROWSE) {
13649            Ok(Some(ForClause::Browse))
13650        } else {
13651            Ok(None)
13652        }
13653    }
13654
13655    /// Parse a mssql `FOR XML` clause
13656    pub fn parse_for_xml(&mut self) -> Result<ForClause, ParserError> {
13657        let for_xml = if self.parse_keyword(Keyword::RAW) {
13658            let mut element_name = None;
13659            if self.peek_token().token == Token::LParen {
13660                self.expect_token(&Token::LParen)?;
13661                element_name = Some(self.parse_literal_string()?);
13662                self.expect_token(&Token::RParen)?;
13663            }
13664            ForXml::Raw(element_name)
13665        } else if self.parse_keyword(Keyword::AUTO) {
13666            ForXml::Auto
13667        } else if self.parse_keyword(Keyword::EXPLICIT) {
13668            ForXml::Explicit
13669        } else if self.parse_keyword(Keyword::PATH) {
13670            let mut element_name = None;
13671            if self.peek_token().token == Token::LParen {
13672                self.expect_token(&Token::LParen)?;
13673                element_name = Some(self.parse_literal_string()?);
13674                self.expect_token(&Token::RParen)?;
13675            }
13676            ForXml::Path(element_name)
13677        } else {
13678            return Err(ParserError::ParserError(
13679                "Expected FOR XML [RAW | AUTO | EXPLICIT | PATH ]".to_string(),
13680            ));
13681        };
13682        let mut elements = false;
13683        let mut binary_base64 = false;
13684        let mut root = None;
13685        let mut r#type = false;
13686        while self.peek_token().token == Token::Comma {
13687            self.next_token();
13688            if self.parse_keyword(Keyword::ELEMENTS) {
13689                elements = true;
13690            } else if self.parse_keyword(Keyword::BINARY) {
13691                self.expect_keyword_is(Keyword::BASE64)?;
13692                binary_base64 = true;
13693            } else if self.parse_keyword(Keyword::ROOT) {
13694                self.expect_token(&Token::LParen)?;
13695                root = Some(self.parse_literal_string()?);
13696                self.expect_token(&Token::RParen)?;
13697            } else if self.parse_keyword(Keyword::TYPE) {
13698                r#type = true;
13699            }
13700        }
13701        Ok(ForClause::Xml {
13702            for_xml,
13703            elements,
13704            binary_base64,
13705            root,
13706            r#type,
13707        })
13708    }
13709
13710    /// Parse a mssql `FOR JSON` clause
13711    pub fn parse_for_json(&mut self) -> Result<ForClause, ParserError> {
13712        let for_json = if self.parse_keyword(Keyword::AUTO) {
13713            ForJson::Auto
13714        } else if self.parse_keyword(Keyword::PATH) {
13715            ForJson::Path
13716        } else {
13717            return Err(ParserError::ParserError(
13718                "Expected FOR JSON [AUTO | PATH ]".to_string(),
13719            ));
13720        };
13721        let mut root = None;
13722        let mut include_null_values = false;
13723        let mut without_array_wrapper = false;
13724        while self.peek_token().token == Token::Comma {
13725            self.next_token();
13726            if self.parse_keyword(Keyword::ROOT) {
13727                self.expect_token(&Token::LParen)?;
13728                root = Some(self.parse_literal_string()?);
13729                self.expect_token(&Token::RParen)?;
13730            } else if self.parse_keyword(Keyword::INCLUDE_NULL_VALUES) {
13731                include_null_values = true;
13732            } else if self.parse_keyword(Keyword::WITHOUT_ARRAY_WRAPPER) {
13733                without_array_wrapper = true;
13734            }
13735        }
13736        Ok(ForClause::Json {
13737            for_json,
13738            root,
13739            include_null_values,
13740            without_array_wrapper,
13741        })
13742    }
13743
13744    /// Parse a CTE (`alias [( col1, col2, ... )] AS (subquery)`)
13745    pub fn parse_cte(&mut self) -> Result<Cte, ParserError> {
13746        let name = self.parse_identifier()?;
13747
13748        let mut cte = if self.parse_keyword(Keyword::AS) {
13749            let mut is_materialized = None;
13750            if dialect_of!(self is PostgreSqlDialect) {
13751                if self.parse_keyword(Keyword::MATERIALIZED) {
13752                    is_materialized = Some(CteAsMaterialized::Materialized);
13753                } else if self.parse_keywords(&[Keyword::NOT, Keyword::MATERIALIZED]) {
13754                    is_materialized = Some(CteAsMaterialized::NotMaterialized);
13755                }
13756            }
13757            self.expect_token(&Token::LParen)?;
13758
13759            let query = self.parse_query()?;
13760            let closing_paren_token = self.expect_token(&Token::RParen)?;
13761
13762            let alias = TableAlias {
13763                explicit: false,
13764                name,
13765                columns: vec![],
13766            };
13767            Cte {
13768                alias,
13769                query,
13770                from: None,
13771                materialized: is_materialized,
13772                closing_paren_token: closing_paren_token.into(),
13773            }
13774        } else {
13775            let columns = self.parse_table_alias_column_defs()?;
13776            self.expect_keyword_is(Keyword::AS)?;
13777            let mut is_materialized = None;
13778            if dialect_of!(self is PostgreSqlDialect) {
13779                if self.parse_keyword(Keyword::MATERIALIZED) {
13780                    is_materialized = Some(CteAsMaterialized::Materialized);
13781                } else if self.parse_keywords(&[Keyword::NOT, Keyword::MATERIALIZED]) {
13782                    is_materialized = Some(CteAsMaterialized::NotMaterialized);
13783                }
13784            }
13785            self.expect_token(&Token::LParen)?;
13786
13787            let query = self.parse_query()?;
13788            let closing_paren_token = self.expect_token(&Token::RParen)?;
13789
13790            let alias = TableAlias {
13791                explicit: false,
13792                name,
13793                columns,
13794            };
13795            Cte {
13796                alias,
13797                query,
13798                from: None,
13799                materialized: is_materialized,
13800                closing_paren_token: closing_paren_token.into(),
13801            }
13802        };
13803        if self.parse_keyword(Keyword::FROM) {
13804            cte.from = Some(self.parse_identifier()?);
13805        }
13806        Ok(cte)
13807    }
13808
13809    /// Parse a "query body", which is an expression with roughly the
13810    /// following grammar:
13811    /// ```sql
13812    ///   query_body ::= restricted_select | '(' subquery ')' | set_operation
13813    ///   restricted_select ::= 'SELECT' [expr_list] [ from ] [ where ] [ groupby_having ]
13814    ///   subquery ::= query_body [ order_by_limit ]
13815    ///   set_operation ::= query_body { 'UNION' | 'EXCEPT' | 'INTERSECT' } [ 'ALL' ] query_body
13816    /// ```
13817    pub fn parse_query_body(&mut self, precedence: u8) -> Result<Box<SetExpr>, ParserError> {
13818        // We parse the expression using a Pratt parser, as in `parse_expr()`.
13819        // Start by parsing a restricted SELECT or a `(subquery)`:
13820        let expr = if self.peek_keyword(Keyword::SELECT)
13821            || (self.peek_keyword(Keyword::FROM) && self.dialect.supports_from_first_select())
13822        {
13823            SetExpr::Select(self.parse_select().map(Box::new)?)
13824        } else if self.consume_token(&Token::LParen) {
13825            // CTEs are not allowed here, but the parser currently accepts them
13826            let subquery = self.parse_query()?;
13827            self.expect_token(&Token::RParen)?;
13828            SetExpr::Query(subquery)
13829        } else if self.parse_keyword(Keyword::VALUES) {
13830            let is_mysql = dialect_of!(self is MySqlDialect);
13831            SetExpr::Values(self.parse_values(is_mysql, false)?)
13832        } else if self.parse_keyword(Keyword::VALUE) {
13833            let is_mysql = dialect_of!(self is MySqlDialect);
13834            SetExpr::Values(self.parse_values(is_mysql, true)?)
13835        } else if self.parse_keyword(Keyword::TABLE) {
13836            SetExpr::Table(Box::new(self.parse_as_table()?))
13837        } else {
13838            return self.expected(
13839                "SELECT, VALUES, or a subquery in the query body",
13840                self.peek_token(),
13841            );
13842        };
13843
13844        self.parse_remaining_set_exprs(expr, precedence)
13845    }
13846
13847    /// Parse any extra set expressions that may be present in a query body
13848    ///
13849    /// (this is its own function to reduce required stack size in debug builds)
13850    fn parse_remaining_set_exprs(
13851        &mut self,
13852        mut expr: SetExpr,
13853        precedence: u8,
13854    ) -> Result<Box<SetExpr>, ParserError> {
13855        loop {
13856            // The query can be optionally followed by a set operator:
13857            let op = self.parse_set_operator(&self.peek_token().token);
13858            let next_precedence = match op {
13859                // UNION and EXCEPT have the same binding power and evaluate left-to-right
13860                Some(SetOperator::Union) | Some(SetOperator::Except) | Some(SetOperator::Minus) => {
13861                    10
13862                }
13863                // INTERSECT has higher precedence than UNION/EXCEPT
13864                Some(SetOperator::Intersect) => 20,
13865                // Unexpected token or EOF => stop parsing the query body
13866                None => break,
13867            };
13868            if precedence >= next_precedence {
13869                break;
13870            }
13871            self.next_token(); // skip past the set operator
13872            let set_quantifier = self.parse_set_quantifier(&op);
13873            expr = SetExpr::SetOperation {
13874                left: Box::new(expr),
13875                op: op.unwrap(),
13876                set_quantifier,
13877                right: self.parse_query_body(next_precedence)?,
13878            };
13879        }
13880
13881        Ok(expr.into())
13882    }
13883
13884    /// Parse a set operator token into its `SetOperator` variant.
13885    pub fn parse_set_operator(&mut self, token: &Token) -> Option<SetOperator> {
13886        match token {
13887            Token::Word(w) if w.keyword == Keyword::UNION => Some(SetOperator::Union),
13888            Token::Word(w) if w.keyword == Keyword::EXCEPT => Some(SetOperator::Except),
13889            Token::Word(w) if w.keyword == Keyword::INTERSECT => Some(SetOperator::Intersect),
13890            Token::Word(w) if w.keyword == Keyword::MINUS => Some(SetOperator::Minus),
13891            _ => None,
13892        }
13893    }
13894
13895    /// Parse a set quantifier (e.g., `ALL`, `DISTINCT BY NAME`) for the given set operator.
13896    pub fn parse_set_quantifier(&mut self, op: &Option<SetOperator>) -> SetQuantifier {
13897        match op {
13898            Some(
13899                SetOperator::Except
13900                | SetOperator::Intersect
13901                | SetOperator::Union
13902                | SetOperator::Minus,
13903            ) => {
13904                if self.parse_keywords(&[Keyword::DISTINCT, Keyword::BY, Keyword::NAME]) {
13905                    SetQuantifier::DistinctByName
13906                } else if self.parse_keywords(&[Keyword::BY, Keyword::NAME]) {
13907                    SetQuantifier::ByName
13908                } else if self.parse_keyword(Keyword::ALL) {
13909                    if self.parse_keywords(&[Keyword::BY, Keyword::NAME]) {
13910                        SetQuantifier::AllByName
13911                    } else {
13912                        SetQuantifier::All
13913                    }
13914                } else if self.parse_keyword(Keyword::DISTINCT) {
13915                    SetQuantifier::Distinct
13916                } else {
13917                    SetQuantifier::None
13918                }
13919            }
13920            _ => SetQuantifier::None,
13921        }
13922    }
13923
13924    /// Parse a restricted `SELECT` statement (no CTEs / `UNION` / `ORDER BY`)
13925    pub fn parse_select(&mut self) -> Result<Select, ParserError> {
13926        let mut from_first = None;
13927
13928        if self.dialect.supports_from_first_select() && self.peek_keyword(Keyword::FROM) {
13929            let from_token = self.expect_keyword(Keyword::FROM)?;
13930            let from = self.parse_table_with_joins()?;
13931            if !self.peek_keyword(Keyword::SELECT) {
13932                return Ok(Select {
13933                    select_token: AttachedToken(from_token),
13934                    optimizer_hint: None,
13935                    distinct: None,
13936                    select_modifiers: None,
13937                    top: None,
13938                    top_before_distinct: false,
13939                    projection: vec![],
13940                    exclude: None,
13941                    into: None,
13942                    from,
13943                    lateral_views: vec![],
13944                    prewhere: None,
13945                    selection: None,
13946                    group_by: GroupByExpr::Expressions(vec![], vec![]),
13947                    cluster_by: vec![],
13948                    distribute_by: vec![],
13949                    sort_by: vec![],
13950                    having: None,
13951                    named_window: vec![],
13952                    window_before_qualify: false,
13953                    qualify: None,
13954                    value_table_mode: None,
13955                    connect_by: vec![],
13956                    flavor: SelectFlavor::FromFirstNoSelect,
13957                });
13958            }
13959            from_first = Some(from);
13960        }
13961
13962        let select_token = self.expect_keyword(Keyword::SELECT)?;
13963        let optimizer_hint = self.maybe_parse_optimizer_hint()?;
13964        let value_table_mode = self.parse_value_table_mode()?;
13965
13966        let (select_modifiers, distinct_select_modifier) =
13967            if self.dialect.supports_select_modifiers() {
13968                self.parse_select_modifiers()?
13969            } else {
13970                (None, None)
13971            };
13972
13973        let mut top_before_distinct = false;
13974        let mut top = None;
13975        if self.dialect.supports_top_before_distinct() && self.parse_keyword(Keyword::TOP) {
13976            top = Some(self.parse_top()?);
13977            top_before_distinct = true;
13978        }
13979
13980        let distinct = if distinct_select_modifier.is_some() {
13981            distinct_select_modifier
13982        } else {
13983            self.parse_all_or_distinct()?
13984        };
13985
13986        if !self.dialect.supports_top_before_distinct() && self.parse_keyword(Keyword::TOP) {
13987            top = Some(self.parse_top()?);
13988        }
13989
13990        let projection =
13991            if self.dialect.supports_empty_projections() && self.peek_keyword(Keyword::FROM) {
13992                vec![]
13993            } else {
13994                self.parse_projection()?
13995            };
13996
13997        let exclude = if self.dialect.supports_select_exclude() {
13998            self.parse_optional_select_item_exclude()?
13999        } else {
14000            None
14001        };
14002
14003        let into = if self.parse_keyword(Keyword::INTO) {
14004            Some(self.parse_select_into()?)
14005        } else {
14006            None
14007        };
14008
14009        // Note that for keywords to be properly handled here, they need to be
14010        // added to `RESERVED_FOR_COLUMN_ALIAS` / `RESERVED_FOR_TABLE_ALIAS`,
14011        // otherwise they may be parsed as an alias as part of the `projection`
14012        // or `from`.
14013
14014        let (from, from_first) = if let Some(from) = from_first.take() {
14015            (from, true)
14016        } else if self.parse_keyword(Keyword::FROM) {
14017            (self.parse_table_with_joins()?, false)
14018        } else {
14019            (vec![], false)
14020        };
14021
14022        let mut lateral_views = vec![];
14023        loop {
14024            if self.parse_keywords(&[Keyword::LATERAL, Keyword::VIEW]) {
14025                let outer = self.parse_keyword(Keyword::OUTER);
14026                let lateral_view = self.parse_expr()?;
14027                let lateral_view_name = self.parse_object_name(false)?;
14028                let lateral_col_alias = self
14029                    .parse_comma_separated(|parser| {
14030                        parser.parse_optional_alias(&[
14031                            Keyword::WHERE,
14032                            Keyword::GROUP,
14033                            Keyword::CLUSTER,
14034                            Keyword::HAVING,
14035                            Keyword::LATERAL,
14036                        ]) // This couldn't possibly be a bad idea
14037                    })?
14038                    .into_iter()
14039                    .flatten()
14040                    .collect();
14041
14042                lateral_views.push(LateralView {
14043                    lateral_view,
14044                    lateral_view_name,
14045                    lateral_col_alias,
14046                    outer,
14047                });
14048            } else {
14049                break;
14050            }
14051        }
14052
14053        let prewhere = if self.dialect.supports_prewhere() && self.parse_keyword(Keyword::PREWHERE)
14054        {
14055            Some(self.parse_expr()?)
14056        } else {
14057            None
14058        };
14059
14060        let selection = if self.parse_keyword(Keyword::WHERE) {
14061            Some(self.parse_expr()?)
14062        } else {
14063            None
14064        };
14065
14066        let connect_by = self.maybe_parse_connect_by()?;
14067
14068        let group_by = self
14069            .parse_optional_group_by()?
14070            .unwrap_or_else(|| GroupByExpr::Expressions(vec![], vec![]));
14071
14072        let cluster_by = if self.parse_keywords(&[Keyword::CLUSTER, Keyword::BY]) {
14073            self.parse_comma_separated(Parser::parse_expr)?
14074        } else {
14075            vec![]
14076        };
14077
14078        let distribute_by = if self.parse_keywords(&[Keyword::DISTRIBUTE, Keyword::BY]) {
14079            self.parse_comma_separated(Parser::parse_expr)?
14080        } else {
14081            vec![]
14082        };
14083
14084        let sort_by = if self.parse_keywords(&[Keyword::SORT, Keyword::BY]) {
14085            self.parse_comma_separated(Parser::parse_order_by_expr)?
14086        } else {
14087            vec![]
14088        };
14089
14090        let having = if self.parse_keyword(Keyword::HAVING) {
14091            Some(self.parse_expr()?)
14092        } else {
14093            None
14094        };
14095
14096        // Accept QUALIFY and WINDOW in any order and flag accordingly.
14097        let (named_windows, qualify, window_before_qualify) = if self.parse_keyword(Keyword::WINDOW)
14098        {
14099            let named_windows = self.parse_comma_separated(Parser::parse_named_window)?;
14100            if self.parse_keyword(Keyword::QUALIFY) {
14101                (named_windows, Some(self.parse_expr()?), true)
14102            } else {
14103                (named_windows, None, true)
14104            }
14105        } else if self.parse_keyword(Keyword::QUALIFY) {
14106            let qualify = Some(self.parse_expr()?);
14107            if self.parse_keyword(Keyword::WINDOW) {
14108                (
14109                    self.parse_comma_separated(Parser::parse_named_window)?,
14110                    qualify,
14111                    false,
14112                )
14113            } else {
14114                (Default::default(), qualify, false)
14115            }
14116        } else {
14117            Default::default()
14118        };
14119
14120        Ok(Select {
14121            select_token: AttachedToken(select_token),
14122            optimizer_hint,
14123            distinct,
14124            select_modifiers,
14125            top,
14126            top_before_distinct,
14127            projection,
14128            exclude,
14129            into,
14130            from,
14131            lateral_views,
14132            prewhere,
14133            selection,
14134            group_by,
14135            cluster_by,
14136            distribute_by,
14137            sort_by,
14138            having,
14139            named_window: named_windows,
14140            window_before_qualify,
14141            qualify,
14142            value_table_mode,
14143            connect_by,
14144            flavor: if from_first {
14145                SelectFlavor::FromFirst
14146            } else {
14147                SelectFlavor::Standard
14148            },
14149        })
14150    }
14151
14152    /// Parses an optional optimizer hint at the current token position
14153    ///
14154    /// [MySQL](https://dev.mysql.com/doc/refman/8.4/en/optimizer-hints.html#optimizer-hints-overview)
14155    /// [Oracle](https://docs.oracle.com/en/database/oracle/oracle-database/21/sqlrf/Comments.html#GUID-D316D545-89E2-4D54-977F-FC97815CD62E)
14156    fn maybe_parse_optimizer_hint(&mut self) -> Result<Option<OptimizerHint>, ParserError> {
14157        let supports_hints = self.dialect.supports_comment_optimizer_hint();
14158        if !supports_hints {
14159            return Ok(None);
14160        }
14161        loop {
14162            let t = self.peek_nth_token_no_skip_ref(0);
14163            match &t.token {
14164                Token::Whitespace(ws) => {
14165                    match ws {
14166                        Whitespace::SingleLineComment { comment, .. }
14167                        | Whitespace::MultiLineComment(comment) => {
14168                            return Ok(match comment.strip_prefix("+") {
14169                                None => None,
14170                                Some(text) => {
14171                                    let hint = OptimizerHint {
14172                                        text: text.into(),
14173                                        style: if let Whitespace::SingleLineComment {
14174                                            prefix, ..
14175                                        } = ws
14176                                        {
14177                                            OptimizerHintStyle::SingleLine {
14178                                                prefix: prefix.clone(),
14179                                            }
14180                                        } else {
14181                                            OptimizerHintStyle::MultiLine
14182                                        },
14183                                    };
14184                                    // Consume the comment token
14185                                    self.next_token_no_skip();
14186                                    Some(hint)
14187                                }
14188                            });
14189                        }
14190                        Whitespace::Space | Whitespace::Tab | Whitespace::Newline => {
14191                            // Consume the token and try with the next whitespace or comment
14192                            self.next_token_no_skip();
14193                        }
14194                    }
14195                }
14196                _ => return Ok(None),
14197            }
14198        }
14199    }
14200
14201    /// Parses MySQL SELECT modifiers and DISTINCT/ALL in any order.
14202    ///
14203    /// Manual testing shows odifiers can appear in any order, and modifiers other than DISTINCT/ALL
14204    /// can be repeated.
14205    ///
14206    /// <https://dev.mysql.com/doc/refman/8.4/en/select.html>
14207    fn parse_select_modifiers(
14208        &mut self,
14209    ) -> Result<(Option<SelectModifiers>, Option<Distinct>), ParserError> {
14210        let mut modifiers = SelectModifiers::default();
14211        let mut distinct = None;
14212
14213        let keywords = &[
14214            Keyword::ALL,
14215            Keyword::DISTINCT,
14216            Keyword::DISTINCTROW,
14217            Keyword::HIGH_PRIORITY,
14218            Keyword::STRAIGHT_JOIN,
14219            Keyword::SQL_SMALL_RESULT,
14220            Keyword::SQL_BIG_RESULT,
14221            Keyword::SQL_BUFFER_RESULT,
14222            Keyword::SQL_NO_CACHE,
14223            Keyword::SQL_CALC_FOUND_ROWS,
14224        ];
14225
14226        while let Some(keyword) = self.parse_one_of_keywords(keywords) {
14227            match keyword {
14228                Keyword::ALL | Keyword::DISTINCT if distinct.is_none() => {
14229                    self.prev_token();
14230                    distinct = self.parse_all_or_distinct()?;
14231                }
14232                // DISTINCTROW is a MySQL-specific legacy (but not deprecated) alias for DISTINCT
14233                Keyword::DISTINCTROW if distinct.is_none() => {
14234                    distinct = Some(Distinct::Distinct);
14235                }
14236                Keyword::HIGH_PRIORITY => modifiers.high_priority = true,
14237                Keyword::STRAIGHT_JOIN => modifiers.straight_join = true,
14238                Keyword::SQL_SMALL_RESULT => modifiers.sql_small_result = true,
14239                Keyword::SQL_BIG_RESULT => modifiers.sql_big_result = true,
14240                Keyword::SQL_BUFFER_RESULT => modifiers.sql_buffer_result = true,
14241                Keyword::SQL_NO_CACHE => modifiers.sql_no_cache = true,
14242                Keyword::SQL_CALC_FOUND_ROWS => modifiers.sql_calc_found_rows = true,
14243                _ => {
14244                    self.prev_token();
14245                    return self.expected(
14246                        "HIGH_PRIORITY, STRAIGHT_JOIN, or other MySQL select modifier",
14247                        self.peek_token(),
14248                    );
14249                }
14250            }
14251        }
14252
14253        // Avoid polluting the AST with `Some(SelectModifiers::default())` empty value unless there
14254        // actually were some modifiers set.
14255        let select_modifiers = if modifiers.is_any_set() {
14256            Some(modifiers)
14257        } else {
14258            None
14259        };
14260        Ok((select_modifiers, distinct))
14261    }
14262
14263    fn parse_value_table_mode(&mut self) -> Result<Option<ValueTableMode>, ParserError> {
14264        if !dialect_of!(self is BigQueryDialect) {
14265            return Ok(None);
14266        }
14267
14268        let mode = if self.parse_keywords(&[Keyword::DISTINCT, Keyword::AS, Keyword::VALUE]) {
14269            Some(ValueTableMode::DistinctAsValue)
14270        } else if self.parse_keywords(&[Keyword::DISTINCT, Keyword::AS, Keyword::STRUCT]) {
14271            Some(ValueTableMode::DistinctAsStruct)
14272        } else if self.parse_keywords(&[Keyword::AS, Keyword::VALUE])
14273            || self.parse_keywords(&[Keyword::ALL, Keyword::AS, Keyword::VALUE])
14274        {
14275            Some(ValueTableMode::AsValue)
14276        } else if self.parse_keywords(&[Keyword::AS, Keyword::STRUCT])
14277            || self.parse_keywords(&[Keyword::ALL, Keyword::AS, Keyword::STRUCT])
14278        {
14279            Some(ValueTableMode::AsStruct)
14280        } else if self.parse_keyword(Keyword::AS) {
14281            self.expected("VALUE or STRUCT", self.peek_token())?
14282        } else {
14283            None
14284        };
14285
14286        Ok(mode)
14287    }
14288
14289    /// Invoke `f` after first setting the parser's `ParserState` to `state`.
14290    ///
14291    /// Upon return, restores the parser's state to what it started at.
14292    fn with_state<T, F>(&mut self, state: ParserState, mut f: F) -> Result<T, ParserError>
14293    where
14294        F: FnMut(&mut Parser) -> Result<T, ParserError>,
14295    {
14296        let current_state = self.state;
14297        self.state = state;
14298        let res = f(self);
14299        self.state = current_state;
14300        res
14301    }
14302
14303    /// Parse a `CONNECT BY` clause (Oracle-style hierarchical query support).
14304    pub fn maybe_parse_connect_by(&mut self) -> Result<Vec<ConnectByKind>, ParserError> {
14305        let mut clauses = Vec::with_capacity(2);
14306        loop {
14307            if let Some(idx) = self.parse_keywords_indexed(&[Keyword::START, Keyword::WITH]) {
14308                clauses.push(ConnectByKind::StartWith {
14309                    start_token: self.token_at(idx).clone().into(),
14310                    condition: self.parse_expr()?.into(),
14311                });
14312            } else if let Some(idx) = self.parse_keywords_indexed(&[Keyword::CONNECT, Keyword::BY])
14313            {
14314                clauses.push(ConnectByKind::ConnectBy {
14315                    connect_token: self.token_at(idx).clone().into(),
14316                    nocycle: self.parse_keyword(Keyword::NOCYCLE),
14317                    relationships: self.with_state(ParserState::ConnectBy, |parser| {
14318                        parser.parse_comma_separated(Parser::parse_expr)
14319                    })?,
14320                });
14321            } else {
14322                break;
14323            }
14324        }
14325        Ok(clauses)
14326    }
14327
14328    /// Parse `CREATE TABLE x AS TABLE y`
14329    pub fn parse_as_table(&mut self) -> Result<Table, ParserError> {
14330        let token1 = self.next_token();
14331        let token2 = self.next_token();
14332        let token3 = self.next_token();
14333
14334        let table_name;
14335        let schema_name;
14336        if token2 == Token::Period {
14337            match token1.token {
14338                Token::Word(w) => {
14339                    schema_name = w.value;
14340                }
14341                _ => {
14342                    return self.expected("Schema name", token1);
14343                }
14344            }
14345            match token3.token {
14346                Token::Word(w) => {
14347                    table_name = w.value;
14348                }
14349                _ => {
14350                    return self.expected("Table name", token3);
14351                }
14352            }
14353            Ok(Table {
14354                table_name: Some(table_name),
14355                schema_name: Some(schema_name),
14356            })
14357        } else {
14358            match token1.token {
14359                Token::Word(w) => {
14360                    table_name = w.value;
14361                }
14362                _ => {
14363                    return self.expected("Table name", token1);
14364                }
14365            }
14366            Ok(Table {
14367                table_name: Some(table_name),
14368                schema_name: None,
14369            })
14370        }
14371    }
14372
14373    /// Parse a `SET ROLE` statement. Expects SET to be consumed already.
14374    fn parse_set_role(
14375        &mut self,
14376        modifier: Option<ContextModifier>,
14377    ) -> Result<Statement, ParserError> {
14378        self.expect_keyword_is(Keyword::ROLE)?;
14379
14380        let role_name = if self.parse_keyword(Keyword::NONE) {
14381            None
14382        } else {
14383            Some(self.parse_identifier()?)
14384        };
14385        Ok(Statement::Set(Set::SetRole {
14386            context_modifier: modifier,
14387            role_name,
14388        }))
14389    }
14390
14391    fn parse_set_values(
14392        &mut self,
14393        parenthesized_assignment: bool,
14394    ) -> Result<Vec<Expr>, ParserError> {
14395        let mut values = vec![];
14396
14397        if parenthesized_assignment {
14398            self.expect_token(&Token::LParen)?;
14399        }
14400
14401        loop {
14402            let value = if let Some(expr) = self.try_parse_expr_sub_query()? {
14403                expr
14404            } else if let Ok(expr) = self.parse_expr() {
14405                expr
14406            } else {
14407                self.expected("variable value", self.peek_token())?
14408            };
14409
14410            values.push(value);
14411            if self.consume_token(&Token::Comma) {
14412                continue;
14413            }
14414
14415            if parenthesized_assignment {
14416                self.expect_token(&Token::RParen)?;
14417            }
14418            return Ok(values);
14419        }
14420    }
14421
14422    fn parse_context_modifier(&mut self) -> Option<ContextModifier> {
14423        let modifier =
14424            self.parse_one_of_keywords(&[Keyword::SESSION, Keyword::LOCAL, Keyword::GLOBAL])?;
14425
14426        Self::keyword_to_modifier(modifier)
14427    }
14428
14429    /// Parse a single SET statement assignment `var = expr`.
14430    fn parse_set_assignment(&mut self) -> Result<SetAssignment, ParserError> {
14431        let scope = self.parse_context_modifier();
14432
14433        let name = if self.dialect.supports_parenthesized_set_variables()
14434            && self.consume_token(&Token::LParen)
14435        {
14436            // Parenthesized assignments are handled in the `parse_set` function after
14437            // trying to parse list of assignments using this function.
14438            // If a dialect supports both, and we find a LParen, we early exit from this function.
14439            self.expected("Unparenthesized assignment", self.peek_token())?
14440        } else {
14441            self.parse_object_name(false)?
14442        };
14443
14444        if !(self.consume_token(&Token::Eq) || self.parse_keyword(Keyword::TO)) {
14445            return self.expected("assignment operator", self.peek_token());
14446        }
14447
14448        let value = self.parse_expr()?;
14449
14450        Ok(SetAssignment { scope, name, value })
14451    }
14452
14453    fn parse_set(&mut self) -> Result<Statement, ParserError> {
14454        let hivevar = self.parse_keyword(Keyword::HIVEVAR);
14455
14456        // Modifier is either HIVEVAR: or a ContextModifier (LOCAL, SESSION, etc), not both
14457        let scope = if !hivevar {
14458            self.parse_context_modifier()
14459        } else {
14460            None
14461        };
14462
14463        if hivevar {
14464            self.expect_token(&Token::Colon)?;
14465        }
14466
14467        if let Some(set_role_stmt) = self.maybe_parse(|parser| parser.parse_set_role(scope))? {
14468            return Ok(set_role_stmt);
14469        }
14470
14471        // Handle special cases first
14472        if self.parse_keywords(&[Keyword::TIME, Keyword::ZONE])
14473            || self.parse_keyword(Keyword::TIMEZONE)
14474        {
14475            if self.consume_token(&Token::Eq) || self.parse_keyword(Keyword::TO) {
14476                return Ok(Set::SingleAssignment {
14477                    scope,
14478                    hivevar,
14479                    variable: ObjectName::from(vec!["TIMEZONE".into()]),
14480                    values: self.parse_set_values(false)?,
14481                }
14482                .into());
14483            } else {
14484                // A shorthand alias for SET TIME ZONE that doesn't require
14485                // the assignment operator. It's originally PostgreSQL specific,
14486                // but we allow it for all the dialects
14487                return Ok(Set::SetTimeZone {
14488                    local: scope == Some(ContextModifier::Local),
14489                    value: self.parse_expr()?,
14490                }
14491                .into());
14492            }
14493        } else if self.dialect.supports_set_names() && self.parse_keyword(Keyword::NAMES) {
14494            if self.parse_keyword(Keyword::DEFAULT) {
14495                return Ok(Set::SetNamesDefault {}.into());
14496            }
14497            let charset_name = self.parse_identifier()?;
14498            let collation_name = if self.parse_one_of_keywords(&[Keyword::COLLATE]).is_some() {
14499                Some(self.parse_literal_string()?)
14500            } else {
14501                None
14502            };
14503
14504            return Ok(Set::SetNames {
14505                charset_name,
14506                collation_name,
14507            }
14508            .into());
14509        } else if self.parse_keyword(Keyword::CHARACTERISTICS) {
14510            self.expect_keywords(&[Keyword::AS, Keyword::TRANSACTION])?;
14511            return Ok(Set::SetTransaction {
14512                modes: self.parse_transaction_modes()?,
14513                snapshot: None,
14514                session: true,
14515            }
14516            .into());
14517        } else if self.parse_keyword(Keyword::TRANSACTION) {
14518            if self.parse_keyword(Keyword::SNAPSHOT) {
14519                let snapshot_id = self.parse_value()?.value;
14520                return Ok(Set::SetTransaction {
14521                    modes: vec![],
14522                    snapshot: Some(snapshot_id),
14523                    session: false,
14524                }
14525                .into());
14526            }
14527            return Ok(Set::SetTransaction {
14528                modes: self.parse_transaction_modes()?,
14529                snapshot: None,
14530                session: false,
14531            }
14532            .into());
14533        } else if self.parse_keyword(Keyword::AUTHORIZATION) {
14534            let auth_value = if self.parse_keyword(Keyword::DEFAULT) {
14535                SetSessionAuthorizationParamKind::Default
14536            } else {
14537                let value = self.parse_identifier()?;
14538                SetSessionAuthorizationParamKind::User(value)
14539            };
14540            return Ok(Set::SetSessionAuthorization(SetSessionAuthorizationParam {
14541                scope: scope.expect("SET ... AUTHORIZATION must have a scope"),
14542                kind: auth_value,
14543            })
14544            .into());
14545        }
14546
14547        if self.dialect.supports_comma_separated_set_assignments() {
14548            if scope.is_some() {
14549                self.prev_token();
14550            }
14551
14552            if let Some(assignments) = self
14553                .maybe_parse(|parser| parser.parse_comma_separated(Parser::parse_set_assignment))?
14554            {
14555                return if assignments.len() > 1 {
14556                    Ok(Set::MultipleAssignments { assignments }.into())
14557                } else {
14558                    let SetAssignment { scope, name, value } =
14559                        assignments.into_iter().next().ok_or_else(|| {
14560                            ParserError::ParserError("Expected at least one assignment".to_string())
14561                        })?;
14562
14563                    Ok(Set::SingleAssignment {
14564                        scope,
14565                        hivevar,
14566                        variable: name,
14567                        values: vec![value],
14568                    }
14569                    .into())
14570                };
14571            }
14572        }
14573
14574        let variables = if self.dialect.supports_parenthesized_set_variables()
14575            && self.consume_token(&Token::LParen)
14576        {
14577            let vars = OneOrManyWithParens::Many(
14578                self.parse_comma_separated(|parser: &mut Parser<'a>| parser.parse_identifier())?
14579                    .into_iter()
14580                    .map(|ident| ObjectName::from(vec![ident]))
14581                    .collect(),
14582            );
14583            self.expect_token(&Token::RParen)?;
14584            vars
14585        } else {
14586            OneOrManyWithParens::One(self.parse_object_name(false)?)
14587        };
14588
14589        if self.consume_token(&Token::Eq) || self.parse_keyword(Keyword::TO) {
14590            let stmt = match variables {
14591                OneOrManyWithParens::One(var) => Set::SingleAssignment {
14592                    scope,
14593                    hivevar,
14594                    variable: var,
14595                    values: self.parse_set_values(false)?,
14596                },
14597                OneOrManyWithParens::Many(vars) => Set::ParenthesizedAssignments {
14598                    variables: vars,
14599                    values: self.parse_set_values(true)?,
14600                },
14601            };
14602
14603            return Ok(stmt.into());
14604        }
14605
14606        if self.dialect.supports_set_stmt_without_operator() {
14607            self.prev_token();
14608            return self.parse_set_session_params();
14609        };
14610
14611        self.expected("equals sign or TO", self.peek_token())
14612    }
14613
14614    /// Parse session parameter assignments after `SET` when no `=` or `TO` is present.
14615    pub fn parse_set_session_params(&mut self) -> Result<Statement, ParserError> {
14616        if self.parse_keyword(Keyword::STATISTICS) {
14617            let topic = match self.parse_one_of_keywords(&[
14618                Keyword::IO,
14619                Keyword::PROFILE,
14620                Keyword::TIME,
14621                Keyword::XML,
14622            ]) {
14623                Some(Keyword::IO) => SessionParamStatsTopic::IO,
14624                Some(Keyword::PROFILE) => SessionParamStatsTopic::Profile,
14625                Some(Keyword::TIME) => SessionParamStatsTopic::Time,
14626                Some(Keyword::XML) => SessionParamStatsTopic::Xml,
14627                _ => return self.expected("IO, PROFILE, TIME or XML", self.peek_token()),
14628            };
14629            let value = self.parse_session_param_value()?;
14630            Ok(
14631                Set::SetSessionParam(SetSessionParamKind::Statistics(SetSessionParamStatistics {
14632                    topic,
14633                    value,
14634                }))
14635                .into(),
14636            )
14637        } else if self.parse_keyword(Keyword::IDENTITY_INSERT) {
14638            let obj = self.parse_object_name(false)?;
14639            let value = self.parse_session_param_value()?;
14640            Ok(Set::SetSessionParam(SetSessionParamKind::IdentityInsert(
14641                SetSessionParamIdentityInsert { obj, value },
14642            ))
14643            .into())
14644        } else if self.parse_keyword(Keyword::OFFSETS) {
14645            let keywords = self.parse_comma_separated(|parser| {
14646                let next_token = parser.next_token();
14647                match &next_token.token {
14648                    Token::Word(w) => Ok(w.to_string()),
14649                    _ => parser.expected("SQL keyword", next_token),
14650                }
14651            })?;
14652            let value = self.parse_session_param_value()?;
14653            Ok(
14654                Set::SetSessionParam(SetSessionParamKind::Offsets(SetSessionParamOffsets {
14655                    keywords,
14656                    value,
14657                }))
14658                .into(),
14659            )
14660        } else {
14661            let names = self.parse_comma_separated(|parser| {
14662                let next_token = parser.next_token();
14663                match next_token.token {
14664                    Token::Word(w) => Ok(w.to_string()),
14665                    _ => parser.expected("Session param name", next_token),
14666                }
14667            })?;
14668            let value = self.parse_expr()?.to_string();
14669            Ok(
14670                Set::SetSessionParam(SetSessionParamKind::Generic(SetSessionParamGeneric {
14671                    names,
14672                    value,
14673                }))
14674                .into(),
14675            )
14676        }
14677    }
14678
14679    fn parse_session_param_value(&mut self) -> Result<SessionParamValue, ParserError> {
14680        if self.parse_keyword(Keyword::ON) {
14681            Ok(SessionParamValue::On)
14682        } else if self.parse_keyword(Keyword::OFF) {
14683            Ok(SessionParamValue::Off)
14684        } else {
14685            self.expected("ON or OFF", self.peek_token())
14686        }
14687    }
14688
14689    /// Parse a `SHOW` statement and dispatch to specific SHOW handlers.
14690    pub fn parse_show(&mut self) -> Result<Statement, ParserError> {
14691        let terse = self.parse_keyword(Keyword::TERSE);
14692        let extended = self.parse_keyword(Keyword::EXTENDED);
14693        let full = self.parse_keyword(Keyword::FULL);
14694        let session = self.parse_keyword(Keyword::SESSION);
14695        let global = self.parse_keyword(Keyword::GLOBAL);
14696        let external = self.parse_keyword(Keyword::EXTERNAL);
14697        if self
14698            .parse_one_of_keywords(&[Keyword::COLUMNS, Keyword::FIELDS])
14699            .is_some()
14700        {
14701            Ok(self.parse_show_columns(extended, full)?)
14702        } else if self.parse_keyword(Keyword::TABLES) {
14703            Ok(self.parse_show_tables(terse, extended, full, external)?)
14704        } else if self.parse_keywords(&[Keyword::MATERIALIZED, Keyword::VIEWS]) {
14705            Ok(self.parse_show_views(terse, true)?)
14706        } else if self.parse_keyword(Keyword::VIEWS) {
14707            Ok(self.parse_show_views(terse, false)?)
14708        } else if self.parse_keyword(Keyword::FUNCTIONS) {
14709            Ok(self.parse_show_functions()?)
14710        } else if extended || full {
14711            Err(ParserError::ParserError(
14712                "EXTENDED/FULL are not supported with this type of SHOW query".to_string(),
14713            ))
14714        } else if self.parse_one_of_keywords(&[Keyword::CREATE]).is_some() {
14715            Ok(self.parse_show_create()?)
14716        } else if self.parse_keyword(Keyword::COLLATION) {
14717            Ok(self.parse_show_collation()?)
14718        } else if self.parse_keyword(Keyword::VARIABLES)
14719            && dialect_of!(self is MySqlDialect | GenericDialect)
14720        {
14721            Ok(Statement::ShowVariables {
14722                filter: self.parse_show_statement_filter()?,
14723                session,
14724                global,
14725            })
14726        } else if self.parse_keyword(Keyword::STATUS)
14727            && dialect_of!(self is MySqlDialect | GenericDialect)
14728        {
14729            Ok(Statement::ShowStatus {
14730                filter: self.parse_show_statement_filter()?,
14731                session,
14732                global,
14733            })
14734        } else if self.parse_keyword(Keyword::DATABASES) {
14735            self.parse_show_databases(terse)
14736        } else if self.parse_keyword(Keyword::SCHEMAS) {
14737            self.parse_show_schemas(terse)
14738        } else if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
14739            self.parse_show_charset(false)
14740        } else if self.parse_keyword(Keyword::CHARSET) {
14741            self.parse_show_charset(true)
14742        } else {
14743            Ok(Statement::ShowVariable {
14744                variable: self.parse_identifiers()?,
14745            })
14746        }
14747    }
14748
14749    fn parse_show_charset(&mut self, is_shorthand: bool) -> Result<Statement, ParserError> {
14750        // parse one of keywords
14751        Ok(Statement::ShowCharset(ShowCharset {
14752            is_shorthand,
14753            filter: self.parse_show_statement_filter()?,
14754        }))
14755    }
14756
14757    fn parse_show_databases(&mut self, terse: bool) -> Result<Statement, ParserError> {
14758        let history = self.parse_keyword(Keyword::HISTORY);
14759        let show_options = self.parse_show_stmt_options()?;
14760        Ok(Statement::ShowDatabases {
14761            terse,
14762            history,
14763            show_options,
14764        })
14765    }
14766
14767    fn parse_show_schemas(&mut self, terse: bool) -> Result<Statement, ParserError> {
14768        let history = self.parse_keyword(Keyword::HISTORY);
14769        let show_options = self.parse_show_stmt_options()?;
14770        Ok(Statement::ShowSchemas {
14771            terse,
14772            history,
14773            show_options,
14774        })
14775    }
14776
14777    /// Parse `SHOW CREATE <object>` returning the corresponding `ShowCreate` statement.
14778    pub fn parse_show_create(&mut self) -> Result<Statement, ParserError> {
14779        let obj_type = match self.expect_one_of_keywords(&[
14780            Keyword::TABLE,
14781            Keyword::TRIGGER,
14782            Keyword::FUNCTION,
14783            Keyword::PROCEDURE,
14784            Keyword::EVENT,
14785            Keyword::VIEW,
14786        ])? {
14787            Keyword::TABLE => Ok(ShowCreateObject::Table),
14788            Keyword::TRIGGER => Ok(ShowCreateObject::Trigger),
14789            Keyword::FUNCTION => Ok(ShowCreateObject::Function),
14790            Keyword::PROCEDURE => Ok(ShowCreateObject::Procedure),
14791            Keyword::EVENT => Ok(ShowCreateObject::Event),
14792            Keyword::VIEW => Ok(ShowCreateObject::View),
14793            keyword => Err(ParserError::ParserError(format!(
14794                "Unable to map keyword to ShowCreateObject: {keyword:?}"
14795            ))),
14796        }?;
14797
14798        let obj_name = self.parse_object_name(false)?;
14799
14800        Ok(Statement::ShowCreate { obj_type, obj_name })
14801    }
14802
14803    /// Parse `SHOW COLUMNS`/`SHOW FIELDS` and return a `ShowColumns` statement.
14804    pub fn parse_show_columns(
14805        &mut self,
14806        extended: bool,
14807        full: bool,
14808    ) -> Result<Statement, ParserError> {
14809        let show_options = self.parse_show_stmt_options()?;
14810        Ok(Statement::ShowColumns {
14811            extended,
14812            full,
14813            show_options,
14814        })
14815    }
14816
14817    fn parse_show_tables(
14818        &mut self,
14819        terse: bool,
14820        extended: bool,
14821        full: bool,
14822        external: bool,
14823    ) -> Result<Statement, ParserError> {
14824        let history = !external && self.parse_keyword(Keyword::HISTORY);
14825        let show_options = self.parse_show_stmt_options()?;
14826        Ok(Statement::ShowTables {
14827            terse,
14828            history,
14829            extended,
14830            full,
14831            external,
14832            show_options,
14833        })
14834    }
14835
14836    fn parse_show_views(
14837        &mut self,
14838        terse: bool,
14839        materialized: bool,
14840    ) -> Result<Statement, ParserError> {
14841        let show_options = self.parse_show_stmt_options()?;
14842        Ok(Statement::ShowViews {
14843            materialized,
14844            terse,
14845            show_options,
14846        })
14847    }
14848
14849    /// Parse `SHOW FUNCTIONS` and optional filter.
14850    pub fn parse_show_functions(&mut self) -> Result<Statement, ParserError> {
14851        let filter = self.parse_show_statement_filter()?;
14852        Ok(Statement::ShowFunctions { filter })
14853    }
14854
14855    /// Parse `SHOW COLLATION` and optional filter.
14856    pub fn parse_show_collation(&mut self) -> Result<Statement, ParserError> {
14857        let filter = self.parse_show_statement_filter()?;
14858        Ok(Statement::ShowCollation { filter })
14859    }
14860
14861    /// Parse an optional filter used by `SHOW` statements (LIKE, ILIKE, WHERE, or literal).
14862    pub fn parse_show_statement_filter(
14863        &mut self,
14864    ) -> Result<Option<ShowStatementFilter>, ParserError> {
14865        if self.parse_keyword(Keyword::LIKE) {
14866            Ok(Some(ShowStatementFilter::Like(
14867                self.parse_literal_string()?,
14868            )))
14869        } else if self.parse_keyword(Keyword::ILIKE) {
14870            Ok(Some(ShowStatementFilter::ILike(
14871                self.parse_literal_string()?,
14872            )))
14873        } else if self.parse_keyword(Keyword::WHERE) {
14874            Ok(Some(ShowStatementFilter::Where(self.parse_expr()?)))
14875        } else {
14876            self.maybe_parse(|parser| -> Result<String, ParserError> {
14877                parser.parse_literal_string()
14878            })?
14879            .map_or(Ok(None), |filter| {
14880                Ok(Some(ShowStatementFilter::NoKeyword(filter)))
14881            })
14882        }
14883    }
14884
14885    /// Parse a `USE` statement (database/catalog/schema/warehouse/role selection).
14886    pub fn parse_use(&mut self) -> Result<Statement, ParserError> {
14887        // Determine which keywords are recognized by the current dialect
14888        let parsed_keyword = if dialect_of!(self is HiveDialect) {
14889            // HiveDialect accepts USE DEFAULT; statement without any db specified
14890            if self.parse_keyword(Keyword::DEFAULT) {
14891                return Ok(Statement::Use(Use::Default));
14892            }
14893            None // HiveDialect doesn't expect any other specific keyword after `USE`
14894        } else if dialect_of!(self is DatabricksDialect) {
14895            self.parse_one_of_keywords(&[Keyword::CATALOG, Keyword::DATABASE, Keyword::SCHEMA])
14896        } else if dialect_of!(self is SnowflakeDialect) {
14897            self.parse_one_of_keywords(&[
14898                Keyword::DATABASE,
14899                Keyword::SCHEMA,
14900                Keyword::WAREHOUSE,
14901                Keyword::ROLE,
14902                Keyword::SECONDARY,
14903            ])
14904        } else {
14905            None // No specific keywords for other dialects, including GenericDialect
14906        };
14907
14908        let result = if matches!(parsed_keyword, Some(Keyword::SECONDARY)) {
14909            self.parse_secondary_roles()?
14910        } else {
14911            let obj_name = self.parse_object_name(false)?;
14912            match parsed_keyword {
14913                Some(Keyword::CATALOG) => Use::Catalog(obj_name),
14914                Some(Keyword::DATABASE) => Use::Database(obj_name),
14915                Some(Keyword::SCHEMA) => Use::Schema(obj_name),
14916                Some(Keyword::WAREHOUSE) => Use::Warehouse(obj_name),
14917                Some(Keyword::ROLE) => Use::Role(obj_name),
14918                _ => Use::Object(obj_name),
14919            }
14920        };
14921
14922        Ok(Statement::Use(result))
14923    }
14924
14925    fn parse_secondary_roles(&mut self) -> Result<Use, ParserError> {
14926        self.expect_one_of_keywords(&[Keyword::ROLES, Keyword::ROLE])?;
14927        if self.parse_keyword(Keyword::NONE) {
14928            Ok(Use::SecondaryRoles(SecondaryRoles::None))
14929        } else if self.parse_keyword(Keyword::ALL) {
14930            Ok(Use::SecondaryRoles(SecondaryRoles::All))
14931        } else {
14932            let roles = self.parse_comma_separated(|parser| parser.parse_identifier())?;
14933            Ok(Use::SecondaryRoles(SecondaryRoles::List(roles)))
14934        }
14935    }
14936
14937    /// Parse a table factor followed by any join clauses, returning `TableWithJoins`.
14938    pub fn parse_table_and_joins(&mut self) -> Result<TableWithJoins, ParserError> {
14939        let relation = self.parse_table_factor()?;
14940        // Note that for keywords to be properly handled here, they need to be
14941        // added to `RESERVED_FOR_TABLE_ALIAS`, otherwise they may be parsed as
14942        // a table alias.
14943        let joins = self.parse_joins()?;
14944        Ok(TableWithJoins { relation, joins })
14945    }
14946
14947    fn parse_joins(&mut self) -> Result<Vec<Join>, ParserError> {
14948        let mut joins = vec![];
14949        loop {
14950            let global = self.parse_keyword(Keyword::GLOBAL);
14951            let join = if self.parse_keyword(Keyword::CROSS) {
14952                let join_operator = if self.parse_keyword(Keyword::JOIN) {
14953                    JoinOperator::CrossJoin(JoinConstraint::None)
14954                } else if self.parse_keyword(Keyword::APPLY) {
14955                    // MSSQL extension, similar to CROSS JOIN LATERAL
14956                    JoinOperator::CrossApply
14957                } else {
14958                    return self.expected("JOIN or APPLY after CROSS", self.peek_token());
14959                };
14960                let relation = self.parse_table_factor()?;
14961                let join_operator = if matches!(join_operator, JoinOperator::CrossJoin(_))
14962                    && self.dialect.supports_cross_join_constraint()
14963                {
14964                    let constraint = self.parse_join_constraint(false)?;
14965                    JoinOperator::CrossJoin(constraint)
14966                } else {
14967                    join_operator
14968                };
14969                Join {
14970                    relation,
14971                    global,
14972                    join_operator,
14973                }
14974            } else if self.parse_keyword(Keyword::OUTER) {
14975                // MSSQL extension, similar to LEFT JOIN LATERAL .. ON 1=1
14976                self.expect_keyword_is(Keyword::APPLY)?;
14977                Join {
14978                    relation: self.parse_table_factor()?,
14979                    global,
14980                    join_operator: JoinOperator::OuterApply,
14981                }
14982            } else if self.parse_keyword(Keyword::ASOF) {
14983                self.expect_keyword_is(Keyword::JOIN)?;
14984                let relation = self.parse_table_factor()?;
14985                self.expect_keyword_is(Keyword::MATCH_CONDITION)?;
14986                let match_condition = self.parse_parenthesized(Self::parse_expr)?;
14987                Join {
14988                    relation,
14989                    global,
14990                    join_operator: JoinOperator::AsOf {
14991                        match_condition,
14992                        constraint: self.parse_join_constraint(false)?,
14993                    },
14994                }
14995            } else {
14996                let natural = self.parse_keyword(Keyword::NATURAL);
14997                let peek_keyword = if let Token::Word(w) = self.peek_token().token {
14998                    w.keyword
14999                } else {
15000                    Keyword::NoKeyword
15001                };
15002
15003                let join_operator_type = match peek_keyword {
15004                    Keyword::INNER | Keyword::JOIN => {
15005                        let inner = self.parse_keyword(Keyword::INNER); // [ INNER ]
15006                        self.expect_keyword_is(Keyword::JOIN)?;
15007                        if inner {
15008                            JoinOperator::Inner
15009                        } else {
15010                            JoinOperator::Join
15011                        }
15012                    }
15013                    kw @ Keyword::LEFT | kw @ Keyword::RIGHT => {
15014                        let _ = self.next_token(); // consume LEFT/RIGHT
15015                        let is_left = kw == Keyword::LEFT;
15016                        let join_type = self.parse_one_of_keywords(&[
15017                            Keyword::OUTER,
15018                            Keyword::SEMI,
15019                            Keyword::ANTI,
15020                            Keyword::JOIN,
15021                        ]);
15022                        match join_type {
15023                            Some(Keyword::OUTER) => {
15024                                self.expect_keyword_is(Keyword::JOIN)?;
15025                                if is_left {
15026                                    JoinOperator::LeftOuter
15027                                } else {
15028                                    JoinOperator::RightOuter
15029                                }
15030                            }
15031                            Some(Keyword::SEMI) => {
15032                                self.expect_keyword_is(Keyword::JOIN)?;
15033                                if is_left {
15034                                    JoinOperator::LeftSemi
15035                                } else {
15036                                    JoinOperator::RightSemi
15037                                }
15038                            }
15039                            Some(Keyword::ANTI) => {
15040                                self.expect_keyword_is(Keyword::JOIN)?;
15041                                if is_left {
15042                                    JoinOperator::LeftAnti
15043                                } else {
15044                                    JoinOperator::RightAnti
15045                                }
15046                            }
15047                            Some(Keyword::JOIN) => {
15048                                if is_left {
15049                                    JoinOperator::Left
15050                                } else {
15051                                    JoinOperator::Right
15052                                }
15053                            }
15054                            _ => {
15055                                return Err(ParserError::ParserError(format!(
15056                                    "expected OUTER, SEMI, ANTI or JOIN after {kw:?}"
15057                                )))
15058                            }
15059                        }
15060                    }
15061                    Keyword::ANTI => {
15062                        let _ = self.next_token(); // consume ANTI
15063                        self.expect_keyword_is(Keyword::JOIN)?;
15064                        JoinOperator::Anti
15065                    }
15066                    Keyword::SEMI => {
15067                        let _ = self.next_token(); // consume SEMI
15068                        self.expect_keyword_is(Keyword::JOIN)?;
15069                        JoinOperator::Semi
15070                    }
15071                    Keyword::FULL => {
15072                        let _ = self.next_token(); // consume FULL
15073                        let _ = self.parse_keyword(Keyword::OUTER); // [ OUTER ]
15074                        self.expect_keyword_is(Keyword::JOIN)?;
15075                        JoinOperator::FullOuter
15076                    }
15077                    Keyword::OUTER => {
15078                        return self.expected("LEFT, RIGHT, or FULL", self.peek_token());
15079                    }
15080                    Keyword::STRAIGHT_JOIN => {
15081                        let _ = self.next_token(); // consume STRAIGHT_JOIN
15082                        JoinOperator::StraightJoin
15083                    }
15084                    _ if natural => {
15085                        return self.expected("a join type after NATURAL", self.peek_token());
15086                    }
15087                    _ => break,
15088                };
15089                let mut relation = self.parse_table_factor()?;
15090
15091                if !self
15092                    .dialect
15093                    .supports_left_associative_joins_without_parens()
15094                    && self.peek_parens_less_nested_join()
15095                {
15096                    let joins = self.parse_joins()?;
15097                    relation = TableFactor::NestedJoin {
15098                        table_with_joins: Box::new(TableWithJoins { relation, joins }),
15099                        alias: None,
15100                    };
15101                }
15102
15103                let join_constraint = self.parse_join_constraint(natural)?;
15104                Join {
15105                    relation,
15106                    global,
15107                    join_operator: join_operator_type(join_constraint),
15108                }
15109            };
15110            joins.push(join);
15111        }
15112        Ok(joins)
15113    }
15114
15115    fn peek_parens_less_nested_join(&self) -> bool {
15116        matches!(
15117            self.peek_token_ref().token,
15118            Token::Word(Word {
15119                keyword: Keyword::JOIN
15120                    | Keyword::INNER
15121                    | Keyword::LEFT
15122                    | Keyword::RIGHT
15123                    | Keyword::FULL,
15124                ..
15125            })
15126        )
15127    }
15128
15129    /// A table name or a parenthesized subquery, followed by optional `[AS] alias`
15130    #[cfg_attr(feature = "recursive-protection", recursive::recursive)]
15131    pub fn parse_table_factor(&mut self) -> Result<TableFactor, ParserError> {
15132        let _guard = self.recursion_counter.try_decrease()?;
15133        if self.parse_keyword(Keyword::LATERAL) {
15134            // LATERAL must always be followed by a subquery or table function.
15135            if self.consume_token(&Token::LParen) {
15136                self.parse_derived_table_factor(Lateral)
15137            } else {
15138                let name = self.parse_object_name(false)?;
15139                self.expect_token(&Token::LParen)?;
15140                let args = self.parse_optional_args()?;
15141                let alias = self.maybe_parse_table_alias()?;
15142                Ok(TableFactor::Function {
15143                    lateral: true,
15144                    name,
15145                    args,
15146                    alias,
15147                })
15148            }
15149        } else if self.parse_keyword(Keyword::TABLE) {
15150            // parse table function (SELECT * FROM TABLE (<expr>) [ AS <alias> ])
15151            self.expect_token(&Token::LParen)?;
15152            let expr = self.parse_expr()?;
15153            self.expect_token(&Token::RParen)?;
15154            let alias = self.maybe_parse_table_alias()?;
15155            Ok(TableFactor::TableFunction { expr, alias })
15156        } else if self.consume_token(&Token::LParen) {
15157            // A left paren introduces either a derived table (i.e., a subquery)
15158            // or a nested join. It's nearly impossible to determine ahead of
15159            // time which it is... so we just try to parse both.
15160            //
15161            // Here's an example that demonstrates the complexity:
15162            //                     /-------------------------------------------------------\
15163            //                     | /-----------------------------------\                 |
15164            //     SELECT * FROM ( ( ( (SELECT 1) UNION (SELECT 2) ) AS t1 NATURAL JOIN t2 ) )
15165            //                   ^ ^ ^ ^
15166            //                   | | | |
15167            //                   | | | |
15168            //                   | | | (4) belongs to a SetExpr::Query inside the subquery
15169            //                   | | (3) starts a derived table (subquery)
15170            //                   | (2) starts a nested join
15171            //                   (1) an additional set of parens around a nested join
15172            //
15173
15174            // If the recently consumed '(' starts a derived table, the call to
15175            // `parse_derived_table_factor` below will return success after parsing the
15176            // subquery, followed by the closing ')', and the alias of the derived table.
15177            // In the example above this is case (3).
15178            if let Some(mut table) =
15179                self.maybe_parse(|parser| parser.parse_derived_table_factor(NotLateral))?
15180            {
15181                while let Some(kw) = self.parse_one_of_keywords(&[Keyword::PIVOT, Keyword::UNPIVOT])
15182                {
15183                    table = match kw {
15184                        Keyword::PIVOT => self.parse_pivot_table_factor(table)?,
15185                        Keyword::UNPIVOT => self.parse_unpivot_table_factor(table)?,
15186                        unexpected_keyword => return Err(ParserError::ParserError(
15187                            format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in pivot/unpivot"),
15188                        )),
15189                    }
15190                }
15191                return Ok(table);
15192            }
15193
15194            // A parsing error from `parse_derived_table_factor` indicates that the '(' we've
15195            // recently consumed does not start a derived table (cases 1, 2, or 4).
15196            // `maybe_parse` will ignore such an error and rewind to be after the opening '('.
15197
15198            // Inside the parentheses we expect to find an (A) table factor
15199            // followed by some joins or (B) another level of nesting.
15200            let mut table_and_joins = self.parse_table_and_joins()?;
15201
15202            #[allow(clippy::if_same_then_else)]
15203            if !table_and_joins.joins.is_empty() {
15204                self.expect_token(&Token::RParen)?;
15205                let alias = self.maybe_parse_table_alias()?;
15206                Ok(TableFactor::NestedJoin {
15207                    table_with_joins: Box::new(table_and_joins),
15208                    alias,
15209                }) // (A)
15210            } else if let TableFactor::NestedJoin {
15211                table_with_joins: _,
15212                alias: _,
15213            } = &table_and_joins.relation
15214            {
15215                // (B): `table_and_joins` (what we found inside the parentheses)
15216                // is a nested join `(foo JOIN bar)`, not followed by other joins.
15217                self.expect_token(&Token::RParen)?;
15218                let alias = self.maybe_parse_table_alias()?;
15219                Ok(TableFactor::NestedJoin {
15220                    table_with_joins: Box::new(table_and_joins),
15221                    alias,
15222                })
15223            } else if self.dialect.supports_parens_around_table_factor() {
15224                // Dialect-specific behavior: Snowflake diverges from the
15225                // standard and from most of the other implementations by
15226                // allowing extra parentheses not only around a join (B), but
15227                // around lone table names (e.g. `FROM (mytable [AS alias])`)
15228                // and around derived tables (e.g. `FROM ((SELECT ...)
15229                // [AS alias])`) as well.
15230                self.expect_token(&Token::RParen)?;
15231
15232                if let Some(outer_alias) = self.maybe_parse_table_alias()? {
15233                    // Snowflake also allows specifying an alias *after* parens
15234                    // e.g. `FROM (mytable) AS alias`
15235                    match &mut table_and_joins.relation {
15236                        TableFactor::Derived { alias, .. }
15237                        | TableFactor::Table { alias, .. }
15238                        | TableFactor::Function { alias, .. }
15239                        | TableFactor::UNNEST { alias, .. }
15240                        | TableFactor::JsonTable { alias, .. }
15241                        | TableFactor::XmlTable { alias, .. }
15242                        | TableFactor::OpenJsonTable { alias, .. }
15243                        | TableFactor::TableFunction { alias, .. }
15244                        | TableFactor::Pivot { alias, .. }
15245                        | TableFactor::Unpivot { alias, .. }
15246                        | TableFactor::MatchRecognize { alias, .. }
15247                        | TableFactor::SemanticView { alias, .. }
15248                        | TableFactor::NestedJoin { alias, .. } => {
15249                            // but not `FROM (mytable AS alias1) AS alias2`.
15250                            if let Some(inner_alias) = alias {
15251                                return Err(ParserError::ParserError(format!(
15252                                    "duplicate alias {inner_alias}"
15253                                )));
15254                            }
15255                            // Act as if the alias was specified normally next
15256                            // to the table name: `(mytable) AS alias` ->
15257                            // `(mytable AS alias)`
15258                            alias.replace(outer_alias);
15259                        }
15260                    };
15261                }
15262                // Do not store the extra set of parens in the AST
15263                Ok(table_and_joins.relation)
15264            } else {
15265                // The SQL spec prohibits derived tables and bare tables from
15266                // appearing alone in parentheses (e.g. `FROM (mytable)`)
15267                self.expected("joined table", self.peek_token())
15268            }
15269        } else if self.dialect.supports_values_as_table_factor()
15270            && matches!(
15271                self.peek_tokens(),
15272                [
15273                    Token::Word(Word {
15274                        keyword: Keyword::VALUES,
15275                        ..
15276                    }),
15277                    Token::LParen
15278                ]
15279            )
15280        {
15281            self.expect_keyword_is(Keyword::VALUES)?;
15282
15283            // Snowflake and Databricks allow syntax like below:
15284            // SELECT * FROM VALUES (1, 'a'), (2, 'b') AS t (col1, col2)
15285            // where there are no parentheses around the VALUES clause.
15286            let values = SetExpr::Values(self.parse_values(false, false)?);
15287            let alias = self.maybe_parse_table_alias()?;
15288            Ok(TableFactor::Derived {
15289                lateral: false,
15290                subquery: Box::new(Query {
15291                    with: None,
15292                    body: Box::new(values),
15293                    order_by: None,
15294                    limit_clause: None,
15295                    fetch: None,
15296                    locks: vec![],
15297                    for_clause: None,
15298                    settings: None,
15299                    format_clause: None,
15300                    pipe_operators: vec![],
15301                }),
15302                alias,
15303                sample: None,
15304            })
15305        } else if dialect_of!(self is BigQueryDialect | PostgreSqlDialect | GenericDialect)
15306            && self.parse_keyword(Keyword::UNNEST)
15307        {
15308            self.expect_token(&Token::LParen)?;
15309            let array_exprs = self.parse_comma_separated(Parser::parse_expr)?;
15310            self.expect_token(&Token::RParen)?;
15311
15312            let with_ordinality = self.parse_keywords(&[Keyword::WITH, Keyword::ORDINALITY]);
15313            let alias = match self.maybe_parse_table_alias() {
15314                Ok(Some(alias)) => Some(alias),
15315                Ok(None) => None,
15316                Err(e) => return Err(e),
15317            };
15318
15319            let with_offset = match self.expect_keywords(&[Keyword::WITH, Keyword::OFFSET]) {
15320                Ok(()) => true,
15321                Err(_) => false,
15322            };
15323
15324            let with_offset_alias = if with_offset {
15325                match self.parse_optional_alias(keywords::RESERVED_FOR_COLUMN_ALIAS) {
15326                    Ok(Some(alias)) => Some(alias),
15327                    Ok(None) => None,
15328                    Err(e) => return Err(e),
15329                }
15330            } else {
15331                None
15332            };
15333
15334            Ok(TableFactor::UNNEST {
15335                alias,
15336                array_exprs,
15337                with_offset,
15338                with_offset_alias,
15339                with_ordinality,
15340            })
15341        } else if self.parse_keyword_with_tokens(Keyword::JSON_TABLE, &[Token::LParen]) {
15342            let json_expr = self.parse_expr()?;
15343            self.expect_token(&Token::Comma)?;
15344            let json_path = self.parse_value()?.value;
15345            self.expect_keyword_is(Keyword::COLUMNS)?;
15346            self.expect_token(&Token::LParen)?;
15347            let columns = self.parse_comma_separated(Parser::parse_json_table_column_def)?;
15348            self.expect_token(&Token::RParen)?;
15349            self.expect_token(&Token::RParen)?;
15350            let alias = self.maybe_parse_table_alias()?;
15351            Ok(TableFactor::JsonTable {
15352                json_expr,
15353                json_path,
15354                columns,
15355                alias,
15356            })
15357        } else if self.parse_keyword_with_tokens(Keyword::OPENJSON, &[Token::LParen]) {
15358            self.prev_token();
15359            self.parse_open_json_table_factor()
15360        } else if self.parse_keyword_with_tokens(Keyword::XMLTABLE, &[Token::LParen]) {
15361            self.prev_token();
15362            self.parse_xml_table_factor()
15363        } else if self.dialect.supports_semantic_view_table_factor()
15364            && self.peek_keyword_with_tokens(Keyword::SEMANTIC_VIEW, &[Token::LParen])
15365        {
15366            self.parse_semantic_view_table_factor()
15367        } else {
15368            let name = self.parse_object_name(true)?;
15369
15370            let json_path = match self.peek_token().token {
15371                Token::LBracket if self.dialect.supports_partiql() => Some(self.parse_json_path()?),
15372                _ => None,
15373            };
15374
15375            let partitions: Vec<Ident> = if dialect_of!(self is MySqlDialect | GenericDialect)
15376                && self.parse_keyword(Keyword::PARTITION)
15377            {
15378                self.parse_parenthesized_identifiers()?
15379            } else {
15380                vec![]
15381            };
15382
15383            // Parse potential version qualifier
15384            let version = self.maybe_parse_table_version()?;
15385
15386            // Postgres, MSSQL, ClickHouse: table-valued functions:
15387            let args = if self.consume_token(&Token::LParen) {
15388                Some(self.parse_table_function_args()?)
15389            } else {
15390                None
15391            };
15392
15393            let with_ordinality = self.parse_keywords(&[Keyword::WITH, Keyword::ORDINALITY]);
15394
15395            let mut sample = None;
15396            if self.dialect.supports_table_sample_before_alias() {
15397                if let Some(parsed_sample) = self.maybe_parse_table_sample()? {
15398                    sample = Some(TableSampleKind::BeforeTableAlias(parsed_sample));
15399                }
15400            }
15401
15402            let alias = self.maybe_parse_table_alias()?;
15403
15404            // MYSQL-specific table hints:
15405            let index_hints = if self.dialect.supports_table_hints() {
15406                self.maybe_parse(|p| p.parse_table_index_hints())?
15407                    .unwrap_or(vec![])
15408            } else {
15409                vec![]
15410            };
15411
15412            // MSSQL-specific table hints:
15413            let mut with_hints = vec![];
15414            if self.parse_keyword(Keyword::WITH) {
15415                if self.consume_token(&Token::LParen) {
15416                    with_hints = self.parse_comma_separated(Parser::parse_expr)?;
15417                    self.expect_token(&Token::RParen)?;
15418                } else {
15419                    // rewind, as WITH may belong to the next statement's CTE
15420                    self.prev_token();
15421                }
15422            };
15423
15424            if !self.dialect.supports_table_sample_before_alias() {
15425                if let Some(parsed_sample) = self.maybe_parse_table_sample()? {
15426                    sample = Some(TableSampleKind::AfterTableAlias(parsed_sample));
15427                }
15428            }
15429
15430            let mut table = TableFactor::Table {
15431                name,
15432                alias,
15433                args,
15434                with_hints,
15435                version,
15436                partitions,
15437                with_ordinality,
15438                json_path,
15439                sample,
15440                index_hints,
15441            };
15442
15443            while let Some(kw) = self.parse_one_of_keywords(&[Keyword::PIVOT, Keyword::UNPIVOT]) {
15444                table = match kw {
15445                    Keyword::PIVOT => self.parse_pivot_table_factor(table)?,
15446                    Keyword::UNPIVOT => self.parse_unpivot_table_factor(table)?,
15447                    unexpected_keyword => return Err(ParserError::ParserError(
15448                        format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in pivot/unpivot"),
15449                    )),
15450                }
15451            }
15452
15453            if self.dialect.supports_match_recognize()
15454                && self.parse_keyword(Keyword::MATCH_RECOGNIZE)
15455            {
15456                table = self.parse_match_recognize(table)?;
15457            }
15458
15459            Ok(table)
15460        }
15461    }
15462
15463    fn maybe_parse_table_sample(&mut self) -> Result<Option<Box<TableSample>>, ParserError> {
15464        let modifier = if self.parse_keyword(Keyword::TABLESAMPLE) {
15465            TableSampleModifier::TableSample
15466        } else if self.parse_keyword(Keyword::SAMPLE) {
15467            TableSampleModifier::Sample
15468        } else {
15469            return Ok(None);
15470        };
15471        self.parse_table_sample(modifier).map(Some)
15472    }
15473
15474    fn parse_table_sample(
15475        &mut self,
15476        modifier: TableSampleModifier,
15477    ) -> Result<Box<TableSample>, ParserError> {
15478        let name = match self.parse_one_of_keywords(&[
15479            Keyword::BERNOULLI,
15480            Keyword::ROW,
15481            Keyword::SYSTEM,
15482            Keyword::BLOCK,
15483        ]) {
15484            Some(Keyword::BERNOULLI) => Some(TableSampleMethod::Bernoulli),
15485            Some(Keyword::ROW) => Some(TableSampleMethod::Row),
15486            Some(Keyword::SYSTEM) => Some(TableSampleMethod::System),
15487            Some(Keyword::BLOCK) => Some(TableSampleMethod::Block),
15488            _ => None,
15489        };
15490
15491        let parenthesized = self.consume_token(&Token::LParen);
15492
15493        let (quantity, bucket) = if parenthesized && self.parse_keyword(Keyword::BUCKET) {
15494            let selected_bucket = self.parse_number_value()?.value;
15495            self.expect_keywords(&[Keyword::OUT, Keyword::OF])?;
15496            let total = self.parse_number_value()?.value;
15497            let on = if self.parse_keyword(Keyword::ON) {
15498                Some(self.parse_expr()?)
15499            } else {
15500                None
15501            };
15502            (
15503                None,
15504                Some(TableSampleBucket {
15505                    bucket: selected_bucket,
15506                    total,
15507                    on,
15508                }),
15509            )
15510        } else {
15511            let value = match self.maybe_parse(|p| p.parse_expr())? {
15512                Some(num) => num,
15513                None => {
15514                    let next_token = self.next_token();
15515                    if let Token::Word(w) = next_token.token {
15516                        Expr::Value(Value::Placeholder(w.value).with_span(next_token.span))
15517                    } else {
15518                        return parser_err!(
15519                            "Expecting number or byte length e.g. 100M",
15520                            self.peek_token().span.start
15521                        );
15522                    }
15523                }
15524            };
15525            let unit = if self.parse_keyword(Keyword::ROWS) {
15526                Some(TableSampleUnit::Rows)
15527            } else if self.parse_keyword(Keyword::PERCENT) {
15528                Some(TableSampleUnit::Percent)
15529            } else {
15530                None
15531            };
15532            (
15533                Some(TableSampleQuantity {
15534                    parenthesized,
15535                    value,
15536                    unit,
15537                }),
15538                None,
15539            )
15540        };
15541        if parenthesized {
15542            self.expect_token(&Token::RParen)?;
15543        }
15544
15545        let seed = if self.parse_keyword(Keyword::REPEATABLE) {
15546            Some(self.parse_table_sample_seed(TableSampleSeedModifier::Repeatable)?)
15547        } else if self.parse_keyword(Keyword::SEED) {
15548            Some(self.parse_table_sample_seed(TableSampleSeedModifier::Seed)?)
15549        } else {
15550            None
15551        };
15552
15553        let offset = if self.parse_keyword(Keyword::OFFSET) {
15554            Some(self.parse_expr()?)
15555        } else {
15556            None
15557        };
15558
15559        Ok(Box::new(TableSample {
15560            modifier,
15561            name,
15562            quantity,
15563            seed,
15564            bucket,
15565            offset,
15566        }))
15567    }
15568
15569    fn parse_table_sample_seed(
15570        &mut self,
15571        modifier: TableSampleSeedModifier,
15572    ) -> Result<TableSampleSeed, ParserError> {
15573        self.expect_token(&Token::LParen)?;
15574        let value = self.parse_number_value()?.value;
15575        self.expect_token(&Token::RParen)?;
15576        Ok(TableSampleSeed { modifier, value })
15577    }
15578
15579    /// Parses `OPENJSON( jsonExpression [ , path ] )  [ <with_clause> ]` clause,
15580    /// assuming the `OPENJSON` keyword was already consumed.
15581    fn parse_open_json_table_factor(&mut self) -> Result<TableFactor, ParserError> {
15582        self.expect_token(&Token::LParen)?;
15583        let json_expr = self.parse_expr()?;
15584        let json_path = if self.consume_token(&Token::Comma) {
15585            Some(self.parse_value()?.value)
15586        } else {
15587            None
15588        };
15589        self.expect_token(&Token::RParen)?;
15590        let columns = if self.parse_keyword(Keyword::WITH) {
15591            self.expect_token(&Token::LParen)?;
15592            let columns = self.parse_comma_separated(Parser::parse_openjson_table_column_def)?;
15593            self.expect_token(&Token::RParen)?;
15594            columns
15595        } else {
15596            Vec::new()
15597        };
15598        let alias = self.maybe_parse_table_alias()?;
15599        Ok(TableFactor::OpenJsonTable {
15600            json_expr,
15601            json_path,
15602            columns,
15603            alias,
15604        })
15605    }
15606
15607    fn parse_xml_table_factor(&mut self) -> Result<TableFactor, ParserError> {
15608        self.expect_token(&Token::LParen)?;
15609        let namespaces = if self.parse_keyword(Keyword::XMLNAMESPACES) {
15610            self.expect_token(&Token::LParen)?;
15611            let namespaces = self.parse_comma_separated(Parser::parse_xml_namespace_definition)?;
15612            self.expect_token(&Token::RParen)?;
15613            self.expect_token(&Token::Comma)?;
15614            namespaces
15615        } else {
15616            vec![]
15617        };
15618        let row_expression = self.parse_expr()?;
15619        let passing = self.parse_xml_passing_clause()?;
15620        self.expect_keyword_is(Keyword::COLUMNS)?;
15621        let columns = self.parse_comma_separated(Parser::parse_xml_table_column)?;
15622        self.expect_token(&Token::RParen)?;
15623        let alias = self.maybe_parse_table_alias()?;
15624        Ok(TableFactor::XmlTable {
15625            namespaces,
15626            row_expression,
15627            passing,
15628            columns,
15629            alias,
15630        })
15631    }
15632
15633    fn parse_xml_namespace_definition(&mut self) -> Result<XmlNamespaceDefinition, ParserError> {
15634        let uri = self.parse_expr()?;
15635        self.expect_keyword_is(Keyword::AS)?;
15636        let name = self.parse_identifier()?;
15637        Ok(XmlNamespaceDefinition { uri, name })
15638    }
15639
15640    fn parse_xml_table_column(&mut self) -> Result<XmlTableColumn, ParserError> {
15641        let name = self.parse_identifier()?;
15642
15643        let option = if self.parse_keyword(Keyword::FOR) {
15644            self.expect_keyword(Keyword::ORDINALITY)?;
15645            XmlTableColumnOption::ForOrdinality
15646        } else {
15647            let r#type = self.parse_data_type()?;
15648            let mut path = None;
15649            let mut default = None;
15650
15651            if self.parse_keyword(Keyword::PATH) {
15652                path = Some(self.parse_expr()?);
15653            }
15654
15655            if self.parse_keyword(Keyword::DEFAULT) {
15656                default = Some(self.parse_expr()?);
15657            }
15658
15659            let not_null = self.parse_keywords(&[Keyword::NOT, Keyword::NULL]);
15660            if !not_null {
15661                // NULL is the default but can be specified explicitly
15662                let _ = self.parse_keyword(Keyword::NULL);
15663            }
15664
15665            XmlTableColumnOption::NamedInfo {
15666                r#type,
15667                path,
15668                default,
15669                nullable: !not_null,
15670            }
15671        };
15672        Ok(XmlTableColumn { name, option })
15673    }
15674
15675    fn parse_xml_passing_clause(&mut self) -> Result<XmlPassingClause, ParserError> {
15676        let mut arguments = vec![];
15677        if self.parse_keyword(Keyword::PASSING) {
15678            loop {
15679                let by_value =
15680                    self.parse_keyword(Keyword::BY) && self.expect_keyword(Keyword::VALUE).is_ok();
15681                let expr = self.parse_expr()?;
15682                let alias = if self.parse_keyword(Keyword::AS) {
15683                    Some(self.parse_identifier()?)
15684                } else {
15685                    None
15686                };
15687                arguments.push(XmlPassingArgument {
15688                    expr,
15689                    alias,
15690                    by_value,
15691                });
15692                if !self.consume_token(&Token::Comma) {
15693                    break;
15694                }
15695            }
15696        }
15697        Ok(XmlPassingClause { arguments })
15698    }
15699
15700    /// Parse a [TableFactor::SemanticView]
15701    fn parse_semantic_view_table_factor(&mut self) -> Result<TableFactor, ParserError> {
15702        self.expect_keyword(Keyword::SEMANTIC_VIEW)?;
15703        self.expect_token(&Token::LParen)?;
15704
15705        let name = self.parse_object_name(true)?;
15706
15707        // Parse DIMENSIONS, METRICS, FACTS and WHERE clauses in flexible order
15708        let mut dimensions = Vec::new();
15709        let mut metrics = Vec::new();
15710        let mut facts = Vec::new();
15711        let mut where_clause = None;
15712
15713        while self.peek_token().token != Token::RParen {
15714            if self.parse_keyword(Keyword::DIMENSIONS) {
15715                if !dimensions.is_empty() {
15716                    return Err(ParserError::ParserError(
15717                        "DIMENSIONS clause can only be specified once".to_string(),
15718                    ));
15719                }
15720                dimensions = self.parse_comma_separated(Parser::parse_wildcard_expr)?;
15721            } else if self.parse_keyword(Keyword::METRICS) {
15722                if !metrics.is_empty() {
15723                    return Err(ParserError::ParserError(
15724                        "METRICS clause can only be specified once".to_string(),
15725                    ));
15726                }
15727                metrics = self.parse_comma_separated(Parser::parse_wildcard_expr)?;
15728            } else if self.parse_keyword(Keyword::FACTS) {
15729                if !facts.is_empty() {
15730                    return Err(ParserError::ParserError(
15731                        "FACTS clause can only be specified once".to_string(),
15732                    ));
15733                }
15734                facts = self.parse_comma_separated(Parser::parse_wildcard_expr)?;
15735            } else if self.parse_keyword(Keyword::WHERE) {
15736                if where_clause.is_some() {
15737                    return Err(ParserError::ParserError(
15738                        "WHERE clause can only be specified once".to_string(),
15739                    ));
15740                }
15741                where_clause = Some(self.parse_expr()?);
15742            } else {
15743                return parser_err!(
15744                    format!(
15745                        "Expected one of DIMENSIONS, METRICS, FACTS or WHERE, got {}",
15746                        self.peek_token().token
15747                    ),
15748                    self.peek_token().span.start
15749                )?;
15750            }
15751        }
15752
15753        self.expect_token(&Token::RParen)?;
15754
15755        let alias = self.maybe_parse_table_alias()?;
15756
15757        Ok(TableFactor::SemanticView {
15758            name,
15759            dimensions,
15760            metrics,
15761            facts,
15762            where_clause,
15763            alias,
15764        })
15765    }
15766
15767    fn parse_match_recognize(&mut self, table: TableFactor) -> Result<TableFactor, ParserError> {
15768        self.expect_token(&Token::LParen)?;
15769
15770        let partition_by = if self.parse_keywords(&[Keyword::PARTITION, Keyword::BY]) {
15771            self.parse_comma_separated(Parser::parse_expr)?
15772        } else {
15773            vec![]
15774        };
15775
15776        let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
15777            self.parse_comma_separated(Parser::parse_order_by_expr)?
15778        } else {
15779            vec![]
15780        };
15781
15782        let measures = if self.parse_keyword(Keyword::MEASURES) {
15783            self.parse_comma_separated(|p| {
15784                let expr = p.parse_expr()?;
15785                let _ = p.parse_keyword(Keyword::AS);
15786                let alias = p.parse_identifier()?;
15787                Ok(Measure { expr, alias })
15788            })?
15789        } else {
15790            vec![]
15791        };
15792
15793        let rows_per_match =
15794            if self.parse_keywords(&[Keyword::ONE, Keyword::ROW, Keyword::PER, Keyword::MATCH]) {
15795                Some(RowsPerMatch::OneRow)
15796            } else if self.parse_keywords(&[
15797                Keyword::ALL,
15798                Keyword::ROWS,
15799                Keyword::PER,
15800                Keyword::MATCH,
15801            ]) {
15802                Some(RowsPerMatch::AllRows(
15803                    if self.parse_keywords(&[Keyword::SHOW, Keyword::EMPTY, Keyword::MATCHES]) {
15804                        Some(EmptyMatchesMode::Show)
15805                    } else if self.parse_keywords(&[
15806                        Keyword::OMIT,
15807                        Keyword::EMPTY,
15808                        Keyword::MATCHES,
15809                    ]) {
15810                        Some(EmptyMatchesMode::Omit)
15811                    } else if self.parse_keywords(&[
15812                        Keyword::WITH,
15813                        Keyword::UNMATCHED,
15814                        Keyword::ROWS,
15815                    ]) {
15816                        Some(EmptyMatchesMode::WithUnmatched)
15817                    } else {
15818                        None
15819                    },
15820                ))
15821            } else {
15822                None
15823            };
15824
15825        let after_match_skip =
15826            if self.parse_keywords(&[Keyword::AFTER, Keyword::MATCH, Keyword::SKIP]) {
15827                if self.parse_keywords(&[Keyword::PAST, Keyword::LAST, Keyword::ROW]) {
15828                    Some(AfterMatchSkip::PastLastRow)
15829                } else if self.parse_keywords(&[Keyword::TO, Keyword::NEXT, Keyword::ROW]) {
15830                    Some(AfterMatchSkip::ToNextRow)
15831                } else if self.parse_keywords(&[Keyword::TO, Keyword::FIRST]) {
15832                    Some(AfterMatchSkip::ToFirst(self.parse_identifier()?))
15833                } else if self.parse_keywords(&[Keyword::TO, Keyword::LAST]) {
15834                    Some(AfterMatchSkip::ToLast(self.parse_identifier()?))
15835                } else {
15836                    let found = self.next_token();
15837                    return self.expected("after match skip option", found);
15838                }
15839            } else {
15840                None
15841            };
15842
15843        self.expect_keyword_is(Keyword::PATTERN)?;
15844        let pattern = self.parse_parenthesized(Self::parse_pattern)?;
15845
15846        self.expect_keyword_is(Keyword::DEFINE)?;
15847
15848        let symbols = self.parse_comma_separated(|p| {
15849            let symbol = p.parse_identifier()?;
15850            p.expect_keyword_is(Keyword::AS)?;
15851            let definition = p.parse_expr()?;
15852            Ok(SymbolDefinition { symbol, definition })
15853        })?;
15854
15855        self.expect_token(&Token::RParen)?;
15856
15857        let alias = self.maybe_parse_table_alias()?;
15858
15859        Ok(TableFactor::MatchRecognize {
15860            table: Box::new(table),
15861            partition_by,
15862            order_by,
15863            measures,
15864            rows_per_match,
15865            after_match_skip,
15866            pattern,
15867            symbols,
15868            alias,
15869        })
15870    }
15871
15872    fn parse_base_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
15873        match self.next_token().token {
15874            Token::Caret => Ok(MatchRecognizePattern::Symbol(MatchRecognizeSymbol::Start)),
15875            Token::Placeholder(s) if s == "$" => {
15876                Ok(MatchRecognizePattern::Symbol(MatchRecognizeSymbol::End))
15877            }
15878            Token::LBrace => {
15879                self.expect_token(&Token::Minus)?;
15880                let symbol = self.parse_identifier().map(MatchRecognizeSymbol::Named)?;
15881                self.expect_token(&Token::Minus)?;
15882                self.expect_token(&Token::RBrace)?;
15883                Ok(MatchRecognizePattern::Exclude(symbol))
15884            }
15885            Token::Word(Word {
15886                value,
15887                quote_style: None,
15888                ..
15889            }) if value == "PERMUTE" => {
15890                self.expect_token(&Token::LParen)?;
15891                let symbols = self.parse_comma_separated(|p| {
15892                    p.parse_identifier().map(MatchRecognizeSymbol::Named)
15893                })?;
15894                self.expect_token(&Token::RParen)?;
15895                Ok(MatchRecognizePattern::Permute(symbols))
15896            }
15897            Token::LParen => {
15898                let pattern = self.parse_pattern()?;
15899                self.expect_token(&Token::RParen)?;
15900                Ok(MatchRecognizePattern::Group(Box::new(pattern)))
15901            }
15902            _ => {
15903                self.prev_token();
15904                self.parse_identifier()
15905                    .map(MatchRecognizeSymbol::Named)
15906                    .map(MatchRecognizePattern::Symbol)
15907            }
15908        }
15909    }
15910
15911    fn parse_repetition_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
15912        let mut pattern = self.parse_base_pattern()?;
15913        loop {
15914            let token = self.next_token();
15915            let quantifier = match token.token {
15916                Token::Mul => RepetitionQuantifier::ZeroOrMore,
15917                Token::Plus => RepetitionQuantifier::OneOrMore,
15918                Token::Placeholder(s) if s == "?" => RepetitionQuantifier::AtMostOne,
15919                Token::LBrace => {
15920                    // quantifier is a range like {n} or {n,} or {,m} or {n,m}
15921                    let token = self.next_token();
15922                    match token.token {
15923                        Token::Comma => {
15924                            let next_token = self.next_token();
15925                            let Token::Number(n, _) = next_token.token else {
15926                                return self.expected("literal number", next_token);
15927                            };
15928                            self.expect_token(&Token::RBrace)?;
15929                            RepetitionQuantifier::AtMost(Self::parse(n, token.span.start)?)
15930                        }
15931                        Token::Number(n, _) if self.consume_token(&Token::Comma) => {
15932                            let next_token = self.next_token();
15933                            match next_token.token {
15934                                Token::Number(m, _) => {
15935                                    self.expect_token(&Token::RBrace)?;
15936                                    RepetitionQuantifier::Range(
15937                                        Self::parse(n, token.span.start)?,
15938                                        Self::parse(m, token.span.start)?,
15939                                    )
15940                                }
15941                                Token::RBrace => {
15942                                    RepetitionQuantifier::AtLeast(Self::parse(n, token.span.start)?)
15943                                }
15944                                _ => {
15945                                    return self.expected("} or upper bound", next_token);
15946                                }
15947                            }
15948                        }
15949                        Token::Number(n, _) => {
15950                            self.expect_token(&Token::RBrace)?;
15951                            RepetitionQuantifier::Exactly(Self::parse(n, token.span.start)?)
15952                        }
15953                        _ => return self.expected("quantifier range", token),
15954                    }
15955                }
15956                _ => {
15957                    self.prev_token();
15958                    break;
15959                }
15960            };
15961            pattern = MatchRecognizePattern::Repetition(Box::new(pattern), quantifier);
15962        }
15963        Ok(pattern)
15964    }
15965
15966    fn parse_concat_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
15967        let mut patterns = vec![self.parse_repetition_pattern()?];
15968        while !matches!(self.peek_token().token, Token::RParen | Token::Pipe) {
15969            patterns.push(self.parse_repetition_pattern()?);
15970        }
15971        match <[MatchRecognizePattern; 1]>::try_from(patterns) {
15972            Ok([pattern]) => Ok(pattern),
15973            Err(patterns) => Ok(MatchRecognizePattern::Concat(patterns)),
15974        }
15975    }
15976
15977    fn parse_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
15978        let pattern = self.parse_concat_pattern()?;
15979        if self.consume_token(&Token::Pipe) {
15980            match self.parse_pattern()? {
15981                // flatten nested alternations
15982                MatchRecognizePattern::Alternation(mut patterns) => {
15983                    patterns.insert(0, pattern);
15984                    Ok(MatchRecognizePattern::Alternation(patterns))
15985                }
15986                next => Ok(MatchRecognizePattern::Alternation(vec![pattern, next])),
15987            }
15988        } else {
15989            Ok(pattern)
15990        }
15991    }
15992
15993    /// Parses a the timestamp version specifier (i.e. query historical data)
15994    pub fn maybe_parse_table_version(&mut self) -> Result<Option<TableVersion>, ParserError> {
15995        if self.dialect.supports_table_versioning() {
15996            if self.parse_keywords(&[Keyword::FOR, Keyword::SYSTEM_TIME, Keyword::AS, Keyword::OF])
15997            {
15998                let expr = self.parse_expr()?;
15999                return Ok(Some(TableVersion::ForSystemTimeAsOf(expr)));
16000            } else if self.peek_keyword(Keyword::AT) || self.peek_keyword(Keyword::BEFORE) {
16001                let func_name = self.parse_object_name(true)?;
16002                let func = self.parse_function(func_name)?;
16003                return Ok(Some(TableVersion::Function(func)));
16004            } else if self.parse_keywords(&[Keyword::TIMESTAMP, Keyword::AS, Keyword::OF]) {
16005                let expr = self.parse_expr()?;
16006                return Ok(Some(TableVersion::TimestampAsOf(expr)));
16007            } else if self.parse_keywords(&[Keyword::VERSION, Keyword::AS, Keyword::OF]) {
16008                let expr = Expr::Value(self.parse_number_value()?);
16009                return Ok(Some(TableVersion::VersionAsOf(expr)));
16010            }
16011        }
16012        Ok(None)
16013    }
16014
16015    /// Parses MySQL's JSON_TABLE column definition.
16016    /// For example: `id INT EXISTS PATH '$' DEFAULT '0' ON EMPTY ERROR ON ERROR`
16017    pub fn parse_json_table_column_def(&mut self) -> Result<JsonTableColumn, ParserError> {
16018        if self.parse_keyword(Keyword::NESTED) {
16019            let _has_path_keyword = self.parse_keyword(Keyword::PATH);
16020            let path = self.parse_value()?.value;
16021            self.expect_keyword_is(Keyword::COLUMNS)?;
16022            let columns = self.parse_parenthesized(|p| {
16023                p.parse_comma_separated(Self::parse_json_table_column_def)
16024            })?;
16025            return Ok(JsonTableColumn::Nested(JsonTableNestedColumn {
16026                path,
16027                columns,
16028            }));
16029        }
16030        let name = self.parse_identifier()?;
16031        if self.parse_keyword(Keyword::FOR) {
16032            self.expect_keyword_is(Keyword::ORDINALITY)?;
16033            return Ok(JsonTableColumn::ForOrdinality(name));
16034        }
16035        let r#type = self.parse_data_type()?;
16036        let exists = self.parse_keyword(Keyword::EXISTS);
16037        self.expect_keyword_is(Keyword::PATH)?;
16038        let path = self.parse_value()?.value;
16039        let mut on_empty = None;
16040        let mut on_error = None;
16041        while let Some(error_handling) = self.parse_json_table_column_error_handling()? {
16042            if self.parse_keyword(Keyword::EMPTY) {
16043                on_empty = Some(error_handling);
16044            } else {
16045                self.expect_keyword_is(Keyword::ERROR)?;
16046                on_error = Some(error_handling);
16047            }
16048        }
16049        Ok(JsonTableColumn::Named(JsonTableNamedColumn {
16050            name,
16051            r#type,
16052            path,
16053            exists,
16054            on_empty,
16055            on_error,
16056        }))
16057    }
16058
16059    /// Parses MSSQL's `OPENJSON WITH` column definition.
16060    ///
16061    /// ```sql
16062    /// colName type [ column_path ] [ AS JSON ]
16063    /// ```
16064    ///
16065    /// Reference: <https://learn.microsoft.com/en-us/sql/t-sql/functions/openjson-transact-sql?view=sql-server-ver16#syntax>
16066    pub fn parse_openjson_table_column_def(&mut self) -> Result<OpenJsonTableColumn, ParserError> {
16067        let name = self.parse_identifier()?;
16068        let r#type = self.parse_data_type()?;
16069        let path = if let Token::SingleQuotedString(path) = self.peek_token().token {
16070            self.next_token();
16071            Some(path)
16072        } else {
16073            None
16074        };
16075        let as_json = self.parse_keyword(Keyword::AS);
16076        if as_json {
16077            self.expect_keyword_is(Keyword::JSON)?;
16078        }
16079        Ok(OpenJsonTableColumn {
16080            name,
16081            r#type,
16082            path,
16083            as_json,
16084        })
16085    }
16086
16087    fn parse_json_table_column_error_handling(
16088        &mut self,
16089    ) -> Result<Option<JsonTableColumnErrorHandling>, ParserError> {
16090        let res = if self.parse_keyword(Keyword::NULL) {
16091            JsonTableColumnErrorHandling::Null
16092        } else if self.parse_keyword(Keyword::ERROR) {
16093            JsonTableColumnErrorHandling::Error
16094        } else if self.parse_keyword(Keyword::DEFAULT) {
16095            JsonTableColumnErrorHandling::Default(self.parse_value()?.value)
16096        } else {
16097            return Ok(None);
16098        };
16099        self.expect_keyword_is(Keyword::ON)?;
16100        Ok(Some(res))
16101    }
16102
16103    /// Parse a derived table factor (a parenthesized subquery), handling optional LATERAL.
16104    pub fn parse_derived_table_factor(
16105        &mut self,
16106        lateral: IsLateral,
16107    ) -> Result<TableFactor, ParserError> {
16108        let subquery = self.parse_query()?;
16109        self.expect_token(&Token::RParen)?;
16110        let alias = self.maybe_parse_table_alias()?;
16111
16112        // Parse optional SAMPLE clause after alias
16113        let sample = self
16114            .maybe_parse_table_sample()?
16115            .map(TableSampleKind::AfterTableAlias);
16116
16117        Ok(TableFactor::Derived {
16118            lateral: match lateral {
16119                Lateral => true,
16120                NotLateral => false,
16121            },
16122            subquery,
16123            alias,
16124            sample,
16125        })
16126    }
16127
16128    fn parse_aliased_function_call(&mut self) -> Result<ExprWithAlias, ParserError> {
16129        let function_name = match self.next_token().token {
16130            Token::Word(w) => Ok(w.value),
16131            _ => self.expected("a function identifier", self.peek_token()),
16132        }?;
16133        let expr = self.parse_function(ObjectName::from(vec![Ident::new(function_name)]))?;
16134        let alias = if self.parse_keyword(Keyword::AS) {
16135            Some(self.parse_identifier()?)
16136        } else {
16137            None
16138        };
16139
16140        Ok(ExprWithAlias { expr, alias })
16141    }
16142    /// Parses an expression with an optional alias
16143    ///
16144    /// Examples:
16145    ///
16146    /// ```sql
16147    /// SUM(price) AS total_price
16148    /// ```
16149    /// ```sql
16150    /// SUM(price)
16151    /// ```
16152    ///
16153    /// Example
16154    /// ```
16155    /// # use sqlparser::parser::{Parser, ParserError};
16156    /// # use sqlparser::dialect::GenericDialect;
16157    /// # fn main() ->Result<(), ParserError> {
16158    /// let sql = r#"SUM("a") as "b""#;
16159    /// let mut parser = Parser::new(&GenericDialect).try_with_sql(sql)?;
16160    /// let expr_with_alias = parser.parse_expr_with_alias()?;
16161    /// assert_eq!(Some("b".to_string()), expr_with_alias.alias.map(|x|x.value));
16162    /// # Ok(())
16163    /// # }
16164    pub fn parse_expr_with_alias(&mut self) -> Result<ExprWithAlias, ParserError> {
16165        let expr = self.parse_expr()?;
16166        let alias = if self.parse_keyword(Keyword::AS) {
16167            Some(self.parse_identifier()?)
16168        } else {
16169            None
16170        };
16171
16172        Ok(ExprWithAlias { expr, alias })
16173    }
16174
16175    /// Parse a PIVOT table factor (ClickHouse/Oracle style pivot), returning a TableFactor.
16176    pub fn parse_pivot_table_factor(
16177        &mut self,
16178        table: TableFactor,
16179    ) -> Result<TableFactor, ParserError> {
16180        self.expect_token(&Token::LParen)?;
16181        let aggregate_functions = self.parse_comma_separated(Self::parse_aliased_function_call)?;
16182        self.expect_keyword_is(Keyword::FOR)?;
16183        let value_column = if self.peek_token_ref().token == Token::LParen {
16184            self.parse_parenthesized_column_list_inner(Mandatory, false, |p| {
16185                p.parse_subexpr(self.dialect.prec_value(Precedence::Between))
16186            })?
16187        } else {
16188            vec![self.parse_subexpr(self.dialect.prec_value(Precedence::Between))?]
16189        };
16190        self.expect_keyword_is(Keyword::IN)?;
16191
16192        self.expect_token(&Token::LParen)?;
16193        let value_source = if self.parse_keyword(Keyword::ANY) {
16194            let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
16195                self.parse_comma_separated(Parser::parse_order_by_expr)?
16196            } else {
16197                vec![]
16198            };
16199            PivotValueSource::Any(order_by)
16200        } else if self.peek_sub_query() {
16201            PivotValueSource::Subquery(self.parse_query()?)
16202        } else {
16203            PivotValueSource::List(self.parse_comma_separated(Self::parse_expr_with_alias)?)
16204        };
16205        self.expect_token(&Token::RParen)?;
16206
16207        let default_on_null =
16208            if self.parse_keywords(&[Keyword::DEFAULT, Keyword::ON, Keyword::NULL]) {
16209                self.expect_token(&Token::LParen)?;
16210                let expr = self.parse_expr()?;
16211                self.expect_token(&Token::RParen)?;
16212                Some(expr)
16213            } else {
16214                None
16215            };
16216
16217        self.expect_token(&Token::RParen)?;
16218        let alias = self.maybe_parse_table_alias()?;
16219        Ok(TableFactor::Pivot {
16220            table: Box::new(table),
16221            aggregate_functions,
16222            value_column,
16223            value_source,
16224            default_on_null,
16225            alias,
16226        })
16227    }
16228
16229    /// Parse an UNPIVOT table factor, returning a TableFactor.
16230    pub fn parse_unpivot_table_factor(
16231        &mut self,
16232        table: TableFactor,
16233    ) -> Result<TableFactor, ParserError> {
16234        let null_inclusion = if self.parse_keyword(Keyword::INCLUDE) {
16235            self.expect_keyword_is(Keyword::NULLS)?;
16236            Some(NullInclusion::IncludeNulls)
16237        } else if self.parse_keyword(Keyword::EXCLUDE) {
16238            self.expect_keyword_is(Keyword::NULLS)?;
16239            Some(NullInclusion::ExcludeNulls)
16240        } else {
16241            None
16242        };
16243        self.expect_token(&Token::LParen)?;
16244        let value = self.parse_expr()?;
16245        self.expect_keyword_is(Keyword::FOR)?;
16246        let name = self.parse_identifier()?;
16247        self.expect_keyword_is(Keyword::IN)?;
16248        let columns = self.parse_parenthesized_column_list_inner(Mandatory, false, |p| {
16249            p.parse_expr_with_alias()
16250        })?;
16251        self.expect_token(&Token::RParen)?;
16252        let alias = self.maybe_parse_table_alias()?;
16253        Ok(TableFactor::Unpivot {
16254            table: Box::new(table),
16255            value,
16256            null_inclusion,
16257            name,
16258            columns,
16259            alias,
16260        })
16261    }
16262
16263    /// Parse a JOIN constraint (`NATURAL`, `ON <expr>`, `USING (...)`, or no constraint).
16264    pub fn parse_join_constraint(&mut self, natural: bool) -> Result<JoinConstraint, ParserError> {
16265        if natural {
16266            Ok(JoinConstraint::Natural)
16267        } else if self.parse_keyword(Keyword::ON) {
16268            let constraint = self.parse_expr()?;
16269            Ok(JoinConstraint::On(constraint))
16270        } else if self.parse_keyword(Keyword::USING) {
16271            let columns = self.parse_parenthesized_qualified_column_list(Mandatory, false)?;
16272            Ok(JoinConstraint::Using(columns))
16273        } else {
16274            Ok(JoinConstraint::None)
16275            //self.expected("ON, or USING after JOIN", self.peek_token())
16276        }
16277    }
16278
16279    /// Parse a GRANT statement.
16280    pub fn parse_grant(&mut self) -> Result<Grant, ParserError> {
16281        let (privileges, objects) = self.parse_grant_deny_revoke_privileges_objects()?;
16282
16283        self.expect_keyword_is(Keyword::TO)?;
16284        let grantees = self.parse_grantees()?;
16285
16286        let with_grant_option =
16287            self.parse_keywords(&[Keyword::WITH, Keyword::GRANT, Keyword::OPTION]);
16288
16289        let current_grants =
16290            if self.parse_keywords(&[Keyword::COPY, Keyword::CURRENT, Keyword::GRANTS]) {
16291                Some(CurrentGrantsKind::CopyCurrentGrants)
16292            } else if self.parse_keywords(&[Keyword::REVOKE, Keyword::CURRENT, Keyword::GRANTS]) {
16293                Some(CurrentGrantsKind::RevokeCurrentGrants)
16294            } else {
16295                None
16296            };
16297
16298        let as_grantor = if self.parse_keywords(&[Keyword::AS]) {
16299            Some(self.parse_identifier()?)
16300        } else {
16301            None
16302        };
16303
16304        let granted_by = if self.parse_keywords(&[Keyword::GRANTED, Keyword::BY]) {
16305            Some(self.parse_identifier()?)
16306        } else {
16307            None
16308        };
16309
16310        Ok(Grant {
16311            privileges,
16312            objects,
16313            grantees,
16314            with_grant_option,
16315            as_grantor,
16316            granted_by,
16317            current_grants,
16318        })
16319    }
16320
16321    fn parse_grantees(&mut self) -> Result<Vec<Grantee>, ParserError> {
16322        let mut values = vec![];
16323        let mut grantee_type = GranteesType::None;
16324        loop {
16325            let new_grantee_type = if self.parse_keyword(Keyword::ROLE) {
16326                GranteesType::Role
16327            } else if self.parse_keyword(Keyword::USER) {
16328                GranteesType::User
16329            } else if self.parse_keyword(Keyword::SHARE) {
16330                GranteesType::Share
16331            } else if self.parse_keyword(Keyword::GROUP) {
16332                GranteesType::Group
16333            } else if self.parse_keyword(Keyword::PUBLIC) {
16334                GranteesType::Public
16335            } else if self.parse_keywords(&[Keyword::DATABASE, Keyword::ROLE]) {
16336                GranteesType::DatabaseRole
16337            } else if self.parse_keywords(&[Keyword::APPLICATION, Keyword::ROLE]) {
16338                GranteesType::ApplicationRole
16339            } else if self.parse_keyword(Keyword::APPLICATION) {
16340                GranteesType::Application
16341            } else {
16342                grantee_type.clone() // keep from previous iteraton, if not specified
16343            };
16344
16345            if self
16346                .dialect
16347                .get_reserved_grantees_types()
16348                .contains(&new_grantee_type)
16349            {
16350                self.prev_token();
16351            } else {
16352                grantee_type = new_grantee_type;
16353            }
16354
16355            let grantee = if grantee_type == GranteesType::Public {
16356                Grantee {
16357                    grantee_type: grantee_type.clone(),
16358                    name: None,
16359                }
16360            } else {
16361                let mut name = self.parse_grantee_name()?;
16362                if self.consume_token(&Token::Colon) {
16363                    // Redshift supports namespace prefix for external users and groups:
16364                    // <Namespace>:<GroupName> or <Namespace>:<UserName>
16365                    // https://docs.aws.amazon.com/redshift/latest/mgmt/redshift-iam-access-control-native-idp.html
16366                    let ident = self.parse_identifier()?;
16367                    if let GranteeName::ObjectName(namespace) = name {
16368                        name = GranteeName::ObjectName(ObjectName::from(vec![Ident::new(
16369                            format!("{namespace}:{ident}"),
16370                        )]));
16371                    };
16372                }
16373                Grantee {
16374                    grantee_type: grantee_type.clone(),
16375                    name: Some(name),
16376                }
16377            };
16378
16379            values.push(grantee);
16380
16381            if !self.consume_token(&Token::Comma) {
16382                break;
16383            }
16384        }
16385
16386        Ok(values)
16387    }
16388
16389    /// Parse privileges and optional target objects for GRANT/DENY/REVOKE statements.
16390    pub fn parse_grant_deny_revoke_privileges_objects(
16391        &mut self,
16392    ) -> Result<(Privileges, Option<GrantObjects>), ParserError> {
16393        let privileges = if self.parse_keyword(Keyword::ALL) {
16394            Privileges::All {
16395                with_privileges_keyword: self.parse_keyword(Keyword::PRIVILEGES),
16396            }
16397        } else {
16398            let actions = self.parse_actions_list()?;
16399            Privileges::Actions(actions)
16400        };
16401
16402        let objects = if self.parse_keyword(Keyword::ON) {
16403            if self.parse_keywords(&[Keyword::ALL, Keyword::TABLES, Keyword::IN, Keyword::SCHEMA]) {
16404                Some(GrantObjects::AllTablesInSchema {
16405                    schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
16406                })
16407            } else if self.parse_keywords(&[
16408                Keyword::ALL,
16409                Keyword::EXTERNAL,
16410                Keyword::TABLES,
16411                Keyword::IN,
16412                Keyword::SCHEMA,
16413            ]) {
16414                Some(GrantObjects::AllExternalTablesInSchema {
16415                    schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
16416                })
16417            } else if self.parse_keywords(&[
16418                Keyword::ALL,
16419                Keyword::VIEWS,
16420                Keyword::IN,
16421                Keyword::SCHEMA,
16422            ]) {
16423                Some(GrantObjects::AllViewsInSchema {
16424                    schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
16425                })
16426            } else if self.parse_keywords(&[
16427                Keyword::ALL,
16428                Keyword::MATERIALIZED,
16429                Keyword::VIEWS,
16430                Keyword::IN,
16431                Keyword::SCHEMA,
16432            ]) {
16433                Some(GrantObjects::AllMaterializedViewsInSchema {
16434                    schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
16435                })
16436            } else if self.parse_keywords(&[
16437                Keyword::ALL,
16438                Keyword::FUNCTIONS,
16439                Keyword::IN,
16440                Keyword::SCHEMA,
16441            ]) {
16442                Some(GrantObjects::AllFunctionsInSchema {
16443                    schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
16444                })
16445            } else if self.parse_keywords(&[
16446                Keyword::FUTURE,
16447                Keyword::SCHEMAS,
16448                Keyword::IN,
16449                Keyword::DATABASE,
16450            ]) {
16451                Some(GrantObjects::FutureSchemasInDatabase {
16452                    databases: self.parse_comma_separated(|p| p.parse_object_name(false))?,
16453                })
16454            } else if self.parse_keywords(&[
16455                Keyword::FUTURE,
16456                Keyword::TABLES,
16457                Keyword::IN,
16458                Keyword::SCHEMA,
16459            ]) {
16460                Some(GrantObjects::FutureTablesInSchema {
16461                    schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
16462                })
16463            } else if self.parse_keywords(&[
16464                Keyword::FUTURE,
16465                Keyword::EXTERNAL,
16466                Keyword::TABLES,
16467                Keyword::IN,
16468                Keyword::SCHEMA,
16469            ]) {
16470                Some(GrantObjects::FutureExternalTablesInSchema {
16471                    schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
16472                })
16473            } else if self.parse_keywords(&[
16474                Keyword::FUTURE,
16475                Keyword::VIEWS,
16476                Keyword::IN,
16477                Keyword::SCHEMA,
16478            ]) {
16479                Some(GrantObjects::FutureViewsInSchema {
16480                    schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
16481                })
16482            } else if self.parse_keywords(&[
16483                Keyword::FUTURE,
16484                Keyword::MATERIALIZED,
16485                Keyword::VIEWS,
16486                Keyword::IN,
16487                Keyword::SCHEMA,
16488            ]) {
16489                Some(GrantObjects::FutureMaterializedViewsInSchema {
16490                    schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
16491                })
16492            } else if self.parse_keywords(&[
16493                Keyword::ALL,
16494                Keyword::SEQUENCES,
16495                Keyword::IN,
16496                Keyword::SCHEMA,
16497            ]) {
16498                Some(GrantObjects::AllSequencesInSchema {
16499                    schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
16500                })
16501            } else if self.parse_keywords(&[
16502                Keyword::FUTURE,
16503                Keyword::SEQUENCES,
16504                Keyword::IN,
16505                Keyword::SCHEMA,
16506            ]) {
16507                Some(GrantObjects::FutureSequencesInSchema {
16508                    schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
16509                })
16510            } else if self.parse_keywords(&[Keyword::RESOURCE, Keyword::MONITOR]) {
16511                Some(GrantObjects::ResourceMonitors(
16512                    self.parse_comma_separated(|p| p.parse_object_name(false))?,
16513                ))
16514            } else if self.parse_keywords(&[Keyword::COMPUTE, Keyword::POOL]) {
16515                Some(GrantObjects::ComputePools(
16516                    self.parse_comma_separated(|p| p.parse_object_name(false))?,
16517                ))
16518            } else if self.parse_keywords(&[Keyword::FAILOVER, Keyword::GROUP]) {
16519                Some(GrantObjects::FailoverGroup(
16520                    self.parse_comma_separated(|p| p.parse_object_name(false))?,
16521                ))
16522            } else if self.parse_keywords(&[Keyword::REPLICATION, Keyword::GROUP]) {
16523                Some(GrantObjects::ReplicationGroup(
16524                    self.parse_comma_separated(|p| p.parse_object_name(false))?,
16525                ))
16526            } else if self.parse_keywords(&[Keyword::EXTERNAL, Keyword::VOLUME]) {
16527                Some(GrantObjects::ExternalVolumes(
16528                    self.parse_comma_separated(|p| p.parse_object_name(false))?,
16529                ))
16530            } else {
16531                let object_type = self.parse_one_of_keywords(&[
16532                    Keyword::SEQUENCE,
16533                    Keyword::DATABASE,
16534                    Keyword::SCHEMA,
16535                    Keyword::TABLE,
16536                    Keyword::VIEW,
16537                    Keyword::WAREHOUSE,
16538                    Keyword::INTEGRATION,
16539                    Keyword::VIEW,
16540                    Keyword::WAREHOUSE,
16541                    Keyword::INTEGRATION,
16542                    Keyword::USER,
16543                    Keyword::CONNECTION,
16544                    Keyword::PROCEDURE,
16545                    Keyword::FUNCTION,
16546                ]);
16547                let objects =
16548                    self.parse_comma_separated(|p| p.parse_object_name_inner(false, true));
16549                match object_type {
16550                    Some(Keyword::DATABASE) => Some(GrantObjects::Databases(objects?)),
16551                    Some(Keyword::SCHEMA) => Some(GrantObjects::Schemas(objects?)),
16552                    Some(Keyword::SEQUENCE) => Some(GrantObjects::Sequences(objects?)),
16553                    Some(Keyword::WAREHOUSE) => Some(GrantObjects::Warehouses(objects?)),
16554                    Some(Keyword::INTEGRATION) => Some(GrantObjects::Integrations(objects?)),
16555                    Some(Keyword::VIEW) => Some(GrantObjects::Views(objects?)),
16556                    Some(Keyword::USER) => Some(GrantObjects::Users(objects?)),
16557                    Some(Keyword::CONNECTION) => Some(GrantObjects::Connections(objects?)),
16558                    kw @ (Some(Keyword::PROCEDURE) | Some(Keyword::FUNCTION)) => {
16559                        if let Some(name) = objects?.first() {
16560                            self.parse_grant_procedure_or_function(name, &kw)?
16561                        } else {
16562                            self.expected("procedure or function name", self.peek_token())?
16563                        }
16564                    }
16565                    Some(Keyword::TABLE) | None => Some(GrantObjects::Tables(objects?)),
16566                    Some(unexpected_keyword) => return Err(ParserError::ParserError(
16567                        format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in grant objects"),
16568                    )),
16569                }
16570            }
16571        } else {
16572            None
16573        };
16574
16575        Ok((privileges, objects))
16576    }
16577
16578    fn parse_grant_procedure_or_function(
16579        &mut self,
16580        name: &ObjectName,
16581        kw: &Option<Keyword>,
16582    ) -> Result<Option<GrantObjects>, ParserError> {
16583        let arg_types = if self.consume_token(&Token::LParen) {
16584            let list = self.parse_comma_separated0(Self::parse_data_type, Token::RParen)?;
16585            self.expect_token(&Token::RParen)?;
16586            list
16587        } else {
16588            vec![]
16589        };
16590        match kw {
16591            Some(Keyword::PROCEDURE) => Ok(Some(GrantObjects::Procedure {
16592                name: name.clone(),
16593                arg_types,
16594            })),
16595            Some(Keyword::FUNCTION) => Ok(Some(GrantObjects::Function {
16596                name: name.clone(),
16597                arg_types,
16598            })),
16599            _ => self.expected("procedure or function keywords", self.peek_token())?,
16600        }
16601    }
16602
16603    /// Parse a single grantable permission/action (used within GRANT statements).
16604    pub fn parse_grant_permission(&mut self) -> Result<Action, ParserError> {
16605        fn parse_columns(parser: &mut Parser) -> Result<Option<Vec<Ident>>, ParserError> {
16606            let columns = parser.parse_parenthesized_column_list(Optional, false)?;
16607            if columns.is_empty() {
16608                Ok(None)
16609            } else {
16610                Ok(Some(columns))
16611            }
16612        }
16613
16614        // Multi-word privileges
16615        if self.parse_keywords(&[Keyword::IMPORTED, Keyword::PRIVILEGES]) {
16616            Ok(Action::ImportedPrivileges)
16617        } else if self.parse_keywords(&[Keyword::ADD, Keyword::SEARCH, Keyword::OPTIMIZATION]) {
16618            Ok(Action::AddSearchOptimization)
16619        } else if self.parse_keywords(&[Keyword::ATTACH, Keyword::LISTING]) {
16620            Ok(Action::AttachListing)
16621        } else if self.parse_keywords(&[Keyword::ATTACH, Keyword::POLICY]) {
16622            Ok(Action::AttachPolicy)
16623        } else if self.parse_keywords(&[Keyword::BIND, Keyword::SERVICE, Keyword::ENDPOINT]) {
16624            Ok(Action::BindServiceEndpoint)
16625        } else if self.parse_keywords(&[Keyword::DATABASE, Keyword::ROLE]) {
16626            let role = self.parse_object_name(false)?;
16627            Ok(Action::DatabaseRole { role })
16628        } else if self.parse_keywords(&[Keyword::EVOLVE, Keyword::SCHEMA]) {
16629            Ok(Action::EvolveSchema)
16630        } else if self.parse_keywords(&[Keyword::IMPORT, Keyword::SHARE]) {
16631            Ok(Action::ImportShare)
16632        } else if self.parse_keywords(&[Keyword::MANAGE, Keyword::VERSIONS]) {
16633            Ok(Action::ManageVersions)
16634        } else if self.parse_keywords(&[Keyword::MANAGE, Keyword::RELEASES]) {
16635            Ok(Action::ManageReleases)
16636        } else if self.parse_keywords(&[Keyword::OVERRIDE, Keyword::SHARE, Keyword::RESTRICTIONS]) {
16637            Ok(Action::OverrideShareRestrictions)
16638        } else if self.parse_keywords(&[
16639            Keyword::PURCHASE,
16640            Keyword::DATA,
16641            Keyword::EXCHANGE,
16642            Keyword::LISTING,
16643        ]) {
16644            Ok(Action::PurchaseDataExchangeListing)
16645        } else if self.parse_keywords(&[Keyword::RESOLVE, Keyword::ALL]) {
16646            Ok(Action::ResolveAll)
16647        } else if self.parse_keywords(&[Keyword::READ, Keyword::SESSION]) {
16648            Ok(Action::ReadSession)
16649
16650        // Single-word privileges
16651        } else if self.parse_keyword(Keyword::APPLY) {
16652            let apply_type = self.parse_action_apply_type()?;
16653            Ok(Action::Apply { apply_type })
16654        } else if self.parse_keyword(Keyword::APPLYBUDGET) {
16655            Ok(Action::ApplyBudget)
16656        } else if self.parse_keyword(Keyword::AUDIT) {
16657            Ok(Action::Audit)
16658        } else if self.parse_keyword(Keyword::CONNECT) {
16659            Ok(Action::Connect)
16660        } else if self.parse_keyword(Keyword::CREATE) {
16661            let obj_type = self.maybe_parse_action_create_object_type();
16662            Ok(Action::Create { obj_type })
16663        } else if self.parse_keyword(Keyword::DELETE) {
16664            Ok(Action::Delete)
16665        } else if self.parse_keyword(Keyword::EXEC) {
16666            let obj_type = self.maybe_parse_action_execute_obj_type();
16667            Ok(Action::Exec { obj_type })
16668        } else if self.parse_keyword(Keyword::EXECUTE) {
16669            let obj_type = self.maybe_parse_action_execute_obj_type();
16670            Ok(Action::Execute { obj_type })
16671        } else if self.parse_keyword(Keyword::FAILOVER) {
16672            Ok(Action::Failover)
16673        } else if self.parse_keyword(Keyword::INSERT) {
16674            Ok(Action::Insert {
16675                columns: parse_columns(self)?,
16676            })
16677        } else if self.parse_keyword(Keyword::MANAGE) {
16678            let manage_type = self.parse_action_manage_type()?;
16679            Ok(Action::Manage { manage_type })
16680        } else if self.parse_keyword(Keyword::MODIFY) {
16681            let modify_type = self.parse_action_modify_type();
16682            Ok(Action::Modify { modify_type })
16683        } else if self.parse_keyword(Keyword::MONITOR) {
16684            let monitor_type = self.parse_action_monitor_type();
16685            Ok(Action::Monitor { monitor_type })
16686        } else if self.parse_keyword(Keyword::OPERATE) {
16687            Ok(Action::Operate)
16688        } else if self.parse_keyword(Keyword::REFERENCES) {
16689            Ok(Action::References {
16690                columns: parse_columns(self)?,
16691            })
16692        } else if self.parse_keyword(Keyword::READ) {
16693            Ok(Action::Read)
16694        } else if self.parse_keyword(Keyword::REPLICATE) {
16695            Ok(Action::Replicate)
16696        } else if self.parse_keyword(Keyword::ROLE) {
16697            let role = self.parse_object_name(false)?;
16698            Ok(Action::Role { role })
16699        } else if self.parse_keyword(Keyword::SELECT) {
16700            Ok(Action::Select {
16701                columns: parse_columns(self)?,
16702            })
16703        } else if self.parse_keyword(Keyword::TEMPORARY) {
16704            Ok(Action::Temporary)
16705        } else if self.parse_keyword(Keyword::TRIGGER) {
16706            Ok(Action::Trigger)
16707        } else if self.parse_keyword(Keyword::TRUNCATE) {
16708            Ok(Action::Truncate)
16709        } else if self.parse_keyword(Keyword::UPDATE) {
16710            Ok(Action::Update {
16711                columns: parse_columns(self)?,
16712            })
16713        } else if self.parse_keyword(Keyword::USAGE) {
16714            Ok(Action::Usage)
16715        } else if self.parse_keyword(Keyword::OWNERSHIP) {
16716            Ok(Action::Ownership)
16717        } else if self.parse_keyword(Keyword::DROP) {
16718            Ok(Action::Drop)
16719        } else {
16720            self.expected("a privilege keyword", self.peek_token())?
16721        }
16722    }
16723
16724    fn maybe_parse_action_create_object_type(&mut self) -> Option<ActionCreateObjectType> {
16725        // Multi-word object types
16726        if self.parse_keywords(&[Keyword::APPLICATION, Keyword::PACKAGE]) {
16727            Some(ActionCreateObjectType::ApplicationPackage)
16728        } else if self.parse_keywords(&[Keyword::COMPUTE, Keyword::POOL]) {
16729            Some(ActionCreateObjectType::ComputePool)
16730        } else if self.parse_keywords(&[Keyword::DATA, Keyword::EXCHANGE, Keyword::LISTING]) {
16731            Some(ActionCreateObjectType::DataExchangeListing)
16732        } else if self.parse_keywords(&[Keyword::EXTERNAL, Keyword::VOLUME]) {
16733            Some(ActionCreateObjectType::ExternalVolume)
16734        } else if self.parse_keywords(&[Keyword::FAILOVER, Keyword::GROUP]) {
16735            Some(ActionCreateObjectType::FailoverGroup)
16736        } else if self.parse_keywords(&[Keyword::NETWORK, Keyword::POLICY]) {
16737            Some(ActionCreateObjectType::NetworkPolicy)
16738        } else if self.parse_keywords(&[Keyword::ORGANIZATION, Keyword::LISTING]) {
16739            Some(ActionCreateObjectType::OrganiationListing)
16740        } else if self.parse_keywords(&[Keyword::REPLICATION, Keyword::GROUP]) {
16741            Some(ActionCreateObjectType::ReplicationGroup)
16742        }
16743        // Single-word object types
16744        else if self.parse_keyword(Keyword::ACCOUNT) {
16745            Some(ActionCreateObjectType::Account)
16746        } else if self.parse_keyword(Keyword::APPLICATION) {
16747            Some(ActionCreateObjectType::Application)
16748        } else if self.parse_keyword(Keyword::DATABASE) {
16749            Some(ActionCreateObjectType::Database)
16750        } else if self.parse_keyword(Keyword::INTEGRATION) {
16751            Some(ActionCreateObjectType::Integration)
16752        } else if self.parse_keyword(Keyword::ROLE) {
16753            Some(ActionCreateObjectType::Role)
16754        } else if self.parse_keyword(Keyword::SCHEMA) {
16755            Some(ActionCreateObjectType::Schema)
16756        } else if self.parse_keyword(Keyword::SHARE) {
16757            Some(ActionCreateObjectType::Share)
16758        } else if self.parse_keyword(Keyword::USER) {
16759            Some(ActionCreateObjectType::User)
16760        } else if self.parse_keyword(Keyword::WAREHOUSE) {
16761            Some(ActionCreateObjectType::Warehouse)
16762        } else {
16763            None
16764        }
16765    }
16766
16767    fn parse_action_apply_type(&mut self) -> Result<ActionApplyType, ParserError> {
16768        if self.parse_keywords(&[Keyword::AGGREGATION, Keyword::POLICY]) {
16769            Ok(ActionApplyType::AggregationPolicy)
16770        } else if self.parse_keywords(&[Keyword::AUTHENTICATION, Keyword::POLICY]) {
16771            Ok(ActionApplyType::AuthenticationPolicy)
16772        } else if self.parse_keywords(&[Keyword::JOIN, Keyword::POLICY]) {
16773            Ok(ActionApplyType::JoinPolicy)
16774        } else if self.parse_keywords(&[Keyword::MASKING, Keyword::POLICY]) {
16775            Ok(ActionApplyType::MaskingPolicy)
16776        } else if self.parse_keywords(&[Keyword::PACKAGES, Keyword::POLICY]) {
16777            Ok(ActionApplyType::PackagesPolicy)
16778        } else if self.parse_keywords(&[Keyword::PASSWORD, Keyword::POLICY]) {
16779            Ok(ActionApplyType::PasswordPolicy)
16780        } else if self.parse_keywords(&[Keyword::PROJECTION, Keyword::POLICY]) {
16781            Ok(ActionApplyType::ProjectionPolicy)
16782        } else if self.parse_keywords(&[Keyword::ROW, Keyword::ACCESS, Keyword::POLICY]) {
16783            Ok(ActionApplyType::RowAccessPolicy)
16784        } else if self.parse_keywords(&[Keyword::SESSION, Keyword::POLICY]) {
16785            Ok(ActionApplyType::SessionPolicy)
16786        } else if self.parse_keyword(Keyword::TAG) {
16787            Ok(ActionApplyType::Tag)
16788        } else {
16789            self.expected("GRANT APPLY type", self.peek_token())
16790        }
16791    }
16792
16793    fn maybe_parse_action_execute_obj_type(&mut self) -> Option<ActionExecuteObjectType> {
16794        if self.parse_keywords(&[Keyword::DATA, Keyword::METRIC, Keyword::FUNCTION]) {
16795            Some(ActionExecuteObjectType::DataMetricFunction)
16796        } else if self.parse_keywords(&[Keyword::MANAGED, Keyword::ALERT]) {
16797            Some(ActionExecuteObjectType::ManagedAlert)
16798        } else if self.parse_keywords(&[Keyword::MANAGED, Keyword::TASK]) {
16799            Some(ActionExecuteObjectType::ManagedTask)
16800        } else if self.parse_keyword(Keyword::ALERT) {
16801            Some(ActionExecuteObjectType::Alert)
16802        } else if self.parse_keyword(Keyword::TASK) {
16803            Some(ActionExecuteObjectType::Task)
16804        } else {
16805            None
16806        }
16807    }
16808
16809    fn parse_action_manage_type(&mut self) -> Result<ActionManageType, ParserError> {
16810        if self.parse_keywords(&[Keyword::ACCOUNT, Keyword::SUPPORT, Keyword::CASES]) {
16811            Ok(ActionManageType::AccountSupportCases)
16812        } else if self.parse_keywords(&[Keyword::EVENT, Keyword::SHARING]) {
16813            Ok(ActionManageType::EventSharing)
16814        } else if self.parse_keywords(&[Keyword::LISTING, Keyword::AUTO, Keyword::FULFILLMENT]) {
16815            Ok(ActionManageType::ListingAutoFulfillment)
16816        } else if self.parse_keywords(&[Keyword::ORGANIZATION, Keyword::SUPPORT, Keyword::CASES]) {
16817            Ok(ActionManageType::OrganizationSupportCases)
16818        } else if self.parse_keywords(&[Keyword::USER, Keyword::SUPPORT, Keyword::CASES]) {
16819            Ok(ActionManageType::UserSupportCases)
16820        } else if self.parse_keyword(Keyword::GRANTS) {
16821            Ok(ActionManageType::Grants)
16822        } else if self.parse_keyword(Keyword::WAREHOUSES) {
16823            Ok(ActionManageType::Warehouses)
16824        } else {
16825            self.expected("GRANT MANAGE type", self.peek_token())
16826        }
16827    }
16828
16829    fn parse_action_modify_type(&mut self) -> Option<ActionModifyType> {
16830        if self.parse_keywords(&[Keyword::LOG, Keyword::LEVEL]) {
16831            Some(ActionModifyType::LogLevel)
16832        } else if self.parse_keywords(&[Keyword::TRACE, Keyword::LEVEL]) {
16833            Some(ActionModifyType::TraceLevel)
16834        } else if self.parse_keywords(&[Keyword::SESSION, Keyword::LOG, Keyword::LEVEL]) {
16835            Some(ActionModifyType::SessionLogLevel)
16836        } else if self.parse_keywords(&[Keyword::SESSION, Keyword::TRACE, Keyword::LEVEL]) {
16837            Some(ActionModifyType::SessionTraceLevel)
16838        } else {
16839            None
16840        }
16841    }
16842
16843    fn parse_action_monitor_type(&mut self) -> Option<ActionMonitorType> {
16844        if self.parse_keyword(Keyword::EXECUTION) {
16845            Some(ActionMonitorType::Execution)
16846        } else if self.parse_keyword(Keyword::SECURITY) {
16847            Some(ActionMonitorType::Security)
16848        } else if self.parse_keyword(Keyword::USAGE) {
16849            Some(ActionMonitorType::Usage)
16850        } else {
16851            None
16852        }
16853    }
16854
16855    /// Parse a grantee name, possibly with a host qualifier (user@host).
16856    pub fn parse_grantee_name(&mut self) -> Result<GranteeName, ParserError> {
16857        let mut name = self.parse_object_name(false)?;
16858        if self.dialect.supports_user_host_grantee()
16859            && name.0.len() == 1
16860            && name.0[0].as_ident().is_some()
16861            && self.consume_token(&Token::AtSign)
16862        {
16863            let user = name.0.pop().unwrap().as_ident().unwrap().clone();
16864            let host = self.parse_identifier()?;
16865            Ok(GranteeName::UserHost { user, host })
16866        } else {
16867            Ok(GranteeName::ObjectName(name))
16868        }
16869    }
16870
16871    /// Parse [`Statement::Deny`]
16872    pub fn parse_deny(&mut self) -> Result<Statement, ParserError> {
16873        self.expect_keyword(Keyword::DENY)?;
16874
16875        let (privileges, objects) = self.parse_grant_deny_revoke_privileges_objects()?;
16876        let objects = match objects {
16877            Some(o) => o,
16878            None => {
16879                return parser_err!(
16880                    "DENY statements must specify an object",
16881                    self.peek_token().span.start
16882                )
16883            }
16884        };
16885
16886        self.expect_keyword_is(Keyword::TO)?;
16887        let grantees = self.parse_grantees()?;
16888        let cascade = self.parse_cascade_option();
16889        let granted_by = if self.parse_keywords(&[Keyword::AS]) {
16890            Some(self.parse_identifier()?)
16891        } else {
16892            None
16893        };
16894
16895        Ok(Statement::Deny(DenyStatement {
16896            privileges,
16897            objects,
16898            grantees,
16899            cascade,
16900            granted_by,
16901        }))
16902    }
16903
16904    /// Parse a REVOKE statement
16905    pub fn parse_revoke(&mut self) -> Result<Revoke, ParserError> {
16906        let (privileges, objects) = self.parse_grant_deny_revoke_privileges_objects()?;
16907
16908        self.expect_keyword_is(Keyword::FROM)?;
16909        let grantees = self.parse_grantees()?;
16910
16911        let granted_by = if self.parse_keywords(&[Keyword::GRANTED, Keyword::BY]) {
16912            Some(self.parse_identifier()?)
16913        } else {
16914            None
16915        };
16916
16917        let cascade = self.parse_cascade_option();
16918
16919        Ok(Revoke {
16920            privileges,
16921            objects,
16922            grantees,
16923            granted_by,
16924            cascade,
16925        })
16926    }
16927
16928    /// Parse an REPLACE statement
16929    pub fn parse_replace(
16930        &mut self,
16931        replace_token: TokenWithSpan,
16932    ) -> Result<Statement, ParserError> {
16933        if !dialect_of!(self is MySqlDialect | GenericDialect) {
16934            return parser_err!(
16935                "Unsupported statement REPLACE",
16936                self.peek_token().span.start
16937            );
16938        }
16939
16940        let mut insert = self.parse_insert(replace_token)?;
16941        if let Statement::Insert(Insert { replace_into, .. }) = &mut insert {
16942            *replace_into = true;
16943        }
16944
16945        Ok(insert)
16946    }
16947
16948    /// Parse an INSERT statement, returning a `Box`ed SetExpr
16949    ///
16950    /// This is used to reduce the size of the stack frames in debug builds
16951    fn parse_insert_setexpr_boxed(
16952        &mut self,
16953        insert_token: TokenWithSpan,
16954    ) -> Result<Box<SetExpr>, ParserError> {
16955        Ok(Box::new(SetExpr::Insert(self.parse_insert(insert_token)?)))
16956    }
16957
16958    /// Parse an INSERT statement
16959    pub fn parse_insert(&mut self, insert_token: TokenWithSpan) -> Result<Statement, ParserError> {
16960        let optimizer_hint = self.maybe_parse_optimizer_hint()?;
16961        let or = self.parse_conflict_clause();
16962        let priority = if !dialect_of!(self is MySqlDialect | GenericDialect) {
16963            None
16964        } else if self.parse_keyword(Keyword::LOW_PRIORITY) {
16965            Some(MysqlInsertPriority::LowPriority)
16966        } else if self.parse_keyword(Keyword::DELAYED) {
16967            Some(MysqlInsertPriority::Delayed)
16968        } else if self.parse_keyword(Keyword::HIGH_PRIORITY) {
16969            Some(MysqlInsertPriority::HighPriority)
16970        } else {
16971            None
16972        };
16973
16974        let ignore = dialect_of!(self is MySqlDialect | GenericDialect)
16975            && self.parse_keyword(Keyword::IGNORE);
16976
16977        let replace_into = false;
16978
16979        let overwrite = self.parse_keyword(Keyword::OVERWRITE);
16980        let into = self.parse_keyword(Keyword::INTO);
16981
16982        let local = self.parse_keyword(Keyword::LOCAL);
16983
16984        if self.parse_keyword(Keyword::DIRECTORY) {
16985            let path = self.parse_literal_string()?;
16986            let file_format = if self.parse_keywords(&[Keyword::STORED, Keyword::AS]) {
16987                Some(self.parse_file_format()?)
16988            } else {
16989                None
16990            };
16991            let source = self.parse_query()?;
16992            Ok(Statement::Directory {
16993                local,
16994                path,
16995                overwrite,
16996                file_format,
16997                source,
16998            })
16999        } else {
17000            // Hive lets you put table here regardless
17001            let table = self.parse_keyword(Keyword::TABLE);
17002            let table_object = self.parse_table_object()?;
17003
17004            let table_alias =
17005                if dialect_of!(self is PostgreSqlDialect) && self.parse_keyword(Keyword::AS) {
17006                    Some(self.parse_identifier()?)
17007                } else {
17008                    None
17009                };
17010
17011            let is_mysql = dialect_of!(self is MySqlDialect);
17012
17013            let (columns, partitioned, after_columns, source, assignments) = if self
17014                .parse_keywords(&[Keyword::DEFAULT, Keyword::VALUES])
17015            {
17016                (vec![], None, vec![], None, vec![])
17017            } else {
17018                let (columns, partitioned, after_columns) = if !self.peek_subquery_start() {
17019                    let columns = self.parse_parenthesized_column_list(Optional, is_mysql)?;
17020
17021                    let partitioned = self.parse_insert_partition()?;
17022                    // Hive allows you to specify columns after partitions as well if you want.
17023                    let after_columns = if dialect_of!(self is HiveDialect) {
17024                        self.parse_parenthesized_column_list(Optional, false)?
17025                    } else {
17026                        vec![]
17027                    };
17028                    (columns, partitioned, after_columns)
17029                } else {
17030                    Default::default()
17031                };
17032
17033                let (source, assignments) = if self.peek_keyword(Keyword::FORMAT)
17034                    || self.peek_keyword(Keyword::SETTINGS)
17035                {
17036                    (None, vec![])
17037                } else if self.dialect.supports_insert_set() && self.parse_keyword(Keyword::SET) {
17038                    (None, self.parse_comma_separated(Parser::parse_assignment)?)
17039                } else {
17040                    (Some(self.parse_query()?), vec![])
17041                };
17042
17043                (columns, partitioned, after_columns, source, assignments)
17044            };
17045
17046            let (format_clause, settings) = if self.dialect.supports_insert_format() {
17047                // Settings always comes before `FORMAT` for ClickHouse:
17048                // <https://clickhouse.com/docs/en/sql-reference/statements/insert-into>
17049                let settings = self.parse_settings()?;
17050
17051                let format = if self.parse_keyword(Keyword::FORMAT) {
17052                    Some(self.parse_input_format_clause()?)
17053                } else {
17054                    None
17055                };
17056
17057                (format, settings)
17058            } else {
17059                Default::default()
17060            };
17061
17062            let insert_alias = if dialect_of!(self is MySqlDialect | GenericDialect)
17063                && self.parse_keyword(Keyword::AS)
17064            {
17065                let row_alias = self.parse_object_name(false)?;
17066                let col_aliases = Some(self.parse_parenthesized_column_list(Optional, false)?);
17067                Some(InsertAliases {
17068                    row_alias,
17069                    col_aliases,
17070                })
17071            } else {
17072                None
17073            };
17074
17075            let on = if self.parse_keyword(Keyword::ON) {
17076                if self.parse_keyword(Keyword::CONFLICT) {
17077                    let conflict_target =
17078                        if self.parse_keywords(&[Keyword::ON, Keyword::CONSTRAINT]) {
17079                            Some(ConflictTarget::OnConstraint(self.parse_object_name(false)?))
17080                        } else if self.peek_token() == Token::LParen {
17081                            Some(ConflictTarget::Columns(
17082                                self.parse_parenthesized_column_list(IsOptional::Mandatory, false)?,
17083                            ))
17084                        } else {
17085                            None
17086                        };
17087
17088                    self.expect_keyword_is(Keyword::DO)?;
17089                    let action = if self.parse_keyword(Keyword::NOTHING) {
17090                        OnConflictAction::DoNothing
17091                    } else {
17092                        self.expect_keyword_is(Keyword::UPDATE)?;
17093                        self.expect_keyword_is(Keyword::SET)?;
17094                        let assignments = self.parse_comma_separated(Parser::parse_assignment)?;
17095                        let selection = if self.parse_keyword(Keyword::WHERE) {
17096                            Some(self.parse_expr()?)
17097                        } else {
17098                            None
17099                        };
17100                        OnConflictAction::DoUpdate(DoUpdate {
17101                            assignments,
17102                            selection,
17103                        })
17104                    };
17105
17106                    Some(OnInsert::OnConflict(OnConflict {
17107                        conflict_target,
17108                        action,
17109                    }))
17110                } else {
17111                    self.expect_keyword_is(Keyword::DUPLICATE)?;
17112                    self.expect_keyword_is(Keyword::KEY)?;
17113                    self.expect_keyword_is(Keyword::UPDATE)?;
17114                    let l = self.parse_comma_separated(Parser::parse_assignment)?;
17115
17116                    Some(OnInsert::DuplicateKeyUpdate(l))
17117                }
17118            } else {
17119                None
17120            };
17121
17122            let returning = if self.parse_keyword(Keyword::RETURNING) {
17123                Some(self.parse_comma_separated(Parser::parse_select_item)?)
17124            } else {
17125                None
17126            };
17127
17128            Ok(Insert {
17129                insert_token: insert_token.into(),
17130                optimizer_hint,
17131                or,
17132                table: table_object,
17133                table_alias,
17134                ignore,
17135                into,
17136                overwrite,
17137                partitioned,
17138                columns,
17139                after_columns,
17140                source,
17141                assignments,
17142                has_table_keyword: table,
17143                on,
17144                returning,
17145                replace_into,
17146                priority,
17147                insert_alias,
17148                settings,
17149                format_clause,
17150            }
17151            .into())
17152        }
17153    }
17154
17155    /// Parses input format clause used for ClickHouse.
17156    ///
17157    /// <https://clickhouse.com/docs/en/interfaces/formats>
17158    pub fn parse_input_format_clause(&mut self) -> Result<InputFormatClause, ParserError> {
17159        let ident = self.parse_identifier()?;
17160        let values = self
17161            .maybe_parse(|p| p.parse_comma_separated(|p| p.parse_expr()))?
17162            .unwrap_or_default();
17163
17164        Ok(InputFormatClause { ident, values })
17165    }
17166
17167    /// Returns true if the immediate tokens look like the
17168    /// beginning of a subquery. `(SELECT ...`
17169    fn peek_subquery_start(&mut self) -> bool {
17170        let [maybe_lparen, maybe_select] = self.peek_tokens();
17171        Token::LParen == maybe_lparen
17172            && matches!(maybe_select, Token::Word(w) if w.keyword == Keyword::SELECT)
17173    }
17174
17175    fn parse_conflict_clause(&mut self) -> Option<SqliteOnConflict> {
17176        if self.parse_keywords(&[Keyword::OR, Keyword::REPLACE]) {
17177            Some(SqliteOnConflict::Replace)
17178        } else if self.parse_keywords(&[Keyword::OR, Keyword::ROLLBACK]) {
17179            Some(SqliteOnConflict::Rollback)
17180        } else if self.parse_keywords(&[Keyword::OR, Keyword::ABORT]) {
17181            Some(SqliteOnConflict::Abort)
17182        } else if self.parse_keywords(&[Keyword::OR, Keyword::FAIL]) {
17183            Some(SqliteOnConflict::Fail)
17184        } else if self.parse_keywords(&[Keyword::OR, Keyword::IGNORE]) {
17185            Some(SqliteOnConflict::Ignore)
17186        } else if self.parse_keyword(Keyword::REPLACE) {
17187            Some(SqliteOnConflict::Replace)
17188        } else {
17189            None
17190        }
17191    }
17192
17193    /// Parse an optional `PARTITION (...)` clause for INSERT statements.
17194    pub fn parse_insert_partition(&mut self) -> Result<Option<Vec<Expr>>, ParserError> {
17195        if self.parse_keyword(Keyword::PARTITION) {
17196            self.expect_token(&Token::LParen)?;
17197            let partition_cols = Some(self.parse_comma_separated(Parser::parse_expr)?);
17198            self.expect_token(&Token::RParen)?;
17199            Ok(partition_cols)
17200        } else {
17201            Ok(None)
17202        }
17203    }
17204
17205    /// Parse optional Hive `INPUTFORMAT ... SERDE ...` clause used by LOAD DATA.
17206    pub fn parse_load_data_table_format(
17207        &mut self,
17208    ) -> Result<Option<HiveLoadDataFormat>, ParserError> {
17209        if self.parse_keyword(Keyword::INPUTFORMAT) {
17210            let input_format = self.parse_expr()?;
17211            self.expect_keyword_is(Keyword::SERDE)?;
17212            let serde = self.parse_expr()?;
17213            Ok(Some(HiveLoadDataFormat {
17214                input_format,
17215                serde,
17216            }))
17217        } else {
17218            Ok(None)
17219        }
17220    }
17221
17222    /// Parse an UPDATE statement, returning a `Box`ed SetExpr
17223    ///
17224    /// This is used to reduce the size of the stack frames in debug builds
17225    fn parse_update_setexpr_boxed(
17226        &mut self,
17227        update_token: TokenWithSpan,
17228    ) -> Result<Box<SetExpr>, ParserError> {
17229        Ok(Box::new(SetExpr::Update(self.parse_update(update_token)?)))
17230    }
17231
17232    /// Parse an `UPDATE` statement and return `Statement::Update`.
17233    pub fn parse_update(&mut self, update_token: TokenWithSpan) -> Result<Statement, ParserError> {
17234        let optimizer_hint = self.maybe_parse_optimizer_hint()?;
17235        let or = self.parse_conflict_clause();
17236        let table = self.parse_table_and_joins()?;
17237        let from_before_set = if self.parse_keyword(Keyword::FROM) {
17238            Some(UpdateTableFromKind::BeforeSet(
17239                self.parse_table_with_joins()?,
17240            ))
17241        } else {
17242            None
17243        };
17244        self.expect_keyword(Keyword::SET)?;
17245        let assignments = self.parse_comma_separated(Parser::parse_assignment)?;
17246        let from = if from_before_set.is_none() && self.parse_keyword(Keyword::FROM) {
17247            Some(UpdateTableFromKind::AfterSet(
17248                self.parse_table_with_joins()?,
17249            ))
17250        } else {
17251            from_before_set
17252        };
17253        let selection = if self.parse_keyword(Keyword::WHERE) {
17254            Some(self.parse_expr()?)
17255        } else {
17256            None
17257        };
17258        let returning = if self.parse_keyword(Keyword::RETURNING) {
17259            Some(self.parse_comma_separated(Parser::parse_select_item)?)
17260        } else {
17261            None
17262        };
17263        let limit = if self.parse_keyword(Keyword::LIMIT) {
17264            Some(self.parse_expr()?)
17265        } else {
17266            None
17267        };
17268        Ok(Update {
17269            update_token: update_token.into(),
17270            optimizer_hint,
17271            table,
17272            assignments,
17273            from,
17274            selection,
17275            returning,
17276            or,
17277            limit,
17278        }
17279        .into())
17280    }
17281
17282    /// Parse a `var = expr` assignment, used in an UPDATE statement
17283    pub fn parse_assignment(&mut self) -> Result<Assignment, ParserError> {
17284        let target = self.parse_assignment_target()?;
17285        self.expect_token(&Token::Eq)?;
17286        let value = self.parse_expr()?;
17287        Ok(Assignment { target, value })
17288    }
17289
17290    /// Parse the left-hand side of an assignment, used in an UPDATE statement
17291    pub fn parse_assignment_target(&mut self) -> Result<AssignmentTarget, ParserError> {
17292        if self.consume_token(&Token::LParen) {
17293            let columns = self.parse_comma_separated(|p| p.parse_object_name(false))?;
17294            self.expect_token(&Token::RParen)?;
17295            Ok(AssignmentTarget::Tuple(columns))
17296        } else {
17297            let column = self.parse_object_name(false)?;
17298            Ok(AssignmentTarget::ColumnName(column))
17299        }
17300    }
17301
17302    /// Parse a single function argument, handling named and unnamed variants.
17303    pub fn parse_function_args(&mut self) -> Result<FunctionArg, ParserError> {
17304        let arg = if self.dialect.supports_named_fn_args_with_expr_name() {
17305            self.maybe_parse(|p| {
17306                let name = p.parse_expr()?;
17307                let operator = p.parse_function_named_arg_operator()?;
17308                let arg = p.parse_wildcard_expr()?.into();
17309                Ok(FunctionArg::ExprNamed {
17310                    name,
17311                    arg,
17312                    operator,
17313                })
17314            })?
17315        } else {
17316            self.maybe_parse(|p| {
17317                let name = p.parse_identifier()?;
17318                let operator = p.parse_function_named_arg_operator()?;
17319                let arg = p.parse_wildcard_expr()?.into();
17320                Ok(FunctionArg::Named {
17321                    name,
17322                    arg,
17323                    operator,
17324                })
17325            })?
17326        };
17327        if let Some(arg) = arg {
17328            return Ok(arg);
17329        }
17330        Ok(FunctionArg::Unnamed(self.parse_wildcard_expr()?.into()))
17331    }
17332
17333    fn parse_function_named_arg_operator(&mut self) -> Result<FunctionArgOperator, ParserError> {
17334        if self.parse_keyword(Keyword::VALUE) {
17335            return Ok(FunctionArgOperator::Value);
17336        }
17337        let tok = self.next_token();
17338        match tok.token {
17339            Token::RArrow if self.dialect.supports_named_fn_args_with_rarrow_operator() => {
17340                Ok(FunctionArgOperator::RightArrow)
17341            }
17342            Token::Eq if self.dialect.supports_named_fn_args_with_eq_operator() => {
17343                Ok(FunctionArgOperator::Equals)
17344            }
17345            Token::Assignment
17346                if self
17347                    .dialect
17348                    .supports_named_fn_args_with_assignment_operator() =>
17349            {
17350                Ok(FunctionArgOperator::Assignment)
17351            }
17352            Token::Colon if self.dialect.supports_named_fn_args_with_colon_operator() => {
17353                Ok(FunctionArgOperator::Colon)
17354            }
17355            _ => {
17356                self.prev_token();
17357                self.expected("argument operator", tok)
17358            }
17359        }
17360    }
17361
17362    /// Parse an optional, comma-separated list of function arguments (consumes closing paren).
17363    pub fn parse_optional_args(&mut self) -> Result<Vec<FunctionArg>, ParserError> {
17364        if self.consume_token(&Token::RParen) {
17365            Ok(vec![])
17366        } else {
17367            let args = self.parse_comma_separated(Parser::parse_function_args)?;
17368            self.expect_token(&Token::RParen)?;
17369            Ok(args)
17370        }
17371    }
17372
17373    fn parse_table_function_args(&mut self) -> Result<TableFunctionArgs, ParserError> {
17374        if self.consume_token(&Token::RParen) {
17375            return Ok(TableFunctionArgs {
17376                args: vec![],
17377                settings: None,
17378            });
17379        }
17380        let mut args = vec![];
17381        let settings = loop {
17382            if let Some(settings) = self.parse_settings()? {
17383                break Some(settings);
17384            }
17385            args.push(self.parse_function_args()?);
17386            if self.is_parse_comma_separated_end() {
17387                break None;
17388            }
17389        };
17390        self.expect_token(&Token::RParen)?;
17391        Ok(TableFunctionArgs { args, settings })
17392    }
17393
17394    /// Parses a potentially empty list of arguments to a function
17395    /// (including the closing parenthesis).
17396    ///
17397    /// Examples:
17398    /// ```sql
17399    /// FIRST_VALUE(x ORDER BY 1,2,3);
17400    /// FIRST_VALUE(x IGNORE NULL);
17401    /// ```
17402    fn parse_function_argument_list(&mut self) -> Result<FunctionArgumentList, ParserError> {
17403        let mut clauses = vec![];
17404
17405        // Handle clauses that may exist with an empty argument list
17406
17407        if let Some(null_clause) = self.parse_json_null_clause() {
17408            clauses.push(FunctionArgumentClause::JsonNullClause(null_clause));
17409        }
17410
17411        if let Some(json_returning_clause) = self.maybe_parse_json_returning_clause()? {
17412            clauses.push(FunctionArgumentClause::JsonReturningClause(
17413                json_returning_clause,
17414            ));
17415        }
17416
17417        if self.consume_token(&Token::RParen) {
17418            return Ok(FunctionArgumentList {
17419                duplicate_treatment: None,
17420                args: vec![],
17421                clauses,
17422            });
17423        }
17424
17425        let duplicate_treatment = self.parse_duplicate_treatment()?;
17426        let args = self.parse_comma_separated(Parser::parse_function_args)?;
17427
17428        if self.dialect.supports_window_function_null_treatment_arg() {
17429            if let Some(null_treatment) = self.parse_null_treatment()? {
17430                clauses.push(FunctionArgumentClause::IgnoreOrRespectNulls(null_treatment));
17431            }
17432        }
17433
17434        if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
17435            clauses.push(FunctionArgumentClause::OrderBy(
17436                self.parse_comma_separated(Parser::parse_order_by_expr)?,
17437            ));
17438        }
17439
17440        if self.parse_keyword(Keyword::LIMIT) {
17441            clauses.push(FunctionArgumentClause::Limit(self.parse_expr()?));
17442        }
17443
17444        if dialect_of!(self is GenericDialect | BigQueryDialect)
17445            && self.parse_keyword(Keyword::HAVING)
17446        {
17447            let kind = match self.expect_one_of_keywords(&[Keyword::MIN, Keyword::MAX])? {
17448                Keyword::MIN => HavingBoundKind::Min,
17449                Keyword::MAX => HavingBoundKind::Max,
17450                unexpected_keyword => return Err(ParserError::ParserError(
17451                    format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in having bound"),
17452                )),
17453            };
17454            clauses.push(FunctionArgumentClause::Having(HavingBound(
17455                kind,
17456                self.parse_expr()?,
17457            )))
17458        }
17459
17460        if dialect_of!(self is GenericDialect | MySqlDialect)
17461            && self.parse_keyword(Keyword::SEPARATOR)
17462        {
17463            clauses.push(FunctionArgumentClause::Separator(self.parse_value()?.value));
17464        }
17465
17466        if let Some(on_overflow) = self.parse_listagg_on_overflow()? {
17467            clauses.push(FunctionArgumentClause::OnOverflow(on_overflow));
17468        }
17469
17470        if let Some(null_clause) = self.parse_json_null_clause() {
17471            clauses.push(FunctionArgumentClause::JsonNullClause(null_clause));
17472        }
17473
17474        if let Some(json_returning_clause) = self.maybe_parse_json_returning_clause()? {
17475            clauses.push(FunctionArgumentClause::JsonReturningClause(
17476                json_returning_clause,
17477            ));
17478        }
17479
17480        self.expect_token(&Token::RParen)?;
17481        Ok(FunctionArgumentList {
17482            duplicate_treatment,
17483            args,
17484            clauses,
17485        })
17486    }
17487
17488    fn parse_json_null_clause(&mut self) -> Option<JsonNullClause> {
17489        if self.parse_keywords(&[Keyword::ABSENT, Keyword::ON, Keyword::NULL]) {
17490            Some(JsonNullClause::AbsentOnNull)
17491        } else if self.parse_keywords(&[Keyword::NULL, Keyword::ON, Keyword::NULL]) {
17492            Some(JsonNullClause::NullOnNull)
17493        } else {
17494            None
17495        }
17496    }
17497
17498    fn maybe_parse_json_returning_clause(
17499        &mut self,
17500    ) -> Result<Option<JsonReturningClause>, ParserError> {
17501        if self.parse_keyword(Keyword::RETURNING) {
17502            let data_type = self.parse_data_type()?;
17503            Ok(Some(JsonReturningClause { data_type }))
17504        } else {
17505            Ok(None)
17506        }
17507    }
17508
17509    fn parse_duplicate_treatment(&mut self) -> Result<Option<DuplicateTreatment>, ParserError> {
17510        let loc = self.peek_token().span.start;
17511        match (
17512            self.parse_keyword(Keyword::ALL),
17513            self.parse_keyword(Keyword::DISTINCT),
17514        ) {
17515            (true, false) => Ok(Some(DuplicateTreatment::All)),
17516            (false, true) => Ok(Some(DuplicateTreatment::Distinct)),
17517            (false, false) => Ok(None),
17518            (true, true) => parser_err!("Cannot specify both ALL and DISTINCT".to_string(), loc),
17519        }
17520    }
17521
17522    /// Parse a comma-delimited list of projections after SELECT
17523    pub fn parse_select_item(&mut self) -> Result<SelectItem, ParserError> {
17524        let prefix = self
17525            .parse_one_of_keywords(
17526                self.dialect
17527                    .get_reserved_keywords_for_select_item_operator(),
17528            )
17529            .map(|keyword| Ident::new(format!("{keyword:?}")));
17530
17531        match self.parse_wildcard_expr()? {
17532            Expr::QualifiedWildcard(prefix, token) => Ok(SelectItem::QualifiedWildcard(
17533                SelectItemQualifiedWildcardKind::ObjectName(prefix),
17534                self.parse_wildcard_additional_options(token.0)?,
17535            )),
17536            Expr::Wildcard(token) => Ok(SelectItem::Wildcard(
17537                self.parse_wildcard_additional_options(token.0)?,
17538            )),
17539            Expr::Identifier(v) if v.value.to_lowercase() == "from" && v.quote_style.is_none() => {
17540                parser_err!(
17541                    format!("Expected an expression, found: {}", v),
17542                    self.peek_token().span.start
17543                )
17544            }
17545            Expr::BinaryOp {
17546                left,
17547                op: BinaryOperator::Eq,
17548                right,
17549            } if self.dialect.supports_eq_alias_assignment()
17550                && matches!(left.as_ref(), Expr::Identifier(_)) =>
17551            {
17552                let Expr::Identifier(alias) = *left else {
17553                    return parser_err!(
17554                        "BUG: expected identifier expression as alias",
17555                        self.peek_token().span.start
17556                    );
17557                };
17558                Ok(SelectItem::ExprWithAlias {
17559                    expr: *right,
17560                    alias,
17561                })
17562            }
17563            expr if self.dialect.supports_select_expr_star()
17564                && self.consume_tokens(&[Token::Period, Token::Mul]) =>
17565            {
17566                let wildcard_token = self.get_previous_token().clone();
17567                Ok(SelectItem::QualifiedWildcard(
17568                    SelectItemQualifiedWildcardKind::Expr(expr),
17569                    self.parse_wildcard_additional_options(wildcard_token)?,
17570                ))
17571            }
17572            expr => self
17573                .maybe_parse_select_item_alias()
17574                .map(|alias| match alias {
17575                    Some(alias) => SelectItem::ExprWithAlias {
17576                        expr: maybe_prefixed_expr(expr, prefix),
17577                        alias,
17578                    },
17579                    None => SelectItem::UnnamedExpr(maybe_prefixed_expr(expr, prefix)),
17580                }),
17581        }
17582    }
17583
17584    /// Parse an [`WildcardAdditionalOptions`] information for wildcard select items.
17585    ///
17586    /// If it is not possible to parse it, will return an option.
17587    pub fn parse_wildcard_additional_options(
17588        &mut self,
17589        wildcard_token: TokenWithSpan,
17590    ) -> Result<WildcardAdditionalOptions, ParserError> {
17591        let opt_ilike = if self.dialect.supports_select_wildcard_ilike() {
17592            self.parse_optional_select_item_ilike()?
17593        } else {
17594            None
17595        };
17596        let opt_exclude = if opt_ilike.is_none() && self.dialect.supports_select_wildcard_exclude()
17597        {
17598            self.parse_optional_select_item_exclude()?
17599        } else {
17600            None
17601        };
17602        let opt_except = if self.dialect.supports_select_wildcard_except() {
17603            self.parse_optional_select_item_except()?
17604        } else {
17605            None
17606        };
17607        let opt_replace = if self.dialect.supports_select_wildcard_replace() {
17608            self.parse_optional_select_item_replace()?
17609        } else {
17610            None
17611        };
17612        let opt_rename = if self.dialect.supports_select_wildcard_rename() {
17613            self.parse_optional_select_item_rename()?
17614        } else {
17615            None
17616        };
17617
17618        Ok(WildcardAdditionalOptions {
17619            wildcard_token: wildcard_token.into(),
17620            opt_ilike,
17621            opt_exclude,
17622            opt_except,
17623            opt_rename,
17624            opt_replace,
17625        })
17626    }
17627
17628    /// Parse an [`Ilike`](IlikeSelectItem) information for wildcard select items.
17629    ///
17630    /// If it is not possible to parse it, will return an option.
17631    pub fn parse_optional_select_item_ilike(
17632        &mut self,
17633    ) -> Result<Option<IlikeSelectItem>, ParserError> {
17634        let opt_ilike = if self.parse_keyword(Keyword::ILIKE) {
17635            let next_token = self.next_token();
17636            let pattern = match next_token.token {
17637                Token::SingleQuotedString(s) => s,
17638                _ => return self.expected("ilike pattern", next_token),
17639            };
17640            Some(IlikeSelectItem { pattern })
17641        } else {
17642            None
17643        };
17644        Ok(opt_ilike)
17645    }
17646
17647    /// Parse an [`Exclude`](ExcludeSelectItem) information for wildcard select items.
17648    ///
17649    /// If it is not possible to parse it, will return an option.
17650    pub fn parse_optional_select_item_exclude(
17651        &mut self,
17652    ) -> Result<Option<ExcludeSelectItem>, ParserError> {
17653        let opt_exclude = if self.parse_keyword(Keyword::EXCLUDE) {
17654            if self.consume_token(&Token::LParen) {
17655                let columns = self.parse_comma_separated(|parser| parser.parse_identifier())?;
17656                self.expect_token(&Token::RParen)?;
17657                Some(ExcludeSelectItem::Multiple(columns))
17658            } else {
17659                let column = self.parse_identifier()?;
17660                Some(ExcludeSelectItem::Single(column))
17661            }
17662        } else {
17663            None
17664        };
17665
17666        Ok(opt_exclude)
17667    }
17668
17669    /// Parse an [`Except`](ExceptSelectItem) information for wildcard select items.
17670    ///
17671    /// If it is not possible to parse it, will return an option.
17672    pub fn parse_optional_select_item_except(
17673        &mut self,
17674    ) -> Result<Option<ExceptSelectItem>, ParserError> {
17675        let opt_except = if self.parse_keyword(Keyword::EXCEPT) {
17676            if self.peek_token().token == Token::LParen {
17677                let idents = self.parse_parenthesized_column_list(Mandatory, false)?;
17678                match &idents[..] {
17679                    [] => {
17680                        return self.expected(
17681                            "at least one column should be parsed by the expect clause",
17682                            self.peek_token(),
17683                        )?;
17684                    }
17685                    [first, idents @ ..] => Some(ExceptSelectItem {
17686                        first_element: first.clone(),
17687                        additional_elements: idents.to_vec(),
17688                    }),
17689                }
17690            } else {
17691                // Clickhouse allows EXCEPT column_name
17692                let ident = self.parse_identifier()?;
17693                Some(ExceptSelectItem {
17694                    first_element: ident,
17695                    additional_elements: vec![],
17696                })
17697            }
17698        } else {
17699            None
17700        };
17701
17702        Ok(opt_except)
17703    }
17704
17705    /// Parse a [`Rename`](RenameSelectItem) information for wildcard select items.
17706    pub fn parse_optional_select_item_rename(
17707        &mut self,
17708    ) -> Result<Option<RenameSelectItem>, ParserError> {
17709        let opt_rename = if self.parse_keyword(Keyword::RENAME) {
17710            if self.consume_token(&Token::LParen) {
17711                let idents =
17712                    self.parse_comma_separated(|parser| parser.parse_identifier_with_alias())?;
17713                self.expect_token(&Token::RParen)?;
17714                Some(RenameSelectItem::Multiple(idents))
17715            } else {
17716                let ident = self.parse_identifier_with_alias()?;
17717                Some(RenameSelectItem::Single(ident))
17718            }
17719        } else {
17720            None
17721        };
17722
17723        Ok(opt_rename)
17724    }
17725
17726    /// Parse a [`Replace`](ReplaceSelectItem) information for wildcard select items.
17727    pub fn parse_optional_select_item_replace(
17728        &mut self,
17729    ) -> Result<Option<ReplaceSelectItem>, ParserError> {
17730        let opt_replace = if self.parse_keyword(Keyword::REPLACE) {
17731            if self.consume_token(&Token::LParen) {
17732                let items = self.parse_comma_separated(|parser| {
17733                    Ok(Box::new(parser.parse_replace_elements()?))
17734                })?;
17735                self.expect_token(&Token::RParen)?;
17736                Some(ReplaceSelectItem { items })
17737            } else {
17738                let tok = self.next_token();
17739                return self.expected("( after REPLACE but", tok);
17740            }
17741        } else {
17742            None
17743        };
17744
17745        Ok(opt_replace)
17746    }
17747    /// Parse a single element of a `REPLACE (...)` select-item clause.
17748    pub fn parse_replace_elements(&mut self) -> Result<ReplaceSelectElement, ParserError> {
17749        let expr = self.parse_expr()?;
17750        let as_keyword = self.parse_keyword(Keyword::AS);
17751        let ident = self.parse_identifier()?;
17752        Ok(ReplaceSelectElement {
17753            expr,
17754            column_name: ident,
17755            as_keyword,
17756        })
17757    }
17758
17759    /// Parse ASC or DESC, returns an Option with true if ASC, false of DESC or `None` if none of
17760    /// them.
17761    pub fn parse_asc_desc(&mut self) -> Option<bool> {
17762        if self.parse_keyword(Keyword::ASC) {
17763            Some(true)
17764        } else if self.parse_keyword(Keyword::DESC) {
17765            Some(false)
17766        } else {
17767            None
17768        }
17769    }
17770
17771    /// Parse an [OrderByExpr] expression.
17772    pub fn parse_order_by_expr(&mut self) -> Result<OrderByExpr, ParserError> {
17773        self.parse_order_by_expr_inner(false)
17774            .map(|(order_by, _)| order_by)
17775    }
17776
17777    /// Parse an [IndexColumn].
17778    pub fn parse_create_index_expr(&mut self) -> Result<IndexColumn, ParserError> {
17779        self.parse_order_by_expr_inner(true)
17780            .map(|(column, operator_class)| IndexColumn {
17781                column,
17782                operator_class,
17783            })
17784    }
17785
17786    fn parse_order_by_expr_inner(
17787        &mut self,
17788        with_operator_class: bool,
17789    ) -> Result<(OrderByExpr, Option<ObjectName>), ParserError> {
17790        let expr = self.parse_expr()?;
17791
17792        let operator_class: Option<ObjectName> = if with_operator_class {
17793            // We check that if non of the following keywords are present, then we parse an
17794            // identifier as operator class.
17795            if self
17796                .peek_one_of_keywords(&[Keyword::ASC, Keyword::DESC, Keyword::NULLS, Keyword::WITH])
17797                .is_some()
17798            {
17799                None
17800            } else {
17801                self.maybe_parse(|parser| parser.parse_object_name(false))?
17802            }
17803        } else {
17804            None
17805        };
17806
17807        let options = self.parse_order_by_options()?;
17808
17809        let with_fill = if self.dialect.supports_with_fill()
17810            && self.parse_keywords(&[Keyword::WITH, Keyword::FILL])
17811        {
17812            Some(self.parse_with_fill()?)
17813        } else {
17814            None
17815        };
17816
17817        Ok((
17818            OrderByExpr {
17819                expr,
17820                options,
17821                with_fill,
17822            },
17823            operator_class,
17824        ))
17825    }
17826
17827    fn parse_order_by_options(&mut self) -> Result<OrderByOptions, ParserError> {
17828        let asc = self.parse_asc_desc();
17829
17830        let nulls_first = if self.parse_keywords(&[Keyword::NULLS, Keyword::FIRST]) {
17831            Some(true)
17832        } else if self.parse_keywords(&[Keyword::NULLS, Keyword::LAST]) {
17833            Some(false)
17834        } else {
17835            None
17836        };
17837
17838        Ok(OrderByOptions { asc, nulls_first })
17839    }
17840
17841    // Parse a WITH FILL clause (ClickHouse dialect)
17842    // that follow the WITH FILL keywords in a ORDER BY clause
17843    /// Parse a `WITH FILL` clause used in ORDER BY (ClickHouse dialect).
17844    pub fn parse_with_fill(&mut self) -> Result<WithFill, ParserError> {
17845        let from = if self.parse_keyword(Keyword::FROM) {
17846            Some(self.parse_expr()?)
17847        } else {
17848            None
17849        };
17850
17851        let to = if self.parse_keyword(Keyword::TO) {
17852            Some(self.parse_expr()?)
17853        } else {
17854            None
17855        };
17856
17857        let step = if self.parse_keyword(Keyword::STEP) {
17858            Some(self.parse_expr()?)
17859        } else {
17860            None
17861        };
17862
17863        Ok(WithFill { from, to, step })
17864    }
17865
17866    /// Parse a set of comma separated INTERPOLATE expressions (ClickHouse dialect)
17867    /// that follow the INTERPOLATE keyword in an ORDER BY clause with the WITH FILL modifier
17868    pub fn parse_interpolations(&mut self) -> Result<Option<Interpolate>, ParserError> {
17869        if !self.parse_keyword(Keyword::INTERPOLATE) {
17870            return Ok(None);
17871        }
17872
17873        if self.consume_token(&Token::LParen) {
17874            let interpolations =
17875                self.parse_comma_separated0(|p| p.parse_interpolation(), Token::RParen)?;
17876            self.expect_token(&Token::RParen)?;
17877            // INTERPOLATE () and INTERPOLATE ( ... ) variants
17878            return Ok(Some(Interpolate {
17879                exprs: Some(interpolations),
17880            }));
17881        }
17882
17883        // INTERPOLATE
17884        Ok(Some(Interpolate { exprs: None }))
17885    }
17886
17887    /// Parse a INTERPOLATE expression (ClickHouse dialect)
17888    pub fn parse_interpolation(&mut self) -> Result<InterpolateExpr, ParserError> {
17889        let column = self.parse_identifier()?;
17890        let expr = if self.parse_keyword(Keyword::AS) {
17891            Some(self.parse_expr()?)
17892        } else {
17893            None
17894        };
17895        Ok(InterpolateExpr { column, expr })
17896    }
17897
17898    /// Parse a TOP clause, MSSQL equivalent of LIMIT,
17899    /// that follows after `SELECT [DISTINCT]`.
17900    pub fn parse_top(&mut self) -> Result<Top, ParserError> {
17901        let quantity = if self.consume_token(&Token::LParen) {
17902            let quantity = self.parse_expr()?;
17903            self.expect_token(&Token::RParen)?;
17904            Some(TopQuantity::Expr(quantity))
17905        } else {
17906            let next_token = self.next_token();
17907            let quantity = match next_token.token {
17908                Token::Number(s, _) => Self::parse::<u64>(s, next_token.span.start)?,
17909                _ => self.expected("literal int", next_token)?,
17910            };
17911            Some(TopQuantity::Constant(quantity))
17912        };
17913
17914        let percent = self.parse_keyword(Keyword::PERCENT);
17915
17916        let with_ties = self.parse_keywords(&[Keyword::WITH, Keyword::TIES]);
17917
17918        Ok(Top {
17919            with_ties,
17920            percent,
17921            quantity,
17922        })
17923    }
17924
17925    /// Parse a LIMIT clause
17926    pub fn parse_limit(&mut self) -> Result<Option<Expr>, ParserError> {
17927        if self.parse_keyword(Keyword::ALL) {
17928            Ok(None)
17929        } else {
17930            Ok(Some(self.parse_expr()?))
17931        }
17932    }
17933
17934    /// Parse an OFFSET clause
17935    pub fn parse_offset(&mut self) -> Result<Offset, ParserError> {
17936        let value = self.parse_expr()?;
17937        let rows = if self.parse_keyword(Keyword::ROW) {
17938            OffsetRows::Row
17939        } else if self.parse_keyword(Keyword::ROWS) {
17940            OffsetRows::Rows
17941        } else {
17942            OffsetRows::None
17943        };
17944        Ok(Offset { value, rows })
17945    }
17946
17947    /// Parse a FETCH clause
17948    pub fn parse_fetch(&mut self) -> Result<Fetch, ParserError> {
17949        let _ = self.parse_one_of_keywords(&[Keyword::FIRST, Keyword::NEXT]);
17950
17951        let (quantity, percent) = if self
17952            .parse_one_of_keywords(&[Keyword::ROW, Keyword::ROWS])
17953            .is_some()
17954        {
17955            (None, false)
17956        } else {
17957            let quantity = Expr::Value(self.parse_value()?);
17958            let percent = self.parse_keyword(Keyword::PERCENT);
17959            let _ = self.parse_one_of_keywords(&[Keyword::ROW, Keyword::ROWS]);
17960            (Some(quantity), percent)
17961        };
17962
17963        let with_ties = if self.parse_keyword(Keyword::ONLY) {
17964            false
17965        } else {
17966            self.parse_keywords(&[Keyword::WITH, Keyword::TIES])
17967        };
17968
17969        Ok(Fetch {
17970            with_ties,
17971            percent,
17972            quantity,
17973        })
17974    }
17975
17976    /// Parse a FOR UPDATE/FOR SHARE clause
17977    pub fn parse_lock(&mut self) -> Result<LockClause, ParserError> {
17978        let lock_type = match self.expect_one_of_keywords(&[Keyword::UPDATE, Keyword::SHARE])? {
17979            Keyword::UPDATE => LockType::Update,
17980            Keyword::SHARE => LockType::Share,
17981            unexpected_keyword => return Err(ParserError::ParserError(
17982                format!("Internal parser error: expected any of {{UPDATE, SHARE}}, got {unexpected_keyword:?}"),
17983            )),
17984        };
17985        let of = if self.parse_keyword(Keyword::OF) {
17986            Some(self.parse_object_name(false)?)
17987        } else {
17988            None
17989        };
17990        let nonblock = if self.parse_keyword(Keyword::NOWAIT) {
17991            Some(NonBlock::Nowait)
17992        } else if self.parse_keywords(&[Keyword::SKIP, Keyword::LOCKED]) {
17993            Some(NonBlock::SkipLocked)
17994        } else {
17995            None
17996        };
17997        Ok(LockClause {
17998            lock_type,
17999            of,
18000            nonblock,
18001        })
18002    }
18003
18004    /// Parse a VALUES clause
18005    pub fn parse_values(
18006        &mut self,
18007        allow_empty: bool,
18008        value_keyword: bool,
18009    ) -> Result<Values, ParserError> {
18010        let mut explicit_row = false;
18011
18012        let rows = self.parse_comma_separated(|parser| {
18013            if parser.parse_keyword(Keyword::ROW) {
18014                explicit_row = true;
18015            }
18016
18017            parser.expect_token(&Token::LParen)?;
18018            if allow_empty && parser.peek_token().token == Token::RParen {
18019                parser.next_token();
18020                Ok(vec![])
18021            } else {
18022                let exprs = parser.parse_comma_separated(Parser::parse_expr)?;
18023                parser.expect_token(&Token::RParen)?;
18024                Ok(exprs)
18025            }
18026        })?;
18027        Ok(Values {
18028            explicit_row,
18029            rows,
18030            value_keyword,
18031        })
18032    }
18033
18034    /// Parse a 'START TRANSACTION' statement
18035    pub fn parse_start_transaction(&mut self) -> Result<Statement, ParserError> {
18036        self.expect_keyword_is(Keyword::TRANSACTION)?;
18037        Ok(Statement::StartTransaction {
18038            modes: self.parse_transaction_modes()?,
18039            begin: false,
18040            transaction: Some(BeginTransactionKind::Transaction),
18041            modifier: None,
18042            statements: vec![],
18043            exception: None,
18044            has_end_keyword: false,
18045        })
18046    }
18047
18048    /// Parse a transaction modifier keyword that can follow a `BEGIN` statement.
18049    pub(crate) fn parse_transaction_modifier(&mut self) -> Option<TransactionModifier> {
18050        if !self.dialect.supports_start_transaction_modifier() {
18051            None
18052        } else if self.parse_keyword(Keyword::DEFERRED) {
18053            Some(TransactionModifier::Deferred)
18054        } else if self.parse_keyword(Keyword::IMMEDIATE) {
18055            Some(TransactionModifier::Immediate)
18056        } else if self.parse_keyword(Keyword::EXCLUSIVE) {
18057            Some(TransactionModifier::Exclusive)
18058        } else if self.parse_keyword(Keyword::TRY) {
18059            Some(TransactionModifier::Try)
18060        } else if self.parse_keyword(Keyword::CATCH) {
18061            Some(TransactionModifier::Catch)
18062        } else {
18063            None
18064        }
18065    }
18066
18067    /// Parse a 'BEGIN' statement
18068    pub fn parse_begin(&mut self) -> Result<Statement, ParserError> {
18069        let modifier = self.parse_transaction_modifier();
18070        let transaction = match self.parse_one_of_keywords(&[Keyword::TRANSACTION, Keyword::WORK]) {
18071            Some(Keyword::TRANSACTION) => Some(BeginTransactionKind::Transaction),
18072            Some(Keyword::WORK) => Some(BeginTransactionKind::Work),
18073            _ => None,
18074        };
18075        Ok(Statement::StartTransaction {
18076            modes: self.parse_transaction_modes()?,
18077            begin: true,
18078            transaction,
18079            modifier,
18080            statements: vec![],
18081            exception: None,
18082            has_end_keyword: false,
18083        })
18084    }
18085
18086    /// Parse a 'BEGIN ... EXCEPTION ... END' block
18087    pub fn parse_begin_exception_end(&mut self) -> Result<Statement, ParserError> {
18088        let statements = self.parse_statement_list(&[Keyword::EXCEPTION, Keyword::END])?;
18089
18090        let exception = if self.parse_keyword(Keyword::EXCEPTION) {
18091            let mut when = Vec::new();
18092
18093            // We can have multiple `WHEN` arms so we consume all cases until `END`
18094            while !self.peek_keyword(Keyword::END) {
18095                self.expect_keyword(Keyword::WHEN)?;
18096
18097                // Each `WHEN` case can have one or more conditions, e.g.
18098                // WHEN EXCEPTION_1 [OR EXCEPTION_2] THEN
18099                // So we parse identifiers until the `THEN` keyword.
18100                let mut idents = Vec::new();
18101
18102                while !self.parse_keyword(Keyword::THEN) {
18103                    let ident = self.parse_identifier()?;
18104                    idents.push(ident);
18105
18106                    self.maybe_parse(|p| p.expect_keyword(Keyword::OR))?;
18107                }
18108
18109                let statements = self.parse_statement_list(&[Keyword::WHEN, Keyword::END])?;
18110
18111                when.push(ExceptionWhen { idents, statements });
18112            }
18113
18114            Some(when)
18115        } else {
18116            None
18117        };
18118
18119        self.expect_keyword(Keyword::END)?;
18120
18121        Ok(Statement::StartTransaction {
18122            begin: true,
18123            statements,
18124            exception,
18125            has_end_keyword: true,
18126            transaction: None,
18127            modifier: None,
18128            modes: Default::default(),
18129        })
18130    }
18131
18132    /// Parse an 'END' statement
18133    pub fn parse_end(&mut self) -> Result<Statement, ParserError> {
18134        let modifier = if !self.dialect.supports_end_transaction_modifier() {
18135            None
18136        } else if self.parse_keyword(Keyword::TRY) {
18137            Some(TransactionModifier::Try)
18138        } else if self.parse_keyword(Keyword::CATCH) {
18139            Some(TransactionModifier::Catch)
18140        } else {
18141            None
18142        };
18143        Ok(Statement::Commit {
18144            chain: self.parse_commit_rollback_chain()?,
18145            end: true,
18146            modifier,
18147        })
18148    }
18149
18150    /// Parse a list of transaction modes
18151    pub fn parse_transaction_modes(&mut self) -> Result<Vec<TransactionMode>, ParserError> {
18152        let mut modes = vec![];
18153        let mut required = false;
18154        loop {
18155            let mode = if self.parse_keywords(&[Keyword::ISOLATION, Keyword::LEVEL]) {
18156                let iso_level = if self.parse_keywords(&[Keyword::READ, Keyword::UNCOMMITTED]) {
18157                    TransactionIsolationLevel::ReadUncommitted
18158                } else if self.parse_keywords(&[Keyword::READ, Keyword::COMMITTED]) {
18159                    TransactionIsolationLevel::ReadCommitted
18160                } else if self.parse_keywords(&[Keyword::REPEATABLE, Keyword::READ]) {
18161                    TransactionIsolationLevel::RepeatableRead
18162                } else if self.parse_keyword(Keyword::SERIALIZABLE) {
18163                    TransactionIsolationLevel::Serializable
18164                } else if self.parse_keyword(Keyword::SNAPSHOT) {
18165                    TransactionIsolationLevel::Snapshot
18166                } else {
18167                    self.expected("isolation level", self.peek_token())?
18168                };
18169                TransactionMode::IsolationLevel(iso_level)
18170            } else if self.parse_keywords(&[Keyword::READ, Keyword::ONLY]) {
18171                TransactionMode::AccessMode(TransactionAccessMode::ReadOnly)
18172            } else if self.parse_keywords(&[Keyword::READ, Keyword::WRITE]) {
18173                TransactionMode::AccessMode(TransactionAccessMode::ReadWrite)
18174            } else if required {
18175                self.expected("transaction mode", self.peek_token())?
18176            } else {
18177                break;
18178            };
18179            modes.push(mode);
18180            // ANSI requires a comma after each transaction mode, but
18181            // PostgreSQL, for historical reasons, does not. We follow
18182            // PostgreSQL in making the comma optional, since that is strictly
18183            // more general.
18184            required = self.consume_token(&Token::Comma);
18185        }
18186        Ok(modes)
18187    }
18188
18189    /// Parse a 'COMMIT' statement
18190    pub fn parse_commit(&mut self) -> Result<Statement, ParserError> {
18191        Ok(Statement::Commit {
18192            chain: self.parse_commit_rollback_chain()?,
18193            end: false,
18194            modifier: None,
18195        })
18196    }
18197
18198    /// Parse a 'ROLLBACK' statement
18199    pub fn parse_rollback(&mut self) -> Result<Statement, ParserError> {
18200        let chain = self.parse_commit_rollback_chain()?;
18201        let savepoint = self.parse_rollback_savepoint()?;
18202
18203        Ok(Statement::Rollback { chain, savepoint })
18204    }
18205
18206    /// Parse an optional `AND [NO] CHAIN` clause for `COMMIT` and `ROLLBACK` statements
18207    pub fn parse_commit_rollback_chain(&mut self) -> Result<bool, ParserError> {
18208        let _ = self.parse_one_of_keywords(&[Keyword::TRANSACTION, Keyword::WORK]);
18209        if self.parse_keyword(Keyword::AND) {
18210            let chain = !self.parse_keyword(Keyword::NO);
18211            self.expect_keyword_is(Keyword::CHAIN)?;
18212            Ok(chain)
18213        } else {
18214            Ok(false)
18215        }
18216    }
18217
18218    /// Parse an optional 'TO SAVEPOINT savepoint_name' clause for ROLLBACK statements
18219    pub fn parse_rollback_savepoint(&mut self) -> Result<Option<Ident>, ParserError> {
18220        if self.parse_keyword(Keyword::TO) {
18221            let _ = self.parse_keyword(Keyword::SAVEPOINT);
18222            let savepoint = self.parse_identifier()?;
18223
18224            Ok(Some(savepoint))
18225        } else {
18226            Ok(None)
18227        }
18228    }
18229
18230    /// Parse a 'RAISERROR' statement
18231    pub fn parse_raiserror(&mut self) -> Result<Statement, ParserError> {
18232        self.expect_token(&Token::LParen)?;
18233        let message = Box::new(self.parse_expr()?);
18234        self.expect_token(&Token::Comma)?;
18235        let severity = Box::new(self.parse_expr()?);
18236        self.expect_token(&Token::Comma)?;
18237        let state = Box::new(self.parse_expr()?);
18238        let arguments = if self.consume_token(&Token::Comma) {
18239            self.parse_comma_separated(Parser::parse_expr)?
18240        } else {
18241            vec![]
18242        };
18243        self.expect_token(&Token::RParen)?;
18244        let options = if self.parse_keyword(Keyword::WITH) {
18245            self.parse_comma_separated(Parser::parse_raiserror_option)?
18246        } else {
18247            vec![]
18248        };
18249        Ok(Statement::RaisError {
18250            message,
18251            severity,
18252            state,
18253            arguments,
18254            options,
18255        })
18256    }
18257
18258    /// Parse a single `RAISERROR` option
18259    pub fn parse_raiserror_option(&mut self) -> Result<RaisErrorOption, ParserError> {
18260        match self.expect_one_of_keywords(&[Keyword::LOG, Keyword::NOWAIT, Keyword::SETERROR])? {
18261            Keyword::LOG => Ok(RaisErrorOption::Log),
18262            Keyword::NOWAIT => Ok(RaisErrorOption::NoWait),
18263            Keyword::SETERROR => Ok(RaisErrorOption::SetError),
18264            _ => self.expected(
18265                "LOG, NOWAIT OR SETERROR raiserror option",
18266                self.peek_token(),
18267            ),
18268        }
18269    }
18270
18271    /// Parse a SQL `DEALLOCATE` statement
18272    pub fn parse_deallocate(&mut self) -> Result<Statement, ParserError> {
18273        let prepare = self.parse_keyword(Keyword::PREPARE);
18274        let name = self.parse_identifier()?;
18275        Ok(Statement::Deallocate { name, prepare })
18276    }
18277
18278    /// Parse a SQL `EXECUTE` statement
18279    pub fn parse_execute(&mut self) -> Result<Statement, ParserError> {
18280        let name = if self.dialect.supports_execute_immediate()
18281            && self.parse_keyword(Keyword::IMMEDIATE)
18282        {
18283            None
18284        } else {
18285            let has_parentheses = self.consume_token(&Token::LParen);
18286            let name = self.parse_object_name(false)?;
18287            if has_parentheses {
18288                self.expect_token(&Token::RParen)?;
18289            }
18290            Some(name)
18291        };
18292
18293        let has_parentheses = self.consume_token(&Token::LParen);
18294
18295        let end_kws = &[Keyword::USING, Keyword::OUTPUT, Keyword::DEFAULT];
18296        let end_token = match (has_parentheses, self.peek_token().token) {
18297            (true, _) => Token::RParen,
18298            (false, Token::EOF) => Token::EOF,
18299            (false, Token::Word(w)) if end_kws.contains(&w.keyword) => Token::Word(w),
18300            (false, _) => Token::SemiColon,
18301        };
18302
18303        let parameters = self.parse_comma_separated0(Parser::parse_expr, end_token)?;
18304
18305        if has_parentheses {
18306            self.expect_token(&Token::RParen)?;
18307        }
18308
18309        let into = if self.parse_keyword(Keyword::INTO) {
18310            self.parse_comma_separated(Self::parse_identifier)?
18311        } else {
18312            vec![]
18313        };
18314
18315        let using = if self.parse_keyword(Keyword::USING) {
18316            self.parse_comma_separated(Self::parse_expr_with_alias)?
18317        } else {
18318            vec![]
18319        };
18320
18321        let output = self.parse_keyword(Keyword::OUTPUT);
18322
18323        let default = self.parse_keyword(Keyword::DEFAULT);
18324
18325        Ok(Statement::Execute {
18326            immediate: name.is_none(),
18327            name,
18328            parameters,
18329            has_parentheses,
18330            into,
18331            using,
18332            output,
18333            default,
18334        })
18335    }
18336
18337    /// Parse a SQL `PREPARE` statement
18338    pub fn parse_prepare(&mut self) -> Result<Statement, ParserError> {
18339        let name = self.parse_identifier()?;
18340
18341        let mut data_types = vec![];
18342        if self.consume_token(&Token::LParen) {
18343            data_types = self.parse_comma_separated(Parser::parse_data_type)?;
18344            self.expect_token(&Token::RParen)?;
18345        }
18346
18347        self.expect_keyword_is(Keyword::AS)?;
18348        let statement = Box::new(self.parse_statement()?);
18349        Ok(Statement::Prepare {
18350            name,
18351            data_types,
18352            statement,
18353        })
18354    }
18355
18356    /// Parse a SQL `UNLOAD` statement
18357    pub fn parse_unload(&mut self) -> Result<Statement, ParserError> {
18358        self.expect_keyword(Keyword::UNLOAD)?;
18359        self.expect_token(&Token::LParen)?;
18360        let (query, query_text) = if matches!(self.peek_token().token, Token::SingleQuotedString(_))
18361        {
18362            (None, Some(self.parse_literal_string()?))
18363        } else {
18364            (Some(self.parse_query()?), None)
18365        };
18366        self.expect_token(&Token::RParen)?;
18367
18368        self.expect_keyword_is(Keyword::TO)?;
18369        let to = self.parse_identifier()?;
18370        let auth = if self.parse_keyword(Keyword::IAM_ROLE) {
18371            Some(self.parse_iam_role_kind()?)
18372        } else {
18373            None
18374        };
18375        let with = self.parse_options(Keyword::WITH)?;
18376        let mut options = vec![];
18377        while let Some(opt) = self.maybe_parse(|parser| parser.parse_copy_legacy_option())? {
18378            options.push(opt);
18379        }
18380        Ok(Statement::Unload {
18381            query,
18382            query_text,
18383            to,
18384            auth,
18385            with,
18386            options,
18387        })
18388    }
18389
18390    fn parse_select_into(&mut self) -> Result<SelectInto, ParserError> {
18391        let temporary = self
18392            .parse_one_of_keywords(&[Keyword::TEMP, Keyword::TEMPORARY])
18393            .is_some();
18394        let unlogged = self.parse_keyword(Keyword::UNLOGGED);
18395        let table = self.parse_keyword(Keyword::TABLE);
18396        let name = self.parse_object_name(false)?;
18397
18398        Ok(SelectInto {
18399            temporary,
18400            unlogged,
18401            table,
18402            name,
18403        })
18404    }
18405
18406    fn parse_pragma_value(&mut self) -> Result<Value, ParserError> {
18407        match self.parse_value()?.value {
18408            v @ Value::SingleQuotedString(_) => Ok(v),
18409            v @ Value::DoubleQuotedString(_) => Ok(v),
18410            v @ Value::Number(_, _) => Ok(v),
18411            v @ Value::Placeholder(_) => Ok(v),
18412            _ => {
18413                self.prev_token();
18414                self.expected("number or string or ? placeholder", self.peek_token())
18415            }
18416        }
18417    }
18418
18419    /// PRAGMA [schema-name '.'] pragma-name [('=' pragma-value) | '(' pragma-value ')']
18420    pub fn parse_pragma(&mut self) -> Result<Statement, ParserError> {
18421        let name = self.parse_object_name(false)?;
18422        if self.consume_token(&Token::LParen) {
18423            let value = self.parse_pragma_value()?;
18424            self.expect_token(&Token::RParen)?;
18425            Ok(Statement::Pragma {
18426                name,
18427                value: Some(value),
18428                is_eq: false,
18429            })
18430        } else if self.consume_token(&Token::Eq) {
18431            Ok(Statement::Pragma {
18432                name,
18433                value: Some(self.parse_pragma_value()?),
18434                is_eq: true,
18435            })
18436        } else {
18437            Ok(Statement::Pragma {
18438                name,
18439                value: None,
18440                is_eq: false,
18441            })
18442        }
18443    }
18444
18445    /// `INSTALL [extension_name]`
18446    pub fn parse_install(&mut self) -> Result<Statement, ParserError> {
18447        let extension_name = self.parse_identifier()?;
18448
18449        Ok(Statement::Install { extension_name })
18450    }
18451
18452    /// Parse a SQL LOAD statement
18453    pub fn parse_load(&mut self) -> Result<Statement, ParserError> {
18454        if self.dialect.supports_load_extension() {
18455            let extension_name = self.parse_identifier()?;
18456            Ok(Statement::Load { extension_name })
18457        } else if self.parse_keyword(Keyword::DATA) && self.dialect.supports_load_data() {
18458            let local = self.parse_one_of_keywords(&[Keyword::LOCAL]).is_some();
18459            self.expect_keyword_is(Keyword::INPATH)?;
18460            let inpath = self.parse_literal_string()?;
18461            let overwrite = self.parse_one_of_keywords(&[Keyword::OVERWRITE]).is_some();
18462            self.expect_keyword_is(Keyword::INTO)?;
18463            self.expect_keyword_is(Keyword::TABLE)?;
18464            let table_name = self.parse_object_name(false)?;
18465            let partitioned = self.parse_insert_partition()?;
18466            let table_format = self.parse_load_data_table_format()?;
18467            Ok(Statement::LoadData {
18468                local,
18469                inpath,
18470                overwrite,
18471                table_name,
18472                partitioned,
18473                table_format,
18474            })
18475        } else {
18476            self.expected(
18477                "`DATA` or an extension name after `LOAD`",
18478                self.peek_token(),
18479            )
18480        }
18481    }
18482
18483    /// ```sql
18484    /// OPTIMIZE TABLE [db.]name [ON CLUSTER cluster] [PARTITION partition | PARTITION ID 'partition_id'] [FINAL] [DEDUPLICATE [BY expression]]
18485    /// ```
18486    /// [ClickHouse](https://clickhouse.com/docs/en/sql-reference/statements/optimize)
18487    pub fn parse_optimize_table(&mut self) -> Result<Statement, ParserError> {
18488        self.expect_keyword_is(Keyword::TABLE)?;
18489        let name = self.parse_object_name(false)?;
18490        let on_cluster = self.parse_optional_on_cluster()?;
18491
18492        let partition = if self.parse_keyword(Keyword::PARTITION) {
18493            if self.parse_keyword(Keyword::ID) {
18494                Some(Partition::Identifier(self.parse_identifier()?))
18495            } else {
18496                Some(Partition::Expr(self.parse_expr()?))
18497            }
18498        } else {
18499            None
18500        };
18501
18502        let include_final = self.parse_keyword(Keyword::FINAL);
18503        let deduplicate = if self.parse_keyword(Keyword::DEDUPLICATE) {
18504            if self.parse_keyword(Keyword::BY) {
18505                Some(Deduplicate::ByExpression(self.parse_expr()?))
18506            } else {
18507                Some(Deduplicate::All)
18508            }
18509        } else {
18510            None
18511        };
18512
18513        Ok(Statement::OptimizeTable {
18514            name,
18515            on_cluster,
18516            partition,
18517            include_final,
18518            deduplicate,
18519        })
18520    }
18521
18522    /// ```sql
18523    /// CREATE [ { TEMPORARY | TEMP } ] SEQUENCE [ IF NOT EXISTS ] <sequence_name>
18524    /// ```
18525    ///
18526    /// See [Postgres docs](https://www.postgresql.org/docs/current/sql-createsequence.html) for more details.
18527    pub fn parse_create_sequence(&mut self, temporary: bool) -> Result<Statement, ParserError> {
18528        //[ IF NOT EXISTS ]
18529        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
18530        //name
18531        let name = self.parse_object_name(false)?;
18532        //[ AS data_type ]
18533        let mut data_type: Option<DataType> = None;
18534        if self.parse_keywords(&[Keyword::AS]) {
18535            data_type = Some(self.parse_data_type()?)
18536        }
18537        let sequence_options = self.parse_create_sequence_options()?;
18538        // [ OWNED BY { table_name.column_name | NONE } ]
18539        let owned_by = if self.parse_keywords(&[Keyword::OWNED, Keyword::BY]) {
18540            if self.parse_keywords(&[Keyword::NONE]) {
18541                Some(ObjectName::from(vec![Ident::new("NONE")]))
18542            } else {
18543                Some(self.parse_object_name(false)?)
18544            }
18545        } else {
18546            None
18547        };
18548        Ok(Statement::CreateSequence {
18549            temporary,
18550            if_not_exists,
18551            name,
18552            data_type,
18553            sequence_options,
18554            owned_by,
18555        })
18556    }
18557
18558    fn parse_create_sequence_options(&mut self) -> Result<Vec<SequenceOptions>, ParserError> {
18559        let mut sequence_options = vec![];
18560        //[ INCREMENT [ BY ] increment ]
18561        if self.parse_keywords(&[Keyword::INCREMENT]) {
18562            if self.parse_keywords(&[Keyword::BY]) {
18563                sequence_options.push(SequenceOptions::IncrementBy(self.parse_number()?, true));
18564            } else {
18565                sequence_options.push(SequenceOptions::IncrementBy(self.parse_number()?, false));
18566            }
18567        }
18568        //[ MINVALUE minvalue | NO MINVALUE ]
18569        if self.parse_keyword(Keyword::MINVALUE) {
18570            sequence_options.push(SequenceOptions::MinValue(Some(self.parse_number()?)));
18571        } else if self.parse_keywords(&[Keyword::NO, Keyword::MINVALUE]) {
18572            sequence_options.push(SequenceOptions::MinValue(None));
18573        }
18574        //[ MAXVALUE maxvalue | NO MAXVALUE ]
18575        if self.parse_keywords(&[Keyword::MAXVALUE]) {
18576            sequence_options.push(SequenceOptions::MaxValue(Some(self.parse_number()?)));
18577        } else if self.parse_keywords(&[Keyword::NO, Keyword::MAXVALUE]) {
18578            sequence_options.push(SequenceOptions::MaxValue(None));
18579        }
18580
18581        //[ START [ WITH ] start ]
18582        if self.parse_keywords(&[Keyword::START]) {
18583            if self.parse_keywords(&[Keyword::WITH]) {
18584                sequence_options.push(SequenceOptions::StartWith(self.parse_number()?, true));
18585            } else {
18586                sequence_options.push(SequenceOptions::StartWith(self.parse_number()?, false));
18587            }
18588        }
18589        //[ CACHE cache ]
18590        if self.parse_keywords(&[Keyword::CACHE]) {
18591            sequence_options.push(SequenceOptions::Cache(self.parse_number()?));
18592        }
18593        // [ [ NO ] CYCLE ]
18594        if self.parse_keywords(&[Keyword::NO, Keyword::CYCLE]) {
18595            sequence_options.push(SequenceOptions::Cycle(true));
18596        } else if self.parse_keywords(&[Keyword::CYCLE]) {
18597            sequence_options.push(SequenceOptions::Cycle(false));
18598        }
18599
18600        Ok(sequence_options)
18601    }
18602
18603    ///   Parse a `CREATE SERVER` statement.
18604    ///
18605    ///  See [Statement::CreateServer]
18606    pub fn parse_pg_create_server(&mut self) -> Result<Statement, ParserError> {
18607        let ine = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
18608        let name = self.parse_object_name(false)?;
18609
18610        let server_type = if self.parse_keyword(Keyword::TYPE) {
18611            Some(self.parse_identifier()?)
18612        } else {
18613            None
18614        };
18615
18616        let version = if self.parse_keyword(Keyword::VERSION) {
18617            Some(self.parse_identifier()?)
18618        } else {
18619            None
18620        };
18621
18622        self.expect_keywords(&[Keyword::FOREIGN, Keyword::DATA, Keyword::WRAPPER])?;
18623        let foreign_data_wrapper = self.parse_object_name(false)?;
18624
18625        let mut options = None;
18626        if self.parse_keyword(Keyword::OPTIONS) {
18627            self.expect_token(&Token::LParen)?;
18628            options = Some(self.parse_comma_separated(|p| {
18629                let key = p.parse_identifier()?;
18630                let value = p.parse_identifier()?;
18631                Ok(CreateServerOption { key, value })
18632            })?);
18633            self.expect_token(&Token::RParen)?;
18634        }
18635
18636        Ok(Statement::CreateServer(CreateServerStatement {
18637            name,
18638            if_not_exists: ine,
18639            server_type,
18640            version,
18641            foreign_data_wrapper,
18642            options,
18643        }))
18644    }
18645
18646    /// The index of the first unprocessed token.
18647    pub fn index(&self) -> usize {
18648        self.index
18649    }
18650
18651    /// Parse a named window definition.
18652    pub fn parse_named_window(&mut self) -> Result<NamedWindowDefinition, ParserError> {
18653        let ident = self.parse_identifier()?;
18654        self.expect_keyword_is(Keyword::AS)?;
18655
18656        let window_expr = if self.consume_token(&Token::LParen) {
18657            NamedWindowExpr::WindowSpec(self.parse_window_spec()?)
18658        } else if self.dialect.supports_window_clause_named_window_reference() {
18659            NamedWindowExpr::NamedWindow(self.parse_identifier()?)
18660        } else {
18661            return self.expected("(", self.peek_token());
18662        };
18663
18664        Ok(NamedWindowDefinition(ident, window_expr))
18665    }
18666
18667    /// Parse `CREATE PROCEDURE` statement.
18668    pub fn parse_create_procedure(&mut self, or_alter: bool) -> Result<Statement, ParserError> {
18669        let name = self.parse_object_name(false)?;
18670        let params = self.parse_optional_procedure_parameters()?;
18671
18672        let language = if self.parse_keyword(Keyword::LANGUAGE) {
18673            Some(self.parse_identifier()?)
18674        } else {
18675            None
18676        };
18677
18678        self.expect_keyword_is(Keyword::AS)?;
18679
18680        let body = self.parse_conditional_statements(&[Keyword::END])?;
18681
18682        Ok(Statement::CreateProcedure {
18683            name,
18684            or_alter,
18685            params,
18686            language,
18687            body,
18688        })
18689    }
18690
18691    /// Parse a window specification.
18692    pub fn parse_window_spec(&mut self) -> Result<WindowSpec, ParserError> {
18693        let window_name = match self.peek_token().token {
18694            Token::Word(word) if word.keyword == Keyword::NoKeyword => {
18695                self.parse_optional_ident()?
18696            }
18697            _ => None,
18698        };
18699
18700        let partition_by = if self.parse_keywords(&[Keyword::PARTITION, Keyword::BY]) {
18701            self.parse_comma_separated(Parser::parse_expr)?
18702        } else {
18703            vec![]
18704        };
18705        let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
18706            self.parse_comma_separated(Parser::parse_order_by_expr)?
18707        } else {
18708            vec![]
18709        };
18710
18711        let window_frame = if !self.consume_token(&Token::RParen) {
18712            let window_frame = self.parse_window_frame()?;
18713            self.expect_token(&Token::RParen)?;
18714            Some(window_frame)
18715        } else {
18716            None
18717        };
18718        Ok(WindowSpec {
18719            window_name,
18720            partition_by,
18721            order_by,
18722            window_frame,
18723        })
18724    }
18725
18726    /// Parse `CREATE TYPE` statement.
18727    pub fn parse_create_type(&mut self) -> Result<Statement, ParserError> {
18728        let name = self.parse_object_name(false)?;
18729
18730        // Check if we have AS keyword
18731        let has_as = self.parse_keyword(Keyword::AS);
18732
18733        if !has_as {
18734            // Two cases: CREATE TYPE name; or CREATE TYPE name (options);
18735            if self.consume_token(&Token::LParen) {
18736                // CREATE TYPE name (options) - SQL definition without AS
18737                let options = self.parse_create_type_sql_definition_options()?;
18738                self.expect_token(&Token::RParen)?;
18739                return Ok(Statement::CreateType {
18740                    name,
18741                    representation: Some(UserDefinedTypeRepresentation::SqlDefinition { options }),
18742                });
18743            }
18744
18745            // CREATE TYPE name; - no representation
18746            return Ok(Statement::CreateType {
18747                name,
18748                representation: None,
18749            });
18750        }
18751
18752        // We have AS keyword
18753        if self.parse_keyword(Keyword::ENUM) {
18754            // CREATE TYPE name AS ENUM (labels)
18755            self.parse_create_type_enum(name)
18756        } else if self.parse_keyword(Keyword::RANGE) {
18757            // CREATE TYPE name AS RANGE (options)
18758            self.parse_create_type_range(name)
18759        } else if self.consume_token(&Token::LParen) {
18760            // CREATE TYPE name AS (attributes) - Composite
18761            self.parse_create_type_composite(name)
18762        } else {
18763            self.expected("ENUM, RANGE, or '(' after AS", self.peek_token())
18764        }
18765    }
18766
18767    /// Parse remainder of `CREATE TYPE AS (attributes)` statement (composite type)
18768    ///
18769    /// See [PostgreSQL](https://www.postgresql.org/docs/current/sql-createtype.html)
18770    fn parse_create_type_composite(&mut self, name: ObjectName) -> Result<Statement, ParserError> {
18771        if self.consume_token(&Token::RParen) {
18772            // Empty composite type
18773            return Ok(Statement::CreateType {
18774                name,
18775                representation: Some(UserDefinedTypeRepresentation::Composite {
18776                    attributes: vec![],
18777                }),
18778            });
18779        }
18780
18781        let mut attributes = vec![];
18782        loop {
18783            let attr_name = self.parse_identifier()?;
18784            let attr_data_type = self.parse_data_type()?;
18785            let attr_collation = if self.parse_keyword(Keyword::COLLATE) {
18786                Some(self.parse_object_name(false)?)
18787            } else {
18788                None
18789            };
18790            attributes.push(UserDefinedTypeCompositeAttributeDef {
18791                name: attr_name,
18792                data_type: attr_data_type,
18793                collation: attr_collation,
18794            });
18795
18796            if !self.consume_token(&Token::Comma) {
18797                break;
18798            }
18799        }
18800        self.expect_token(&Token::RParen)?;
18801
18802        Ok(Statement::CreateType {
18803            name,
18804            representation: Some(UserDefinedTypeRepresentation::Composite { attributes }),
18805        })
18806    }
18807
18808    /// Parse remainder of `CREATE TYPE AS ENUM` statement (see [Statement::CreateType] and [Self::parse_create_type])
18809    ///
18810    /// See [PostgreSQL](https://www.postgresql.org/docs/current/sql-createtype.html)
18811    pub fn parse_create_type_enum(&mut self, name: ObjectName) -> Result<Statement, ParserError> {
18812        self.expect_token(&Token::LParen)?;
18813        let labels = self.parse_comma_separated0(|p| p.parse_identifier(), Token::RParen)?;
18814        self.expect_token(&Token::RParen)?;
18815
18816        Ok(Statement::CreateType {
18817            name,
18818            representation: Some(UserDefinedTypeRepresentation::Enum { labels }),
18819        })
18820    }
18821
18822    /// Parse remainder of `CREATE TYPE AS RANGE` statement
18823    ///
18824    /// See [PostgreSQL](https://www.postgresql.org/docs/current/sql-createtype.html)
18825    fn parse_create_type_range(&mut self, name: ObjectName) -> Result<Statement, ParserError> {
18826        self.expect_token(&Token::LParen)?;
18827        let options = self.parse_comma_separated0(|p| p.parse_range_option(), Token::RParen)?;
18828        self.expect_token(&Token::RParen)?;
18829
18830        Ok(Statement::CreateType {
18831            name,
18832            representation: Some(UserDefinedTypeRepresentation::Range { options }),
18833        })
18834    }
18835
18836    /// Parse a single range option for a `CREATE TYPE AS RANGE` statement
18837    fn parse_range_option(&mut self) -> Result<UserDefinedTypeRangeOption, ParserError> {
18838        let keyword = self.parse_one_of_keywords(&[
18839            Keyword::SUBTYPE,
18840            Keyword::SUBTYPE_OPCLASS,
18841            Keyword::COLLATION,
18842            Keyword::CANONICAL,
18843            Keyword::SUBTYPE_DIFF,
18844            Keyword::MULTIRANGE_TYPE_NAME,
18845        ]);
18846
18847        match keyword {
18848            Some(Keyword::SUBTYPE) => {
18849                self.expect_token(&Token::Eq)?;
18850                let data_type = self.parse_data_type()?;
18851                Ok(UserDefinedTypeRangeOption::Subtype(data_type))
18852            }
18853            Some(Keyword::SUBTYPE_OPCLASS) => {
18854                self.expect_token(&Token::Eq)?;
18855                let name = self.parse_object_name(false)?;
18856                Ok(UserDefinedTypeRangeOption::SubtypeOpClass(name))
18857            }
18858            Some(Keyword::COLLATION) => {
18859                self.expect_token(&Token::Eq)?;
18860                let name = self.parse_object_name(false)?;
18861                Ok(UserDefinedTypeRangeOption::Collation(name))
18862            }
18863            Some(Keyword::CANONICAL) => {
18864                self.expect_token(&Token::Eq)?;
18865                let name = self.parse_object_name(false)?;
18866                Ok(UserDefinedTypeRangeOption::Canonical(name))
18867            }
18868            Some(Keyword::SUBTYPE_DIFF) => {
18869                self.expect_token(&Token::Eq)?;
18870                let name = self.parse_object_name(false)?;
18871                Ok(UserDefinedTypeRangeOption::SubtypeDiff(name))
18872            }
18873            Some(Keyword::MULTIRANGE_TYPE_NAME) => {
18874                self.expect_token(&Token::Eq)?;
18875                let name = self.parse_object_name(false)?;
18876                Ok(UserDefinedTypeRangeOption::MultirangeTypeName(name))
18877            }
18878            _ => self.expected("range option keyword", self.peek_token()),
18879        }
18880    }
18881
18882    /// Parse SQL definition options for CREATE TYPE (options)
18883    fn parse_create_type_sql_definition_options(
18884        &mut self,
18885    ) -> Result<Vec<UserDefinedTypeSqlDefinitionOption>, ParserError> {
18886        self.parse_comma_separated0(|p| p.parse_sql_definition_option(), Token::RParen)
18887    }
18888
18889    /// Parse a single SQL definition option for CREATE TYPE (options)
18890    fn parse_sql_definition_option(
18891        &mut self,
18892    ) -> Result<UserDefinedTypeSqlDefinitionOption, ParserError> {
18893        let keyword = self.parse_one_of_keywords(&[
18894            Keyword::INPUT,
18895            Keyword::OUTPUT,
18896            Keyword::RECEIVE,
18897            Keyword::SEND,
18898            Keyword::TYPMOD_IN,
18899            Keyword::TYPMOD_OUT,
18900            Keyword::ANALYZE,
18901            Keyword::SUBSCRIPT,
18902            Keyword::INTERNALLENGTH,
18903            Keyword::PASSEDBYVALUE,
18904            Keyword::ALIGNMENT,
18905            Keyword::STORAGE,
18906            Keyword::LIKE,
18907            Keyword::CATEGORY,
18908            Keyword::PREFERRED,
18909            Keyword::DEFAULT,
18910            Keyword::ELEMENT,
18911            Keyword::DELIMITER,
18912            Keyword::COLLATABLE,
18913        ]);
18914
18915        match keyword {
18916            Some(Keyword::INPUT) => {
18917                self.expect_token(&Token::Eq)?;
18918                let name = self.parse_object_name(false)?;
18919                Ok(UserDefinedTypeSqlDefinitionOption::Input(name))
18920            }
18921            Some(Keyword::OUTPUT) => {
18922                self.expect_token(&Token::Eq)?;
18923                let name = self.parse_object_name(false)?;
18924                Ok(UserDefinedTypeSqlDefinitionOption::Output(name))
18925            }
18926            Some(Keyword::RECEIVE) => {
18927                self.expect_token(&Token::Eq)?;
18928                let name = self.parse_object_name(false)?;
18929                Ok(UserDefinedTypeSqlDefinitionOption::Receive(name))
18930            }
18931            Some(Keyword::SEND) => {
18932                self.expect_token(&Token::Eq)?;
18933                let name = self.parse_object_name(false)?;
18934                Ok(UserDefinedTypeSqlDefinitionOption::Send(name))
18935            }
18936            Some(Keyword::TYPMOD_IN) => {
18937                self.expect_token(&Token::Eq)?;
18938                let name = self.parse_object_name(false)?;
18939                Ok(UserDefinedTypeSqlDefinitionOption::TypmodIn(name))
18940            }
18941            Some(Keyword::TYPMOD_OUT) => {
18942                self.expect_token(&Token::Eq)?;
18943                let name = self.parse_object_name(false)?;
18944                Ok(UserDefinedTypeSqlDefinitionOption::TypmodOut(name))
18945            }
18946            Some(Keyword::ANALYZE) => {
18947                self.expect_token(&Token::Eq)?;
18948                let name = self.parse_object_name(false)?;
18949                Ok(UserDefinedTypeSqlDefinitionOption::Analyze(name))
18950            }
18951            Some(Keyword::SUBSCRIPT) => {
18952                self.expect_token(&Token::Eq)?;
18953                let name = self.parse_object_name(false)?;
18954                Ok(UserDefinedTypeSqlDefinitionOption::Subscript(name))
18955            }
18956            Some(Keyword::INTERNALLENGTH) => {
18957                self.expect_token(&Token::Eq)?;
18958                if self.parse_keyword(Keyword::VARIABLE) {
18959                    Ok(UserDefinedTypeSqlDefinitionOption::InternalLength(
18960                        UserDefinedTypeInternalLength::Variable,
18961                    ))
18962                } else {
18963                    let value = self.parse_literal_uint()?;
18964                    Ok(UserDefinedTypeSqlDefinitionOption::InternalLength(
18965                        UserDefinedTypeInternalLength::Fixed(value),
18966                    ))
18967                }
18968            }
18969            Some(Keyword::PASSEDBYVALUE) => Ok(UserDefinedTypeSqlDefinitionOption::PassedByValue),
18970            Some(Keyword::ALIGNMENT) => {
18971                self.expect_token(&Token::Eq)?;
18972                let align_keyword = self.parse_one_of_keywords(&[
18973                    Keyword::CHAR,
18974                    Keyword::INT2,
18975                    Keyword::INT4,
18976                    Keyword::DOUBLE,
18977                ]);
18978                match align_keyword {
18979                    Some(Keyword::CHAR) => Ok(UserDefinedTypeSqlDefinitionOption::Alignment(
18980                        Alignment::Char,
18981                    )),
18982                    Some(Keyword::INT2) => Ok(UserDefinedTypeSqlDefinitionOption::Alignment(
18983                        Alignment::Int2,
18984                    )),
18985                    Some(Keyword::INT4) => Ok(UserDefinedTypeSqlDefinitionOption::Alignment(
18986                        Alignment::Int4,
18987                    )),
18988                    Some(Keyword::DOUBLE) => Ok(UserDefinedTypeSqlDefinitionOption::Alignment(
18989                        Alignment::Double,
18990                    )),
18991                    _ => self.expected(
18992                        "alignment value (char, int2, int4, or double)",
18993                        self.peek_token(),
18994                    ),
18995                }
18996            }
18997            Some(Keyword::STORAGE) => {
18998                self.expect_token(&Token::Eq)?;
18999                let storage_keyword = self.parse_one_of_keywords(&[
19000                    Keyword::PLAIN,
19001                    Keyword::EXTERNAL,
19002                    Keyword::EXTENDED,
19003                    Keyword::MAIN,
19004                ]);
19005                match storage_keyword {
19006                    Some(Keyword::PLAIN) => Ok(UserDefinedTypeSqlDefinitionOption::Storage(
19007                        UserDefinedTypeStorage::Plain,
19008                    )),
19009                    Some(Keyword::EXTERNAL) => Ok(UserDefinedTypeSqlDefinitionOption::Storage(
19010                        UserDefinedTypeStorage::External,
19011                    )),
19012                    Some(Keyword::EXTENDED) => Ok(UserDefinedTypeSqlDefinitionOption::Storage(
19013                        UserDefinedTypeStorage::Extended,
19014                    )),
19015                    Some(Keyword::MAIN) => Ok(UserDefinedTypeSqlDefinitionOption::Storage(
19016                        UserDefinedTypeStorage::Main,
19017                    )),
19018                    _ => self.expected(
19019                        "storage value (plain, external, extended, or main)",
19020                        self.peek_token(),
19021                    ),
19022                }
19023            }
19024            Some(Keyword::LIKE) => {
19025                self.expect_token(&Token::Eq)?;
19026                let name = self.parse_object_name(false)?;
19027                Ok(UserDefinedTypeSqlDefinitionOption::Like(name))
19028            }
19029            Some(Keyword::CATEGORY) => {
19030                self.expect_token(&Token::Eq)?;
19031                let category_str = self.parse_literal_string()?;
19032                let category_char = category_str.chars().next().ok_or_else(|| {
19033                    ParserError::ParserError(
19034                        "CATEGORY value must be a single character".to_string(),
19035                    )
19036                })?;
19037                Ok(UserDefinedTypeSqlDefinitionOption::Category(category_char))
19038            }
19039            Some(Keyword::PREFERRED) => {
19040                self.expect_token(&Token::Eq)?;
19041                let value =
19042                    self.parse_keyword(Keyword::TRUE) || !self.parse_keyword(Keyword::FALSE);
19043                Ok(UserDefinedTypeSqlDefinitionOption::Preferred(value))
19044            }
19045            Some(Keyword::DEFAULT) => {
19046                self.expect_token(&Token::Eq)?;
19047                let expr = self.parse_expr()?;
19048                Ok(UserDefinedTypeSqlDefinitionOption::Default(expr))
19049            }
19050            Some(Keyword::ELEMENT) => {
19051                self.expect_token(&Token::Eq)?;
19052                let data_type = self.parse_data_type()?;
19053                Ok(UserDefinedTypeSqlDefinitionOption::Element(data_type))
19054            }
19055            Some(Keyword::DELIMITER) => {
19056                self.expect_token(&Token::Eq)?;
19057                let delimiter = self.parse_literal_string()?;
19058                Ok(UserDefinedTypeSqlDefinitionOption::Delimiter(delimiter))
19059            }
19060            Some(Keyword::COLLATABLE) => {
19061                self.expect_token(&Token::Eq)?;
19062                let value =
19063                    self.parse_keyword(Keyword::TRUE) || !self.parse_keyword(Keyword::FALSE);
19064                Ok(UserDefinedTypeSqlDefinitionOption::Collatable(value))
19065            }
19066            _ => self.expected("SQL definition option keyword", self.peek_token()),
19067        }
19068    }
19069
19070    fn parse_parenthesized_identifiers(&mut self) -> Result<Vec<Ident>, ParserError> {
19071        self.expect_token(&Token::LParen)?;
19072        let idents = self.parse_comma_separated0(|p| p.parse_identifier(), Token::RParen)?;
19073        self.expect_token(&Token::RParen)?;
19074        Ok(idents)
19075    }
19076
19077    fn parse_column_position(&mut self) -> Result<Option<MySQLColumnPosition>, ParserError> {
19078        if dialect_of!(self is MySqlDialect | GenericDialect) {
19079            if self.parse_keyword(Keyword::FIRST) {
19080                Ok(Some(MySQLColumnPosition::First))
19081            } else if self.parse_keyword(Keyword::AFTER) {
19082                let ident = self.parse_identifier()?;
19083                Ok(Some(MySQLColumnPosition::After(ident)))
19084            } else {
19085                Ok(None)
19086            }
19087        } else {
19088            Ok(None)
19089        }
19090    }
19091
19092    /// Parse [Statement::Print]
19093    fn parse_print(&mut self) -> Result<Statement, ParserError> {
19094        Ok(Statement::Print(PrintStatement {
19095            message: Box::new(self.parse_expr()?),
19096        }))
19097    }
19098
19099    /// Parse [Statement::Return]
19100    fn parse_return(&mut self) -> Result<Statement, ParserError> {
19101        match self.maybe_parse(|p| p.parse_expr())? {
19102            Some(expr) => Ok(Statement::Return(ReturnStatement {
19103                value: Some(ReturnStatementValue::Expr(expr)),
19104            })),
19105            None => Ok(Statement::Return(ReturnStatement { value: None })),
19106        }
19107    }
19108
19109    /// /// Parse a `EXPORT DATA` statement.
19110    ///
19111    /// See [Statement::ExportData]
19112    fn parse_export_data(&mut self) -> Result<Statement, ParserError> {
19113        self.expect_keywords(&[Keyword::EXPORT, Keyword::DATA])?;
19114
19115        let connection = if self.parse_keywords(&[Keyword::WITH, Keyword::CONNECTION]) {
19116            Some(self.parse_object_name(false)?)
19117        } else {
19118            None
19119        };
19120        self.expect_keyword(Keyword::OPTIONS)?;
19121        self.expect_token(&Token::LParen)?;
19122        let options = self.parse_comma_separated(|p| p.parse_sql_option())?;
19123        self.expect_token(&Token::RParen)?;
19124        self.expect_keyword(Keyword::AS)?;
19125        let query = self.parse_query()?;
19126        Ok(Statement::ExportData(ExportData {
19127            options,
19128            query,
19129            connection,
19130        }))
19131    }
19132
19133    fn parse_vacuum(&mut self) -> Result<Statement, ParserError> {
19134        self.expect_keyword(Keyword::VACUUM)?;
19135        let full = self.parse_keyword(Keyword::FULL);
19136        let sort_only = self.parse_keywords(&[Keyword::SORT, Keyword::ONLY]);
19137        let delete_only = self.parse_keywords(&[Keyword::DELETE, Keyword::ONLY]);
19138        let reindex = self.parse_keyword(Keyword::REINDEX);
19139        let recluster = self.parse_keyword(Keyword::RECLUSTER);
19140        let (table_name, threshold, boost) =
19141            match self.maybe_parse(|p| p.parse_object_name(false))? {
19142                Some(table_name) => {
19143                    let threshold = if self.parse_keyword(Keyword::TO) {
19144                        let value = self.parse_value()?;
19145                        self.expect_keyword(Keyword::PERCENT)?;
19146                        Some(value.value)
19147                    } else {
19148                        None
19149                    };
19150                    let boost = self.parse_keyword(Keyword::BOOST);
19151                    (Some(table_name), threshold, boost)
19152                }
19153                _ => (None, None, false),
19154            };
19155        Ok(Statement::Vacuum(VacuumStatement {
19156            full,
19157            sort_only,
19158            delete_only,
19159            reindex,
19160            recluster,
19161            table_name,
19162            threshold,
19163            boost,
19164        }))
19165    }
19166
19167    /// Consume the parser and return its underlying token buffer
19168    pub fn into_tokens(self) -> Vec<TokenWithSpan> {
19169        self.tokens
19170    }
19171
19172    /// Returns true if the next keyword indicates a sub query, i.e. SELECT or WITH
19173    fn peek_sub_query(&mut self) -> bool {
19174        if self
19175            .parse_one_of_keywords(&[Keyword::SELECT, Keyword::WITH])
19176            .is_some()
19177        {
19178            self.prev_token();
19179            return true;
19180        }
19181        false
19182    }
19183
19184    pub(crate) fn parse_show_stmt_options(&mut self) -> Result<ShowStatementOptions, ParserError> {
19185        let show_in;
19186        let mut filter_position = None;
19187        if self.dialect.supports_show_like_before_in() {
19188            if let Some(filter) = self.parse_show_statement_filter()? {
19189                filter_position = Some(ShowStatementFilterPosition::Infix(filter));
19190            }
19191            show_in = self.maybe_parse_show_stmt_in()?;
19192        } else {
19193            show_in = self.maybe_parse_show_stmt_in()?;
19194            if let Some(filter) = self.parse_show_statement_filter()? {
19195                filter_position = Some(ShowStatementFilterPosition::Suffix(filter));
19196            }
19197        }
19198        let starts_with = self.maybe_parse_show_stmt_starts_with()?;
19199        let limit = self.maybe_parse_show_stmt_limit()?;
19200        let from = self.maybe_parse_show_stmt_from()?;
19201        Ok(ShowStatementOptions {
19202            filter_position,
19203            show_in,
19204            starts_with,
19205            limit,
19206            limit_from: from,
19207        })
19208    }
19209
19210    fn maybe_parse_show_stmt_in(&mut self) -> Result<Option<ShowStatementIn>, ParserError> {
19211        let clause = match self.parse_one_of_keywords(&[Keyword::FROM, Keyword::IN]) {
19212            Some(Keyword::FROM) => ShowStatementInClause::FROM,
19213            Some(Keyword::IN) => ShowStatementInClause::IN,
19214            None => return Ok(None),
19215            _ => return self.expected("FROM or IN", self.peek_token()),
19216        };
19217
19218        let (parent_type, parent_name) = match self.parse_one_of_keywords(&[
19219            Keyword::ACCOUNT,
19220            Keyword::DATABASE,
19221            Keyword::SCHEMA,
19222            Keyword::TABLE,
19223            Keyword::VIEW,
19224        ]) {
19225            // If we see these next keywords it means we don't have a parent name
19226            Some(Keyword::DATABASE)
19227                if self.peek_keywords(&[Keyword::STARTS, Keyword::WITH])
19228                    | self.peek_keyword(Keyword::LIMIT) =>
19229            {
19230                (Some(ShowStatementInParentType::Database), None)
19231            }
19232            Some(Keyword::SCHEMA)
19233                if self.peek_keywords(&[Keyword::STARTS, Keyword::WITH])
19234                    | self.peek_keyword(Keyword::LIMIT) =>
19235            {
19236                (Some(ShowStatementInParentType::Schema), None)
19237            }
19238            Some(parent_kw) => {
19239                // The parent name here is still optional, for example:
19240                // SHOW TABLES IN ACCOUNT, so parsing the object name
19241                // may fail because the statement ends.
19242                let parent_name = self.maybe_parse(|p| p.parse_object_name(false))?;
19243                match parent_kw {
19244                    Keyword::ACCOUNT => (Some(ShowStatementInParentType::Account), parent_name),
19245                    Keyword::DATABASE => (Some(ShowStatementInParentType::Database), parent_name),
19246                    Keyword::SCHEMA => (Some(ShowStatementInParentType::Schema), parent_name),
19247                    Keyword::TABLE => (Some(ShowStatementInParentType::Table), parent_name),
19248                    Keyword::VIEW => (Some(ShowStatementInParentType::View), parent_name),
19249                    _ => {
19250                        return self.expected(
19251                            "one of ACCOUNT, DATABASE, SCHEMA, TABLE or VIEW",
19252                            self.peek_token(),
19253                        )
19254                    }
19255                }
19256            }
19257            None => {
19258                // Parsing MySQL style FROM tbl_name FROM db_name
19259                // which is equivalent to FROM tbl_name.db_name
19260                let mut parent_name = self.parse_object_name(false)?;
19261                if self
19262                    .parse_one_of_keywords(&[Keyword::FROM, Keyword::IN])
19263                    .is_some()
19264                {
19265                    parent_name
19266                        .0
19267                        .insert(0, ObjectNamePart::Identifier(self.parse_identifier()?));
19268                }
19269                (None, Some(parent_name))
19270            }
19271        };
19272
19273        Ok(Some(ShowStatementIn {
19274            clause,
19275            parent_type,
19276            parent_name,
19277        }))
19278    }
19279
19280    fn maybe_parse_show_stmt_starts_with(&mut self) -> Result<Option<Value>, ParserError> {
19281        if self.parse_keywords(&[Keyword::STARTS, Keyword::WITH]) {
19282            Ok(Some(self.parse_value()?.value))
19283        } else {
19284            Ok(None)
19285        }
19286    }
19287
19288    fn maybe_parse_show_stmt_limit(&mut self) -> Result<Option<Expr>, ParserError> {
19289        if self.parse_keyword(Keyword::LIMIT) {
19290            Ok(self.parse_limit()?)
19291        } else {
19292            Ok(None)
19293        }
19294    }
19295
19296    fn maybe_parse_show_stmt_from(&mut self) -> Result<Option<Value>, ParserError> {
19297        if self.parse_keyword(Keyword::FROM) {
19298            Ok(Some(self.parse_value()?.value))
19299        } else {
19300            Ok(None)
19301        }
19302    }
19303
19304    pub(crate) fn in_column_definition_state(&self) -> bool {
19305        matches!(self.state, ColumnDefinition)
19306    }
19307
19308    /// Parses options provided in key-value format.
19309    ///
19310    /// * `parenthesized` - true if the options are enclosed in parenthesis
19311    /// * `end_words` - a list of keywords that any of them indicates the end of the options section
19312    pub(crate) fn parse_key_value_options(
19313        &mut self,
19314        parenthesized: bool,
19315        end_words: &[Keyword],
19316    ) -> Result<KeyValueOptions, ParserError> {
19317        let mut options: Vec<KeyValueOption> = Vec::new();
19318        let mut delimiter = KeyValueOptionsDelimiter::Space;
19319        if parenthesized {
19320            self.expect_token(&Token::LParen)?;
19321        }
19322        loop {
19323            match self.next_token().token {
19324                Token::RParen => {
19325                    if parenthesized {
19326                        break;
19327                    } else {
19328                        return self.expected(" another option or EOF", self.peek_token());
19329                    }
19330                }
19331                Token::EOF | Token::SemiColon => break,
19332                Token::Comma => {
19333                    delimiter = KeyValueOptionsDelimiter::Comma;
19334                    continue;
19335                }
19336                Token::Word(w) if !end_words.contains(&w.keyword) => {
19337                    options.push(self.parse_key_value_option(&w)?)
19338                }
19339                Token::Word(w) if end_words.contains(&w.keyword) => {
19340                    self.prev_token();
19341                    break;
19342                }
19343                _ => {
19344                    return self.expected(
19345                        "another option, EOF, SemiColon, Comma or ')'",
19346                        self.peek_token(),
19347                    )
19348                }
19349            };
19350        }
19351
19352        Ok(KeyValueOptions { delimiter, options })
19353    }
19354
19355    /// Parses a `KEY = VALUE` construct based on the specified key
19356    pub(crate) fn parse_key_value_option(
19357        &mut self,
19358        key: &Word,
19359    ) -> Result<KeyValueOption, ParserError> {
19360        self.expect_token(&Token::Eq)?;
19361        match self.peek_token().token {
19362            Token::SingleQuotedString(_) => Ok(KeyValueOption {
19363                option_name: key.value.clone(),
19364                option_value: KeyValueOptionKind::Single(self.parse_value()?.into()),
19365            }),
19366            Token::Word(word)
19367                if word.keyword == Keyword::TRUE || word.keyword == Keyword::FALSE =>
19368            {
19369                Ok(KeyValueOption {
19370                    option_name: key.value.clone(),
19371                    option_value: KeyValueOptionKind::Single(self.parse_value()?.into()),
19372                })
19373            }
19374            Token::Number(..) => Ok(KeyValueOption {
19375                option_name: key.value.clone(),
19376                option_value: KeyValueOptionKind::Single(self.parse_value()?.into()),
19377            }),
19378            Token::Word(word) => {
19379                self.next_token();
19380                Ok(KeyValueOption {
19381                    option_name: key.value.clone(),
19382                    option_value: KeyValueOptionKind::Single(Value::Placeholder(
19383                        word.value.clone(),
19384                    )),
19385                })
19386            }
19387            Token::LParen => {
19388                // Can be a list of values or a list of key value properties.
19389                // Try to parse a list of values and if that fails, try to parse
19390                // a list of key-value properties.
19391                match self.maybe_parse(|parser| {
19392                    parser.expect_token(&Token::LParen)?;
19393                    let values = parser.parse_comma_separated0(|p| p.parse_value(), Token::RParen);
19394                    parser.expect_token(&Token::RParen)?;
19395                    values
19396                })? {
19397                    Some(values) => {
19398                        let values = values.into_iter().map(|v| v.value).collect();
19399                        Ok(KeyValueOption {
19400                            option_name: key.value.clone(),
19401                            option_value: KeyValueOptionKind::Multi(values),
19402                        })
19403                    }
19404                    None => Ok(KeyValueOption {
19405                        option_name: key.value.clone(),
19406                        option_value: KeyValueOptionKind::KeyValueOptions(Box::new(
19407                            self.parse_key_value_options(true, &[])?,
19408                        )),
19409                    }),
19410                }
19411            }
19412            _ => self.expected("expected option value", self.peek_token()),
19413        }
19414    }
19415
19416    /// Parses a RESET statement
19417    fn parse_reset(&mut self) -> Result<ResetStatement, ParserError> {
19418        if self.parse_keyword(Keyword::ALL) {
19419            return Ok(ResetStatement { reset: Reset::ALL });
19420        }
19421
19422        let obj = self.parse_object_name(false)?;
19423        Ok(ResetStatement {
19424            reset: Reset::ConfigurationParameter(obj),
19425        })
19426    }
19427}
19428
19429fn maybe_prefixed_expr(expr: Expr, prefix: Option<Ident>) -> Expr {
19430    if let Some(prefix) = prefix {
19431        Expr::Prefixed {
19432            prefix,
19433            value: Box::new(expr),
19434        }
19435    } else {
19436        expr
19437    }
19438}
19439
19440impl Word {
19441    /// Convert a reference to this word into an [`Ident`] by cloning the value.
19442    ///
19443    /// Use this method when you need to keep the original `Word` around.
19444    /// If you can consume the `Word`, prefer [`into_ident`](Self::into_ident) instead
19445    /// to avoid cloning.
19446    pub fn to_ident(&self, span: Span) -> Ident {
19447        Ident {
19448            value: self.value.clone(),
19449            quote_style: self.quote_style,
19450            span,
19451        }
19452    }
19453
19454    /// Convert this word into an [`Ident`] identifier, consuming the `Word`.
19455    ///
19456    /// This avoids cloning the string value. If you need to keep the original
19457    /// `Word`, use [`to_ident`](Self::to_ident) instead.
19458    pub fn into_ident(self, span: Span) -> Ident {
19459        Ident {
19460            value: self.value,
19461            quote_style: self.quote_style,
19462            span,
19463        }
19464    }
19465}
19466
19467#[cfg(test)]
19468mod tests {
19469    use crate::test_utils::{all_dialects, TestedDialects};
19470
19471    use super::*;
19472
19473    #[test]
19474    fn test_prev_index() {
19475        let sql = "SELECT version";
19476        all_dialects().run_parser_method(sql, |parser| {
19477            assert_eq!(parser.peek_token(), Token::make_keyword("SELECT"));
19478            assert_eq!(parser.next_token(), Token::make_keyword("SELECT"));
19479            parser.prev_token();
19480            assert_eq!(parser.next_token(), Token::make_keyword("SELECT"));
19481            assert_eq!(parser.next_token(), Token::make_word("version", None));
19482            parser.prev_token();
19483            assert_eq!(parser.peek_token(), Token::make_word("version", None));
19484            assert_eq!(parser.next_token(), Token::make_word("version", None));
19485            assert_eq!(parser.peek_token(), Token::EOF);
19486            parser.prev_token();
19487            assert_eq!(parser.next_token(), Token::make_word("version", None));
19488            assert_eq!(parser.next_token(), Token::EOF);
19489            assert_eq!(parser.next_token(), Token::EOF);
19490            parser.prev_token();
19491        });
19492    }
19493
19494    #[test]
19495    fn test_peek_tokens() {
19496        all_dialects().run_parser_method("SELECT foo AS bar FROM baz", |parser| {
19497            assert!(matches!(
19498                parser.peek_tokens(),
19499                [Token::Word(Word {
19500                    keyword: Keyword::SELECT,
19501                    ..
19502                })]
19503            ));
19504
19505            assert!(matches!(
19506                parser.peek_tokens(),
19507                [
19508                    Token::Word(Word {
19509                        keyword: Keyword::SELECT,
19510                        ..
19511                    }),
19512                    Token::Word(_),
19513                    Token::Word(Word {
19514                        keyword: Keyword::AS,
19515                        ..
19516                    }),
19517                ]
19518            ));
19519
19520            for _ in 0..4 {
19521                parser.next_token();
19522            }
19523
19524            assert!(matches!(
19525                parser.peek_tokens(),
19526                [
19527                    Token::Word(Word {
19528                        keyword: Keyword::FROM,
19529                        ..
19530                    }),
19531                    Token::Word(_),
19532                    Token::EOF,
19533                    Token::EOF,
19534                ]
19535            ))
19536        })
19537    }
19538
19539    #[cfg(test)]
19540    mod test_parse_data_type {
19541        use crate::ast::{
19542            CharLengthUnits, CharacterLength, DataType, ExactNumberInfo, ObjectName, TimezoneInfo,
19543        };
19544        use crate::dialect::{AnsiDialect, GenericDialect, PostgreSqlDialect};
19545        use crate::test_utils::TestedDialects;
19546
19547        macro_rules! test_parse_data_type {
19548            ($dialect:expr, $input:expr, $expected_type:expr $(,)?) => {{
19549                $dialect.run_parser_method(&*$input, |parser| {
19550                    let data_type = parser.parse_data_type().unwrap();
19551                    assert_eq!($expected_type, data_type);
19552                    assert_eq!($input.to_string(), data_type.to_string());
19553                });
19554            }};
19555        }
19556
19557        #[test]
19558        fn test_ansii_character_string_types() {
19559            // Character string types: <https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#character-string-type>
19560            let dialect =
19561                TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
19562
19563            test_parse_data_type!(dialect, "CHARACTER", DataType::Character(None));
19564
19565            test_parse_data_type!(
19566                dialect,
19567                "CHARACTER(20)",
19568                DataType::Character(Some(CharacterLength::IntegerLength {
19569                    length: 20,
19570                    unit: None
19571                }))
19572            );
19573
19574            test_parse_data_type!(
19575                dialect,
19576                "CHARACTER(20 CHARACTERS)",
19577                DataType::Character(Some(CharacterLength::IntegerLength {
19578                    length: 20,
19579                    unit: Some(CharLengthUnits::Characters)
19580                }))
19581            );
19582
19583            test_parse_data_type!(
19584                dialect,
19585                "CHARACTER(20 OCTETS)",
19586                DataType::Character(Some(CharacterLength::IntegerLength {
19587                    length: 20,
19588                    unit: Some(CharLengthUnits::Octets)
19589                }))
19590            );
19591
19592            test_parse_data_type!(dialect, "CHAR", DataType::Char(None));
19593
19594            test_parse_data_type!(
19595                dialect,
19596                "CHAR(20)",
19597                DataType::Char(Some(CharacterLength::IntegerLength {
19598                    length: 20,
19599                    unit: None
19600                }))
19601            );
19602
19603            test_parse_data_type!(
19604                dialect,
19605                "CHAR(20 CHARACTERS)",
19606                DataType::Char(Some(CharacterLength::IntegerLength {
19607                    length: 20,
19608                    unit: Some(CharLengthUnits::Characters)
19609                }))
19610            );
19611
19612            test_parse_data_type!(
19613                dialect,
19614                "CHAR(20 OCTETS)",
19615                DataType::Char(Some(CharacterLength::IntegerLength {
19616                    length: 20,
19617                    unit: Some(CharLengthUnits::Octets)
19618                }))
19619            );
19620
19621            test_parse_data_type!(
19622                dialect,
19623                "CHARACTER VARYING(20)",
19624                DataType::CharacterVarying(Some(CharacterLength::IntegerLength {
19625                    length: 20,
19626                    unit: None
19627                }))
19628            );
19629
19630            test_parse_data_type!(
19631                dialect,
19632                "CHARACTER VARYING(20 CHARACTERS)",
19633                DataType::CharacterVarying(Some(CharacterLength::IntegerLength {
19634                    length: 20,
19635                    unit: Some(CharLengthUnits::Characters)
19636                }))
19637            );
19638
19639            test_parse_data_type!(
19640                dialect,
19641                "CHARACTER VARYING(20 OCTETS)",
19642                DataType::CharacterVarying(Some(CharacterLength::IntegerLength {
19643                    length: 20,
19644                    unit: Some(CharLengthUnits::Octets)
19645                }))
19646            );
19647
19648            test_parse_data_type!(
19649                dialect,
19650                "CHAR VARYING(20)",
19651                DataType::CharVarying(Some(CharacterLength::IntegerLength {
19652                    length: 20,
19653                    unit: None
19654                }))
19655            );
19656
19657            test_parse_data_type!(
19658                dialect,
19659                "CHAR VARYING(20 CHARACTERS)",
19660                DataType::CharVarying(Some(CharacterLength::IntegerLength {
19661                    length: 20,
19662                    unit: Some(CharLengthUnits::Characters)
19663                }))
19664            );
19665
19666            test_parse_data_type!(
19667                dialect,
19668                "CHAR VARYING(20 OCTETS)",
19669                DataType::CharVarying(Some(CharacterLength::IntegerLength {
19670                    length: 20,
19671                    unit: Some(CharLengthUnits::Octets)
19672                }))
19673            );
19674
19675            test_parse_data_type!(
19676                dialect,
19677                "VARCHAR(20)",
19678                DataType::Varchar(Some(CharacterLength::IntegerLength {
19679                    length: 20,
19680                    unit: None
19681                }))
19682            );
19683        }
19684
19685        #[test]
19686        fn test_ansii_character_large_object_types() {
19687            // Character large object types: <https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#character-large-object-length>
19688            let dialect =
19689                TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
19690
19691            test_parse_data_type!(
19692                dialect,
19693                "CHARACTER LARGE OBJECT",
19694                DataType::CharacterLargeObject(None)
19695            );
19696            test_parse_data_type!(
19697                dialect,
19698                "CHARACTER LARGE OBJECT(20)",
19699                DataType::CharacterLargeObject(Some(20))
19700            );
19701
19702            test_parse_data_type!(
19703                dialect,
19704                "CHAR LARGE OBJECT",
19705                DataType::CharLargeObject(None)
19706            );
19707            test_parse_data_type!(
19708                dialect,
19709                "CHAR LARGE OBJECT(20)",
19710                DataType::CharLargeObject(Some(20))
19711            );
19712
19713            test_parse_data_type!(dialect, "CLOB", DataType::Clob(None));
19714            test_parse_data_type!(dialect, "CLOB(20)", DataType::Clob(Some(20)));
19715        }
19716
19717        #[test]
19718        fn test_parse_custom_types() {
19719            let dialect =
19720                TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
19721
19722            test_parse_data_type!(
19723                dialect,
19724                "GEOMETRY",
19725                DataType::Custom(ObjectName::from(vec!["GEOMETRY".into()]), vec![])
19726            );
19727
19728            test_parse_data_type!(
19729                dialect,
19730                "GEOMETRY(POINT)",
19731                DataType::Custom(
19732                    ObjectName::from(vec!["GEOMETRY".into()]),
19733                    vec!["POINT".to_string()]
19734                )
19735            );
19736
19737            test_parse_data_type!(
19738                dialect,
19739                "GEOMETRY(POINT, 4326)",
19740                DataType::Custom(
19741                    ObjectName::from(vec!["GEOMETRY".into()]),
19742                    vec!["POINT".to_string(), "4326".to_string()]
19743                )
19744            );
19745        }
19746
19747        #[test]
19748        fn test_ansii_exact_numeric_types() {
19749            // Exact numeric types: <https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#exact-numeric-type>
19750            let dialect = TestedDialects::new(vec![
19751                Box::new(GenericDialect {}),
19752                Box::new(AnsiDialect {}),
19753                Box::new(PostgreSqlDialect {}),
19754            ]);
19755
19756            test_parse_data_type!(dialect, "NUMERIC", DataType::Numeric(ExactNumberInfo::None));
19757
19758            test_parse_data_type!(
19759                dialect,
19760                "NUMERIC(2)",
19761                DataType::Numeric(ExactNumberInfo::Precision(2))
19762            );
19763
19764            test_parse_data_type!(
19765                dialect,
19766                "NUMERIC(2,10)",
19767                DataType::Numeric(ExactNumberInfo::PrecisionAndScale(2, 10))
19768            );
19769
19770            test_parse_data_type!(dialect, "DECIMAL", DataType::Decimal(ExactNumberInfo::None));
19771
19772            test_parse_data_type!(
19773                dialect,
19774                "DECIMAL(2)",
19775                DataType::Decimal(ExactNumberInfo::Precision(2))
19776            );
19777
19778            test_parse_data_type!(
19779                dialect,
19780                "DECIMAL(2,10)",
19781                DataType::Decimal(ExactNumberInfo::PrecisionAndScale(2, 10))
19782            );
19783
19784            test_parse_data_type!(dialect, "DEC", DataType::Dec(ExactNumberInfo::None));
19785
19786            test_parse_data_type!(
19787                dialect,
19788                "DEC(2)",
19789                DataType::Dec(ExactNumberInfo::Precision(2))
19790            );
19791
19792            test_parse_data_type!(
19793                dialect,
19794                "DEC(2,10)",
19795                DataType::Dec(ExactNumberInfo::PrecisionAndScale(2, 10))
19796            );
19797
19798            // Test negative scale values.
19799            test_parse_data_type!(
19800                dialect,
19801                "NUMERIC(10,-2)",
19802                DataType::Numeric(ExactNumberInfo::PrecisionAndScale(10, -2))
19803            );
19804
19805            test_parse_data_type!(
19806                dialect,
19807                "DECIMAL(1000,-10)",
19808                DataType::Decimal(ExactNumberInfo::PrecisionAndScale(1000, -10))
19809            );
19810
19811            test_parse_data_type!(
19812                dialect,
19813                "DEC(5,-1000)",
19814                DataType::Dec(ExactNumberInfo::PrecisionAndScale(5, -1000))
19815            );
19816
19817            test_parse_data_type!(
19818                dialect,
19819                "NUMERIC(10,-5)",
19820                DataType::Numeric(ExactNumberInfo::PrecisionAndScale(10, -5))
19821            );
19822
19823            test_parse_data_type!(
19824                dialect,
19825                "DECIMAL(20,-10)",
19826                DataType::Decimal(ExactNumberInfo::PrecisionAndScale(20, -10))
19827            );
19828
19829            test_parse_data_type!(
19830                dialect,
19831                "DEC(5,-2)",
19832                DataType::Dec(ExactNumberInfo::PrecisionAndScale(5, -2))
19833            );
19834
19835            dialect.run_parser_method("NUMERIC(10,+5)", |parser| {
19836                let data_type = parser.parse_data_type().unwrap();
19837                assert_eq!(
19838                    DataType::Numeric(ExactNumberInfo::PrecisionAndScale(10, 5)),
19839                    data_type
19840                );
19841                // Note: Explicit '+' sign is not preserved in output, which is correct
19842                assert_eq!("NUMERIC(10,5)", data_type.to_string());
19843            });
19844        }
19845
19846        #[test]
19847        fn test_ansii_date_type() {
19848            // Datetime types: <https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#datetime-type>
19849            let dialect =
19850                TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
19851
19852            test_parse_data_type!(dialect, "DATE", DataType::Date);
19853
19854            test_parse_data_type!(dialect, "TIME", DataType::Time(None, TimezoneInfo::None));
19855
19856            test_parse_data_type!(
19857                dialect,
19858                "TIME(6)",
19859                DataType::Time(Some(6), TimezoneInfo::None)
19860            );
19861
19862            test_parse_data_type!(
19863                dialect,
19864                "TIME WITH TIME ZONE",
19865                DataType::Time(None, TimezoneInfo::WithTimeZone)
19866            );
19867
19868            test_parse_data_type!(
19869                dialect,
19870                "TIME(6) WITH TIME ZONE",
19871                DataType::Time(Some(6), TimezoneInfo::WithTimeZone)
19872            );
19873
19874            test_parse_data_type!(
19875                dialect,
19876                "TIME WITHOUT TIME ZONE",
19877                DataType::Time(None, TimezoneInfo::WithoutTimeZone)
19878            );
19879
19880            test_parse_data_type!(
19881                dialect,
19882                "TIME(6) WITHOUT TIME ZONE",
19883                DataType::Time(Some(6), TimezoneInfo::WithoutTimeZone)
19884            );
19885
19886            test_parse_data_type!(
19887                dialect,
19888                "TIMESTAMP",
19889                DataType::Timestamp(None, TimezoneInfo::None)
19890            );
19891
19892            test_parse_data_type!(
19893                dialect,
19894                "TIMESTAMP(22)",
19895                DataType::Timestamp(Some(22), TimezoneInfo::None)
19896            );
19897
19898            test_parse_data_type!(
19899                dialect,
19900                "TIMESTAMP(22) WITH TIME ZONE",
19901                DataType::Timestamp(Some(22), TimezoneInfo::WithTimeZone)
19902            );
19903
19904            test_parse_data_type!(
19905                dialect,
19906                "TIMESTAMP(33) WITHOUT TIME ZONE",
19907                DataType::Timestamp(Some(33), TimezoneInfo::WithoutTimeZone)
19908            );
19909        }
19910    }
19911
19912    #[test]
19913    fn test_parse_schema_name() {
19914        // The expected name should be identical as the input name, that's why I don't receive both
19915        macro_rules! test_parse_schema_name {
19916            ($input:expr, $expected_name:expr $(,)?) => {{
19917                all_dialects().run_parser_method(&*$input, |parser| {
19918                    let schema_name = parser.parse_schema_name().unwrap();
19919                    // Validate that the structure is the same as expected
19920                    assert_eq!(schema_name, $expected_name);
19921                    // Validate that the input and the expected structure serialization are the same
19922                    assert_eq!(schema_name.to_string(), $input.to_string());
19923                });
19924            }};
19925        }
19926
19927        let dummy_name = ObjectName::from(vec![Ident::new("dummy_name")]);
19928        let dummy_authorization = Ident::new("dummy_authorization");
19929
19930        test_parse_schema_name!(
19931            format!("{dummy_name}"),
19932            SchemaName::Simple(dummy_name.clone())
19933        );
19934
19935        test_parse_schema_name!(
19936            format!("AUTHORIZATION {dummy_authorization}"),
19937            SchemaName::UnnamedAuthorization(dummy_authorization.clone()),
19938        );
19939        test_parse_schema_name!(
19940            format!("{dummy_name} AUTHORIZATION {dummy_authorization}"),
19941            SchemaName::NamedAuthorization(dummy_name.clone(), dummy_authorization.clone()),
19942        );
19943    }
19944
19945    #[test]
19946    fn mysql_parse_index_table_constraint() {
19947        macro_rules! test_parse_table_constraint {
19948            ($dialect:expr, $input:expr, $expected:expr $(,)?) => {{
19949                $dialect.run_parser_method(&*$input, |parser| {
19950                    let constraint = parser.parse_optional_table_constraint().unwrap().unwrap();
19951                    // Validate that the structure is the same as expected
19952                    assert_eq!(constraint, $expected);
19953                    // Validate that the input and the expected structure serialization are the same
19954                    assert_eq!(constraint.to_string(), $input.to_string());
19955                });
19956            }};
19957        }
19958
19959        fn mk_expected_col(name: &str) -> IndexColumn {
19960            IndexColumn {
19961                column: OrderByExpr {
19962                    expr: Expr::Identifier(name.into()),
19963                    options: OrderByOptions {
19964                        asc: None,
19965                        nulls_first: None,
19966                    },
19967                    with_fill: None,
19968                },
19969                operator_class: None,
19970            }
19971        }
19972
19973        let dialect =
19974            TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(MySqlDialect {})]);
19975
19976        test_parse_table_constraint!(
19977            dialect,
19978            "INDEX (c1)",
19979            IndexConstraint {
19980                display_as_key: false,
19981                name: None,
19982                index_type: None,
19983                columns: vec![mk_expected_col("c1")],
19984                index_options: vec![],
19985            }
19986            .into()
19987        );
19988
19989        test_parse_table_constraint!(
19990            dialect,
19991            "KEY (c1)",
19992            IndexConstraint {
19993                display_as_key: true,
19994                name: None,
19995                index_type: None,
19996                columns: vec![mk_expected_col("c1")],
19997                index_options: vec![],
19998            }
19999            .into()
20000        );
20001
20002        test_parse_table_constraint!(
20003            dialect,
20004            "INDEX 'index' (c1, c2)",
20005            TableConstraint::Index(IndexConstraint {
20006                display_as_key: false,
20007                name: Some(Ident::with_quote('\'', "index")),
20008                index_type: None,
20009                columns: vec![mk_expected_col("c1"), mk_expected_col("c2")],
20010                index_options: vec![],
20011            })
20012        );
20013
20014        test_parse_table_constraint!(
20015            dialect,
20016            "INDEX USING BTREE (c1)",
20017            IndexConstraint {
20018                display_as_key: false,
20019                name: None,
20020                index_type: Some(IndexType::BTree),
20021                columns: vec![mk_expected_col("c1")],
20022                index_options: vec![],
20023            }
20024            .into()
20025        );
20026
20027        test_parse_table_constraint!(
20028            dialect,
20029            "INDEX USING HASH (c1)",
20030            IndexConstraint {
20031                display_as_key: false,
20032                name: None,
20033                index_type: Some(IndexType::Hash),
20034                columns: vec![mk_expected_col("c1")],
20035                index_options: vec![],
20036            }
20037            .into()
20038        );
20039
20040        test_parse_table_constraint!(
20041            dialect,
20042            "INDEX idx_name USING BTREE (c1)",
20043            IndexConstraint {
20044                display_as_key: false,
20045                name: Some(Ident::new("idx_name")),
20046                index_type: Some(IndexType::BTree),
20047                columns: vec![mk_expected_col("c1")],
20048                index_options: vec![],
20049            }
20050            .into()
20051        );
20052
20053        test_parse_table_constraint!(
20054            dialect,
20055            "INDEX idx_name USING HASH (c1)",
20056            IndexConstraint {
20057                display_as_key: false,
20058                name: Some(Ident::new("idx_name")),
20059                index_type: Some(IndexType::Hash),
20060                columns: vec![mk_expected_col("c1")],
20061                index_options: vec![],
20062            }
20063            .into()
20064        );
20065    }
20066
20067    #[test]
20068    fn test_tokenizer_error_loc() {
20069        let sql = "foo '";
20070        let ast = Parser::parse_sql(&GenericDialect, sql);
20071        assert_eq!(
20072            ast,
20073            Err(ParserError::TokenizerError(
20074                "Unterminated string literal at Line: 1, Column: 5".to_string()
20075            ))
20076        );
20077    }
20078
20079    #[test]
20080    fn test_parser_error_loc() {
20081        let sql = "SELECT this is a syntax error";
20082        let ast = Parser::parse_sql(&GenericDialect, sql);
20083        assert_eq!(
20084            ast,
20085            Err(ParserError::ParserError(
20086                "Expected: [NOT] NULL | TRUE | FALSE | DISTINCT | [form] NORMALIZED FROM after IS, found: a at Line: 1, Column: 16"
20087                    .to_string()
20088            ))
20089        );
20090    }
20091
20092    #[test]
20093    fn test_nested_explain_error() {
20094        let sql = "EXPLAIN EXPLAIN SELECT 1";
20095        let ast = Parser::parse_sql(&GenericDialect, sql);
20096        assert_eq!(
20097            ast,
20098            Err(ParserError::ParserError(
20099                "Explain must be root of the plan".to_string()
20100            ))
20101        );
20102    }
20103
20104    #[test]
20105    fn test_parse_multipart_identifier_positive() {
20106        let dialect = TestedDialects::new(vec![Box::new(GenericDialect {})]);
20107
20108        // parse multipart with quotes
20109        let expected = vec![
20110            Ident {
20111                value: "CATALOG".to_string(),
20112                quote_style: None,
20113                span: Span::empty(),
20114            },
20115            Ident {
20116                value: "F(o)o. \"bar".to_string(),
20117                quote_style: Some('"'),
20118                span: Span::empty(),
20119            },
20120            Ident {
20121                value: "table".to_string(),
20122                quote_style: None,
20123                span: Span::empty(),
20124            },
20125        ];
20126        dialect.run_parser_method(r#"CATALOG."F(o)o. ""bar".table"#, |parser| {
20127            let actual = parser.parse_multipart_identifier().unwrap();
20128            assert_eq!(expected, actual);
20129        });
20130
20131        // allow whitespace between ident parts
20132        let expected = vec![
20133            Ident {
20134                value: "CATALOG".to_string(),
20135                quote_style: None,
20136                span: Span::empty(),
20137            },
20138            Ident {
20139                value: "table".to_string(),
20140                quote_style: None,
20141                span: Span::empty(),
20142            },
20143        ];
20144        dialect.run_parser_method("CATALOG . table", |parser| {
20145            let actual = parser.parse_multipart_identifier().unwrap();
20146            assert_eq!(expected, actual);
20147        });
20148    }
20149
20150    #[test]
20151    fn test_parse_multipart_identifier_negative() {
20152        macro_rules! test_parse_multipart_identifier_error {
20153            ($input:expr, $expected_err:expr $(,)?) => {{
20154                all_dialects().run_parser_method(&*$input, |parser| {
20155                    let actual_err = parser.parse_multipart_identifier().unwrap_err();
20156                    assert_eq!(actual_err.to_string(), $expected_err);
20157                });
20158            }};
20159        }
20160
20161        test_parse_multipart_identifier_error!(
20162            "",
20163            "sql parser error: Empty input when parsing identifier",
20164        );
20165
20166        test_parse_multipart_identifier_error!(
20167            "*schema.table",
20168            "sql parser error: Unexpected token in identifier: *",
20169        );
20170
20171        test_parse_multipart_identifier_error!(
20172            "schema.table*",
20173            "sql parser error: Unexpected token in identifier: *",
20174        );
20175
20176        test_parse_multipart_identifier_error!(
20177            "schema.table.",
20178            "sql parser error: Trailing period in identifier",
20179        );
20180
20181        test_parse_multipart_identifier_error!(
20182            "schema.*",
20183            "sql parser error: Unexpected token following period in identifier: *",
20184        );
20185    }
20186
20187    #[test]
20188    fn test_mysql_partition_selection() {
20189        let sql = "SELECT * FROM employees PARTITION (p0, p2)";
20190        let expected = vec!["p0", "p2"];
20191
20192        let ast: Vec<Statement> = Parser::parse_sql(&MySqlDialect {}, sql).unwrap();
20193        assert_eq!(ast.len(), 1);
20194        if let Statement::Query(v) = &ast[0] {
20195            if let SetExpr::Select(select) = &*v.body {
20196                assert_eq!(select.from.len(), 1);
20197                let from: &TableWithJoins = &select.from[0];
20198                let table_factor = &from.relation;
20199                if let TableFactor::Table { partitions, .. } = table_factor {
20200                    let actual: Vec<&str> = partitions
20201                        .iter()
20202                        .map(|ident| ident.value.as_str())
20203                        .collect();
20204                    assert_eq!(expected, actual);
20205                }
20206            }
20207        } else {
20208            panic!("fail to parse mysql partition selection");
20209        }
20210    }
20211
20212    #[test]
20213    fn test_replace_into_placeholders() {
20214        let sql = "REPLACE INTO t (a) VALUES (&a)";
20215
20216        assert!(Parser::parse_sql(&GenericDialect {}, sql).is_err());
20217    }
20218
20219    #[test]
20220    fn test_replace_into_set_placeholder() {
20221        let sql = "REPLACE INTO t SET ?";
20222
20223        assert!(Parser::parse_sql(&GenericDialect {}, sql).is_err());
20224    }
20225
20226    #[test]
20227    fn test_replace_incomplete() {
20228        let sql = r#"REPLACE"#;
20229
20230        assert!(Parser::parse_sql(&MySqlDialect {}, sql).is_err());
20231    }
20232
20233    #[test]
20234    fn test_placeholder_invalid_whitespace() {
20235        for w in ["  ", "/*invalid*/"] {
20236            let sql = format!("\nSELECT\n  :{w}fooBar");
20237            assert!(Parser::parse_sql(&GenericDialect, &sql).is_err());
20238        }
20239    }
20240}