sqlparser/parser/
mod.rs

1// Licensed under the Apache License, Version 2.0 (the "License");
2// you may not use this file except in compliance with the License.
3// You may obtain a copy of the License at
4//
5// http://www.apache.org/licenses/LICENSE-2.0
6//
7// Unless required by applicable law or agreed to in writing, software
8// distributed under the License is distributed on an "AS IS" BASIS,
9// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
10// See the License for the specific language governing permissions and
11// limitations under the License.
12
13//! SQL Parser
14
15#[cfg(not(feature = "std"))]
16use alloc::{
17    boxed::Box,
18    format,
19    string::{String, ToString},
20    vec,
21    vec::Vec,
22};
23use core::{
24    fmt::{self, Display},
25    str::FromStr,
26};
27use helpers::attached_token::AttachedToken;
28
29use log::debug;
30
31use recursion::RecursionCounter;
32use IsLateral::*;
33use IsOptional::*;
34
35use crate::ast::helpers::stmt_create_table::{CreateTableBuilder, CreateTableConfiguration};
36use crate::ast::Statement::CreatePolicy;
37use crate::ast::*;
38use crate::dialect::*;
39use crate::keywords::{Keyword, ALL_KEYWORDS};
40use crate::tokenizer::*;
41
42mod alter;
43
44#[derive(Debug, Clone, PartialEq, Eq)]
45pub enum ParserError {
46    TokenizerError(String),
47    ParserError(String),
48    RecursionLimitExceeded,
49}
50
51// Use `Parser::expected` instead, if possible
52macro_rules! parser_err {
53    ($MSG:expr, $loc:expr) => {
54        Err(ParserError::ParserError(format!("{}{}", $MSG, $loc)))
55    };
56}
57
58#[cfg(feature = "std")]
59/// Implementation [`RecursionCounter`] if std is available
60mod recursion {
61    use std::cell::Cell;
62    use std::rc::Rc;
63
64    use super::ParserError;
65
66    /// Tracks remaining recursion depth. This value is decremented on
67    /// each call to [`RecursionCounter::try_decrease()`], when it reaches 0 an error will
68    /// be returned.
69    ///
70    /// Note: Uses an [`std::rc::Rc`] and [`std::cell::Cell`] in order to satisfy the Rust
71    /// borrow checker so the automatic [`DepthGuard`] decrement a
72    /// reference to the counter.
73    ///
74    /// Note: when "recursive-protection" feature is enabled, this crate uses additional stack overflow protection
75    /// for some of its recursive methods. See [`recursive::recursive`] for more information.
76    pub(crate) struct RecursionCounter {
77        remaining_depth: Rc<Cell<usize>>,
78    }
79
80    impl RecursionCounter {
81        /// Creates a [`RecursionCounter`] with the specified maximum
82        /// depth
83        pub fn new(remaining_depth: usize) -> Self {
84            Self {
85                remaining_depth: Rc::new(remaining_depth.into()),
86            }
87        }
88
89        /// Decreases the remaining depth by 1.
90        ///
91        /// Returns [`Err`] if the remaining depth falls to 0.
92        ///
93        /// Returns a [`DepthGuard`] which will adds 1 to the
94        /// remaining depth upon drop;
95        pub fn try_decrease(&self) -> Result<DepthGuard, ParserError> {
96            let old_value = self.remaining_depth.get();
97            // ran out of space
98            if old_value == 0 {
99                Err(ParserError::RecursionLimitExceeded)
100            } else {
101                self.remaining_depth.set(old_value - 1);
102                Ok(DepthGuard::new(Rc::clone(&self.remaining_depth)))
103            }
104        }
105    }
106
107    /// Guard that increases the remaining depth by 1 on drop
108    pub struct DepthGuard {
109        remaining_depth: Rc<Cell<usize>>,
110    }
111
112    impl DepthGuard {
113        fn new(remaining_depth: Rc<Cell<usize>>) -> Self {
114            Self { remaining_depth }
115        }
116    }
117    impl Drop for DepthGuard {
118        fn drop(&mut self) {
119            let old_value = self.remaining_depth.get();
120            self.remaining_depth.set(old_value + 1);
121        }
122    }
123}
124
125#[cfg(not(feature = "std"))]
126mod recursion {
127    /// Implementation [`RecursionCounter`] if std is NOT available (and does not
128    /// guard against stack overflow).
129    ///
130    /// Has the same API as the std [`RecursionCounter`] implementation
131    /// but does not actually limit stack depth.
132    pub(crate) struct RecursionCounter {}
133
134    impl RecursionCounter {
135        pub fn new(_remaining_depth: usize) -> Self {
136            Self {}
137        }
138        pub fn try_decrease(&self) -> Result<DepthGuard, super::ParserError> {
139            Ok(DepthGuard {})
140        }
141    }
142
143    pub struct DepthGuard {}
144}
145
146#[derive(PartialEq, Eq)]
147pub enum IsOptional {
148    Optional,
149    Mandatory,
150}
151
152pub enum IsLateral {
153    Lateral,
154    NotLateral,
155}
156
157pub enum WildcardExpr {
158    Expr(Expr),
159    QualifiedWildcard(ObjectName),
160    Wildcard,
161}
162
163impl From<TokenizerError> for ParserError {
164    fn from(e: TokenizerError) -> Self {
165        ParserError::TokenizerError(e.to_string())
166    }
167}
168
169impl fmt::Display for ParserError {
170    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
171        write!(
172            f,
173            "sql parser error: {}",
174            match self {
175                ParserError::TokenizerError(s) => s,
176                ParserError::ParserError(s) => s,
177                ParserError::RecursionLimitExceeded => "recursion limit exceeded",
178            }
179        )
180    }
181}
182
183#[cfg(feature = "std")]
184impl std::error::Error for ParserError {}
185
186// By default, allow expressions up to this deep before erroring
187const DEFAULT_REMAINING_DEPTH: usize = 50;
188
189// A constant EOF token that can be referenced.
190const EOF_TOKEN: TokenWithSpan = TokenWithSpan {
191    token: Token::EOF,
192    span: Span {
193        start: Location { line: 0, column: 0 },
194        end: Location { line: 0, column: 0 },
195    },
196};
197
198/// Composite types declarations using angle brackets syntax can be arbitrary
199/// nested such that the following declaration is possible:
200///      `ARRAY<ARRAY<INT>>`
201/// But the tokenizer recognizes the `>>` as a ShiftRight token.
202/// We work around that limitation when parsing a data type by accepting
203/// either a `>` or `>>` token in such cases, remembering which variant we
204/// matched.
205/// In the latter case having matched a `>>`, the parent type will not look to
206/// match its closing `>` as a result since that will have taken place at the
207/// child type.
208///
209/// See [Parser::parse_data_type] for details
210struct MatchedTrailingBracket(bool);
211
212impl From<bool> for MatchedTrailingBracket {
213    fn from(value: bool) -> Self {
214        Self(value)
215    }
216}
217
218/// Options that control how the [`Parser`] parses SQL text
219#[derive(Debug, Clone, PartialEq, Eq)]
220pub struct ParserOptions {
221    pub trailing_commas: bool,
222    /// Controls how literal values are unescaped. See
223    /// [`Tokenizer::with_unescape`] for more details.
224    pub unescape: bool,
225}
226
227impl Default for ParserOptions {
228    fn default() -> Self {
229        Self {
230            trailing_commas: false,
231            unescape: true,
232        }
233    }
234}
235
236impl ParserOptions {
237    /// Create a new [`ParserOptions`]
238    pub fn new() -> Self {
239        Default::default()
240    }
241
242    /// Set if trailing commas are allowed.
243    ///
244    /// If this option is `false` (the default), the following SQL will
245    /// not parse. If the option is `true`, the SQL will parse.
246    ///
247    /// ```sql
248    ///  SELECT
249    ///   foo,
250    ///   bar,
251    ///  FROM baz
252    /// ```
253    pub fn with_trailing_commas(mut self, trailing_commas: bool) -> Self {
254        self.trailing_commas = trailing_commas;
255        self
256    }
257
258    /// Set if literal values are unescaped. Defaults to true. See
259    /// [`Tokenizer::with_unescape`] for more details.
260    pub fn with_unescape(mut self, unescape: bool) -> Self {
261        self.unescape = unescape;
262        self
263    }
264}
265
266#[derive(Copy, Clone)]
267enum ParserState {
268    /// The default state of the parser.
269    Normal,
270    /// The state when parsing a CONNECT BY expression. This allows parsing
271    /// PRIOR expressions while still allowing prior as an identifier name
272    /// in other contexts.
273    ConnectBy,
274}
275
276/// A SQL Parser
277///
278/// This struct is the main entry point for parsing SQL queries.
279///
280/// # Functionality:
281/// * Parsing SQL: see examples on [`Parser::new`] and [`Parser::parse_sql`]
282/// * Controlling recursion: See [`Parser::with_recursion_limit`]
283/// * Controlling parser options: See [`Parser::with_options`]
284/// * Providing your own tokens: See [`Parser::with_tokens`]
285///
286/// # Internals
287///
288/// The parser uses a [`Tokenizer`] to tokenize the input SQL string into a
289/// `Vec` of [`TokenWithSpan`]s and maintains an `index` to the current token
290/// being processed. The token vec may contain multiple SQL statements.
291///
292/// * The "current" token is the token at `index - 1`
293/// * The "next" token is the token at `index`
294/// * The "previous" token is the token at `index - 2`
295///
296/// If `index` is equal to the length of the token stream, the 'next' token is
297/// [`Token::EOF`].
298///
299/// For example, the SQL string "SELECT * FROM foo" will be tokenized into
300/// following tokens:
301/// ```text
302///  [
303///    "SELECT", // token index 0
304///    " ",      // whitespace
305///    "*",
306///    " ",
307///    "FROM",
308///    " ",
309///    "foo"
310///   ]
311/// ```
312///
313///
314pub struct Parser<'a> {
315    /// The tokens
316    tokens: Vec<TokenWithSpan>,
317    /// The index of the first unprocessed token in [`Parser::tokens`].
318    index: usize,
319    /// The current state of the parser.
320    state: ParserState,
321    /// The SQL dialect to use.
322    dialect: &'a dyn Dialect,
323    /// Additional options that allow you to mix & match behavior
324    /// otherwise constrained to certain dialects (e.g. trailing
325    /// commas) and/or format of parse (e.g. unescaping).
326    options: ParserOptions,
327    /// Ensures the stack does not overflow by limiting recursion depth.
328    recursion_counter: RecursionCounter,
329}
330
331impl<'a> Parser<'a> {
332    /// Create a parser for a [`Dialect`]
333    ///
334    /// See also [`Parser::parse_sql`]
335    ///
336    /// Example:
337    /// ```
338    /// # use sqlparser::{parser::{Parser, ParserError}, dialect::GenericDialect};
339    /// # fn main() -> Result<(), ParserError> {
340    /// let dialect = GenericDialect{};
341    /// let statements = Parser::new(&dialect)
342    ///   .try_with_sql("SELECT * FROM foo")?
343    ///   .parse_statements()?;
344    /// # Ok(())
345    /// # }
346    /// ```
347    pub fn new(dialect: &'a dyn Dialect) -> Self {
348        Self {
349            tokens: vec![],
350            index: 0,
351            state: ParserState::Normal,
352            dialect,
353            recursion_counter: RecursionCounter::new(DEFAULT_REMAINING_DEPTH),
354            options: ParserOptions::new().with_trailing_commas(dialect.supports_trailing_commas()),
355        }
356    }
357
358    /// Specify the maximum recursion limit while parsing.
359    ///
360    /// [`Parser`] prevents stack overflows by returning
361    /// [`ParserError::RecursionLimitExceeded`] if the parser exceeds
362    /// this depth while processing the query.
363    ///
364    /// Example:
365    /// ```
366    /// # use sqlparser::{parser::{Parser, ParserError}, dialect::GenericDialect};
367    /// # fn main() -> Result<(), ParserError> {
368    /// let dialect = GenericDialect{};
369    /// let result = Parser::new(&dialect)
370    ///   .with_recursion_limit(1)
371    ///   .try_with_sql("SELECT * FROM foo WHERE (a OR (b OR (c OR d)))")?
372    ///   .parse_statements();
373    ///   assert_eq!(result, Err(ParserError::RecursionLimitExceeded));
374    /// # Ok(())
375    /// # }
376    /// ```
377    ///
378    /// Note: when "recursive-protection" feature is enabled, this crate uses additional stack overflow protection
379    //  for some of its recursive methods. See [`recursive::recursive`] for more information.
380    pub fn with_recursion_limit(mut self, recursion_limit: usize) -> Self {
381        self.recursion_counter = RecursionCounter::new(recursion_limit);
382        self
383    }
384
385    /// Specify additional parser options
386    ///
387    /// [`Parser`] supports additional options ([`ParserOptions`])
388    /// that allow you to mix & match behavior otherwise constrained
389    /// to certain dialects (e.g. trailing commas).
390    ///
391    /// Example:
392    /// ```
393    /// # use sqlparser::{parser::{Parser, ParserError, ParserOptions}, dialect::GenericDialect};
394    /// # fn main() -> Result<(), ParserError> {
395    /// let dialect = GenericDialect{};
396    /// let options = ParserOptions::new()
397    ///    .with_trailing_commas(true)
398    ///    .with_unescape(false);
399    /// let result = Parser::new(&dialect)
400    ///   .with_options(options)
401    ///   .try_with_sql("SELECT a, b, COUNT(*), FROM foo GROUP BY a, b,")?
402    ///   .parse_statements();
403    ///   assert!(matches!(result, Ok(_)));
404    /// # Ok(())
405    /// # }
406    /// ```
407    pub fn with_options(mut self, options: ParserOptions) -> Self {
408        self.options = options;
409        self
410    }
411
412    /// Reset this parser to parse the specified token stream
413    pub fn with_tokens_with_locations(mut self, tokens: Vec<TokenWithSpan>) -> Self {
414        self.tokens = tokens;
415        self.index = 0;
416        self
417    }
418
419    /// Reset this parser state to parse the specified tokens
420    pub fn with_tokens(self, tokens: Vec<Token>) -> Self {
421        // Put in dummy locations
422        let tokens_with_locations: Vec<TokenWithSpan> = tokens
423            .into_iter()
424            .map(|token| TokenWithSpan {
425                token,
426                span: Span::empty(),
427            })
428            .collect();
429        self.with_tokens_with_locations(tokens_with_locations)
430    }
431
432    /// Tokenize the sql string and sets this [`Parser`]'s state to
433    /// parse the resulting tokens
434    ///
435    /// Returns an error if there was an error tokenizing the SQL string.
436    ///
437    /// See example on [`Parser::new()`] for an example
438    pub fn try_with_sql(self, sql: &str) -> Result<Self, ParserError> {
439        debug!("Parsing sql '{}'...", sql);
440        let tokens = Tokenizer::new(self.dialect, sql)
441            .with_unescape(self.options.unescape)
442            .tokenize_with_location()?;
443        Ok(self.with_tokens_with_locations(tokens))
444    }
445
446    /// Parse potentially multiple statements
447    ///
448    /// Example
449    /// ```
450    /// # use sqlparser::{parser::{Parser, ParserError}, dialect::GenericDialect};
451    /// # fn main() -> Result<(), ParserError> {
452    /// let dialect = GenericDialect{};
453    /// let statements = Parser::new(&dialect)
454    ///   // Parse a SQL string with 2 separate statements
455    ///   .try_with_sql("SELECT * FROM foo; SELECT * FROM bar;")?
456    ///   .parse_statements()?;
457    /// assert_eq!(statements.len(), 2);
458    /// # Ok(())
459    /// # }
460    /// ```
461    pub fn parse_statements(&mut self) -> Result<Vec<Statement>, ParserError> {
462        let mut stmts = Vec::new();
463        let mut expecting_statement_delimiter = false;
464        loop {
465            // ignore empty statements (between successive statement delimiters)
466            while self.consume_token(&Token::SemiColon) {
467                expecting_statement_delimiter = false;
468            }
469
470            match self.peek_token().token {
471                Token::EOF => break,
472
473                // end of statement
474                Token::Word(word) => {
475                    if expecting_statement_delimiter && word.keyword == Keyword::END {
476                        break;
477                    }
478                }
479                _ => {}
480            }
481
482            if expecting_statement_delimiter {
483                return self.expected("end of statement", self.peek_token());
484            }
485
486            let statement = self.parse_statement()?;
487            stmts.push(statement);
488            expecting_statement_delimiter = true;
489        }
490        Ok(stmts)
491    }
492
493    /// Convenience method to parse a string with one or more SQL
494    /// statements into produce an Abstract Syntax Tree (AST).
495    ///
496    /// Example
497    /// ```
498    /// # use sqlparser::{parser::{Parser, ParserError}, dialect::GenericDialect};
499    /// # fn main() -> Result<(), ParserError> {
500    /// let dialect = GenericDialect{};
501    /// let statements = Parser::parse_sql(
502    ///   &dialect, "SELECT * FROM foo"
503    /// )?;
504    /// assert_eq!(statements.len(), 1);
505    /// # Ok(())
506    /// # }
507    /// ```
508    pub fn parse_sql(dialect: &dyn Dialect, sql: &str) -> Result<Vec<Statement>, ParserError> {
509        Parser::new(dialect).try_with_sql(sql)?.parse_statements()
510    }
511
512    /// Parse a single top-level statement (such as SELECT, INSERT, CREATE, etc.),
513    /// stopping before the statement separator, if any.
514    pub fn parse_statement(&mut self) -> Result<Statement, ParserError> {
515        let _guard = self.recursion_counter.try_decrease()?;
516
517        // allow the dialect to override statement parsing
518        if let Some(statement) = self.dialect.parse_statement(self) {
519            return statement;
520        }
521
522        let next_token = self.next_token();
523        match &next_token.token {
524            Token::Word(w) => match w.keyword {
525                Keyword::KILL => self.parse_kill(),
526                Keyword::FLUSH => self.parse_flush(),
527                Keyword::DESC => self.parse_explain(DescribeAlias::Desc),
528                Keyword::DESCRIBE => self.parse_explain(DescribeAlias::Describe),
529                Keyword::EXPLAIN => self.parse_explain(DescribeAlias::Explain),
530                Keyword::ANALYZE => self.parse_analyze(),
531                Keyword::SELECT | Keyword::WITH | Keyword::VALUES | Keyword::FROM => {
532                    self.prev_token();
533                    self.parse_query().map(Statement::Query)
534                }
535                Keyword::TRUNCATE => self.parse_truncate(),
536                Keyword::ATTACH => {
537                    if dialect_of!(self is DuckDbDialect) {
538                        self.parse_attach_duckdb_database()
539                    } else {
540                        self.parse_attach_database()
541                    }
542                }
543                Keyword::DETACH if dialect_of!(self is DuckDbDialect | GenericDialect) => {
544                    self.parse_detach_duckdb_database()
545                }
546                Keyword::MSCK => self.parse_msck(),
547                Keyword::CREATE => self.parse_create(),
548                Keyword::CACHE => self.parse_cache_table(),
549                Keyword::DROP => self.parse_drop(),
550                Keyword::DISCARD => self.parse_discard(),
551                Keyword::DECLARE => self.parse_declare(),
552                Keyword::FETCH => self.parse_fetch_statement(),
553                Keyword::DELETE => self.parse_delete(),
554                Keyword::INSERT => self.parse_insert(),
555                Keyword::REPLACE => self.parse_replace(),
556                Keyword::UNCACHE => self.parse_uncache_table(),
557                Keyword::UPDATE => self.parse_update(),
558                Keyword::ALTER => self.parse_alter(),
559                Keyword::CALL => self.parse_call(),
560                Keyword::COPY => self.parse_copy(),
561                Keyword::CLOSE => self.parse_close(),
562                Keyword::SET => self.parse_set(),
563                Keyword::SHOW => self.parse_show(),
564                Keyword::USE => self.parse_use(),
565                Keyword::GRANT => self.parse_grant(),
566                Keyword::REVOKE => self.parse_revoke(),
567                Keyword::START => self.parse_start_transaction(),
568                // `BEGIN` is a nonstandard but common alias for the
569                // standard `START TRANSACTION` statement. It is supported
570                // by at least PostgreSQL and MySQL.
571                Keyword::BEGIN => self.parse_begin(),
572                // `END` is a nonstandard but common alias for the
573                // standard `COMMIT TRANSACTION` statement. It is supported
574                // by PostgreSQL.
575                Keyword::END => self.parse_end(),
576                Keyword::SAVEPOINT => self.parse_savepoint(),
577                Keyword::RELEASE => self.parse_release(),
578                Keyword::COMMIT => self.parse_commit(),
579                Keyword::RAISERROR => Ok(self.parse_raiserror()?),
580                Keyword::ROLLBACK => self.parse_rollback(),
581                Keyword::ASSERT => self.parse_assert(),
582                // `PREPARE`, `EXECUTE` and `DEALLOCATE` are Postgres-specific
583                // syntaxes. They are used for Postgres prepared statement.
584                Keyword::DEALLOCATE => self.parse_deallocate(),
585                Keyword::EXECUTE | Keyword::EXEC => self.parse_execute(),
586                Keyword::PREPARE => self.parse_prepare(),
587                Keyword::MERGE => self.parse_merge(),
588                // `LISTEN`, `UNLISTEN` and `NOTIFY` are Postgres-specific
589                // syntaxes. They are used for Postgres statement.
590                Keyword::LISTEN if self.dialect.supports_listen_notify() => self.parse_listen(),
591                Keyword::UNLISTEN if self.dialect.supports_listen_notify() => self.parse_unlisten(),
592                Keyword::NOTIFY if self.dialect.supports_listen_notify() => self.parse_notify(),
593                // `PRAGMA` is sqlite specific https://www.sqlite.org/pragma.html
594                Keyword::PRAGMA => self.parse_pragma(),
595                Keyword::UNLOAD => self.parse_unload(),
596                Keyword::RENAME => self.parse_rename(),
597                // `INSTALL` is duckdb specific https://duckdb.org/docs/extensions/overview
598                Keyword::INSTALL if dialect_of!(self is DuckDbDialect | GenericDialect) => {
599                    self.parse_install()
600                }
601                Keyword::LOAD => self.parse_load(),
602                // `OPTIMIZE` is clickhouse specific https://clickhouse.tech/docs/en/sql-reference/statements/optimize/
603                Keyword::OPTIMIZE if dialect_of!(self is ClickHouseDialect | GenericDialect) => {
604                    self.parse_optimize_table()
605                }
606                // `COMMENT` is snowflake specific https://docs.snowflake.com/en/sql-reference/sql/comment
607                Keyword::COMMENT if self.dialect.supports_comment_on() => self.parse_comment(),
608                _ => self.expected("an SQL statement", next_token),
609            },
610            Token::LParen => {
611                self.prev_token();
612                self.parse_query().map(Statement::Query)
613            }
614            _ => self.expected("an SQL statement", next_token),
615        }
616    }
617
618    pub fn parse_comment(&mut self) -> Result<Statement, ParserError> {
619        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
620
621        self.expect_keyword_is(Keyword::ON)?;
622        let token = self.next_token();
623
624        let (object_type, object_name) = match token.token {
625            Token::Word(w) if w.keyword == Keyword::COLUMN => {
626                (CommentObject::Column, self.parse_object_name(false)?)
627            }
628            Token::Word(w) if w.keyword == Keyword::TABLE => {
629                (CommentObject::Table, self.parse_object_name(false)?)
630            }
631            Token::Word(w) if w.keyword == Keyword::EXTENSION => {
632                (CommentObject::Extension, self.parse_object_name(false)?)
633            }
634            Token::Word(w) if w.keyword == Keyword::SCHEMA => {
635                (CommentObject::Schema, self.parse_object_name(false)?)
636            }
637            Token::Word(w) if w.keyword == Keyword::DATABASE => {
638                (CommentObject::Database, self.parse_object_name(false)?)
639            }
640            Token::Word(w) if w.keyword == Keyword::USER => {
641                (CommentObject::User, self.parse_object_name(false)?)
642            }
643            Token::Word(w) if w.keyword == Keyword::ROLE => {
644                (CommentObject::Role, self.parse_object_name(false)?)
645            }
646            _ => self.expected("comment object_type", token)?,
647        };
648
649        self.expect_keyword_is(Keyword::IS)?;
650        let comment = if self.parse_keyword(Keyword::NULL) {
651            None
652        } else {
653            Some(self.parse_literal_string()?)
654        };
655        Ok(Statement::Comment {
656            object_type,
657            object_name,
658            comment,
659            if_exists,
660        })
661    }
662
663    pub fn parse_flush(&mut self) -> Result<Statement, ParserError> {
664        let mut channel = None;
665        let mut tables: Vec<ObjectName> = vec![];
666        let mut read_lock = false;
667        let mut export = false;
668
669        if !dialect_of!(self is MySqlDialect | GenericDialect) {
670            return parser_err!("Unsupported statement FLUSH", self.peek_token().span.start);
671        }
672
673        let location = if self.parse_keyword(Keyword::NO_WRITE_TO_BINLOG) {
674            Some(FlushLocation::NoWriteToBinlog)
675        } else if self.parse_keyword(Keyword::LOCAL) {
676            Some(FlushLocation::Local)
677        } else {
678            None
679        };
680
681        let object_type = if self.parse_keywords(&[Keyword::BINARY, Keyword::LOGS]) {
682            FlushType::BinaryLogs
683        } else if self.parse_keywords(&[Keyword::ENGINE, Keyword::LOGS]) {
684            FlushType::EngineLogs
685        } else if self.parse_keywords(&[Keyword::ERROR, Keyword::LOGS]) {
686            FlushType::ErrorLogs
687        } else if self.parse_keywords(&[Keyword::GENERAL, Keyword::LOGS]) {
688            FlushType::GeneralLogs
689        } else if self.parse_keywords(&[Keyword::HOSTS]) {
690            FlushType::Hosts
691        } else if self.parse_keyword(Keyword::PRIVILEGES) {
692            FlushType::Privileges
693        } else if self.parse_keyword(Keyword::OPTIMIZER_COSTS) {
694            FlushType::OptimizerCosts
695        } else if self.parse_keywords(&[Keyword::RELAY, Keyword::LOGS]) {
696            if self.parse_keywords(&[Keyword::FOR, Keyword::CHANNEL]) {
697                channel = Some(self.parse_object_name(false).unwrap().to_string());
698            }
699            FlushType::RelayLogs
700        } else if self.parse_keywords(&[Keyword::SLOW, Keyword::LOGS]) {
701            FlushType::SlowLogs
702        } else if self.parse_keyword(Keyword::STATUS) {
703            FlushType::Status
704        } else if self.parse_keyword(Keyword::USER_RESOURCES) {
705            FlushType::UserResources
706        } else if self.parse_keywords(&[Keyword::LOGS]) {
707            FlushType::Logs
708        } else if self.parse_keywords(&[Keyword::TABLES]) {
709            loop {
710                let next_token = self.next_token();
711                match &next_token.token {
712                    Token::Word(w) => match w.keyword {
713                        Keyword::WITH => {
714                            read_lock = self.parse_keywords(&[Keyword::READ, Keyword::LOCK]);
715                        }
716                        Keyword::FOR => {
717                            export = self.parse_keyword(Keyword::EXPORT);
718                        }
719                        Keyword::NoKeyword => {
720                            self.prev_token();
721                            tables = self.parse_comma_separated(|p| p.parse_object_name(false))?;
722                        }
723                        _ => {}
724                    },
725                    _ => {
726                        break;
727                    }
728                }
729            }
730
731            FlushType::Tables
732        } else {
733            return self.expected(
734                "BINARY LOGS, ENGINE LOGS, ERROR LOGS, GENERAL LOGS, HOSTS, LOGS, PRIVILEGES, OPTIMIZER_COSTS,\
735                 RELAY LOGS [FOR CHANNEL channel], SLOW LOGS, STATUS, USER_RESOURCES",
736                self.peek_token(),
737            );
738        };
739
740        Ok(Statement::Flush {
741            object_type,
742            location,
743            channel,
744            read_lock,
745            export,
746            tables,
747        })
748    }
749
750    pub fn parse_msck(&mut self) -> Result<Statement, ParserError> {
751        let repair = self.parse_keyword(Keyword::REPAIR);
752        self.expect_keyword_is(Keyword::TABLE)?;
753        let table_name = self.parse_object_name(false)?;
754        let partition_action = self
755            .maybe_parse(|parser| {
756                let pa = match parser.parse_one_of_keywords(&[
757                    Keyword::ADD,
758                    Keyword::DROP,
759                    Keyword::SYNC,
760                ]) {
761                    Some(Keyword::ADD) => Some(AddDropSync::ADD),
762                    Some(Keyword::DROP) => Some(AddDropSync::DROP),
763                    Some(Keyword::SYNC) => Some(AddDropSync::SYNC),
764                    _ => None,
765                };
766                parser.expect_keyword_is(Keyword::PARTITIONS)?;
767                Ok(pa)
768            })?
769            .unwrap_or_default();
770        Ok(Statement::Msck {
771            repair,
772            table_name,
773            partition_action,
774        })
775    }
776
777    pub fn parse_truncate(&mut self) -> Result<Statement, ParserError> {
778        let table = self.parse_keyword(Keyword::TABLE);
779        let only = self.parse_keyword(Keyword::ONLY);
780
781        let table_names = self
782            .parse_comma_separated(|p| p.parse_object_name(false))?
783            .into_iter()
784            .map(|n| TruncateTableTarget { name: n })
785            .collect();
786
787        let mut partitions = None;
788        if self.parse_keyword(Keyword::PARTITION) {
789            self.expect_token(&Token::LParen)?;
790            partitions = Some(self.parse_comma_separated(Parser::parse_expr)?);
791            self.expect_token(&Token::RParen)?;
792        }
793
794        let mut identity = None;
795        let mut cascade = None;
796
797        if dialect_of!(self is PostgreSqlDialect | GenericDialect) {
798            identity = if self.parse_keywords(&[Keyword::RESTART, Keyword::IDENTITY]) {
799                Some(TruncateIdentityOption::Restart)
800            } else if self.parse_keywords(&[Keyword::CONTINUE, Keyword::IDENTITY]) {
801                Some(TruncateIdentityOption::Continue)
802            } else {
803                None
804            };
805
806            cascade = self.parse_cascade_option();
807        };
808
809        let on_cluster = self.parse_optional_on_cluster()?;
810
811        Ok(Statement::Truncate {
812            table_names,
813            partitions,
814            table,
815            only,
816            identity,
817            cascade,
818            on_cluster,
819        })
820    }
821
822    fn parse_cascade_option(&mut self) -> Option<CascadeOption> {
823        if self.parse_keyword(Keyword::CASCADE) {
824            Some(CascadeOption::Cascade)
825        } else if self.parse_keyword(Keyword::RESTRICT) {
826            Some(CascadeOption::Restrict)
827        } else {
828            None
829        }
830    }
831
832    pub fn parse_attach_duckdb_database_options(
833        &mut self,
834    ) -> Result<Vec<AttachDuckDBDatabaseOption>, ParserError> {
835        if !self.consume_token(&Token::LParen) {
836            return Ok(vec![]);
837        }
838
839        let mut options = vec![];
840        loop {
841            if self.parse_keyword(Keyword::READ_ONLY) {
842                let boolean = if self.parse_keyword(Keyword::TRUE) {
843                    Some(true)
844                } else if self.parse_keyword(Keyword::FALSE) {
845                    Some(false)
846                } else {
847                    None
848                };
849                options.push(AttachDuckDBDatabaseOption::ReadOnly(boolean));
850            } else if self.parse_keyword(Keyword::TYPE) {
851                let ident = self.parse_identifier()?;
852                options.push(AttachDuckDBDatabaseOption::Type(ident));
853            } else {
854                return self.expected("expected one of: ), READ_ONLY, TYPE", self.peek_token());
855            };
856
857            if self.consume_token(&Token::RParen) {
858                return Ok(options);
859            } else if self.consume_token(&Token::Comma) {
860                continue;
861            } else {
862                return self.expected("expected one of: ')', ','", self.peek_token());
863            }
864        }
865    }
866
867    pub fn parse_attach_duckdb_database(&mut self) -> Result<Statement, ParserError> {
868        let database = self.parse_keyword(Keyword::DATABASE);
869        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
870        let database_path = self.parse_identifier()?;
871        let database_alias = if self.parse_keyword(Keyword::AS) {
872            Some(self.parse_identifier()?)
873        } else {
874            None
875        };
876
877        let attach_options = self.parse_attach_duckdb_database_options()?;
878        Ok(Statement::AttachDuckDBDatabase {
879            if_not_exists,
880            database,
881            database_path,
882            database_alias,
883            attach_options,
884        })
885    }
886
887    pub fn parse_detach_duckdb_database(&mut self) -> Result<Statement, ParserError> {
888        let database = self.parse_keyword(Keyword::DATABASE);
889        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
890        let database_alias = self.parse_identifier()?;
891        Ok(Statement::DetachDuckDBDatabase {
892            if_exists,
893            database,
894            database_alias,
895        })
896    }
897
898    pub fn parse_attach_database(&mut self) -> Result<Statement, ParserError> {
899        let database = self.parse_keyword(Keyword::DATABASE);
900        let database_file_name = self.parse_expr()?;
901        self.expect_keyword_is(Keyword::AS)?;
902        let schema_name = self.parse_identifier()?;
903        Ok(Statement::AttachDatabase {
904            database,
905            schema_name,
906            database_file_name,
907        })
908    }
909
910    pub fn parse_analyze(&mut self) -> Result<Statement, ParserError> {
911        let has_table_keyword = self.parse_keyword(Keyword::TABLE);
912        let table_name = self.parse_object_name(false)?;
913        let mut for_columns = false;
914        let mut cache_metadata = false;
915        let mut noscan = false;
916        let mut partitions = None;
917        let mut compute_statistics = false;
918        let mut columns = vec![];
919        loop {
920            match self.parse_one_of_keywords(&[
921                Keyword::PARTITION,
922                Keyword::FOR,
923                Keyword::CACHE,
924                Keyword::NOSCAN,
925                Keyword::COMPUTE,
926            ]) {
927                Some(Keyword::PARTITION) => {
928                    self.expect_token(&Token::LParen)?;
929                    partitions = Some(self.parse_comma_separated(Parser::parse_expr)?);
930                    self.expect_token(&Token::RParen)?;
931                }
932                Some(Keyword::NOSCAN) => noscan = true,
933                Some(Keyword::FOR) => {
934                    self.expect_keyword_is(Keyword::COLUMNS)?;
935
936                    columns = self
937                        .maybe_parse(|parser| {
938                            parser.parse_comma_separated(|p| p.parse_identifier())
939                        })?
940                        .unwrap_or_default();
941                    for_columns = true
942                }
943                Some(Keyword::CACHE) => {
944                    self.expect_keyword_is(Keyword::METADATA)?;
945                    cache_metadata = true
946                }
947                Some(Keyword::COMPUTE) => {
948                    self.expect_keyword_is(Keyword::STATISTICS)?;
949                    compute_statistics = true
950                }
951                _ => break,
952            }
953        }
954
955        Ok(Statement::Analyze {
956            has_table_keyword,
957            table_name,
958            for_columns,
959            columns,
960            partitions,
961            cache_metadata,
962            noscan,
963            compute_statistics,
964        })
965    }
966
967    /// Parse a new expression including wildcard & qualified wildcard.
968    pub fn parse_wildcard_expr(&mut self) -> Result<Expr, ParserError> {
969        let index = self.index;
970
971        let next_token = self.next_token();
972        match next_token.token {
973            t @ (Token::Word(_) | Token::SingleQuotedString(_)) => {
974                if self.peek_token().token == Token::Period {
975                    let mut id_parts: Vec<Ident> = vec![match t {
976                        Token::Word(w) => w.into_ident(next_token.span),
977                        Token::SingleQuotedString(s) => Ident::with_quote('\'', s),
978                        _ => unreachable!(), // We matched above
979                    }];
980
981                    while self.consume_token(&Token::Period) {
982                        let next_token = self.next_token();
983                        match next_token.token {
984                            Token::Word(w) => id_parts.push(w.into_ident(next_token.span)),
985                            Token::SingleQuotedString(s) => {
986                                // SQLite has single-quoted identifiers
987                                id_parts.push(Ident::with_quote('\'', s))
988                            }
989                            Token::Mul => {
990                                return Ok(Expr::QualifiedWildcard(
991                                    ObjectName::from(id_parts),
992                                    AttachedToken(next_token),
993                                ));
994                            }
995                            _ => {
996                                return self
997                                    .expected("an identifier or a '*' after '.'", next_token);
998                            }
999                        }
1000                    }
1001                }
1002            }
1003            Token::Mul => {
1004                return Ok(Expr::Wildcard(AttachedToken(next_token)));
1005            }
1006            _ => (),
1007        };
1008
1009        self.index = index;
1010        self.parse_expr()
1011    }
1012
1013    /// Parse a new expression.
1014    pub fn parse_expr(&mut self) -> Result<Expr, ParserError> {
1015        self.parse_subexpr(self.dialect.prec_unknown())
1016    }
1017
1018    /// Parse tokens until the precedence changes.
1019    pub fn parse_subexpr(&mut self, precedence: u8) -> Result<Expr, ParserError> {
1020        let _guard = self.recursion_counter.try_decrease()?;
1021        debug!("parsing expr");
1022        let mut expr = self.parse_prefix()?;
1023
1024        expr = self.parse_compound_expr(expr, vec![])?;
1025
1026        debug!("prefix: {:?}", expr);
1027        loop {
1028            let next_precedence = self.get_next_precedence()?;
1029            debug!("next precedence: {:?}", next_precedence);
1030
1031            if precedence >= next_precedence {
1032                break;
1033            }
1034
1035            // The period operator is handled exclusively by the
1036            // compound field access parsing.
1037            if Token::Period == self.peek_token_ref().token {
1038                break;
1039            }
1040
1041            expr = self.parse_infix(expr, next_precedence)?;
1042        }
1043        Ok(expr)
1044    }
1045
1046    pub fn parse_assert(&mut self) -> Result<Statement, ParserError> {
1047        let condition = self.parse_expr()?;
1048        let message = if self.parse_keyword(Keyword::AS) {
1049            Some(self.parse_expr()?)
1050        } else {
1051            None
1052        };
1053
1054        Ok(Statement::Assert { condition, message })
1055    }
1056
1057    pub fn parse_savepoint(&mut self) -> Result<Statement, ParserError> {
1058        let name = self.parse_identifier()?;
1059        Ok(Statement::Savepoint { name })
1060    }
1061
1062    pub fn parse_release(&mut self) -> Result<Statement, ParserError> {
1063        let _ = self.parse_keyword(Keyword::SAVEPOINT);
1064        let name = self.parse_identifier()?;
1065
1066        Ok(Statement::ReleaseSavepoint { name })
1067    }
1068
1069    pub fn parse_listen(&mut self) -> Result<Statement, ParserError> {
1070        let channel = self.parse_identifier()?;
1071        Ok(Statement::LISTEN { channel })
1072    }
1073
1074    pub fn parse_unlisten(&mut self) -> Result<Statement, ParserError> {
1075        let channel = if self.consume_token(&Token::Mul) {
1076            Ident::new(Expr::Wildcard(AttachedToken::empty()).to_string())
1077        } else {
1078            match self.parse_identifier() {
1079                Ok(expr) => expr,
1080                _ => {
1081                    self.prev_token();
1082                    return self.expected("wildcard or identifier", self.peek_token());
1083                }
1084            }
1085        };
1086        Ok(Statement::UNLISTEN { channel })
1087    }
1088
1089    pub fn parse_notify(&mut self) -> Result<Statement, ParserError> {
1090        let channel = self.parse_identifier()?;
1091        let payload = if self.consume_token(&Token::Comma) {
1092            Some(self.parse_literal_string()?)
1093        } else {
1094            None
1095        };
1096        Ok(Statement::NOTIFY { channel, payload })
1097    }
1098
1099    /// Parses a `RENAME TABLE` statement. See [Statement::RenameTable]
1100    pub fn parse_rename(&mut self) -> Result<Statement, ParserError> {
1101        if self.peek_keyword(Keyword::TABLE) {
1102            self.expect_keyword(Keyword::TABLE)?;
1103            let rename_tables = self.parse_comma_separated(|parser| {
1104                let old_name = parser.parse_object_name(false)?;
1105                parser.expect_keyword(Keyword::TO)?;
1106                let new_name = parser.parse_object_name(false)?;
1107
1108                Ok(RenameTable { old_name, new_name })
1109            })?;
1110            Ok(Statement::RenameTable(rename_tables))
1111        } else {
1112            self.expected("KEYWORD `TABLE` after RENAME", self.peek_token())
1113        }
1114    }
1115
1116    /// Tries to parse an expression by matching the specified word to known keywords that have a special meaning in the dialect.
1117    /// Returns `None if no match is found.
1118    fn parse_expr_prefix_by_reserved_word(
1119        &mut self,
1120        w: &Word,
1121        w_span: Span,
1122    ) -> Result<Option<Expr>, ParserError> {
1123        match w.keyword {
1124            Keyword::TRUE | Keyword::FALSE if self.dialect.supports_boolean_literals() => {
1125                self.prev_token();
1126                Ok(Some(Expr::Value(self.parse_value()?)))
1127            }
1128            Keyword::NULL => {
1129                self.prev_token();
1130                Ok(Some(Expr::Value(self.parse_value()?)))
1131            }
1132            Keyword::CURRENT_CATALOG
1133            | Keyword::CURRENT_USER
1134            | Keyword::SESSION_USER
1135            | Keyword::USER
1136            if dialect_of!(self is PostgreSqlDialect | GenericDialect) =>
1137                {
1138                    Ok(Some(Expr::Function(Function {
1139                        name: ObjectName::from(vec![w.clone().into_ident(w_span)]),
1140                        uses_odbc_syntax: false,
1141                        parameters: FunctionArguments::None,
1142                        args: FunctionArguments::None,
1143                        null_treatment: None,
1144                        filter: None,
1145                        over: None,
1146                        within_group: vec![],
1147                    })))
1148                }
1149            Keyword::CURRENT_TIMESTAMP
1150            | Keyword::CURRENT_TIME
1151            | Keyword::CURRENT_DATE
1152            | Keyword::LOCALTIME
1153            | Keyword::LOCALTIMESTAMP => {
1154                Ok(Some(self.parse_time_functions(ObjectName::from(vec![w.clone().into_ident(w_span)]))?))
1155            }
1156            Keyword::CASE => Ok(Some(self.parse_case_expr()?)),
1157            Keyword::CONVERT => Ok(Some(self.parse_convert_expr(false)?)),
1158            Keyword::TRY_CONVERT if self.dialect.supports_try_convert() => Ok(Some(self.parse_convert_expr(true)?)),
1159            Keyword::CAST => Ok(Some(self.parse_cast_expr(CastKind::Cast)?)),
1160            Keyword::TRY_CAST => Ok(Some(self.parse_cast_expr(CastKind::TryCast)?)),
1161            Keyword::SAFE_CAST => Ok(Some(self.parse_cast_expr(CastKind::SafeCast)?)),
1162            Keyword::EXISTS
1163            // Support parsing Databricks has a function named `exists`.
1164            if !dialect_of!(self is DatabricksDialect)
1165                || matches!(
1166                        self.peek_nth_token_ref(1).token,
1167                        Token::Word(Word {
1168                            keyword: Keyword::SELECT | Keyword::WITH,
1169                            ..
1170                        })
1171                    ) =>
1172                {
1173                    Ok(Some(self.parse_exists_expr(false)?))
1174                }
1175            Keyword::EXTRACT => Ok(Some(self.parse_extract_expr()?)),
1176            Keyword::CEIL => Ok(Some(self.parse_ceil_floor_expr(true)?)),
1177            Keyword::FLOOR => Ok(Some(self.parse_ceil_floor_expr(false)?)),
1178            Keyword::POSITION if self.peek_token_ref().token == Token::LParen => {
1179                Ok(Some(self.parse_position_expr(w.clone().into_ident(w_span))?))
1180            }
1181            Keyword::SUBSTRING => Ok(Some(self.parse_substring_expr()?)),
1182            Keyword::OVERLAY => Ok(Some(self.parse_overlay_expr()?)),
1183            Keyword::TRIM => Ok(Some(self.parse_trim_expr()?)),
1184            Keyword::INTERVAL => Ok(Some(self.parse_interval()?)),
1185            // Treat ARRAY[1,2,3] as an array [1,2,3], otherwise try as subquery or a function call
1186            Keyword::ARRAY if *self.peek_token_ref() == Token::LBracket => {
1187                self.expect_token(&Token::LBracket)?;
1188                Ok(Some(self.parse_array_expr(true)?))
1189            }
1190            Keyword::ARRAY
1191            if self.peek_token() == Token::LParen
1192                && !dialect_of!(self is ClickHouseDialect | DatabricksDialect) =>
1193                {
1194                    self.expect_token(&Token::LParen)?;
1195                    let query = self.parse_query()?;
1196                    self.expect_token(&Token::RParen)?;
1197                    Ok(Some(Expr::Function(Function {
1198                        name: ObjectName::from(vec![w.clone().into_ident(w_span)]),
1199                        uses_odbc_syntax: false,
1200                        parameters: FunctionArguments::None,
1201                        args: FunctionArguments::Subquery(query),
1202                        filter: None,
1203                        null_treatment: None,
1204                        over: None,
1205                        within_group: vec![],
1206                    })))
1207                }
1208            Keyword::NOT => Ok(Some(self.parse_not()?)),
1209            Keyword::MATCH if self.dialect.supports_match_against() => {
1210                Ok(Some(self.parse_match_against()?))
1211            }
1212            Keyword::STRUCT if self.dialect.supports_struct_literal() => {
1213                let struct_expr = self.parse_struct_literal()?;
1214                Ok(Some(struct_expr))
1215            }
1216            Keyword::PRIOR if matches!(self.state, ParserState::ConnectBy) => {
1217                let expr = self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?;
1218                Ok(Some(Expr::Prior(Box::new(expr))))
1219            }
1220            Keyword::MAP if *self.peek_token_ref() == Token::LBrace && self.dialect.support_map_literal_syntax() => {
1221                Ok(Some(self.parse_duckdb_map_literal()?))
1222            }
1223            _ if self.dialect.supports_geometric_types() => match w.keyword {
1224                Keyword::CIRCLE => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Circle)?)),
1225                Keyword::BOX => Ok(Some(self.parse_geometric_type(GeometricTypeKind::GeometricBox)?)),
1226                Keyword::PATH => Ok(Some(self.parse_geometric_type(GeometricTypeKind::GeometricPath)?)),
1227                Keyword::LINE => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Line)?)),
1228                Keyword::LSEG => Ok(Some(self.parse_geometric_type(GeometricTypeKind::LineSegment)?)),
1229                Keyword::POINT => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Point)?)),
1230                Keyword::POLYGON => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Polygon)?)),
1231                _ => Ok(None),
1232            },
1233            _ => Ok(None),
1234        }
1235    }
1236
1237    /// Tries to parse an expression by a word that is not known to have a special meaning in the dialect.
1238    fn parse_expr_prefix_by_unreserved_word(
1239        &mut self,
1240        w: &Word,
1241        w_span: Span,
1242    ) -> Result<Expr, ParserError> {
1243        match self.peek_token().token {
1244            Token::LParen if !self.peek_outer_join_operator() => {
1245                let id_parts = vec![w.clone().into_ident(w_span)];
1246                self.parse_function(ObjectName::from(id_parts))
1247            }
1248            // string introducer https://dev.mysql.com/doc/refman/8.0/en/charset-introducer.html
1249            Token::SingleQuotedString(_)
1250            | Token::DoubleQuotedString(_)
1251            | Token::HexStringLiteral(_)
1252                if w.value.starts_with('_') =>
1253            {
1254                Ok(Expr::IntroducedString {
1255                    introducer: w.value.clone(),
1256                    value: self.parse_introduced_string_value()?,
1257                })
1258            }
1259            // string introducer https://dev.mysql.com/doc/refman/8.0/en/charset-introducer.html
1260            Token::SingleQuotedString(_)
1261            | Token::DoubleQuotedString(_)
1262            | Token::HexStringLiteral(_)
1263                if w.value.starts_with('_') =>
1264            {
1265                Ok(Expr::IntroducedString {
1266                    introducer: w.value.clone(),
1267                    value: self.parse_introduced_string_value()?,
1268                })
1269            }
1270            Token::Arrow if self.dialect.supports_lambda_functions() => {
1271                self.expect_token(&Token::Arrow)?;
1272                Ok(Expr::Lambda(LambdaFunction {
1273                    params: OneOrManyWithParens::One(w.clone().into_ident(w_span)),
1274                    body: Box::new(self.parse_expr()?),
1275                }))
1276            }
1277            _ => Ok(Expr::Identifier(w.clone().into_ident(w_span))),
1278        }
1279    }
1280
1281    /// Parse an expression prefix.
1282    pub fn parse_prefix(&mut self) -> Result<Expr, ParserError> {
1283        // allow the dialect to override prefix parsing
1284        if let Some(prefix) = self.dialect.parse_prefix(self) {
1285            return prefix;
1286        }
1287
1288        // PostgreSQL allows any string literal to be preceded by a type name, indicating that the
1289        // string literal represents a literal of that type. Some examples:
1290        //
1291        //      DATE '2020-05-20'
1292        //      TIMESTAMP WITH TIME ZONE '2020-05-20 7:43:54'
1293        //      BOOL 'true'
1294        //
1295        // The first two are standard SQL, while the latter is a PostgreSQL extension. Complicating
1296        // matters is the fact that INTERVAL string literals may optionally be followed by special
1297        // keywords, e.g.:
1298        //
1299        //      INTERVAL '7' DAY
1300        //
1301        // Note also that naively `SELECT date` looks like a syntax error because the `date` type
1302        // name is not followed by a string literal, but in fact in PostgreSQL it is a valid
1303        // expression that should parse as the column name "date".
1304        let loc = self.peek_token_ref().span.start;
1305        let opt_expr = self.maybe_parse(|parser| {
1306            match parser.parse_data_type()? {
1307                DataType::Interval => parser.parse_interval(),
1308                // PostgreSQL allows almost any identifier to be used as custom data type name,
1309                // and we support that in `parse_data_type()`. But unlike Postgres we don't
1310                // have a list of globally reserved keywords (since they vary across dialects),
1311                // so given `NOT 'a' LIKE 'b'`, we'd accept `NOT` as a possible custom data type
1312                // name, resulting in `NOT 'a'` being recognized as a `TypedString` instead of
1313                // an unary negation `NOT ('a' LIKE 'b')`. To solve this, we don't accept the
1314                // `type 'string'` syntax for the custom data types at all.
1315                DataType::Custom(..) => parser_err!("dummy", loc),
1316                data_type => Ok(Expr::TypedString {
1317                    data_type,
1318                    value: parser.parse_value()?.value,
1319                }),
1320            }
1321        })?;
1322
1323        if let Some(expr) = opt_expr {
1324            return Ok(expr);
1325        }
1326
1327        // Cache some dialect properties to avoid lifetime issues with the
1328        // next_token reference.
1329
1330        let dialect = self.dialect;
1331
1332        self.advance_token();
1333        let next_token_index = self.get_current_index();
1334        let next_token = self.get_current_token();
1335        let span = next_token.span;
1336        let expr = match &next_token.token {
1337            Token::Word(w) => {
1338                // The word we consumed may fall into one of two cases: it has a special meaning, or not.
1339                // For example, in Snowflake, the word `interval` may have two meanings depending on the context:
1340                // `SELECT CURRENT_DATE() + INTERVAL '1 DAY', MAX(interval) FROM tbl;`
1341                //                          ^^^^^^^^^^^^^^^^      ^^^^^^^^
1342                //                         interval expression   identifier
1343                //
1344                // We first try to parse the word and following tokens as a special expression, and if that fails,
1345                // we rollback and try to parse it as an identifier.
1346                let w = w.clone();
1347                match self.try_parse(|parser| parser.parse_expr_prefix_by_reserved_word(&w, span)) {
1348                    // This word indicated an expression prefix and parsing was successful
1349                    Ok(Some(expr)) => Ok(expr),
1350
1351                    // No expression prefix associated with this word
1352                    Ok(None) => Ok(self.parse_expr_prefix_by_unreserved_word(&w, span)?),
1353
1354                    // If parsing of the word as a special expression failed, we are facing two options:
1355                    // 1. The statement is malformed, e.g. `SELECT INTERVAL '1 DAI` (`DAI` instead of `DAY`)
1356                    // 2. The word is used as an identifier, e.g. `SELECT MAX(interval) FROM tbl`
1357                    // We first try to parse the word as an identifier and if that fails
1358                    // we rollback and return the parsing error we got from trying to parse a
1359                    // special expression (to maintain backwards compatibility of parsing errors).
1360                    Err(e) => {
1361                        if !self.dialect.is_reserved_for_identifier(w.keyword) {
1362                            if let Ok(Some(expr)) = self.maybe_parse(|parser| {
1363                                parser.parse_expr_prefix_by_unreserved_word(&w, span)
1364                            }) {
1365                                return Ok(expr);
1366                            }
1367                        }
1368                        return Err(e);
1369                    }
1370                }
1371            } // End of Token::Word
1372            // array `[1, 2, 3]`
1373            Token::LBracket => self.parse_array_expr(false),
1374            tok @ Token::Minus | tok @ Token::Plus => {
1375                let op = if *tok == Token::Plus {
1376                    UnaryOperator::Plus
1377                } else {
1378                    UnaryOperator::Minus
1379                };
1380                Ok(Expr::UnaryOp {
1381                    op,
1382                    expr: Box::new(
1383                        self.parse_subexpr(self.dialect.prec_value(Precedence::MulDivModOp))?,
1384                    ),
1385                })
1386            }
1387            Token::ExclamationMark if dialect.supports_bang_not_operator() => Ok(Expr::UnaryOp {
1388                op: UnaryOperator::BangNot,
1389                expr: Box::new(self.parse_subexpr(self.dialect.prec_value(Precedence::UnaryNot))?),
1390            }),
1391            tok @ Token::DoubleExclamationMark
1392            | tok @ Token::PGSquareRoot
1393            | tok @ Token::PGCubeRoot
1394            | tok @ Token::AtSign
1395            | tok @ Token::Tilde
1396                if dialect_is!(dialect is PostgreSqlDialect) =>
1397            {
1398                let op = match tok {
1399                    Token::DoubleExclamationMark => UnaryOperator::PGPrefixFactorial,
1400                    Token::PGSquareRoot => UnaryOperator::PGSquareRoot,
1401                    Token::PGCubeRoot => UnaryOperator::PGCubeRoot,
1402                    Token::AtSign => UnaryOperator::PGAbs,
1403                    Token::Tilde => UnaryOperator::PGBitwiseNot,
1404                    _ => unreachable!(),
1405                };
1406                Ok(Expr::UnaryOp {
1407                    op,
1408                    expr: Box::new(
1409                        self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?,
1410                    ),
1411                })
1412            }
1413            tok @ Token::Sharp
1414            | tok @ Token::AtDashAt
1415            | tok @ Token::AtAt
1416            | tok @ Token::QuestionMarkDash
1417            | tok @ Token::QuestionPipe
1418                if self.dialect.supports_geometric_types() =>
1419            {
1420                let op = match tok {
1421                    Token::Sharp => UnaryOperator::Hash,
1422                    Token::AtDashAt => UnaryOperator::AtDashAt,
1423                    Token::AtAt => UnaryOperator::DoubleAt,
1424                    Token::QuestionMarkDash => UnaryOperator::QuestionDash,
1425                    Token::QuestionPipe => UnaryOperator::QuestionPipe,
1426                    _ => {
1427                        return Err(ParserError::ParserError(format!(
1428                            "Unexpected token in unary operator parsing: {:?}",
1429                            tok
1430                        )))
1431                    }
1432                };
1433                Ok(Expr::UnaryOp {
1434                    op,
1435                    expr: Box::new(
1436                        self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?,
1437                    ),
1438                })
1439            }
1440            Token::EscapedStringLiteral(_) if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) =>
1441            {
1442                self.prev_token();
1443                Ok(Expr::Value(self.parse_value()?))
1444            }
1445            Token::UnicodeStringLiteral(_) => {
1446                self.prev_token();
1447                Ok(Expr::Value(self.parse_value()?))
1448            }
1449            Token::Number(_, _)
1450            | Token::SingleQuotedString(_)
1451            | Token::DoubleQuotedString(_)
1452            | Token::TripleSingleQuotedString(_)
1453            | Token::TripleDoubleQuotedString(_)
1454            | Token::DollarQuotedString(_)
1455            | Token::SingleQuotedByteStringLiteral(_)
1456            | Token::DoubleQuotedByteStringLiteral(_)
1457            | Token::TripleSingleQuotedByteStringLiteral(_)
1458            | Token::TripleDoubleQuotedByteStringLiteral(_)
1459            | Token::SingleQuotedRawStringLiteral(_)
1460            | Token::DoubleQuotedRawStringLiteral(_)
1461            | Token::TripleSingleQuotedRawStringLiteral(_)
1462            | Token::TripleDoubleQuotedRawStringLiteral(_)
1463            | Token::NationalStringLiteral(_)
1464            | Token::HexStringLiteral(_) => {
1465                self.prev_token();
1466                Ok(Expr::Value(self.parse_value()?))
1467            }
1468            Token::LParen => {
1469                let expr = if let Some(expr) = self.try_parse_expr_sub_query()? {
1470                    expr
1471                } else if let Some(lambda) = self.try_parse_lambda()? {
1472                    return Ok(lambda);
1473                } else {
1474                    let exprs = self.parse_comma_separated(Parser::parse_expr)?;
1475                    match exprs.len() {
1476                        0 => unreachable!(), // parse_comma_separated ensures 1 or more
1477                        1 => Expr::Nested(Box::new(exprs.into_iter().next().unwrap())),
1478                        _ => Expr::Tuple(exprs),
1479                    }
1480                };
1481                self.expect_token(&Token::RParen)?;
1482                Ok(expr)
1483            }
1484            Token::Placeholder(_) | Token::Colon | Token::AtSign => {
1485                self.prev_token();
1486                Ok(Expr::Value(self.parse_value()?))
1487            }
1488            Token::LBrace => {
1489                self.prev_token();
1490                self.parse_lbrace_expr()
1491            }
1492            _ => self.expected_at("an expression", next_token_index),
1493        }?;
1494
1495        if self.parse_keyword(Keyword::COLLATE) {
1496            Ok(Expr::Collate {
1497                expr: Box::new(expr),
1498                collation: self.parse_object_name(false)?,
1499            })
1500        } else {
1501            Ok(expr)
1502        }
1503    }
1504
1505    fn parse_geometric_type(&mut self, kind: GeometricTypeKind) -> Result<Expr, ParserError> {
1506        let value: Value = self.parse_value()?.value;
1507        Ok(Expr::TypedString {
1508            data_type: DataType::GeometricType(kind),
1509            value,
1510        })
1511    }
1512
1513    /// Try to parse an [Expr::CompoundFieldAccess] like `a.b.c` or `a.b[1].c`.
1514    /// If all the fields are `Expr::Identifier`s, return an [Expr::CompoundIdentifier] instead.
1515    /// If only the root exists, return the root.
1516    /// Parses compound expressions which may be delimited by period
1517    /// or bracket notation.
1518    /// For example: `a.b.c`, `a.b[1]`.
1519    pub fn parse_compound_expr(
1520        &mut self,
1521        root: Expr,
1522        mut chain: Vec<AccessExpr>,
1523    ) -> Result<Expr, ParserError> {
1524        let mut ending_wildcard: Option<TokenWithSpan> = None;
1525        loop {
1526            if self.consume_token(&Token::Period) {
1527                let next_token = self.peek_token_ref();
1528                match &next_token.token {
1529                    Token::Mul => {
1530                        // Postgres explicitly allows funcnm(tablenm.*) and the
1531                        // function array_agg traverses this control flow
1532                        if dialect_of!(self is PostgreSqlDialect) {
1533                            ending_wildcard = Some(self.next_token());
1534                        } else {
1535                            // Put back the consumed `.` tokens before exiting.
1536                            // If this expression is being parsed in the
1537                            // context of a projection, then the `.*` could imply
1538                            // a wildcard expansion. For example:
1539                            // `SELECT STRUCT('foo').* FROM T`
1540                            self.prev_token(); // .
1541                        }
1542
1543                        break;
1544                    }
1545                    Token::SingleQuotedString(s) => {
1546                        let expr =
1547                            Expr::Identifier(Ident::with_quote_and_span('\'', next_token.span, s));
1548                        chain.push(AccessExpr::Dot(expr));
1549                        self.advance_token(); // The consumed string
1550                    }
1551                    // Fallback to parsing an arbitrary expression.
1552                    _ => match self.parse_subexpr(self.dialect.prec_value(Precedence::Period))? {
1553                        // If we get back a compound field access or identifier,
1554                        // we flatten the nested expression.
1555                        // For example if the current root is `foo`
1556                        // and we get back a compound identifier expression `bar.baz`
1557                        // The full expression should be `foo.bar.baz` (i.e.
1558                        // a root with an access chain with 2 entries) and not
1559                        // `foo.(bar.baz)` (i.e. a root with an access chain with
1560                        // 1 entry`).
1561                        Expr::CompoundFieldAccess { root, access_chain } => {
1562                            chain.push(AccessExpr::Dot(*root));
1563                            chain.extend(access_chain);
1564                        }
1565                        Expr::CompoundIdentifier(parts) => chain
1566                            .extend(parts.into_iter().map(Expr::Identifier).map(AccessExpr::Dot)),
1567                        expr => {
1568                            chain.push(AccessExpr::Dot(expr));
1569                        }
1570                    },
1571                }
1572            } else if !self.dialect.supports_partiql()
1573                && self.peek_token_ref().token == Token::LBracket
1574            {
1575                self.parse_multi_dim_subscript(&mut chain)?;
1576            } else {
1577                break;
1578            }
1579        }
1580
1581        let tok_index = self.get_current_index();
1582        if let Some(wildcard_token) = ending_wildcard {
1583            if !Self::is_all_ident(&root, &chain) {
1584                return self.expected("an identifier or a '*' after '.'", self.peek_token());
1585            };
1586            Ok(Expr::QualifiedWildcard(
1587                ObjectName::from(Self::exprs_to_idents(root, chain)?),
1588                AttachedToken(wildcard_token),
1589            ))
1590        } else if self.maybe_parse_outer_join_operator() {
1591            if !Self::is_all_ident(&root, &chain) {
1592                return self.expected_at("column identifier before (+)", tok_index);
1593            };
1594            let expr = if chain.is_empty() {
1595                root
1596            } else {
1597                Expr::CompoundIdentifier(Self::exprs_to_idents(root, chain)?)
1598            };
1599            Ok(Expr::OuterJoin(expr.into()))
1600        } else {
1601            Self::build_compound_expr(root, chain)
1602        }
1603    }
1604
1605    /// Combines a root expression and access chain to form
1606    /// a compound expression. Which may be a [Expr::CompoundFieldAccess]
1607    /// or other special cased expressions like [Expr::CompoundIdentifier],
1608    /// [Expr::OuterJoin].
1609    fn build_compound_expr(
1610        root: Expr,
1611        mut access_chain: Vec<AccessExpr>,
1612    ) -> Result<Expr, ParserError> {
1613        if access_chain.is_empty() {
1614            return Ok(root);
1615        }
1616
1617        if Self::is_all_ident(&root, &access_chain) {
1618            return Ok(Expr::CompoundIdentifier(Self::exprs_to_idents(
1619                root,
1620                access_chain,
1621            )?));
1622        }
1623
1624        // Flatten qualified function calls.
1625        // For example, the expression `a.b.c.foo(1,2,3)` should
1626        // represent a function called `a.b.c.foo`, rather than
1627        // a composite expression.
1628        if matches!(root, Expr::Identifier(_))
1629            && matches!(
1630                access_chain.last(),
1631                Some(AccessExpr::Dot(Expr::Function(_)))
1632            )
1633            && access_chain
1634                .iter()
1635                .rev()
1636                .skip(1) // All except the Function
1637                .all(|access| matches!(access, AccessExpr::Dot(Expr::Identifier(_))))
1638        {
1639            let Some(AccessExpr::Dot(Expr::Function(mut func))) = access_chain.pop() else {
1640                return parser_err!("expected function expression", root.span().start);
1641            };
1642
1643            let compound_func_name = [root]
1644                .into_iter()
1645                .chain(access_chain.into_iter().flat_map(|access| match access {
1646                    AccessExpr::Dot(expr) => Some(expr),
1647                    _ => None,
1648                }))
1649                .flat_map(|expr| match expr {
1650                    Expr::Identifier(ident) => Some(ident),
1651                    _ => None,
1652                })
1653                .map(ObjectNamePart::Identifier)
1654                .chain(func.name.0)
1655                .collect::<Vec<_>>();
1656            func.name = ObjectName(compound_func_name);
1657
1658            return Ok(Expr::Function(func));
1659        }
1660
1661        // Flatten qualified outer join expressions.
1662        // For example, the expression `T.foo(+)` should
1663        // represent an outer join on the column name `T.foo`
1664        // rather than a composite expression.
1665        if access_chain.len() == 1
1666            && matches!(
1667                access_chain.last(),
1668                Some(AccessExpr::Dot(Expr::OuterJoin(_)))
1669            )
1670        {
1671            let Some(AccessExpr::Dot(Expr::OuterJoin(inner_expr))) = access_chain.pop() else {
1672                return parser_err!("expected (+) expression", root.span().start);
1673            };
1674
1675            if !Self::is_all_ident(&root, &[]) {
1676                return parser_err!("column identifier before (+)", root.span().start);
1677            };
1678
1679            let token_start = root.span().start;
1680            let mut idents = Self::exprs_to_idents(root, vec![])?;
1681            match *inner_expr {
1682                Expr::CompoundIdentifier(suffix) => idents.extend(suffix),
1683                Expr::Identifier(suffix) => idents.push(suffix),
1684                _ => {
1685                    return parser_err!("column identifier before (+)", token_start);
1686                }
1687            }
1688
1689            return Ok(Expr::OuterJoin(Expr::CompoundIdentifier(idents).into()));
1690        }
1691
1692        Ok(Expr::CompoundFieldAccess {
1693            root: Box::new(root),
1694            access_chain,
1695        })
1696    }
1697
1698    /// Check if the root is an identifier and all fields are identifiers.
1699    fn is_all_ident(root: &Expr, fields: &[AccessExpr]) -> bool {
1700        if !matches!(root, Expr::Identifier(_)) {
1701            return false;
1702        }
1703        fields
1704            .iter()
1705            .all(|x| matches!(x, AccessExpr::Dot(Expr::Identifier(_))))
1706    }
1707
1708    /// Convert a root and a list of fields to a list of identifiers.
1709    fn exprs_to_idents(root: Expr, fields: Vec<AccessExpr>) -> Result<Vec<Ident>, ParserError> {
1710        let mut idents = vec![];
1711        if let Expr::Identifier(root) = root {
1712            idents.push(root);
1713            for x in fields {
1714                if let AccessExpr::Dot(Expr::Identifier(ident)) = x {
1715                    idents.push(ident);
1716                } else {
1717                    return parser_err!(
1718                        format!("Expected identifier, found: {}", x),
1719                        x.span().start
1720                    );
1721                }
1722            }
1723            Ok(idents)
1724        } else {
1725            parser_err!(
1726                format!("Expected identifier, found: {}", root),
1727                root.span().start
1728            )
1729        }
1730    }
1731
1732    /// Returns true if the next tokens indicate the outer join operator `(+)`.
1733    fn peek_outer_join_operator(&mut self) -> bool {
1734        if !self.dialect.supports_outer_join_operator() {
1735            return false;
1736        }
1737
1738        let [maybe_lparen, maybe_plus, maybe_rparen] = self.peek_tokens_ref();
1739        Token::LParen == maybe_lparen.token
1740            && Token::Plus == maybe_plus.token
1741            && Token::RParen == maybe_rparen.token
1742    }
1743
1744    /// If the next tokens indicates the outer join operator `(+)`, consume
1745    /// the tokens and return true.
1746    fn maybe_parse_outer_join_operator(&mut self) -> bool {
1747        self.dialect.supports_outer_join_operator()
1748            && self.consume_tokens(&[Token::LParen, Token::Plus, Token::RParen])
1749    }
1750
1751    pub fn parse_utility_options(&mut self) -> Result<Vec<UtilityOption>, ParserError> {
1752        self.expect_token(&Token::LParen)?;
1753        let options = self.parse_comma_separated(Self::parse_utility_option)?;
1754        self.expect_token(&Token::RParen)?;
1755
1756        Ok(options)
1757    }
1758
1759    fn parse_utility_option(&mut self) -> Result<UtilityOption, ParserError> {
1760        let name = self.parse_identifier()?;
1761
1762        let next_token = self.peek_token();
1763        if next_token == Token::Comma || next_token == Token::RParen {
1764            return Ok(UtilityOption { name, arg: None });
1765        }
1766        let arg = self.parse_expr()?;
1767
1768        Ok(UtilityOption {
1769            name,
1770            arg: Some(arg),
1771        })
1772    }
1773
1774    fn try_parse_expr_sub_query(&mut self) -> Result<Option<Expr>, ParserError> {
1775        if !self.peek_sub_query() {
1776            return Ok(None);
1777        }
1778
1779        Ok(Some(Expr::Subquery(self.parse_query()?)))
1780    }
1781
1782    fn try_parse_lambda(&mut self) -> Result<Option<Expr>, ParserError> {
1783        if !self.dialect.supports_lambda_functions() {
1784            return Ok(None);
1785        }
1786        self.maybe_parse(|p| {
1787            let params = p.parse_comma_separated(|p| p.parse_identifier())?;
1788            p.expect_token(&Token::RParen)?;
1789            p.expect_token(&Token::Arrow)?;
1790            let expr = p.parse_expr()?;
1791            Ok(Expr::Lambda(LambdaFunction {
1792                params: OneOrManyWithParens::Many(params),
1793                body: Box::new(expr),
1794            }))
1795        })
1796    }
1797
1798    /// Tries to parse the body of an [ODBC function] call.
1799    /// i.e. without the enclosing braces
1800    ///
1801    /// ```sql
1802    /// fn myfunc(1,2,3)
1803    /// ```
1804    ///
1805    /// [ODBC function]: https://learn.microsoft.com/en-us/sql/odbc/reference/develop-app/scalar-function-calls?view=sql-server-2017
1806    fn maybe_parse_odbc_fn_body(&mut self) -> Result<Option<Expr>, ParserError> {
1807        self.maybe_parse(|p| {
1808            p.expect_keyword(Keyword::FN)?;
1809            let fn_name = p.parse_object_name(false)?;
1810            let mut fn_call = p.parse_function_call(fn_name)?;
1811            fn_call.uses_odbc_syntax = true;
1812            Ok(Expr::Function(fn_call))
1813        })
1814    }
1815
1816    pub fn parse_function(&mut self, name: ObjectName) -> Result<Expr, ParserError> {
1817        self.parse_function_call(name).map(Expr::Function)
1818    }
1819
1820    fn parse_function_call(&mut self, name: ObjectName) -> Result<Function, ParserError> {
1821        self.expect_token(&Token::LParen)?;
1822
1823        // Snowflake permits a subquery to be passed as an argument without
1824        // an enclosing set of parens if it's the only argument.
1825        if dialect_of!(self is SnowflakeDialect) && self.peek_sub_query() {
1826            let subquery = self.parse_query()?;
1827            self.expect_token(&Token::RParen)?;
1828            return Ok(Function {
1829                name,
1830                uses_odbc_syntax: false,
1831                parameters: FunctionArguments::None,
1832                args: FunctionArguments::Subquery(subquery),
1833                filter: None,
1834                null_treatment: None,
1835                over: None,
1836                within_group: vec![],
1837            });
1838        }
1839
1840        let mut args = self.parse_function_argument_list()?;
1841        let mut parameters = FunctionArguments::None;
1842        // ClickHouse aggregations support parametric functions like `HISTOGRAM(0.5, 0.6)(x, y)`
1843        // which (0.5, 0.6) is a parameter to the function.
1844        if dialect_of!(self is ClickHouseDialect | GenericDialect)
1845            && self.consume_token(&Token::LParen)
1846        {
1847            parameters = FunctionArguments::List(args);
1848            args = self.parse_function_argument_list()?;
1849        }
1850
1851        let within_group = if self.parse_keywords(&[Keyword::WITHIN, Keyword::GROUP]) {
1852            self.expect_token(&Token::LParen)?;
1853            self.expect_keywords(&[Keyword::ORDER, Keyword::BY])?;
1854            let order_by = self.parse_comma_separated(Parser::parse_order_by_expr)?;
1855            self.expect_token(&Token::RParen)?;
1856            order_by
1857        } else {
1858            vec![]
1859        };
1860
1861        let filter = if self.dialect.supports_filter_during_aggregation()
1862            && self.parse_keyword(Keyword::FILTER)
1863            && self.consume_token(&Token::LParen)
1864            && self.parse_keyword(Keyword::WHERE)
1865        {
1866            let filter = Some(Box::new(self.parse_expr()?));
1867            self.expect_token(&Token::RParen)?;
1868            filter
1869        } else {
1870            None
1871        };
1872
1873        // Syntax for null treatment shows up either in the args list
1874        // or after the function call, but not both.
1875        let null_treatment = if args
1876            .clauses
1877            .iter()
1878            .all(|clause| !matches!(clause, FunctionArgumentClause::IgnoreOrRespectNulls(_)))
1879        {
1880            self.parse_null_treatment()?
1881        } else {
1882            None
1883        };
1884
1885        let over = if self.parse_keyword(Keyword::OVER) {
1886            if self.consume_token(&Token::LParen) {
1887                let window_spec = self.parse_window_spec()?;
1888                Some(WindowType::WindowSpec(window_spec))
1889            } else {
1890                Some(WindowType::NamedWindow(self.parse_identifier()?))
1891            }
1892        } else {
1893            None
1894        };
1895
1896        Ok(Function {
1897            name,
1898            uses_odbc_syntax: false,
1899            parameters,
1900            args: FunctionArguments::List(args),
1901            null_treatment,
1902            filter,
1903            over,
1904            within_group,
1905        })
1906    }
1907
1908    /// Optionally parses a null treatment clause.
1909    fn parse_null_treatment(&mut self) -> Result<Option<NullTreatment>, ParserError> {
1910        match self.parse_one_of_keywords(&[Keyword::RESPECT, Keyword::IGNORE]) {
1911            Some(keyword) => {
1912                self.expect_keyword_is(Keyword::NULLS)?;
1913
1914                Ok(match keyword {
1915                    Keyword::RESPECT => Some(NullTreatment::RespectNulls),
1916                    Keyword::IGNORE => Some(NullTreatment::IgnoreNulls),
1917                    _ => None,
1918                })
1919            }
1920            None => Ok(None),
1921        }
1922    }
1923
1924    pub fn parse_time_functions(&mut self, name: ObjectName) -> Result<Expr, ParserError> {
1925        let args = if self.consume_token(&Token::LParen) {
1926            FunctionArguments::List(self.parse_function_argument_list()?)
1927        } else {
1928            FunctionArguments::None
1929        };
1930        Ok(Expr::Function(Function {
1931            name,
1932            uses_odbc_syntax: false,
1933            parameters: FunctionArguments::None,
1934            args,
1935            filter: None,
1936            over: None,
1937            null_treatment: None,
1938            within_group: vec![],
1939        }))
1940    }
1941
1942    pub fn parse_window_frame_units(&mut self) -> Result<WindowFrameUnits, ParserError> {
1943        let next_token = self.next_token();
1944        match &next_token.token {
1945            Token::Word(w) => match w.keyword {
1946                Keyword::ROWS => Ok(WindowFrameUnits::Rows),
1947                Keyword::RANGE => Ok(WindowFrameUnits::Range),
1948                Keyword::GROUPS => Ok(WindowFrameUnits::Groups),
1949                _ => self.expected("ROWS, RANGE, GROUPS", next_token)?,
1950            },
1951            _ => self.expected("ROWS, RANGE, GROUPS", next_token),
1952        }
1953    }
1954
1955    pub fn parse_window_frame(&mut self) -> Result<WindowFrame, ParserError> {
1956        let units = self.parse_window_frame_units()?;
1957        let (start_bound, end_bound) = if self.parse_keyword(Keyword::BETWEEN) {
1958            let start_bound = self.parse_window_frame_bound()?;
1959            self.expect_keyword_is(Keyword::AND)?;
1960            let end_bound = Some(self.parse_window_frame_bound()?);
1961            (start_bound, end_bound)
1962        } else {
1963            (self.parse_window_frame_bound()?, None)
1964        };
1965        Ok(WindowFrame {
1966            units,
1967            start_bound,
1968            end_bound,
1969        })
1970    }
1971
1972    /// Parse `CURRENT ROW` or `{ <positive number> | UNBOUNDED } { PRECEDING | FOLLOWING }`
1973    pub fn parse_window_frame_bound(&mut self) -> Result<WindowFrameBound, ParserError> {
1974        if self.parse_keywords(&[Keyword::CURRENT, Keyword::ROW]) {
1975            Ok(WindowFrameBound::CurrentRow)
1976        } else {
1977            let rows = if self.parse_keyword(Keyword::UNBOUNDED) {
1978                None
1979            } else {
1980                Some(Box::new(match self.peek_token().token {
1981                    Token::SingleQuotedString(_) => self.parse_interval()?,
1982                    _ => self.parse_expr()?,
1983                }))
1984            };
1985            if self.parse_keyword(Keyword::PRECEDING) {
1986                Ok(WindowFrameBound::Preceding(rows))
1987            } else if self.parse_keyword(Keyword::FOLLOWING) {
1988                Ok(WindowFrameBound::Following(rows))
1989            } else {
1990                self.expected("PRECEDING or FOLLOWING", self.peek_token())
1991            }
1992        }
1993    }
1994
1995    /// Parse a group by expr. Group by expr can be one of group sets, roll up, cube, or simple expr.
1996    fn parse_group_by_expr(&mut self) -> Result<Expr, ParserError> {
1997        if self.dialect.supports_group_by_expr() {
1998            if self.parse_keywords(&[Keyword::GROUPING, Keyword::SETS]) {
1999                self.expect_token(&Token::LParen)?;
2000                let result = self.parse_comma_separated(|p| p.parse_tuple(false, true))?;
2001                self.expect_token(&Token::RParen)?;
2002                Ok(Expr::GroupingSets(result))
2003            } else if self.parse_keyword(Keyword::CUBE) {
2004                self.expect_token(&Token::LParen)?;
2005                let result = self.parse_comma_separated(|p| p.parse_tuple(true, true))?;
2006                self.expect_token(&Token::RParen)?;
2007                Ok(Expr::Cube(result))
2008            } else if self.parse_keyword(Keyword::ROLLUP) {
2009                self.expect_token(&Token::LParen)?;
2010                let result = self.parse_comma_separated(|p| p.parse_tuple(true, true))?;
2011                self.expect_token(&Token::RParen)?;
2012                Ok(Expr::Rollup(result))
2013            } else if self.consume_tokens(&[Token::LParen, Token::RParen]) {
2014                // PostgreSQL allow to use empty tuple as a group by expression,
2015                // e.g. `GROUP BY (), name`. Please refer to GROUP BY Clause section in
2016                // [PostgreSQL](https://www.postgresql.org/docs/16/sql-select.html)
2017                Ok(Expr::Tuple(vec![]))
2018            } else {
2019                self.parse_expr()
2020            }
2021        } else {
2022            // TODO parse rollup for other dialects
2023            self.parse_expr()
2024        }
2025    }
2026
2027    /// Parse a tuple with `(` and `)`.
2028    /// If `lift_singleton` is true, then a singleton tuple is lifted to a tuple of length 1, otherwise it will fail.
2029    /// If `allow_empty` is true, then an empty tuple is allowed.
2030    fn parse_tuple(
2031        &mut self,
2032        lift_singleton: bool,
2033        allow_empty: bool,
2034    ) -> Result<Vec<Expr>, ParserError> {
2035        if lift_singleton {
2036            if self.consume_token(&Token::LParen) {
2037                let result = if allow_empty && self.consume_token(&Token::RParen) {
2038                    vec![]
2039                } else {
2040                    let result = self.parse_comma_separated(Parser::parse_expr)?;
2041                    self.expect_token(&Token::RParen)?;
2042                    result
2043                };
2044                Ok(result)
2045            } else {
2046                Ok(vec![self.parse_expr()?])
2047            }
2048        } else {
2049            self.expect_token(&Token::LParen)?;
2050            let result = if allow_empty && self.consume_token(&Token::RParen) {
2051                vec![]
2052            } else {
2053                let result = self.parse_comma_separated(Parser::parse_expr)?;
2054                self.expect_token(&Token::RParen)?;
2055                result
2056            };
2057            Ok(result)
2058        }
2059    }
2060
2061    pub fn parse_case_expr(&mut self) -> Result<Expr, ParserError> {
2062        let mut operand = None;
2063        if !self.parse_keyword(Keyword::WHEN) {
2064            operand = Some(Box::new(self.parse_expr()?));
2065            self.expect_keyword_is(Keyword::WHEN)?;
2066        }
2067        let mut conditions = vec![];
2068        loop {
2069            let condition = self.parse_expr()?;
2070            self.expect_keyword_is(Keyword::THEN)?;
2071            let result = self.parse_expr()?;
2072            conditions.push(CaseWhen { condition, result });
2073            if !self.parse_keyword(Keyword::WHEN) {
2074                break;
2075            }
2076        }
2077        let else_result = if self.parse_keyword(Keyword::ELSE) {
2078            Some(Box::new(self.parse_expr()?))
2079        } else {
2080            None
2081        };
2082        self.expect_keyword_is(Keyword::END)?;
2083        Ok(Expr::Case {
2084            operand,
2085            conditions,
2086            else_result,
2087        })
2088    }
2089
2090    pub fn parse_optional_cast_format(&mut self) -> Result<Option<CastFormat>, ParserError> {
2091        if self.parse_keyword(Keyword::FORMAT) {
2092            let value = self.parse_value()?.value;
2093            match self.parse_optional_time_zone()? {
2094                Some(tz) => Ok(Some(CastFormat::ValueAtTimeZone(value, tz))),
2095                None => Ok(Some(CastFormat::Value(value))),
2096            }
2097        } else {
2098            Ok(None)
2099        }
2100    }
2101
2102    pub fn parse_optional_time_zone(&mut self) -> Result<Option<Value>, ParserError> {
2103        if self.parse_keywords(&[Keyword::AT, Keyword::TIME, Keyword::ZONE]) {
2104            self.parse_value().map(|v| Some(v.value))
2105        } else {
2106            Ok(None)
2107        }
2108    }
2109
2110    /// mssql-like convert function
2111    fn parse_mssql_convert(&mut self, is_try: bool) -> Result<Expr, ParserError> {
2112        self.expect_token(&Token::LParen)?;
2113        let data_type = self.parse_data_type()?;
2114        self.expect_token(&Token::Comma)?;
2115        let expr = self.parse_expr()?;
2116        let styles = if self.consume_token(&Token::Comma) {
2117            self.parse_comma_separated(Parser::parse_expr)?
2118        } else {
2119            Default::default()
2120        };
2121        self.expect_token(&Token::RParen)?;
2122        Ok(Expr::Convert {
2123            is_try,
2124            expr: Box::new(expr),
2125            data_type: Some(data_type),
2126            charset: None,
2127            target_before_value: true,
2128            styles,
2129        })
2130    }
2131
2132    /// Parse a SQL CONVERT function:
2133    ///  - `CONVERT('héhé' USING utf8mb4)` (MySQL)
2134    ///  - `CONVERT('héhé', CHAR CHARACTER SET utf8mb4)` (MySQL)
2135    ///  - `CONVERT(DECIMAL(10, 5), 42)` (MSSQL) - the type comes first
2136    pub fn parse_convert_expr(&mut self, is_try: bool) -> Result<Expr, ParserError> {
2137        if self.dialect.convert_type_before_value() {
2138            return self.parse_mssql_convert(is_try);
2139        }
2140        self.expect_token(&Token::LParen)?;
2141        let expr = self.parse_expr()?;
2142        if self.parse_keyword(Keyword::USING) {
2143            let charset = self.parse_object_name(false)?;
2144            self.expect_token(&Token::RParen)?;
2145            return Ok(Expr::Convert {
2146                is_try,
2147                expr: Box::new(expr),
2148                data_type: None,
2149                charset: Some(charset),
2150                target_before_value: false,
2151                styles: vec![],
2152            });
2153        }
2154        self.expect_token(&Token::Comma)?;
2155        let data_type = self.parse_data_type()?;
2156        let charset = if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
2157            Some(self.parse_object_name(false)?)
2158        } else {
2159            None
2160        };
2161        self.expect_token(&Token::RParen)?;
2162        Ok(Expr::Convert {
2163            is_try,
2164            expr: Box::new(expr),
2165            data_type: Some(data_type),
2166            charset,
2167            target_before_value: false,
2168            styles: vec![],
2169        })
2170    }
2171
2172    /// Parse a SQL CAST function e.g. `CAST(expr AS FLOAT)`
2173    pub fn parse_cast_expr(&mut self, kind: CastKind) -> Result<Expr, ParserError> {
2174        self.expect_token(&Token::LParen)?;
2175        let expr = self.parse_expr()?;
2176        self.expect_keyword_is(Keyword::AS)?;
2177        let data_type = self.parse_data_type()?;
2178        let format = self.parse_optional_cast_format()?;
2179        self.expect_token(&Token::RParen)?;
2180        Ok(Expr::Cast {
2181            kind,
2182            expr: Box::new(expr),
2183            data_type,
2184            format,
2185        })
2186    }
2187
2188    /// Parse a SQL EXISTS expression e.g. `WHERE EXISTS(SELECT ...)`.
2189    pub fn parse_exists_expr(&mut self, negated: bool) -> Result<Expr, ParserError> {
2190        self.expect_token(&Token::LParen)?;
2191        let exists_node = Expr::Exists {
2192            negated,
2193            subquery: self.parse_query()?,
2194        };
2195        self.expect_token(&Token::RParen)?;
2196        Ok(exists_node)
2197    }
2198
2199    pub fn parse_extract_expr(&mut self) -> Result<Expr, ParserError> {
2200        self.expect_token(&Token::LParen)?;
2201        let field = self.parse_date_time_field()?;
2202
2203        let syntax = if self.parse_keyword(Keyword::FROM) {
2204            ExtractSyntax::From
2205        } else if self.consume_token(&Token::Comma)
2206            && dialect_of!(self is SnowflakeDialect | GenericDialect)
2207        {
2208            ExtractSyntax::Comma
2209        } else {
2210            return Err(ParserError::ParserError(
2211                "Expected 'FROM' or ','".to_string(),
2212            ));
2213        };
2214
2215        let expr = self.parse_expr()?;
2216        self.expect_token(&Token::RParen)?;
2217        Ok(Expr::Extract {
2218            field,
2219            expr: Box::new(expr),
2220            syntax,
2221        })
2222    }
2223
2224    pub fn parse_ceil_floor_expr(&mut self, is_ceil: bool) -> Result<Expr, ParserError> {
2225        self.expect_token(&Token::LParen)?;
2226        let expr = self.parse_expr()?;
2227        // Parse `CEIL/FLOOR(expr)`
2228        let field = if self.parse_keyword(Keyword::TO) {
2229            // Parse `CEIL/FLOOR(expr TO DateTimeField)`
2230            CeilFloorKind::DateTimeField(self.parse_date_time_field()?)
2231        } else if self.consume_token(&Token::Comma) {
2232            // Parse `CEIL/FLOOR(expr, scale)`
2233            match self.parse_value()?.value {
2234                Value::Number(n, s) => CeilFloorKind::Scale(Value::Number(n, s)),
2235                _ => {
2236                    return Err(ParserError::ParserError(
2237                        "Scale field can only be of number type".to_string(),
2238                    ))
2239                }
2240            }
2241        } else {
2242            CeilFloorKind::DateTimeField(DateTimeField::NoDateTime)
2243        };
2244        self.expect_token(&Token::RParen)?;
2245        if is_ceil {
2246            Ok(Expr::Ceil {
2247                expr: Box::new(expr),
2248                field,
2249            })
2250        } else {
2251            Ok(Expr::Floor {
2252                expr: Box::new(expr),
2253                field,
2254            })
2255        }
2256    }
2257
2258    pub fn parse_position_expr(&mut self, ident: Ident) -> Result<Expr, ParserError> {
2259        let between_prec = self.dialect.prec_value(Precedence::Between);
2260        let position_expr = self.maybe_parse(|p| {
2261            // PARSE SELECT POSITION('@' in field)
2262            p.expect_token(&Token::LParen)?;
2263
2264            // Parse the subexpr till the IN keyword
2265            let expr = p.parse_subexpr(between_prec)?;
2266            p.expect_keyword_is(Keyword::IN)?;
2267            let from = p.parse_expr()?;
2268            p.expect_token(&Token::RParen)?;
2269            Ok(Expr::Position {
2270                expr: Box::new(expr),
2271                r#in: Box::new(from),
2272            })
2273        })?;
2274        match position_expr {
2275            Some(expr) => Ok(expr),
2276            // Snowflake supports `position` as an ordinary function call
2277            // without the special `IN` syntax.
2278            None => self.parse_function(ObjectName::from(vec![ident])),
2279        }
2280    }
2281
2282    pub fn parse_substring_expr(&mut self) -> Result<Expr, ParserError> {
2283        // PARSE SUBSTRING (EXPR [FROM 1] [FOR 3])
2284        self.expect_token(&Token::LParen)?;
2285        let expr = self.parse_expr()?;
2286        let mut from_expr = None;
2287        let special = self.consume_token(&Token::Comma);
2288        if special || self.parse_keyword(Keyword::FROM) {
2289            from_expr = Some(self.parse_expr()?);
2290        }
2291
2292        let mut to_expr = None;
2293        if self.parse_keyword(Keyword::FOR) || self.consume_token(&Token::Comma) {
2294            to_expr = Some(self.parse_expr()?);
2295        }
2296        self.expect_token(&Token::RParen)?;
2297
2298        Ok(Expr::Substring {
2299            expr: Box::new(expr),
2300            substring_from: from_expr.map(Box::new),
2301            substring_for: to_expr.map(Box::new),
2302            special,
2303        })
2304    }
2305
2306    pub fn parse_overlay_expr(&mut self) -> Result<Expr, ParserError> {
2307        // PARSE OVERLAY (EXPR PLACING EXPR FROM 1 [FOR 3])
2308        self.expect_token(&Token::LParen)?;
2309        let expr = self.parse_expr()?;
2310        self.expect_keyword_is(Keyword::PLACING)?;
2311        let what_expr = self.parse_expr()?;
2312        self.expect_keyword_is(Keyword::FROM)?;
2313        let from_expr = self.parse_expr()?;
2314        let mut for_expr = None;
2315        if self.parse_keyword(Keyword::FOR) {
2316            for_expr = Some(self.parse_expr()?);
2317        }
2318        self.expect_token(&Token::RParen)?;
2319
2320        Ok(Expr::Overlay {
2321            expr: Box::new(expr),
2322            overlay_what: Box::new(what_expr),
2323            overlay_from: Box::new(from_expr),
2324            overlay_for: for_expr.map(Box::new),
2325        })
2326    }
2327
2328    /// ```sql
2329    /// TRIM ([WHERE] ['text' FROM] 'text')
2330    /// TRIM ('text')
2331    /// TRIM(<expr>, [, characters]) -- only Snowflake or BigQuery
2332    /// ```
2333    pub fn parse_trim_expr(&mut self) -> Result<Expr, ParserError> {
2334        self.expect_token(&Token::LParen)?;
2335        let mut trim_where = None;
2336        if let Token::Word(word) = self.peek_token().token {
2337            if [Keyword::BOTH, Keyword::LEADING, Keyword::TRAILING]
2338                .iter()
2339                .any(|d| word.keyword == *d)
2340            {
2341                trim_where = Some(self.parse_trim_where()?);
2342            }
2343        }
2344        let expr = self.parse_expr()?;
2345        if self.parse_keyword(Keyword::FROM) {
2346            let trim_what = Box::new(expr);
2347            let expr = self.parse_expr()?;
2348            self.expect_token(&Token::RParen)?;
2349            Ok(Expr::Trim {
2350                expr: Box::new(expr),
2351                trim_where,
2352                trim_what: Some(trim_what),
2353                trim_characters: None,
2354            })
2355        } else if self.consume_token(&Token::Comma)
2356            && dialect_of!(self is SnowflakeDialect | BigQueryDialect | GenericDialect)
2357        {
2358            let characters = self.parse_comma_separated(Parser::parse_expr)?;
2359            self.expect_token(&Token::RParen)?;
2360            Ok(Expr::Trim {
2361                expr: Box::new(expr),
2362                trim_where: None,
2363                trim_what: None,
2364                trim_characters: Some(characters),
2365            })
2366        } else {
2367            self.expect_token(&Token::RParen)?;
2368            Ok(Expr::Trim {
2369                expr: Box::new(expr),
2370                trim_where,
2371                trim_what: None,
2372                trim_characters: None,
2373            })
2374        }
2375    }
2376
2377    pub fn parse_trim_where(&mut self) -> Result<TrimWhereField, ParserError> {
2378        let next_token = self.next_token();
2379        match &next_token.token {
2380            Token::Word(w) => match w.keyword {
2381                Keyword::BOTH => Ok(TrimWhereField::Both),
2382                Keyword::LEADING => Ok(TrimWhereField::Leading),
2383                Keyword::TRAILING => Ok(TrimWhereField::Trailing),
2384                _ => self.expected("trim_where field", next_token)?,
2385            },
2386            _ => self.expected("trim_where field", next_token),
2387        }
2388    }
2389
2390    /// Parses an array expression `[ex1, ex2, ..]`
2391    /// if `named` is `true`, came from an expression like  `ARRAY[ex1, ex2]`
2392    pub fn parse_array_expr(&mut self, named: bool) -> Result<Expr, ParserError> {
2393        let exprs = self.parse_comma_separated0(Parser::parse_expr, Token::RBracket)?;
2394        self.expect_token(&Token::RBracket)?;
2395        Ok(Expr::Array(Array { elem: exprs, named }))
2396    }
2397
2398    pub fn parse_listagg_on_overflow(&mut self) -> Result<Option<ListAggOnOverflow>, ParserError> {
2399        if self.parse_keywords(&[Keyword::ON, Keyword::OVERFLOW]) {
2400            if self.parse_keyword(Keyword::ERROR) {
2401                Ok(Some(ListAggOnOverflow::Error))
2402            } else {
2403                self.expect_keyword_is(Keyword::TRUNCATE)?;
2404                let filler = match self.peek_token().token {
2405                    Token::Word(w)
2406                        if w.keyword == Keyword::WITH || w.keyword == Keyword::WITHOUT =>
2407                    {
2408                        None
2409                    }
2410                    Token::SingleQuotedString(_)
2411                    | Token::EscapedStringLiteral(_)
2412                    | Token::UnicodeStringLiteral(_)
2413                    | Token::NationalStringLiteral(_)
2414                    | Token::HexStringLiteral(_) => Some(Box::new(self.parse_expr()?)),
2415                    _ => self.expected(
2416                        "either filler, WITH, or WITHOUT in LISTAGG",
2417                        self.peek_token(),
2418                    )?,
2419                };
2420                let with_count = self.parse_keyword(Keyword::WITH);
2421                if !with_count && !self.parse_keyword(Keyword::WITHOUT) {
2422                    self.expected("either WITH or WITHOUT in LISTAGG", self.peek_token())?;
2423                }
2424                self.expect_keyword_is(Keyword::COUNT)?;
2425                Ok(Some(ListAggOnOverflow::Truncate { filler, with_count }))
2426            }
2427        } else {
2428            Ok(None)
2429        }
2430    }
2431
2432    // This function parses date/time fields for the EXTRACT function-like
2433    // operator, interval qualifiers, and the ceil/floor operations.
2434    // EXTRACT supports a wider set of date/time fields than interval qualifiers,
2435    // so this function may need to be split in two.
2436    pub fn parse_date_time_field(&mut self) -> Result<DateTimeField, ParserError> {
2437        let next_token = self.next_token();
2438        match &next_token.token {
2439            Token::Word(w) => match w.keyword {
2440                Keyword::YEAR => Ok(DateTimeField::Year),
2441                Keyword::YEARS => Ok(DateTimeField::Years),
2442                Keyword::MONTH => Ok(DateTimeField::Month),
2443                Keyword::MONTHS => Ok(DateTimeField::Months),
2444                Keyword::WEEK => {
2445                    let week_day = if dialect_of!(self is BigQueryDialect | GenericDialect)
2446                        && self.consume_token(&Token::LParen)
2447                    {
2448                        let week_day = self.parse_identifier()?;
2449                        self.expect_token(&Token::RParen)?;
2450                        Some(week_day)
2451                    } else {
2452                        None
2453                    };
2454                    Ok(DateTimeField::Week(week_day))
2455                }
2456                Keyword::WEEKS => Ok(DateTimeField::Weeks),
2457                Keyword::DAY => Ok(DateTimeField::Day),
2458                Keyword::DAYOFWEEK => Ok(DateTimeField::DayOfWeek),
2459                Keyword::DAYOFYEAR => Ok(DateTimeField::DayOfYear),
2460                Keyword::DAYS => Ok(DateTimeField::Days),
2461                Keyword::DATE => Ok(DateTimeField::Date),
2462                Keyword::DATETIME => Ok(DateTimeField::Datetime),
2463                Keyword::HOUR => Ok(DateTimeField::Hour),
2464                Keyword::HOURS => Ok(DateTimeField::Hours),
2465                Keyword::MINUTE => Ok(DateTimeField::Minute),
2466                Keyword::MINUTES => Ok(DateTimeField::Minutes),
2467                Keyword::SECOND => Ok(DateTimeField::Second),
2468                Keyword::SECONDS => Ok(DateTimeField::Seconds),
2469                Keyword::CENTURY => Ok(DateTimeField::Century),
2470                Keyword::DECADE => Ok(DateTimeField::Decade),
2471                Keyword::DOY => Ok(DateTimeField::Doy),
2472                Keyword::DOW => Ok(DateTimeField::Dow),
2473                Keyword::EPOCH => Ok(DateTimeField::Epoch),
2474                Keyword::ISODOW => Ok(DateTimeField::Isodow),
2475                Keyword::ISOYEAR => Ok(DateTimeField::Isoyear),
2476                Keyword::ISOWEEK => Ok(DateTimeField::IsoWeek),
2477                Keyword::JULIAN => Ok(DateTimeField::Julian),
2478                Keyword::MICROSECOND => Ok(DateTimeField::Microsecond),
2479                Keyword::MICROSECONDS => Ok(DateTimeField::Microseconds),
2480                Keyword::MILLENIUM => Ok(DateTimeField::Millenium),
2481                Keyword::MILLENNIUM => Ok(DateTimeField::Millennium),
2482                Keyword::MILLISECOND => Ok(DateTimeField::Millisecond),
2483                Keyword::MILLISECONDS => Ok(DateTimeField::Milliseconds),
2484                Keyword::NANOSECOND => Ok(DateTimeField::Nanosecond),
2485                Keyword::NANOSECONDS => Ok(DateTimeField::Nanoseconds),
2486                Keyword::QUARTER => Ok(DateTimeField::Quarter),
2487                Keyword::TIME => Ok(DateTimeField::Time),
2488                Keyword::TIMEZONE => Ok(DateTimeField::Timezone),
2489                Keyword::TIMEZONE_ABBR => Ok(DateTimeField::TimezoneAbbr),
2490                Keyword::TIMEZONE_HOUR => Ok(DateTimeField::TimezoneHour),
2491                Keyword::TIMEZONE_MINUTE => Ok(DateTimeField::TimezoneMinute),
2492                Keyword::TIMEZONE_REGION => Ok(DateTimeField::TimezoneRegion),
2493                _ if self.dialect.allow_extract_custom() => {
2494                    self.prev_token();
2495                    let custom = self.parse_identifier()?;
2496                    Ok(DateTimeField::Custom(custom))
2497                }
2498                _ => self.expected("date/time field", next_token),
2499            },
2500            Token::SingleQuotedString(_) if self.dialect.allow_extract_single_quotes() => {
2501                self.prev_token();
2502                let custom = self.parse_identifier()?;
2503                Ok(DateTimeField::Custom(custom))
2504            }
2505            _ => self.expected("date/time field", next_token),
2506        }
2507    }
2508
2509    pub fn parse_not(&mut self) -> Result<Expr, ParserError> {
2510        match self.peek_token().token {
2511            Token::Word(w) => match w.keyword {
2512                Keyword::EXISTS => {
2513                    let negated = true;
2514                    let _ = self.parse_keyword(Keyword::EXISTS);
2515                    self.parse_exists_expr(negated)
2516                }
2517                _ => Ok(Expr::UnaryOp {
2518                    op: UnaryOperator::Not,
2519                    expr: Box::new(
2520                        self.parse_subexpr(self.dialect.prec_value(Precedence::UnaryNot))?,
2521                    ),
2522                }),
2523            },
2524            _ => Ok(Expr::UnaryOp {
2525                op: UnaryOperator::Not,
2526                expr: Box::new(self.parse_subexpr(self.dialect.prec_value(Precedence::UnaryNot))?),
2527            }),
2528        }
2529    }
2530
2531    /// Parse expression types that start with a left brace '{'.
2532    /// Examples:
2533    /// ```sql
2534    /// -- Dictionary expr.
2535    /// {'key1': 'value1', 'key2': 'value2'}
2536    ///
2537    /// -- Function call using the ODBC syntax.
2538    /// { fn CONCAT('foo', 'bar') }
2539    /// ```
2540    fn parse_lbrace_expr(&mut self) -> Result<Expr, ParserError> {
2541        let token = self.expect_token(&Token::LBrace)?;
2542
2543        if let Some(fn_expr) = self.maybe_parse_odbc_fn_body()? {
2544            self.expect_token(&Token::RBrace)?;
2545            return Ok(fn_expr);
2546        }
2547
2548        if self.dialect.supports_dictionary_syntax() {
2549            self.prev_token(); // Put back the '{'
2550            return self.parse_duckdb_struct_literal();
2551        }
2552
2553        self.expected("an expression", token)
2554    }
2555
2556    /// Parses fulltext expressions [`sqlparser::ast::Expr::MatchAgainst`]
2557    ///
2558    /// # Errors
2559    /// This method will raise an error if the column list is empty or with invalid identifiers,
2560    /// the match expression is not a literal string, or if the search modifier is not valid.
2561    pub fn parse_match_against(&mut self) -> Result<Expr, ParserError> {
2562        let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
2563
2564        self.expect_keyword_is(Keyword::AGAINST)?;
2565
2566        self.expect_token(&Token::LParen)?;
2567
2568        // MySQL is too permissive about the value, IMO we can't validate it perfectly on syntax level.
2569        let match_value = self.parse_value()?.value;
2570
2571        let in_natural_language_mode_keywords = &[
2572            Keyword::IN,
2573            Keyword::NATURAL,
2574            Keyword::LANGUAGE,
2575            Keyword::MODE,
2576        ];
2577
2578        let with_query_expansion_keywords = &[Keyword::WITH, Keyword::QUERY, Keyword::EXPANSION];
2579
2580        let in_boolean_mode_keywords = &[Keyword::IN, Keyword::BOOLEAN, Keyword::MODE];
2581
2582        let opt_search_modifier = if self.parse_keywords(in_natural_language_mode_keywords) {
2583            if self.parse_keywords(with_query_expansion_keywords) {
2584                Some(SearchModifier::InNaturalLanguageModeWithQueryExpansion)
2585            } else {
2586                Some(SearchModifier::InNaturalLanguageMode)
2587            }
2588        } else if self.parse_keywords(in_boolean_mode_keywords) {
2589            Some(SearchModifier::InBooleanMode)
2590        } else if self.parse_keywords(with_query_expansion_keywords) {
2591            Some(SearchModifier::WithQueryExpansion)
2592        } else {
2593            None
2594        };
2595
2596        self.expect_token(&Token::RParen)?;
2597
2598        Ok(Expr::MatchAgainst {
2599            columns,
2600            match_value,
2601            opt_search_modifier,
2602        })
2603    }
2604
2605    /// Parse an `INTERVAL` expression.
2606    ///
2607    /// Some syntactically valid intervals:
2608    ///
2609    /// ```sql
2610    ///   1. INTERVAL '1' DAY
2611    ///   2. INTERVAL '1-1' YEAR TO MONTH
2612    ///   3. INTERVAL '1' SECOND
2613    ///   4. INTERVAL '1:1:1.1' HOUR (5) TO SECOND (5)
2614    ///   5. INTERVAL '1.1' SECOND (2, 2)
2615    ///   6. INTERVAL '1:1' HOUR (5) TO MINUTE (5)
2616    ///   7. (MySql & BigQuery only): INTERVAL 1 DAY
2617    /// ```
2618    ///
2619    /// Note that we do not currently attempt to parse the quoted value.
2620    pub fn parse_interval(&mut self) -> Result<Expr, ParserError> {
2621        // The SQL standard allows an optional sign before the value string, but
2622        // it is not clear if any implementations support that syntax, so we
2623        // don't currently try to parse it. (The sign can instead be included
2624        // inside the value string.)
2625
2626        // to match the different flavours of INTERVAL syntax, we only allow expressions
2627        // if the dialect requires an interval qualifier,
2628        // see https://github.com/sqlparser-rs/sqlparser-rs/pull/1398 for more details
2629        let value = if self.dialect.require_interval_qualifier() {
2630            // parse a whole expression so `INTERVAL 1 + 1 DAY` is valid
2631            self.parse_expr()?
2632        } else {
2633            // parse a prefix expression so `INTERVAL 1 DAY` is valid, but `INTERVAL 1 + 1 DAY` is not
2634            // this also means that `INTERVAL '5 days' > INTERVAL '1 day'` treated properly
2635            self.parse_prefix()?
2636        };
2637
2638        // Following the string literal is a qualifier which indicates the units
2639        // of the duration specified in the string literal.
2640        //
2641        // Note that PostgreSQL allows omitting the qualifier, so we provide
2642        // this more general implementation.
2643        let leading_field = if self.next_token_is_temporal_unit() {
2644            Some(self.parse_date_time_field()?)
2645        } else if self.dialect.require_interval_qualifier() {
2646            return parser_err!(
2647                "INTERVAL requires a unit after the literal value",
2648                self.peek_token().span.start
2649            );
2650        } else {
2651            None
2652        };
2653
2654        let (leading_precision, last_field, fsec_precision) =
2655            if leading_field == Some(DateTimeField::Second) {
2656                // SQL mandates special syntax for `SECOND TO SECOND` literals.
2657                // Instead of
2658                //     `SECOND [(<leading precision>)] TO SECOND[(<fractional seconds precision>)]`
2659                // one must use the special format:
2660                //     `SECOND [( <leading precision> [ , <fractional seconds precision>] )]`
2661                let last_field = None;
2662                let (leading_precision, fsec_precision) = self.parse_optional_precision_scale()?;
2663                (leading_precision, last_field, fsec_precision)
2664            } else {
2665                let leading_precision = self.parse_optional_precision()?;
2666                if self.parse_keyword(Keyword::TO) {
2667                    let last_field = Some(self.parse_date_time_field()?);
2668                    let fsec_precision = if last_field == Some(DateTimeField::Second) {
2669                        self.parse_optional_precision()?
2670                    } else {
2671                        None
2672                    };
2673                    (leading_precision, last_field, fsec_precision)
2674                } else {
2675                    (leading_precision, None, None)
2676                }
2677            };
2678
2679        Ok(Expr::Interval(Interval {
2680            value: Box::new(value),
2681            leading_field,
2682            leading_precision,
2683            last_field,
2684            fractional_seconds_precision: fsec_precision,
2685        }))
2686    }
2687
2688    /// Peek at the next token and determine if it is a temporal unit
2689    /// like `second`.
2690    pub fn next_token_is_temporal_unit(&mut self) -> bool {
2691        if let Token::Word(word) = self.peek_token().token {
2692            matches!(
2693                word.keyword,
2694                Keyword::YEAR
2695                    | Keyword::YEARS
2696                    | Keyword::MONTH
2697                    | Keyword::MONTHS
2698                    | Keyword::WEEK
2699                    | Keyword::WEEKS
2700                    | Keyword::DAY
2701                    | Keyword::DAYS
2702                    | Keyword::HOUR
2703                    | Keyword::HOURS
2704                    | Keyword::MINUTE
2705                    | Keyword::MINUTES
2706                    | Keyword::SECOND
2707                    | Keyword::SECONDS
2708                    | Keyword::CENTURY
2709                    | Keyword::DECADE
2710                    | Keyword::DOW
2711                    | Keyword::DOY
2712                    | Keyword::EPOCH
2713                    | Keyword::ISODOW
2714                    | Keyword::ISOYEAR
2715                    | Keyword::JULIAN
2716                    | Keyword::MICROSECOND
2717                    | Keyword::MICROSECONDS
2718                    | Keyword::MILLENIUM
2719                    | Keyword::MILLENNIUM
2720                    | Keyword::MILLISECOND
2721                    | Keyword::MILLISECONDS
2722                    | Keyword::NANOSECOND
2723                    | Keyword::NANOSECONDS
2724                    | Keyword::QUARTER
2725                    | Keyword::TIMEZONE
2726                    | Keyword::TIMEZONE_HOUR
2727                    | Keyword::TIMEZONE_MINUTE
2728            )
2729        } else {
2730            false
2731        }
2732    }
2733
2734    /// Syntax
2735    /// ```sql
2736    /// -- typed
2737    /// STRUCT<[field_name] field_type, ...>( expr1 [, ... ])
2738    /// -- typeless
2739    /// STRUCT( expr1 [AS field_name] [, ... ])
2740    /// ```
2741    fn parse_struct_literal(&mut self) -> Result<Expr, ParserError> {
2742        // Parse the fields definition if exist `<[field_name] field_type, ...>`
2743        self.prev_token();
2744        let (fields, trailing_bracket) =
2745            self.parse_struct_type_def(Self::parse_struct_field_def)?;
2746        if trailing_bracket.0 {
2747            return parser_err!(
2748                "unmatched > in STRUCT literal",
2749                self.peek_token().span.start
2750            );
2751        }
2752
2753        // Parse the struct values `(expr1 [, ... ])`
2754        self.expect_token(&Token::LParen)?;
2755        let values = self
2756            .parse_comma_separated(|parser| parser.parse_struct_field_expr(!fields.is_empty()))?;
2757        self.expect_token(&Token::RParen)?;
2758
2759        Ok(Expr::Struct { values, fields })
2760    }
2761
2762    /// Parse an expression value for a struct literal
2763    /// Syntax
2764    /// ```sql
2765    /// expr [AS name]
2766    /// ```
2767    ///
2768    /// For biquery [1], Parameter typed_syntax is set to true if the expression
2769    /// is to be parsed as a field expression declared using typed
2770    /// struct syntax [2], and false if using typeless struct syntax [3].
2771    ///
2772    /// [1]: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#constructing_a_struct
2773    /// [2]: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#typed_struct_syntax
2774    /// [3]: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#typeless_struct_syntax
2775    fn parse_struct_field_expr(&mut self, typed_syntax: bool) -> Result<Expr, ParserError> {
2776        let expr = self.parse_expr()?;
2777        if self.parse_keyword(Keyword::AS) {
2778            if typed_syntax {
2779                return parser_err!("Typed syntax does not allow AS", {
2780                    self.prev_token();
2781                    self.peek_token().span.start
2782                });
2783            }
2784            let field_name = self.parse_identifier()?;
2785            Ok(Expr::Named {
2786                expr: expr.into(),
2787                name: field_name,
2788            })
2789        } else {
2790            Ok(expr)
2791        }
2792    }
2793
2794    /// Parse a Struct type definition as a sequence of field-value pairs.
2795    /// The syntax of the Struct elem differs by dialect so it is customised
2796    /// by the `elem_parser` argument.
2797    ///
2798    /// Syntax
2799    /// ```sql
2800    /// Hive:
2801    /// STRUCT<field_name: field_type>
2802    ///
2803    /// BigQuery:
2804    /// STRUCT<[field_name] field_type>
2805    /// ```
2806    fn parse_struct_type_def<F>(
2807        &mut self,
2808        mut elem_parser: F,
2809    ) -> Result<(Vec<StructField>, MatchedTrailingBracket), ParserError>
2810    where
2811        F: FnMut(&mut Parser<'a>) -> Result<(StructField, MatchedTrailingBracket), ParserError>,
2812    {
2813        let start_token = self.peek_token();
2814        self.expect_keyword_is(Keyword::STRUCT)?;
2815
2816        // Nothing to do if we have no type information.
2817        if Token::Lt != self.peek_token() {
2818            return Ok((Default::default(), false.into()));
2819        }
2820        self.next_token();
2821
2822        let mut field_defs = vec![];
2823        let trailing_bracket = loop {
2824            let (def, trailing_bracket) = elem_parser(self)?;
2825            field_defs.push(def);
2826            if !self.consume_token(&Token::Comma) {
2827                break trailing_bracket;
2828            }
2829
2830            // Angle brackets are balanced so we only expect the trailing `>>` after
2831            // we've matched all field types for the current struct.
2832            // e.g. this is invalid syntax `STRUCT<STRUCT<INT>>>, INT>(NULL)`
2833            if trailing_bracket.0 {
2834                return parser_err!("unmatched > in STRUCT definition", start_token.span.start);
2835            }
2836        };
2837
2838        Ok((
2839            field_defs,
2840            self.expect_closing_angle_bracket(trailing_bracket)?,
2841        ))
2842    }
2843
2844    /// Duckdb Struct Data Type <https://duckdb.org/docs/sql/data_types/struct.html#retrieving-from-structs>
2845    fn parse_duckdb_struct_type_def(&mut self) -> Result<Vec<StructField>, ParserError> {
2846        self.expect_keyword_is(Keyword::STRUCT)?;
2847        self.expect_token(&Token::LParen)?;
2848        let struct_body = self.parse_comma_separated(|parser| {
2849            let field_name = parser.parse_identifier()?;
2850            let field_type = parser.parse_data_type()?;
2851
2852            Ok(StructField {
2853                field_name: Some(field_name),
2854                field_type,
2855            })
2856        });
2857        self.expect_token(&Token::RParen)?;
2858        struct_body
2859    }
2860
2861    /// Parse a field definition in a [struct] or [tuple].
2862    /// Syntax:
2863    ///
2864    /// ```sql
2865    /// [field_name] field_type
2866    /// ```
2867    ///
2868    /// [struct]: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#declaring_a_struct_type
2869    /// [tuple]: https://clickhouse.com/docs/en/sql-reference/data-types/tuple
2870    fn parse_struct_field_def(
2871        &mut self,
2872    ) -> Result<(StructField, MatchedTrailingBracket), ParserError> {
2873        // Look beyond the next item to infer whether both field name
2874        // and type are specified.
2875        let is_anonymous_field = !matches!(
2876            (self.peek_nth_token(0).token, self.peek_nth_token(1).token),
2877            (Token::Word(_), Token::Word(_))
2878        );
2879
2880        let field_name = if is_anonymous_field {
2881            None
2882        } else {
2883            Some(self.parse_identifier()?)
2884        };
2885
2886        let (field_type, trailing_bracket) = self.parse_data_type_helper()?;
2887
2888        Ok((
2889            StructField {
2890                field_name,
2891                field_type,
2892            },
2893            trailing_bracket,
2894        ))
2895    }
2896
2897    /// DuckDB specific: Parse a Union type definition as a sequence of field-value pairs.
2898    ///
2899    /// Syntax:
2900    ///
2901    /// ```sql
2902    /// UNION(field_name field_type[,...])
2903    /// ```
2904    ///
2905    /// [1]: https://duckdb.org/docs/sql/data_types/union.html
2906    fn parse_union_type_def(&mut self) -> Result<Vec<UnionField>, ParserError> {
2907        self.expect_keyword_is(Keyword::UNION)?;
2908
2909        self.expect_token(&Token::LParen)?;
2910
2911        let fields = self.parse_comma_separated(|p| {
2912            Ok(UnionField {
2913                field_name: p.parse_identifier()?,
2914                field_type: p.parse_data_type()?,
2915            })
2916        })?;
2917
2918        self.expect_token(&Token::RParen)?;
2919
2920        Ok(fields)
2921    }
2922
2923    /// DuckDB specific: Parse a duckdb [dictionary]
2924    ///
2925    /// Syntax:
2926    ///
2927    /// ```sql
2928    /// {'field_name': expr1[, ... ]}
2929    /// ```
2930    ///
2931    /// [dictionary]: https://duckdb.org/docs/sql/data_types/struct#creating-structs
2932    fn parse_duckdb_struct_literal(&mut self) -> Result<Expr, ParserError> {
2933        self.expect_token(&Token::LBrace)?;
2934
2935        let fields =
2936            self.parse_comma_separated0(Self::parse_duckdb_dictionary_field, Token::RBrace)?;
2937
2938        self.expect_token(&Token::RBrace)?;
2939
2940        Ok(Expr::Dictionary(fields))
2941    }
2942
2943    /// Parse a field for a duckdb [dictionary]
2944    ///
2945    /// Syntax
2946    ///
2947    /// ```sql
2948    /// 'name': expr
2949    /// ```
2950    ///
2951    /// [dictionary]: https://duckdb.org/docs/sql/data_types/struct#creating-structs
2952    fn parse_duckdb_dictionary_field(&mut self) -> Result<DictionaryField, ParserError> {
2953        let key = self.parse_identifier()?;
2954
2955        self.expect_token(&Token::Colon)?;
2956
2957        let expr = self.parse_expr()?;
2958
2959        Ok(DictionaryField {
2960            key,
2961            value: Box::new(expr),
2962        })
2963    }
2964
2965    /// DuckDB specific: Parse a duckdb [map]
2966    ///
2967    /// Syntax:
2968    ///
2969    /// ```sql
2970    /// Map {key1: value1[, ... ]}
2971    /// ```
2972    ///
2973    /// [map]: https://duckdb.org/docs/sql/data_types/map.html#creating-maps
2974    fn parse_duckdb_map_literal(&mut self) -> Result<Expr, ParserError> {
2975        self.expect_token(&Token::LBrace)?;
2976        let fields = self.parse_comma_separated0(Self::parse_duckdb_map_field, Token::RBrace)?;
2977        self.expect_token(&Token::RBrace)?;
2978        Ok(Expr::Map(Map { entries: fields }))
2979    }
2980
2981    /// Parse a field for a duckdb [map]
2982    ///
2983    /// Syntax
2984    ///
2985    /// ```sql
2986    /// key: value
2987    /// ```
2988    ///
2989    /// [map]: https://duckdb.org/docs/sql/data_types/map.html#creating-maps
2990    fn parse_duckdb_map_field(&mut self) -> Result<MapEntry, ParserError> {
2991        let key = self.parse_expr()?;
2992
2993        self.expect_token(&Token::Colon)?;
2994
2995        let value = self.parse_expr()?;
2996
2997        Ok(MapEntry {
2998            key: Box::new(key),
2999            value: Box::new(value),
3000        })
3001    }
3002
3003    /// Parse clickhouse [map]
3004    ///
3005    /// Syntax
3006    ///
3007    /// ```sql
3008    /// Map(key_data_type, value_data_type)
3009    /// ```
3010    ///
3011    /// [map]: https://clickhouse.com/docs/en/sql-reference/data-types/map
3012    fn parse_click_house_map_def(&mut self) -> Result<(DataType, DataType), ParserError> {
3013        self.expect_keyword_is(Keyword::MAP)?;
3014        self.expect_token(&Token::LParen)?;
3015        let key_data_type = self.parse_data_type()?;
3016        self.expect_token(&Token::Comma)?;
3017        let value_data_type = self.parse_data_type()?;
3018        self.expect_token(&Token::RParen)?;
3019
3020        Ok((key_data_type, value_data_type))
3021    }
3022
3023    /// Parse clickhouse [tuple]
3024    ///
3025    /// Syntax
3026    ///
3027    /// ```sql
3028    /// Tuple([field_name] field_type, ...)
3029    /// ```
3030    ///
3031    /// [tuple]: https://clickhouse.com/docs/en/sql-reference/data-types/tuple
3032    fn parse_click_house_tuple_def(&mut self) -> Result<Vec<StructField>, ParserError> {
3033        self.expect_keyword_is(Keyword::TUPLE)?;
3034        self.expect_token(&Token::LParen)?;
3035        let mut field_defs = vec![];
3036        loop {
3037            let (def, _) = self.parse_struct_field_def()?;
3038            field_defs.push(def);
3039            if !self.consume_token(&Token::Comma) {
3040                break;
3041            }
3042        }
3043        self.expect_token(&Token::RParen)?;
3044
3045        Ok(field_defs)
3046    }
3047
3048    /// For nested types that use the angle bracket syntax, this matches either
3049    /// `>`, `>>` or nothing depending on which variant is expected (specified by the previously
3050    /// matched `trailing_bracket` argument). It returns whether there is a trailing
3051    /// left to be matched - (i.e. if '>>' was matched).
3052    fn expect_closing_angle_bracket(
3053        &mut self,
3054        trailing_bracket: MatchedTrailingBracket,
3055    ) -> Result<MatchedTrailingBracket, ParserError> {
3056        let trailing_bracket = if !trailing_bracket.0 {
3057            match self.peek_token().token {
3058                Token::Gt => {
3059                    self.next_token();
3060                    false.into()
3061                }
3062                Token::ShiftRight => {
3063                    self.next_token();
3064                    true.into()
3065                }
3066                _ => return self.expected(">", self.peek_token()),
3067            }
3068        } else {
3069            false.into()
3070        };
3071
3072        Ok(trailing_bracket)
3073    }
3074
3075    /// Parse an operator following an expression
3076    pub fn parse_infix(&mut self, expr: Expr, precedence: u8) -> Result<Expr, ParserError> {
3077        // allow the dialect to override infix parsing
3078        if let Some(infix) = self.dialect.parse_infix(self, &expr, precedence) {
3079            return infix;
3080        }
3081
3082        let dialect = self.dialect;
3083
3084        self.advance_token();
3085        let tok = self.get_current_token();
3086        let tok_index = self.get_current_index();
3087        let span = tok.span;
3088        let regular_binary_operator = match &tok.token {
3089            Token::Spaceship => Some(BinaryOperator::Spaceship),
3090            Token::DoubleEq => Some(BinaryOperator::Eq),
3091            Token::Eq => Some(BinaryOperator::Eq),
3092            Token::Neq => Some(BinaryOperator::NotEq),
3093            Token::Gt => Some(BinaryOperator::Gt),
3094            Token::GtEq => Some(BinaryOperator::GtEq),
3095            Token::Lt => Some(BinaryOperator::Lt),
3096            Token::LtEq => Some(BinaryOperator::LtEq),
3097            Token::Plus => Some(BinaryOperator::Plus),
3098            Token::Minus => Some(BinaryOperator::Minus),
3099            Token::Mul => Some(BinaryOperator::Multiply),
3100            Token::Mod => Some(BinaryOperator::Modulo),
3101            Token::StringConcat => Some(BinaryOperator::StringConcat),
3102            Token::Pipe => Some(BinaryOperator::BitwiseOr),
3103            Token::Caret => {
3104                // In PostgreSQL, ^ stands for the exponentiation operation,
3105                // and # stands for XOR. See https://www.postgresql.org/docs/current/functions-math.html
3106                if dialect_is!(dialect is PostgreSqlDialect) {
3107                    Some(BinaryOperator::PGExp)
3108                } else {
3109                    Some(BinaryOperator::BitwiseXor)
3110                }
3111            }
3112            Token::Ampersand => Some(BinaryOperator::BitwiseAnd),
3113            Token::Div => Some(BinaryOperator::Divide),
3114            Token::DuckIntDiv if dialect_is!(dialect is DuckDbDialect | GenericDialect) => {
3115                Some(BinaryOperator::DuckIntegerDivide)
3116            }
3117            Token::ShiftLeft if dialect_is!(dialect is PostgreSqlDialect | DuckDbDialect | GenericDialect | RedshiftSqlDialect) => {
3118                Some(BinaryOperator::PGBitwiseShiftLeft)
3119            }
3120            Token::ShiftRight if dialect_is!(dialect is PostgreSqlDialect | DuckDbDialect | GenericDialect | RedshiftSqlDialect) => {
3121                Some(BinaryOperator::PGBitwiseShiftRight)
3122            }
3123            Token::Sharp if dialect_is!(dialect is PostgreSqlDialect | RedshiftSqlDialect) => {
3124                Some(BinaryOperator::PGBitwiseXor)
3125            }
3126            Token::Overlap if dialect_is!(dialect is PostgreSqlDialect | RedshiftSqlDialect) => {
3127                Some(BinaryOperator::PGOverlap)
3128            }
3129            Token::Overlap if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
3130                Some(BinaryOperator::PGOverlap)
3131            }
3132            Token::CaretAt if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
3133                Some(BinaryOperator::PGStartsWith)
3134            }
3135            Token::Tilde => Some(BinaryOperator::PGRegexMatch),
3136            Token::TildeAsterisk => Some(BinaryOperator::PGRegexIMatch),
3137            Token::ExclamationMarkTilde => Some(BinaryOperator::PGRegexNotMatch),
3138            Token::ExclamationMarkTildeAsterisk => Some(BinaryOperator::PGRegexNotIMatch),
3139            Token::DoubleTilde => Some(BinaryOperator::PGLikeMatch),
3140            Token::DoubleTildeAsterisk => Some(BinaryOperator::PGILikeMatch),
3141            Token::ExclamationMarkDoubleTilde => Some(BinaryOperator::PGNotLikeMatch),
3142            Token::ExclamationMarkDoubleTildeAsterisk => Some(BinaryOperator::PGNotILikeMatch),
3143            Token::Arrow => Some(BinaryOperator::Arrow),
3144            Token::LongArrow => Some(BinaryOperator::LongArrow),
3145            Token::HashArrow => Some(BinaryOperator::HashArrow),
3146            Token::HashLongArrow => Some(BinaryOperator::HashLongArrow),
3147            Token::AtArrow => Some(BinaryOperator::AtArrow),
3148            Token::ArrowAt => Some(BinaryOperator::ArrowAt),
3149            Token::HashMinus => Some(BinaryOperator::HashMinus),
3150            Token::AtQuestion => Some(BinaryOperator::AtQuestion),
3151            Token::AtAt => Some(BinaryOperator::AtAt),
3152            Token::Question => Some(BinaryOperator::Question),
3153            Token::QuestionAnd => Some(BinaryOperator::QuestionAnd),
3154            Token::QuestionPipe => Some(BinaryOperator::QuestionPipe),
3155            Token::CustomBinaryOperator(s) => Some(BinaryOperator::Custom(s.clone())),
3156            Token::DoubleSharp if self.dialect.supports_geometric_types() => {
3157                Some(BinaryOperator::DoubleHash)
3158            }
3159
3160            Token::AmpersandLeftAngleBracket if self.dialect.supports_geometric_types() => {
3161                Some(BinaryOperator::AndLt)
3162            }
3163            Token::AmpersandRightAngleBracket if self.dialect.supports_geometric_types() => {
3164                Some(BinaryOperator::AndGt)
3165            }
3166            Token::QuestionMarkDash if self.dialect.supports_geometric_types() => {
3167                Some(BinaryOperator::QuestionDash)
3168            }
3169            Token::AmpersandLeftAngleBracketVerticalBar
3170                if self.dialect.supports_geometric_types() =>
3171            {
3172                Some(BinaryOperator::AndLtPipe)
3173            }
3174            Token::VerticalBarAmpersandRightAngleBracket
3175                if self.dialect.supports_geometric_types() =>
3176            {
3177                Some(BinaryOperator::PipeAndGt)
3178            }
3179            Token::TwoWayArrow if self.dialect.supports_geometric_types() => {
3180                Some(BinaryOperator::LtDashGt)
3181            }
3182            Token::LeftAngleBracketCaret if self.dialect.supports_geometric_types() => {
3183                Some(BinaryOperator::LtCaret)
3184            }
3185            Token::RightAngleBracketCaret if self.dialect.supports_geometric_types() => {
3186                Some(BinaryOperator::GtCaret)
3187            }
3188            Token::QuestionMarkSharp if self.dialect.supports_geometric_types() => {
3189                Some(BinaryOperator::QuestionHash)
3190            }
3191            Token::QuestionMarkDoubleVerticalBar if self.dialect.supports_geometric_types() => {
3192                Some(BinaryOperator::QuestionDoublePipe)
3193            }
3194            Token::QuestionMarkDashVerticalBar if self.dialect.supports_geometric_types() => {
3195                Some(BinaryOperator::QuestionDashPipe)
3196            }
3197            Token::TildeEqual if self.dialect.supports_geometric_types() => {
3198                Some(BinaryOperator::TildeEq)
3199            }
3200            Token::ShiftLeftVerticalBar if self.dialect.supports_geometric_types() => {
3201                Some(BinaryOperator::LtLtPipe)
3202            }
3203            Token::VerticalBarShiftRight if self.dialect.supports_geometric_types() => {
3204                Some(BinaryOperator::PipeGtGt)
3205            }
3206            Token::AtSign if self.dialect.supports_geometric_types() => Some(BinaryOperator::At),
3207
3208            Token::Word(w) => match w.keyword {
3209                Keyword::AND => Some(BinaryOperator::And),
3210                Keyword::OR => Some(BinaryOperator::Or),
3211                Keyword::XOR => Some(BinaryOperator::Xor),
3212                Keyword::OVERLAPS => Some(BinaryOperator::Overlaps),
3213                Keyword::OPERATOR if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
3214                    self.expect_token(&Token::LParen)?;
3215                    // there are special rules for operator names in
3216                    // postgres so we can not use 'parse_object'
3217                    // or similar.
3218                    // See https://www.postgresql.org/docs/current/sql-createoperator.html
3219                    let mut idents = vec![];
3220                    loop {
3221                        self.advance_token();
3222                        idents.push(self.get_current_token().to_string());
3223                        if !self.consume_token(&Token::Period) {
3224                            break;
3225                        }
3226                    }
3227                    self.expect_token(&Token::RParen)?;
3228                    Some(BinaryOperator::PGCustomBinaryOperator(idents))
3229                }
3230                _ => None,
3231            },
3232            _ => None,
3233        };
3234
3235        let tok = self.token_at(tok_index);
3236        if let Some(op) = regular_binary_operator {
3237            if let Some(keyword) =
3238                self.parse_one_of_keywords(&[Keyword::ANY, Keyword::ALL, Keyword::SOME])
3239            {
3240                self.expect_token(&Token::LParen)?;
3241                let right = if self.peek_sub_query() {
3242                    // We have a subquery ahead (SELECT\WITH ...) need to rewind and
3243                    // use the parenthesis for parsing the subquery as an expression.
3244                    self.prev_token(); // LParen
3245                    self.parse_subexpr(precedence)?
3246                } else {
3247                    // Non-subquery expression
3248                    let right = self.parse_subexpr(precedence)?;
3249                    self.expect_token(&Token::RParen)?;
3250                    right
3251                };
3252
3253                if !matches!(
3254                    op,
3255                    BinaryOperator::Gt
3256                        | BinaryOperator::Lt
3257                        | BinaryOperator::GtEq
3258                        | BinaryOperator::LtEq
3259                        | BinaryOperator::Eq
3260                        | BinaryOperator::NotEq
3261                ) {
3262                    return parser_err!(
3263                        format!(
3264                        "Expected one of [=, >, <, =>, =<, !=] as comparison operator, found: {op}"
3265                    ),
3266                        span.start
3267                    );
3268                };
3269
3270                Ok(match keyword {
3271                    Keyword::ALL => Expr::AllOp {
3272                        left: Box::new(expr),
3273                        compare_op: op,
3274                        right: Box::new(right),
3275                    },
3276                    Keyword::ANY | Keyword::SOME => Expr::AnyOp {
3277                        left: Box::new(expr),
3278                        compare_op: op,
3279                        right: Box::new(right),
3280                        is_some: keyword == Keyword::SOME,
3281                    },
3282                    _ => unreachable!(),
3283                })
3284            } else {
3285                Ok(Expr::BinaryOp {
3286                    left: Box::new(expr),
3287                    op,
3288                    right: Box::new(self.parse_subexpr(precedence)?),
3289                })
3290            }
3291        } else if let Token::Word(w) = &tok.token {
3292            match w.keyword {
3293                Keyword::IS => {
3294                    if self.parse_keyword(Keyword::NULL) {
3295                        Ok(Expr::IsNull(Box::new(expr)))
3296                    } else if self.parse_keywords(&[Keyword::NOT, Keyword::NULL]) {
3297                        Ok(Expr::IsNotNull(Box::new(expr)))
3298                    } else if self.parse_keywords(&[Keyword::TRUE]) {
3299                        Ok(Expr::IsTrue(Box::new(expr)))
3300                    } else if self.parse_keywords(&[Keyword::NOT, Keyword::TRUE]) {
3301                        Ok(Expr::IsNotTrue(Box::new(expr)))
3302                    } else if self.parse_keywords(&[Keyword::FALSE]) {
3303                        Ok(Expr::IsFalse(Box::new(expr)))
3304                    } else if self.parse_keywords(&[Keyword::NOT, Keyword::FALSE]) {
3305                        Ok(Expr::IsNotFalse(Box::new(expr)))
3306                    } else if self.parse_keywords(&[Keyword::UNKNOWN]) {
3307                        Ok(Expr::IsUnknown(Box::new(expr)))
3308                    } else if self.parse_keywords(&[Keyword::NOT, Keyword::UNKNOWN]) {
3309                        Ok(Expr::IsNotUnknown(Box::new(expr)))
3310                    } else if self.parse_keywords(&[Keyword::DISTINCT, Keyword::FROM]) {
3311                        let expr2 = self.parse_expr()?;
3312                        Ok(Expr::IsDistinctFrom(Box::new(expr), Box::new(expr2)))
3313                    } else if self.parse_keywords(&[Keyword::NOT, Keyword::DISTINCT, Keyword::FROM])
3314                    {
3315                        let expr2 = self.parse_expr()?;
3316                        Ok(Expr::IsNotDistinctFrom(Box::new(expr), Box::new(expr2)))
3317                    } else if let Ok(is_normalized) = self.parse_unicode_is_normalized(expr) {
3318                        Ok(is_normalized)
3319                    } else {
3320                        self.expected(
3321                            "[NOT] NULL | TRUE | FALSE | DISTINCT | [form] NORMALIZED FROM after IS",
3322                            self.peek_token(),
3323                        )
3324                    }
3325                }
3326                Keyword::AT => {
3327                    self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
3328                    Ok(Expr::AtTimeZone {
3329                        timestamp: Box::new(expr),
3330                        time_zone: Box::new(self.parse_subexpr(precedence)?),
3331                    })
3332                }
3333                Keyword::NOT
3334                | Keyword::IN
3335                | Keyword::BETWEEN
3336                | Keyword::LIKE
3337                | Keyword::ILIKE
3338                | Keyword::SIMILAR
3339                | Keyword::REGEXP
3340                | Keyword::RLIKE => {
3341                    self.prev_token();
3342                    let negated = self.parse_keyword(Keyword::NOT);
3343                    let regexp = self.parse_keyword(Keyword::REGEXP);
3344                    let rlike = self.parse_keyword(Keyword::RLIKE);
3345                    if regexp || rlike {
3346                        Ok(Expr::RLike {
3347                            negated,
3348                            expr: Box::new(expr),
3349                            pattern: Box::new(
3350                                self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
3351                            ),
3352                            regexp,
3353                        })
3354                    } else if self.parse_keyword(Keyword::IN) {
3355                        self.parse_in(expr, negated)
3356                    } else if self.parse_keyword(Keyword::BETWEEN) {
3357                        self.parse_between(expr, negated)
3358                    } else if self.parse_keyword(Keyword::LIKE) {
3359                        Ok(Expr::Like {
3360                            negated,
3361                            any: self.parse_keyword(Keyword::ANY),
3362                            expr: Box::new(expr),
3363                            pattern: Box::new(
3364                                self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
3365                            ),
3366                            escape_char: self.parse_escape_char()?,
3367                        })
3368                    } else if self.parse_keyword(Keyword::ILIKE) {
3369                        Ok(Expr::ILike {
3370                            negated,
3371                            any: self.parse_keyword(Keyword::ANY),
3372                            expr: Box::new(expr),
3373                            pattern: Box::new(
3374                                self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
3375                            ),
3376                            escape_char: self.parse_escape_char()?,
3377                        })
3378                    } else if self.parse_keywords(&[Keyword::SIMILAR, Keyword::TO]) {
3379                        Ok(Expr::SimilarTo {
3380                            negated,
3381                            expr: Box::new(expr),
3382                            pattern: Box::new(
3383                                self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
3384                            ),
3385                            escape_char: self.parse_escape_char()?,
3386                        })
3387                    } else {
3388                        self.expected("IN or BETWEEN after NOT", self.peek_token())
3389                    }
3390                }
3391                // Can only happen if `get_next_precedence` got out of sync with this function
3392                _ => parser_err!(
3393                    format!("No infix parser for token {:?}", tok.token),
3394                    tok.span.start
3395                ),
3396            }
3397        } else if Token::DoubleColon == *tok {
3398            Ok(Expr::Cast {
3399                kind: CastKind::DoubleColon,
3400                expr: Box::new(expr),
3401                data_type: self.parse_data_type()?,
3402                format: None,
3403            })
3404        } else if Token::ExclamationMark == *tok && self.dialect.supports_factorial_operator() {
3405            Ok(Expr::UnaryOp {
3406                op: UnaryOperator::PGPostfixFactorial,
3407                expr: Box::new(expr),
3408            })
3409        } else if Token::LBracket == *tok && self.dialect.supports_partiql()
3410            || (dialect_of!(self is SnowflakeDialect | GenericDialect) && Token::Colon == *tok)
3411        {
3412            self.prev_token();
3413            self.parse_json_access(expr)
3414        } else {
3415            // Can only happen if `get_next_precedence` got out of sync with this function
3416            parser_err!(
3417                format!("No infix parser for token {:?}", tok.token),
3418                tok.span.start
3419            )
3420        }
3421    }
3422
3423    /// Parse the `ESCAPE CHAR` portion of `LIKE`, `ILIKE`, and `SIMILAR TO`
3424    pub fn parse_escape_char(&mut self) -> Result<Option<String>, ParserError> {
3425        if self.parse_keyword(Keyword::ESCAPE) {
3426            Ok(Some(self.parse_literal_string()?))
3427        } else {
3428            Ok(None)
3429        }
3430    }
3431
3432    /// Parses an array subscript like
3433    /// * `[:]`
3434    /// * `[l]`
3435    /// * `[l:]`
3436    /// * `[:u]`
3437    /// * `[l:u]`
3438    /// * `[l:u:s]`
3439    ///
3440    /// Parser is right after `[`
3441    fn parse_subscript_inner(&mut self) -> Result<Subscript, ParserError> {
3442        // at either `<lower>:(rest)` or `:(rest)]`
3443        let lower_bound = if self.consume_token(&Token::Colon) {
3444            None
3445        } else {
3446            Some(self.parse_expr()?)
3447        };
3448
3449        // check for end
3450        if self.consume_token(&Token::RBracket) {
3451            if let Some(lower_bound) = lower_bound {
3452                return Ok(Subscript::Index { index: lower_bound });
3453            };
3454            return Ok(Subscript::Slice {
3455                lower_bound,
3456                upper_bound: None,
3457                stride: None,
3458            });
3459        }
3460
3461        // consume the `:`
3462        if lower_bound.is_some() {
3463            self.expect_token(&Token::Colon)?;
3464        }
3465
3466        // we are now at either `]`, `<upper>(rest)]`
3467        let upper_bound = if self.consume_token(&Token::RBracket) {
3468            return Ok(Subscript::Slice {
3469                lower_bound,
3470                upper_bound: None,
3471                stride: None,
3472            });
3473        } else {
3474            Some(self.parse_expr()?)
3475        };
3476
3477        // check for end
3478        if self.consume_token(&Token::RBracket) {
3479            return Ok(Subscript::Slice {
3480                lower_bound,
3481                upper_bound,
3482                stride: None,
3483            });
3484        }
3485
3486        // we are now at `:]` or `:stride]`
3487        self.expect_token(&Token::Colon)?;
3488        let stride = if self.consume_token(&Token::RBracket) {
3489            None
3490        } else {
3491            Some(self.parse_expr()?)
3492        };
3493
3494        if stride.is_some() {
3495            self.expect_token(&Token::RBracket)?;
3496        }
3497
3498        Ok(Subscript::Slice {
3499            lower_bound,
3500            upper_bound,
3501            stride,
3502        })
3503    }
3504
3505    /// Parse a multi-dimension array accessing like `[1:3][1][1]`
3506    pub fn parse_multi_dim_subscript(
3507        &mut self,
3508        chain: &mut Vec<AccessExpr>,
3509    ) -> Result<(), ParserError> {
3510        while self.consume_token(&Token::LBracket) {
3511            self.parse_subscript(chain)?;
3512        }
3513        Ok(())
3514    }
3515
3516    /// Parses an array subscript like `[1:3]`
3517    ///
3518    /// Parser is right after `[`
3519    fn parse_subscript(&mut self, chain: &mut Vec<AccessExpr>) -> Result<(), ParserError> {
3520        let subscript = self.parse_subscript_inner()?;
3521        chain.push(AccessExpr::Subscript(subscript));
3522        Ok(())
3523    }
3524
3525    fn parse_json_path_object_key(&mut self) -> Result<JsonPathElem, ParserError> {
3526        let token = self.next_token();
3527        match token.token {
3528            Token::Word(Word {
3529                value,
3530                // path segments in SF dot notation can be unquoted or double-quoted
3531                quote_style: quote_style @ (Some('"') | None),
3532                // some experimentation suggests that snowflake permits
3533                // any keyword here unquoted.
3534                keyword: _,
3535            }) => Ok(JsonPathElem::Dot {
3536                key: value,
3537                quoted: quote_style.is_some(),
3538            }),
3539
3540            // This token should never be generated on snowflake or generic
3541            // dialects, but we handle it just in case this is used on future
3542            // dialects.
3543            Token::DoubleQuotedString(key) => Ok(JsonPathElem::Dot { key, quoted: true }),
3544
3545            _ => self.expected("variant object key name", token),
3546        }
3547    }
3548
3549    fn parse_json_access(&mut self, expr: Expr) -> Result<Expr, ParserError> {
3550        let path = self.parse_json_path()?;
3551        Ok(Expr::JsonAccess {
3552            value: Box::new(expr),
3553            path,
3554        })
3555    }
3556
3557    fn parse_json_path(&mut self) -> Result<JsonPath, ParserError> {
3558        let mut path = Vec::new();
3559        loop {
3560            match self.next_token().token {
3561                Token::Colon if path.is_empty() => {
3562                    path.push(self.parse_json_path_object_key()?);
3563                }
3564                Token::Period if !path.is_empty() => {
3565                    path.push(self.parse_json_path_object_key()?);
3566                }
3567                Token::LBracket => {
3568                    let key = self.parse_expr()?;
3569                    self.expect_token(&Token::RBracket)?;
3570
3571                    path.push(JsonPathElem::Bracket { key });
3572                }
3573                _ => {
3574                    self.prev_token();
3575                    break;
3576                }
3577            };
3578        }
3579
3580        debug_assert!(!path.is_empty());
3581        Ok(JsonPath { path })
3582    }
3583
3584    /// Parses the parens following the `[ NOT ] IN` operator.
3585    pub fn parse_in(&mut self, expr: Expr, negated: bool) -> Result<Expr, ParserError> {
3586        // BigQuery allows `IN UNNEST(array_expression)`
3587        // https://cloud.google.com/bigquery/docs/reference/standard-sql/operators#in_operators
3588        if self.parse_keyword(Keyword::UNNEST) {
3589            self.expect_token(&Token::LParen)?;
3590            let array_expr = self.parse_expr()?;
3591            self.expect_token(&Token::RParen)?;
3592            return Ok(Expr::InUnnest {
3593                expr: Box::new(expr),
3594                array_expr: Box::new(array_expr),
3595                negated,
3596            });
3597        }
3598        self.expect_token(&Token::LParen)?;
3599        let in_op = if self.parse_keyword(Keyword::SELECT) || self.parse_keyword(Keyword::WITH) {
3600            self.prev_token();
3601            Expr::InSubquery {
3602                expr: Box::new(expr),
3603                subquery: self.parse_query()?,
3604                negated,
3605            }
3606        } else {
3607            Expr::InList {
3608                expr: Box::new(expr),
3609                list: if self.dialect.supports_in_empty_list() {
3610                    self.parse_comma_separated0(Parser::parse_expr, Token::RParen)?
3611                } else {
3612                    self.parse_comma_separated(Parser::parse_expr)?
3613                },
3614                negated,
3615            }
3616        };
3617        self.expect_token(&Token::RParen)?;
3618        Ok(in_op)
3619    }
3620
3621    /// Parses `BETWEEN <low> AND <high>`, assuming the `BETWEEN` keyword was already consumed.
3622    pub fn parse_between(&mut self, expr: Expr, negated: bool) -> Result<Expr, ParserError> {
3623        // Stop parsing subexpressions for <low> and <high> on tokens with
3624        // precedence lower than that of `BETWEEN`, such as `AND`, `IS`, etc.
3625        let low = self.parse_subexpr(self.dialect.prec_value(Precedence::Between))?;
3626        self.expect_keyword_is(Keyword::AND)?;
3627        let high = self.parse_subexpr(self.dialect.prec_value(Precedence::Between))?;
3628        Ok(Expr::Between {
3629            expr: Box::new(expr),
3630            negated,
3631            low: Box::new(low),
3632            high: Box::new(high),
3633        })
3634    }
3635
3636    /// Parse a postgresql casting style which is in the form of `expr::datatype`.
3637    pub fn parse_pg_cast(&mut self, expr: Expr) -> Result<Expr, ParserError> {
3638        Ok(Expr::Cast {
3639            kind: CastKind::DoubleColon,
3640            expr: Box::new(expr),
3641            data_type: self.parse_data_type()?,
3642            format: None,
3643        })
3644    }
3645
3646    /// Get the precedence of the next token
3647    pub fn get_next_precedence(&self) -> Result<u8, ParserError> {
3648        self.dialect.get_next_precedence_default(self)
3649    }
3650
3651    /// Return the token at the given location, or EOF if the index is beyond
3652    /// the length of the current set of tokens.
3653    pub fn token_at(&self, index: usize) -> &TokenWithSpan {
3654        self.tokens.get(index).unwrap_or(&EOF_TOKEN)
3655    }
3656
3657    /// Return the first non-whitespace token that has not yet been processed
3658    /// or Token::EOF
3659    ///
3660    /// See [`Self::peek_token_ref`] to avoid the copy.
3661    pub fn peek_token(&self) -> TokenWithSpan {
3662        self.peek_nth_token(0)
3663    }
3664
3665    /// Return a reference to the first non-whitespace token that has not yet
3666    /// been processed or Token::EOF
3667    pub fn peek_token_ref(&self) -> &TokenWithSpan {
3668        self.peek_nth_token_ref(0)
3669    }
3670
3671    /// Returns the `N` next non-whitespace tokens that have not yet been
3672    /// processed.
3673    ///
3674    /// Example:
3675    /// ```rust
3676    /// # use sqlparser::dialect::GenericDialect;
3677    /// # use sqlparser::parser::Parser;
3678    /// # use sqlparser::keywords::Keyword;
3679    /// # use sqlparser::tokenizer::{Token, Word};
3680    /// let dialect = GenericDialect {};
3681    /// let mut parser = Parser::new(&dialect).try_with_sql("ORDER BY foo, bar").unwrap();
3682    ///
3683    /// // Note that Rust infers the number of tokens to peek based on the
3684    /// // length of the slice pattern!
3685    /// assert!(matches!(
3686    ///     parser.peek_tokens(),
3687    ///     [
3688    ///         Token::Word(Word { keyword: Keyword::ORDER, .. }),
3689    ///         Token::Word(Word { keyword: Keyword::BY, .. }),
3690    ///     ]
3691    /// ));
3692    /// ```
3693    pub fn peek_tokens<const N: usize>(&self) -> [Token; N] {
3694        self.peek_tokens_with_location()
3695            .map(|with_loc| with_loc.token)
3696    }
3697
3698    /// Returns the `N` next non-whitespace tokens with locations that have not
3699    /// yet been processed.
3700    ///
3701    /// See [`Self::peek_token`] for an example.
3702    pub fn peek_tokens_with_location<const N: usize>(&self) -> [TokenWithSpan; N] {
3703        let mut index = self.index;
3704        core::array::from_fn(|_| loop {
3705            let token = self.tokens.get(index);
3706            index += 1;
3707            if let Some(TokenWithSpan {
3708                token: Token::Whitespace(_),
3709                span: _,
3710            }) = token
3711            {
3712                continue;
3713            }
3714            break token.cloned().unwrap_or(TokenWithSpan {
3715                token: Token::EOF,
3716                span: Span::empty(),
3717            });
3718        })
3719    }
3720
3721    /// Returns references to the `N` next non-whitespace tokens
3722    /// that have not yet been processed.
3723    ///
3724    /// See [`Self::peek_tokens`] for an example.
3725    pub fn peek_tokens_ref<const N: usize>(&self) -> [&TokenWithSpan; N] {
3726        let mut index = self.index;
3727        core::array::from_fn(|_| loop {
3728            let token = self.tokens.get(index);
3729            index += 1;
3730            if let Some(TokenWithSpan {
3731                token: Token::Whitespace(_),
3732                span: _,
3733            }) = token
3734            {
3735                continue;
3736            }
3737            break token.unwrap_or(&EOF_TOKEN);
3738        })
3739    }
3740
3741    /// Return nth non-whitespace token that has not yet been processed
3742    pub fn peek_nth_token(&self, n: usize) -> TokenWithSpan {
3743        self.peek_nth_token_ref(n).clone()
3744    }
3745
3746    /// Return nth non-whitespace token that has not yet been processed
3747    pub fn peek_nth_token_ref(&self, mut n: usize) -> &TokenWithSpan {
3748        let mut index = self.index;
3749        loop {
3750            index += 1;
3751            match self.tokens.get(index - 1) {
3752                Some(TokenWithSpan {
3753                    token: Token::Whitespace(_),
3754                    span: _,
3755                }) => continue,
3756                non_whitespace => {
3757                    if n == 0 {
3758                        return non_whitespace.unwrap_or(&EOF_TOKEN);
3759                    }
3760                    n -= 1;
3761                }
3762            }
3763        }
3764    }
3765
3766    /// Return the first token, possibly whitespace, that has not yet been processed
3767    /// (or None if reached end-of-file).
3768    pub fn peek_token_no_skip(&self) -> TokenWithSpan {
3769        self.peek_nth_token_no_skip(0)
3770    }
3771
3772    /// Return nth token, possibly whitespace, that has not yet been processed.
3773    pub fn peek_nth_token_no_skip(&self, n: usize) -> TokenWithSpan {
3774        self.tokens
3775            .get(self.index + n)
3776            .cloned()
3777            .unwrap_or(TokenWithSpan {
3778                token: Token::EOF,
3779                span: Span::empty(),
3780            })
3781    }
3782
3783    /// Return true if the next tokens exactly `expected`
3784    ///
3785    /// Does not advance the current token.
3786    fn peek_keywords(&mut self, expected: &[Keyword]) -> bool {
3787        let index = self.index;
3788        let matched = self.parse_keywords(expected);
3789        self.index = index;
3790        matched
3791    }
3792
3793    /// Advances to the next non-whitespace token and returns a copy.
3794    ///
3795    /// Please use [`Self::advance_token`] and [`Self::get_current_token`] to
3796    /// avoid the copy.
3797    pub fn next_token(&mut self) -> TokenWithSpan {
3798        self.advance_token();
3799        self.get_current_token().clone()
3800    }
3801
3802    /// Returns the index of the current token
3803    ///
3804    /// This can be used with APIs that expect an index, such as
3805    /// [`Self::token_at`]
3806    pub fn get_current_index(&self) -> usize {
3807        self.index.saturating_sub(1)
3808    }
3809
3810    /// Return the next unprocessed token, possibly whitespace.
3811    pub fn next_token_no_skip(&mut self) -> Option<&TokenWithSpan> {
3812        self.index += 1;
3813        self.tokens.get(self.index - 1)
3814    }
3815
3816    /// Advances the current token to the next non-whitespace token
3817    ///
3818    /// See [`Self::get_current_token`] to get the current token after advancing
3819    pub fn advance_token(&mut self) {
3820        loop {
3821            self.index += 1;
3822            match self.tokens.get(self.index - 1) {
3823                Some(TokenWithSpan {
3824                    token: Token::Whitespace(_),
3825                    span: _,
3826                }) => continue,
3827                _ => break,
3828            }
3829        }
3830    }
3831
3832    /// Returns a reference to the current token
3833    ///
3834    /// Does not advance the current token.
3835    pub fn get_current_token(&self) -> &TokenWithSpan {
3836        self.token_at(self.index.saturating_sub(1))
3837    }
3838
3839    /// Returns a reference to the previous token
3840    ///
3841    /// Does not advance the current token.
3842    pub fn get_previous_token(&self) -> &TokenWithSpan {
3843        self.token_at(self.index.saturating_sub(2))
3844    }
3845
3846    /// Returns a reference to the next token
3847    ///
3848    /// Does not advance the current token.
3849    pub fn get_next_token(&self) -> &TokenWithSpan {
3850        self.token_at(self.index)
3851    }
3852
3853    /// Seek back the last one non-whitespace token.
3854    ///
3855    /// Must be called after `next_token()`, otherwise might panic. OK to call
3856    /// after `next_token()` indicates an EOF.
3857    ///
3858    // TODO rename to backup_token and deprecate prev_token?
3859    pub fn prev_token(&mut self) {
3860        loop {
3861            assert!(self.index > 0);
3862            self.index -= 1;
3863            if let Some(TokenWithSpan {
3864                token: Token::Whitespace(_),
3865                span: _,
3866            }) = self.tokens.get(self.index)
3867            {
3868                continue;
3869            }
3870            return;
3871        }
3872    }
3873
3874    /// Report `found` was encountered instead of `expected`
3875    pub fn expected<T>(&self, expected: &str, found: TokenWithSpan) -> Result<T, ParserError> {
3876        parser_err!(
3877            format!("Expected: {expected}, found: {found}"),
3878            found.span.start
3879        )
3880    }
3881
3882    /// report `found` was encountered instead of `expected`
3883    pub fn expected_ref<T>(&self, expected: &str, found: &TokenWithSpan) -> Result<T, ParserError> {
3884        parser_err!(
3885            format!("Expected: {expected}, found: {found}"),
3886            found.span.start
3887        )
3888    }
3889
3890    /// Report that the token at `index` was found instead of `expected`.
3891    pub fn expected_at<T>(&self, expected: &str, index: usize) -> Result<T, ParserError> {
3892        let found = self.tokens.get(index).unwrap_or(&EOF_TOKEN);
3893        parser_err!(
3894            format!("Expected: {expected}, found: {found}"),
3895            found.span.start
3896        )
3897    }
3898
3899    /// If the current token is the `expected` keyword, consume it and returns
3900    /// true. Otherwise, no tokens are consumed and returns false.
3901    #[must_use]
3902    pub fn parse_keyword(&mut self, expected: Keyword) -> bool {
3903        if self.peek_keyword(expected) {
3904            self.advance_token();
3905            true
3906        } else {
3907            false
3908        }
3909    }
3910
3911    #[must_use]
3912    pub fn peek_keyword(&self, expected: Keyword) -> bool {
3913        matches!(&self.peek_token_ref().token, Token::Word(w) if expected == w.keyword)
3914    }
3915
3916    /// If the current token is the `expected` keyword followed by
3917    /// specified tokens, consume them and returns true.
3918    /// Otherwise, no tokens are consumed and returns false.
3919    ///
3920    /// Note that if the length of `tokens` is too long, this function will
3921    /// not be efficient as it does a loop on the tokens with `peek_nth_token`
3922    /// each time.
3923    pub fn parse_keyword_with_tokens(&mut self, expected: Keyword, tokens: &[Token]) -> bool {
3924        match &self.peek_token_ref().token {
3925            Token::Word(w) if expected == w.keyword => {
3926                for (idx, token) in tokens.iter().enumerate() {
3927                    if self.peek_nth_token_ref(idx + 1).token != *token {
3928                        return false;
3929                    }
3930                }
3931                // consume all tokens
3932                for _ in 0..(tokens.len() + 1) {
3933                    self.advance_token();
3934                }
3935                true
3936            }
3937            _ => false,
3938        }
3939    }
3940
3941    /// If the current and subsequent tokens exactly match the `keywords`
3942    /// sequence, consume them and returns true. Otherwise, no tokens are
3943    /// consumed and returns false
3944    #[must_use]
3945    pub fn parse_keywords(&mut self, keywords: &[Keyword]) -> bool {
3946        let index = self.index;
3947        for &keyword in keywords {
3948            if !self.parse_keyword(keyword) {
3949                // println!("parse_keywords aborting .. did not find {:?}", keyword);
3950                // reset index and return immediately
3951                self.index = index;
3952                return false;
3953            }
3954        }
3955        true
3956    }
3957
3958    /// If the current token is one of the given `keywords`, consume the token
3959    /// and return the keyword that matches. Otherwise, no tokens are consumed
3960    /// and returns [`None`].
3961    #[must_use]
3962    pub fn parse_one_of_keywords(&mut self, keywords: &[Keyword]) -> Option<Keyword> {
3963        match &self.peek_token_ref().token {
3964            Token::Word(w) => {
3965                keywords
3966                    .iter()
3967                    .find(|keyword| **keyword == w.keyword)
3968                    .map(|keyword| {
3969                        self.advance_token();
3970                        *keyword
3971                    })
3972            }
3973            _ => None,
3974        }
3975    }
3976
3977    /// If the current token is one of the expected keywords, consume the token
3978    /// and return the keyword that matches. Otherwise, return an error.
3979    pub fn expect_one_of_keywords(&mut self, keywords: &[Keyword]) -> Result<Keyword, ParserError> {
3980        if let Some(keyword) = self.parse_one_of_keywords(keywords) {
3981            Ok(keyword)
3982        } else {
3983            let keywords: Vec<String> = keywords.iter().map(|x| format!("{x:?}")).collect();
3984            self.expected_ref(
3985                &format!("one of {}", keywords.join(" or ")),
3986                self.peek_token_ref(),
3987            )
3988        }
3989    }
3990
3991    /// If the current token is the `expected` keyword, consume the token.
3992    /// Otherwise, return an error.
3993    ///
3994    // todo deprecate in favor of expected_keyword_is
3995    pub fn expect_keyword(&mut self, expected: Keyword) -> Result<TokenWithSpan, ParserError> {
3996        if self.parse_keyword(expected) {
3997            Ok(self.get_current_token().clone())
3998        } else {
3999            self.expected_ref(format!("{:?}", &expected).as_str(), self.peek_token_ref())
4000        }
4001    }
4002
4003    /// If the current token is the `expected` keyword, consume the token.
4004    /// Otherwise, return an error.
4005    ///
4006    /// This differs from expect_keyword only in that the matched keyword
4007    /// token is not returned.
4008    pub fn expect_keyword_is(&mut self, expected: Keyword) -> Result<(), ParserError> {
4009        if self.parse_keyword(expected) {
4010            Ok(())
4011        } else {
4012            self.expected_ref(format!("{:?}", &expected).as_str(), self.peek_token_ref())
4013        }
4014    }
4015
4016    /// If the current and subsequent tokens exactly match the `keywords`
4017    /// sequence, consume them and returns Ok. Otherwise, return an Error.
4018    pub fn expect_keywords(&mut self, expected: &[Keyword]) -> Result<(), ParserError> {
4019        for &kw in expected {
4020            self.expect_keyword_is(kw)?;
4021        }
4022        Ok(())
4023    }
4024
4025    /// Consume the next token if it matches the expected token, otherwise return false
4026    ///
4027    /// See [Self::advance_token] to consume the token unconditionally
4028    #[must_use]
4029    pub fn consume_token(&mut self, expected: &Token) -> bool {
4030        if self.peek_token_ref() == expected {
4031            self.advance_token();
4032            true
4033        } else {
4034            false
4035        }
4036    }
4037
4038    /// If the current and subsequent tokens exactly match the `tokens`
4039    /// sequence, consume them and returns true. Otherwise, no tokens are
4040    /// consumed and returns false
4041    #[must_use]
4042    pub fn consume_tokens(&mut self, tokens: &[Token]) -> bool {
4043        let index = self.index;
4044        for token in tokens {
4045            if !self.consume_token(token) {
4046                self.index = index;
4047                return false;
4048            }
4049        }
4050        true
4051    }
4052
4053    /// Bail out if the current token is not an expected keyword, or consume it if it is
4054    pub fn expect_token(&mut self, expected: &Token) -> Result<TokenWithSpan, ParserError> {
4055        if self.peek_token_ref() == expected {
4056            Ok(self.next_token())
4057        } else {
4058            self.expected_ref(&expected.to_string(), self.peek_token_ref())
4059        }
4060    }
4061
4062    fn parse<T: FromStr>(s: String, loc: Location) -> Result<T, ParserError>
4063    where
4064        <T as FromStr>::Err: Display,
4065    {
4066        s.parse::<T>().map_err(|e| {
4067            ParserError::ParserError(format!(
4068                "Could not parse '{s}' as {}: {e}{loc}",
4069                core::any::type_name::<T>()
4070            ))
4071        })
4072    }
4073
4074    /// Parse a comma-separated list of 1+ SelectItem
4075    pub fn parse_projection(&mut self) -> Result<Vec<SelectItem>, ParserError> {
4076        // BigQuery and Snowflake allow trailing commas, but only in project lists
4077        // e.g. `SELECT 1, 2, FROM t`
4078        // https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#trailing_commas
4079        // https://docs.snowflake.com/en/release-notes/2024/8_11#select-supports-trailing-commas
4080
4081        let trailing_commas =
4082            self.options.trailing_commas | self.dialect.supports_projection_trailing_commas();
4083
4084        self.parse_comma_separated_with_trailing_commas(
4085            |p| p.parse_select_item(),
4086            trailing_commas,
4087            Self::is_reserved_for_column_alias,
4088        )
4089    }
4090
4091    pub fn parse_actions_list(&mut self) -> Result<Vec<Action>, ParserError> {
4092        let mut values = vec![];
4093        loop {
4094            values.push(self.parse_grant_permission()?);
4095            if !self.consume_token(&Token::Comma) {
4096                break;
4097            } else if self.options.trailing_commas {
4098                match self.peek_token().token {
4099                    Token::Word(kw) if kw.keyword == Keyword::ON => {
4100                        break;
4101                    }
4102                    Token::RParen
4103                    | Token::SemiColon
4104                    | Token::EOF
4105                    | Token::RBracket
4106                    | Token::RBrace => break,
4107                    _ => continue,
4108                }
4109            }
4110        }
4111        Ok(values)
4112    }
4113
4114    /// Parse a list of [TableWithJoins]
4115    fn parse_table_with_joins(&mut self) -> Result<Vec<TableWithJoins>, ParserError> {
4116        let trailing_commas = self.dialect.supports_from_trailing_commas();
4117
4118        self.parse_comma_separated_with_trailing_commas(
4119            Parser::parse_table_and_joins,
4120            trailing_commas,
4121            |kw, _parser| {
4122                self.dialect
4123                    .get_reserved_keywords_for_table_factor()
4124                    .contains(kw)
4125            },
4126        )
4127    }
4128
4129    /// Parse the comma of a comma-separated syntax element.
4130    /// `R` is a predicate that should return true if the next
4131    /// keyword is a reserved keyword.
4132    /// Allows for control over trailing commas
4133    ///
4134    /// Returns true if there is a next element
4135    fn is_parse_comma_separated_end_with_trailing_commas<R>(
4136        &mut self,
4137        trailing_commas: bool,
4138        is_reserved_keyword: &R,
4139    ) -> bool
4140    where
4141        R: Fn(&Keyword, &mut Parser) -> bool,
4142    {
4143        if !self.consume_token(&Token::Comma) {
4144            true
4145        } else if trailing_commas {
4146            let token = self.next_token().token;
4147            let is_end = match token {
4148                Token::Word(ref kw) if is_reserved_keyword(&kw.keyword, self) => true,
4149                Token::RParen | Token::SemiColon | Token::EOF | Token::RBracket | Token::RBrace => {
4150                    true
4151                }
4152                _ => false,
4153            };
4154            self.prev_token();
4155
4156            is_end
4157        } else {
4158            false
4159        }
4160    }
4161
4162    /// Parse the comma of a comma-separated syntax element.
4163    /// Returns true if there is a next element
4164    fn is_parse_comma_separated_end(&mut self) -> bool {
4165        self.is_parse_comma_separated_end_with_trailing_commas(
4166            self.options.trailing_commas,
4167            &Self::is_reserved_for_column_alias,
4168        )
4169    }
4170
4171    /// Parse a comma-separated list of 1+ items accepted by `F`
4172    pub fn parse_comma_separated<T, F>(&mut self, f: F) -> Result<Vec<T>, ParserError>
4173    where
4174        F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4175    {
4176        self.parse_comma_separated_with_trailing_commas(
4177            f,
4178            self.options.trailing_commas,
4179            Self::is_reserved_for_column_alias,
4180        )
4181    }
4182
4183    /// Parse a comma-separated list of 1+ items accepted by `F`.
4184    /// `R` is a predicate that should return true if the next
4185    /// keyword is a reserved keyword.
4186    /// Allows for control over trailing commas.
4187    fn parse_comma_separated_with_trailing_commas<T, F, R>(
4188        &mut self,
4189        mut f: F,
4190        trailing_commas: bool,
4191        is_reserved_keyword: R,
4192    ) -> Result<Vec<T>, ParserError>
4193    where
4194        F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4195        R: Fn(&Keyword, &mut Parser) -> bool,
4196    {
4197        let mut values = vec![];
4198        loop {
4199            values.push(f(self)?);
4200            if self.is_parse_comma_separated_end_with_trailing_commas(
4201                trailing_commas,
4202                &is_reserved_keyword,
4203            ) {
4204                break;
4205            }
4206        }
4207        Ok(values)
4208    }
4209
4210    /// Parse a period-separated list of 1+ items accepted by `F`
4211    fn parse_period_separated<T, F>(&mut self, mut f: F) -> Result<Vec<T>, ParserError>
4212    where
4213        F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4214    {
4215        let mut values = vec![];
4216        loop {
4217            values.push(f(self)?);
4218            if !self.consume_token(&Token::Period) {
4219                break;
4220            }
4221        }
4222        Ok(values)
4223    }
4224
4225    /// Parse a keyword-separated list of 1+ items accepted by `F`
4226    pub fn parse_keyword_separated<T, F>(
4227        &mut self,
4228        keyword: Keyword,
4229        mut f: F,
4230    ) -> Result<Vec<T>, ParserError>
4231    where
4232        F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4233    {
4234        let mut values = vec![];
4235        loop {
4236            values.push(f(self)?);
4237            if !self.parse_keyword(keyword) {
4238                break;
4239            }
4240        }
4241        Ok(values)
4242    }
4243
4244    pub fn parse_parenthesized<T, F>(&mut self, mut f: F) -> Result<T, ParserError>
4245    where
4246        F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4247    {
4248        self.expect_token(&Token::LParen)?;
4249        let res = f(self)?;
4250        self.expect_token(&Token::RParen)?;
4251        Ok(res)
4252    }
4253
4254    /// Parse a comma-separated list of 0+ items accepted by `F`
4255    /// * `end_token` - expected end token for the closure (e.g. [Token::RParen], [Token::RBrace] ...)
4256    pub fn parse_comma_separated0<T, F>(
4257        &mut self,
4258        f: F,
4259        end_token: Token,
4260    ) -> Result<Vec<T>, ParserError>
4261    where
4262        F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4263    {
4264        if self.peek_token().token == end_token {
4265            return Ok(vec![]);
4266        }
4267
4268        if self.options.trailing_commas && self.peek_tokens() == [Token::Comma, end_token] {
4269            let _ = self.consume_token(&Token::Comma);
4270            return Ok(vec![]);
4271        }
4272
4273        self.parse_comma_separated(f)
4274    }
4275
4276    /// Parses 0 or more statements, each followed by a semicolon.
4277    /// If the next token is any of `terminal_keywords` then no more
4278    /// statements will be parsed.
4279    pub(crate) fn parse_statement_list(
4280        &mut self,
4281        terminal_keywords: &[Keyword],
4282    ) -> Result<Vec<Statement>, ParserError> {
4283        let mut values = vec![];
4284        loop {
4285            if let Token::Word(w) = &self.peek_nth_token_ref(0).token {
4286                if w.quote_style.is_none() && terminal_keywords.contains(&w.keyword) {
4287                    break;
4288                }
4289            }
4290
4291            values.push(self.parse_statement()?);
4292            self.expect_token(&Token::SemiColon)?;
4293        }
4294        Ok(values)
4295    }
4296
4297    /// Default implementation of a predicate that returns true if
4298    /// the specified keyword is reserved for column alias.
4299    /// See [Dialect::is_column_alias]
4300    fn is_reserved_for_column_alias(kw: &Keyword, parser: &mut Parser) -> bool {
4301        !parser.dialect.is_column_alias(kw, parser)
4302    }
4303
4304    /// Run a parser method `f`, reverting back to the current position if unsuccessful.
4305    /// Returns `None` if `f` returns an error
4306    pub fn maybe_parse<T, F>(&mut self, f: F) -> Result<Option<T>, ParserError>
4307    where
4308        F: FnMut(&mut Parser) -> Result<T, ParserError>,
4309    {
4310        match self.try_parse(f) {
4311            Ok(t) => Ok(Some(t)),
4312            Err(ParserError::RecursionLimitExceeded) => Err(ParserError::RecursionLimitExceeded),
4313            _ => Ok(None),
4314        }
4315    }
4316
4317    /// Run a parser method `f`, reverting back to the current position if unsuccessful.
4318    pub fn try_parse<T, F>(&mut self, mut f: F) -> Result<T, ParserError>
4319    where
4320        F: FnMut(&mut Parser) -> Result<T, ParserError>,
4321    {
4322        let index = self.index;
4323        match f(self) {
4324            Ok(t) => Ok(t),
4325            Err(e) => {
4326                // Unwind stack if limit exceeded
4327                self.index = index;
4328                Err(e)
4329            }
4330        }
4331    }
4332
4333    /// Parse either `ALL`, `DISTINCT` or `DISTINCT ON (...)`. Returns [`None`] if `ALL` is parsed
4334    /// and results in a [`ParserError`] if both `ALL` and `DISTINCT` are found.
4335    pub fn parse_all_or_distinct(&mut self) -> Result<Option<Distinct>, ParserError> {
4336        let loc = self.peek_token().span.start;
4337        let all = self.parse_keyword(Keyword::ALL);
4338        let distinct = self.parse_keyword(Keyword::DISTINCT);
4339        if !distinct {
4340            return Ok(None);
4341        }
4342        if all {
4343            return parser_err!("Cannot specify both ALL and DISTINCT".to_string(), loc);
4344        }
4345        let on = self.parse_keyword(Keyword::ON);
4346        if !on {
4347            return Ok(Some(Distinct::Distinct));
4348        }
4349
4350        self.expect_token(&Token::LParen)?;
4351        let col_names = if self.consume_token(&Token::RParen) {
4352            self.prev_token();
4353            Vec::new()
4354        } else {
4355            self.parse_comma_separated(Parser::parse_expr)?
4356        };
4357        self.expect_token(&Token::RParen)?;
4358        Ok(Some(Distinct::On(col_names)))
4359    }
4360
4361    /// Parse a SQL CREATE statement
4362    pub fn parse_create(&mut self) -> Result<Statement, ParserError> {
4363        let or_replace = self.parse_keywords(&[Keyword::OR, Keyword::REPLACE]);
4364        let or_alter = self.parse_keywords(&[Keyword::OR, Keyword::ALTER]);
4365        let local = self.parse_one_of_keywords(&[Keyword::LOCAL]).is_some();
4366        let global = self.parse_one_of_keywords(&[Keyword::GLOBAL]).is_some();
4367        let transient = self.parse_one_of_keywords(&[Keyword::TRANSIENT]).is_some();
4368        let global: Option<bool> = if global {
4369            Some(true)
4370        } else if local {
4371            Some(false)
4372        } else {
4373            None
4374        };
4375        let temporary = self
4376            .parse_one_of_keywords(&[Keyword::TEMP, Keyword::TEMPORARY])
4377            .is_some();
4378        let persistent = dialect_of!(self is DuckDbDialect)
4379            && self.parse_one_of_keywords(&[Keyword::PERSISTENT]).is_some();
4380        let create_view_params = self.parse_create_view_params()?;
4381        if self.parse_keyword(Keyword::TABLE) {
4382            self.parse_create_table(or_replace, temporary, global, transient)
4383        } else if self.parse_keyword(Keyword::MATERIALIZED) || self.parse_keyword(Keyword::VIEW) {
4384            self.prev_token();
4385            self.parse_create_view(or_replace, temporary, create_view_params)
4386        } else if self.parse_keyword(Keyword::POLICY) {
4387            self.parse_create_policy()
4388        } else if self.parse_keyword(Keyword::EXTERNAL) {
4389            self.parse_create_external_table(or_replace)
4390        } else if self.parse_keyword(Keyword::FUNCTION) {
4391            self.parse_create_function(or_replace, temporary)
4392        } else if self.parse_keyword(Keyword::TRIGGER) {
4393            self.parse_create_trigger(or_replace, false)
4394        } else if self.parse_keywords(&[Keyword::CONSTRAINT, Keyword::TRIGGER]) {
4395            self.parse_create_trigger(or_replace, true)
4396        } else if self.parse_keyword(Keyword::MACRO) {
4397            self.parse_create_macro(or_replace, temporary)
4398        } else if self.parse_keyword(Keyword::SECRET) {
4399            self.parse_create_secret(or_replace, temporary, persistent)
4400        } else if or_replace {
4401            self.expected(
4402                "[EXTERNAL] TABLE or [MATERIALIZED] VIEW or FUNCTION after CREATE OR REPLACE",
4403                self.peek_token(),
4404            )
4405        } else if self.parse_keyword(Keyword::EXTENSION) {
4406            self.parse_create_extension()
4407        } else if self.parse_keyword(Keyword::INDEX) {
4408            self.parse_create_index(false)
4409        } else if self.parse_keywords(&[Keyword::UNIQUE, Keyword::INDEX]) {
4410            self.parse_create_index(true)
4411        } else if self.parse_keyword(Keyword::VIRTUAL) {
4412            self.parse_create_virtual_table()
4413        } else if self.parse_keyword(Keyword::SCHEMA) {
4414            self.parse_create_schema()
4415        } else if self.parse_keyword(Keyword::DATABASE) {
4416            self.parse_create_database()
4417        } else if self.parse_keyword(Keyword::ROLE) {
4418            self.parse_create_role()
4419        } else if self.parse_keyword(Keyword::SEQUENCE) {
4420            self.parse_create_sequence(temporary)
4421        } else if self.parse_keyword(Keyword::TYPE) {
4422            self.parse_create_type()
4423        } else if self.parse_keyword(Keyword::PROCEDURE) {
4424            self.parse_create_procedure(or_alter)
4425        } else if self.parse_keyword(Keyword::CONNECTOR) {
4426            self.parse_create_connector()
4427        } else {
4428            self.expected("an object type after CREATE", self.peek_token())
4429        }
4430    }
4431
4432    /// See [DuckDB Docs](https://duckdb.org/docs/sql/statements/create_secret.html) for more details.
4433    pub fn parse_create_secret(
4434        &mut self,
4435        or_replace: bool,
4436        temporary: bool,
4437        persistent: bool,
4438    ) -> Result<Statement, ParserError> {
4439        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
4440
4441        let mut storage_specifier = None;
4442        let mut name = None;
4443        if self.peek_token() != Token::LParen {
4444            if self.parse_keyword(Keyword::IN) {
4445                storage_specifier = self.parse_identifier().ok()
4446            } else {
4447                name = self.parse_identifier().ok();
4448            }
4449
4450            // Storage specifier may follow the name
4451            if storage_specifier.is_none()
4452                && self.peek_token() != Token::LParen
4453                && self.parse_keyword(Keyword::IN)
4454            {
4455                storage_specifier = self.parse_identifier().ok();
4456            }
4457        }
4458
4459        self.expect_token(&Token::LParen)?;
4460        self.expect_keyword_is(Keyword::TYPE)?;
4461        let secret_type = self.parse_identifier()?;
4462
4463        let mut options = Vec::new();
4464        if self.consume_token(&Token::Comma) {
4465            options.append(&mut self.parse_comma_separated(|p| {
4466                let key = p.parse_identifier()?;
4467                let value = p.parse_identifier()?;
4468                Ok(SecretOption { key, value })
4469            })?);
4470        }
4471        self.expect_token(&Token::RParen)?;
4472
4473        let temp = match (temporary, persistent) {
4474            (true, false) => Some(true),
4475            (false, true) => Some(false),
4476            (false, false) => None,
4477            _ => self.expected("TEMPORARY or PERSISTENT", self.peek_token())?,
4478        };
4479
4480        Ok(Statement::CreateSecret {
4481            or_replace,
4482            temporary: temp,
4483            if_not_exists,
4484            name,
4485            storage_specifier,
4486            secret_type,
4487            options,
4488        })
4489    }
4490
4491    /// Parse a CACHE TABLE statement
4492    pub fn parse_cache_table(&mut self) -> Result<Statement, ParserError> {
4493        let (mut table_flag, mut options, mut has_as, mut query) = (None, vec![], false, None);
4494        if self.parse_keyword(Keyword::TABLE) {
4495            let table_name = self.parse_object_name(false)?;
4496            if self.peek_token().token != Token::EOF {
4497                if let Token::Word(word) = self.peek_token().token {
4498                    if word.keyword == Keyword::OPTIONS {
4499                        options = self.parse_options(Keyword::OPTIONS)?
4500                    }
4501                };
4502
4503                if self.peek_token().token != Token::EOF {
4504                    let (a, q) = self.parse_as_query()?;
4505                    has_as = a;
4506                    query = Some(q);
4507                }
4508
4509                Ok(Statement::Cache {
4510                    table_flag,
4511                    table_name,
4512                    has_as,
4513                    options,
4514                    query,
4515                })
4516            } else {
4517                Ok(Statement::Cache {
4518                    table_flag,
4519                    table_name,
4520                    has_as,
4521                    options,
4522                    query,
4523                })
4524            }
4525        } else {
4526            table_flag = Some(self.parse_object_name(false)?);
4527            if self.parse_keyword(Keyword::TABLE) {
4528                let table_name = self.parse_object_name(false)?;
4529                if self.peek_token() != Token::EOF {
4530                    if let Token::Word(word) = self.peek_token().token {
4531                        if word.keyword == Keyword::OPTIONS {
4532                            options = self.parse_options(Keyword::OPTIONS)?
4533                        }
4534                    };
4535
4536                    if self.peek_token() != Token::EOF {
4537                        let (a, q) = self.parse_as_query()?;
4538                        has_as = a;
4539                        query = Some(q);
4540                    }
4541
4542                    Ok(Statement::Cache {
4543                        table_flag,
4544                        table_name,
4545                        has_as,
4546                        options,
4547                        query,
4548                    })
4549                } else {
4550                    Ok(Statement::Cache {
4551                        table_flag,
4552                        table_name,
4553                        has_as,
4554                        options,
4555                        query,
4556                    })
4557                }
4558            } else {
4559                if self.peek_token() == Token::EOF {
4560                    self.prev_token();
4561                }
4562                self.expected("a `TABLE` keyword", self.peek_token())
4563            }
4564        }
4565    }
4566
4567    /// Parse 'AS' before as query,such as `WITH XXX AS SELECT XXX` oer `CACHE TABLE AS SELECT XXX`
4568    pub fn parse_as_query(&mut self) -> Result<(bool, Box<Query>), ParserError> {
4569        match self.peek_token().token {
4570            Token::Word(word) => match word.keyword {
4571                Keyword::AS => {
4572                    self.next_token();
4573                    Ok((true, self.parse_query()?))
4574                }
4575                _ => Ok((false, self.parse_query()?)),
4576            },
4577            _ => self.expected("a QUERY statement", self.peek_token()),
4578        }
4579    }
4580
4581    /// Parse a UNCACHE TABLE statement
4582    pub fn parse_uncache_table(&mut self) -> Result<Statement, ParserError> {
4583        self.expect_keyword_is(Keyword::TABLE)?;
4584        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
4585        let table_name = self.parse_object_name(false)?;
4586        Ok(Statement::UNCache {
4587            table_name,
4588            if_exists,
4589        })
4590    }
4591
4592    /// SQLite-specific `CREATE VIRTUAL TABLE`
4593    pub fn parse_create_virtual_table(&mut self) -> Result<Statement, ParserError> {
4594        self.expect_keyword_is(Keyword::TABLE)?;
4595        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
4596        let table_name = self.parse_object_name(false)?;
4597        self.expect_keyword_is(Keyword::USING)?;
4598        let module_name = self.parse_identifier()?;
4599        // SQLite docs note that module "arguments syntax is sufficiently
4600        // general that the arguments can be made to appear as column
4601        // definitions in a traditional CREATE TABLE statement", but
4602        // we don't implement that.
4603        let module_args = self.parse_parenthesized_column_list(Optional, false)?;
4604        Ok(Statement::CreateVirtualTable {
4605            name: table_name,
4606            if_not_exists,
4607            module_name,
4608            module_args,
4609        })
4610    }
4611
4612    pub fn parse_create_schema(&mut self) -> Result<Statement, ParserError> {
4613        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
4614
4615        let schema_name = self.parse_schema_name()?;
4616
4617        Ok(Statement::CreateSchema {
4618            schema_name,
4619            if_not_exists,
4620        })
4621    }
4622
4623    fn parse_schema_name(&mut self) -> Result<SchemaName, ParserError> {
4624        if self.parse_keyword(Keyword::AUTHORIZATION) {
4625            Ok(SchemaName::UnnamedAuthorization(self.parse_identifier()?))
4626        } else {
4627            let name = self.parse_object_name(false)?;
4628
4629            if self.parse_keyword(Keyword::AUTHORIZATION) {
4630                Ok(SchemaName::NamedAuthorization(
4631                    name,
4632                    self.parse_identifier()?,
4633                ))
4634            } else {
4635                Ok(SchemaName::Simple(name))
4636            }
4637        }
4638    }
4639
4640    pub fn parse_create_database(&mut self) -> Result<Statement, ParserError> {
4641        let ine = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
4642        let db_name = self.parse_object_name(false)?;
4643        let mut location = None;
4644        let mut managed_location = None;
4645        loop {
4646            match self.parse_one_of_keywords(&[Keyword::LOCATION, Keyword::MANAGEDLOCATION]) {
4647                Some(Keyword::LOCATION) => location = Some(self.parse_literal_string()?),
4648                Some(Keyword::MANAGEDLOCATION) => {
4649                    managed_location = Some(self.parse_literal_string()?)
4650                }
4651                _ => break,
4652            }
4653        }
4654        Ok(Statement::CreateDatabase {
4655            db_name,
4656            if_not_exists: ine,
4657            location,
4658            managed_location,
4659        })
4660    }
4661
4662    pub fn parse_optional_create_function_using(
4663        &mut self,
4664    ) -> Result<Option<CreateFunctionUsing>, ParserError> {
4665        if !self.parse_keyword(Keyword::USING) {
4666            return Ok(None);
4667        };
4668        let keyword =
4669            self.expect_one_of_keywords(&[Keyword::JAR, Keyword::FILE, Keyword::ARCHIVE])?;
4670
4671        let uri = self.parse_literal_string()?;
4672
4673        match keyword {
4674            Keyword::JAR => Ok(Some(CreateFunctionUsing::Jar(uri))),
4675            Keyword::FILE => Ok(Some(CreateFunctionUsing::File(uri))),
4676            Keyword::ARCHIVE => Ok(Some(CreateFunctionUsing::Archive(uri))),
4677            _ => self.expected(
4678                "JAR, FILE or ARCHIVE, got {:?}",
4679                TokenWithSpan::wrap(Token::make_keyword(format!("{keyword:?}").as_str())),
4680            ),
4681        }
4682    }
4683
4684    pub fn parse_create_function(
4685        &mut self,
4686        or_replace: bool,
4687        temporary: bool,
4688    ) -> Result<Statement, ParserError> {
4689        if dialect_of!(self is HiveDialect) {
4690            self.parse_hive_create_function(or_replace, temporary)
4691        } else if dialect_of!(self is PostgreSqlDialect | GenericDialect) {
4692            self.parse_postgres_create_function(or_replace, temporary)
4693        } else if dialect_of!(self is DuckDbDialect) {
4694            self.parse_create_macro(or_replace, temporary)
4695        } else if dialect_of!(self is BigQueryDialect) {
4696            self.parse_bigquery_create_function(or_replace, temporary)
4697        } else {
4698            self.prev_token();
4699            self.expected("an object type after CREATE", self.peek_token())
4700        }
4701    }
4702
4703    /// Parse `CREATE FUNCTION` for [Postgres]
4704    ///
4705    /// [Postgres]: https://www.postgresql.org/docs/15/sql-createfunction.html
4706    fn parse_postgres_create_function(
4707        &mut self,
4708        or_replace: bool,
4709        temporary: bool,
4710    ) -> Result<Statement, ParserError> {
4711        let name = self.parse_object_name(false)?;
4712
4713        self.expect_token(&Token::LParen)?;
4714        let args = if Token::RParen != self.peek_token_ref().token {
4715            self.parse_comma_separated(Parser::parse_function_arg)?
4716        } else {
4717            vec![]
4718        };
4719        self.expect_token(&Token::RParen)?;
4720
4721        let return_type = if self.parse_keyword(Keyword::RETURNS) {
4722            Some(self.parse_data_type()?)
4723        } else {
4724            None
4725        };
4726
4727        #[derive(Default)]
4728        struct Body {
4729            language: Option<Ident>,
4730            behavior: Option<FunctionBehavior>,
4731            function_body: Option<CreateFunctionBody>,
4732            called_on_null: Option<FunctionCalledOnNull>,
4733            parallel: Option<FunctionParallel>,
4734        }
4735        let mut body = Body::default();
4736        loop {
4737            fn ensure_not_set<T>(field: &Option<T>, name: &str) -> Result<(), ParserError> {
4738                if field.is_some() {
4739                    return Err(ParserError::ParserError(format!(
4740                        "{name} specified more than once",
4741                    )));
4742                }
4743                Ok(())
4744            }
4745            if self.parse_keyword(Keyword::AS) {
4746                ensure_not_set(&body.function_body, "AS")?;
4747                body.function_body = Some(CreateFunctionBody::AsBeforeOptions(
4748                    self.parse_create_function_body_string()?,
4749                ));
4750            } else if self.parse_keyword(Keyword::LANGUAGE) {
4751                ensure_not_set(&body.language, "LANGUAGE")?;
4752                body.language = Some(self.parse_identifier()?);
4753            } else if self.parse_keyword(Keyword::IMMUTABLE) {
4754                ensure_not_set(&body.behavior, "IMMUTABLE | STABLE | VOLATILE")?;
4755                body.behavior = Some(FunctionBehavior::Immutable);
4756            } else if self.parse_keyword(Keyword::STABLE) {
4757                ensure_not_set(&body.behavior, "IMMUTABLE | STABLE | VOLATILE")?;
4758                body.behavior = Some(FunctionBehavior::Stable);
4759            } else if self.parse_keyword(Keyword::VOLATILE) {
4760                ensure_not_set(&body.behavior, "IMMUTABLE | STABLE | VOLATILE")?;
4761                body.behavior = Some(FunctionBehavior::Volatile);
4762            } else if self.parse_keywords(&[
4763                Keyword::CALLED,
4764                Keyword::ON,
4765                Keyword::NULL,
4766                Keyword::INPUT,
4767            ]) {
4768                ensure_not_set(
4769                    &body.called_on_null,
4770                    "CALLED ON NULL INPUT | RETURNS NULL ON NULL INPUT | STRICT",
4771                )?;
4772                body.called_on_null = Some(FunctionCalledOnNull::CalledOnNullInput);
4773            } else if self.parse_keywords(&[
4774                Keyword::RETURNS,
4775                Keyword::NULL,
4776                Keyword::ON,
4777                Keyword::NULL,
4778                Keyword::INPUT,
4779            ]) {
4780                ensure_not_set(
4781                    &body.called_on_null,
4782                    "CALLED ON NULL INPUT | RETURNS NULL ON NULL INPUT | STRICT",
4783                )?;
4784                body.called_on_null = Some(FunctionCalledOnNull::ReturnsNullOnNullInput);
4785            } else if self.parse_keyword(Keyword::STRICT) {
4786                ensure_not_set(
4787                    &body.called_on_null,
4788                    "CALLED ON NULL INPUT | RETURNS NULL ON NULL INPUT | STRICT",
4789                )?;
4790                body.called_on_null = Some(FunctionCalledOnNull::Strict);
4791            } else if self.parse_keyword(Keyword::PARALLEL) {
4792                ensure_not_set(&body.parallel, "PARALLEL { UNSAFE | RESTRICTED | SAFE }")?;
4793                if self.parse_keyword(Keyword::UNSAFE) {
4794                    body.parallel = Some(FunctionParallel::Unsafe);
4795                } else if self.parse_keyword(Keyword::RESTRICTED) {
4796                    body.parallel = Some(FunctionParallel::Restricted);
4797                } else if self.parse_keyword(Keyword::SAFE) {
4798                    body.parallel = Some(FunctionParallel::Safe);
4799                } else {
4800                    return self.expected("one of UNSAFE | RESTRICTED | SAFE", self.peek_token());
4801                }
4802            } else if self.parse_keyword(Keyword::RETURN) {
4803                ensure_not_set(&body.function_body, "RETURN")?;
4804                body.function_body = Some(CreateFunctionBody::Return(self.parse_expr()?));
4805            } else {
4806                break;
4807            }
4808        }
4809
4810        Ok(Statement::CreateFunction(CreateFunction {
4811            or_replace,
4812            temporary,
4813            name,
4814            args: Some(args),
4815            return_type,
4816            behavior: body.behavior,
4817            called_on_null: body.called_on_null,
4818            parallel: body.parallel,
4819            language: body.language,
4820            function_body: body.function_body,
4821            if_not_exists: false,
4822            using: None,
4823            determinism_specifier: None,
4824            options: None,
4825            remote_connection: None,
4826        }))
4827    }
4828
4829    /// Parse `CREATE FUNCTION` for [Hive]
4830    ///
4831    /// [Hive]: https://cwiki.apache.org/confluence/display/hive/languagemanual+ddl#LanguageManualDDL-Create/Drop/ReloadFunction
4832    fn parse_hive_create_function(
4833        &mut self,
4834        or_replace: bool,
4835        temporary: bool,
4836    ) -> Result<Statement, ParserError> {
4837        let name = self.parse_object_name(false)?;
4838        self.expect_keyword_is(Keyword::AS)?;
4839
4840        let as_ = self.parse_create_function_body_string()?;
4841        let using = self.parse_optional_create_function_using()?;
4842
4843        Ok(Statement::CreateFunction(CreateFunction {
4844            or_replace,
4845            temporary,
4846            name,
4847            function_body: Some(CreateFunctionBody::AsBeforeOptions(as_)),
4848            using,
4849            if_not_exists: false,
4850            args: None,
4851            return_type: None,
4852            behavior: None,
4853            called_on_null: None,
4854            parallel: None,
4855            language: None,
4856            determinism_specifier: None,
4857            options: None,
4858            remote_connection: None,
4859        }))
4860    }
4861
4862    /// Parse `CREATE FUNCTION` for [BigQuery]
4863    ///
4864    /// [BigQuery]: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#create_function_statement
4865    fn parse_bigquery_create_function(
4866        &mut self,
4867        or_replace: bool,
4868        temporary: bool,
4869    ) -> Result<Statement, ParserError> {
4870        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
4871        let name = self.parse_object_name(false)?;
4872
4873        let parse_function_param =
4874            |parser: &mut Parser| -> Result<OperateFunctionArg, ParserError> {
4875                let name = parser.parse_identifier()?;
4876                let data_type = parser.parse_data_type()?;
4877                Ok(OperateFunctionArg {
4878                    mode: None,
4879                    name: Some(name),
4880                    data_type,
4881                    default_expr: None,
4882                })
4883            };
4884        self.expect_token(&Token::LParen)?;
4885        let args = self.parse_comma_separated0(parse_function_param, Token::RParen)?;
4886        self.expect_token(&Token::RParen)?;
4887
4888        let return_type = if self.parse_keyword(Keyword::RETURNS) {
4889            Some(self.parse_data_type()?)
4890        } else {
4891            None
4892        };
4893
4894        let determinism_specifier = if self.parse_keyword(Keyword::DETERMINISTIC) {
4895            Some(FunctionDeterminismSpecifier::Deterministic)
4896        } else if self.parse_keywords(&[Keyword::NOT, Keyword::DETERMINISTIC]) {
4897            Some(FunctionDeterminismSpecifier::NotDeterministic)
4898        } else {
4899            None
4900        };
4901
4902        let language = if self.parse_keyword(Keyword::LANGUAGE) {
4903            Some(self.parse_identifier()?)
4904        } else {
4905            None
4906        };
4907
4908        let remote_connection =
4909            if self.parse_keywords(&[Keyword::REMOTE, Keyword::WITH, Keyword::CONNECTION]) {
4910                Some(self.parse_object_name(false)?)
4911            } else {
4912                None
4913            };
4914
4915        // `OPTIONS` may come before of after the function body but
4916        // may be specified at most once.
4917        let mut options = self.maybe_parse_options(Keyword::OPTIONS)?;
4918
4919        let function_body = if remote_connection.is_none() {
4920            self.expect_keyword_is(Keyword::AS)?;
4921            let expr = self.parse_expr()?;
4922            if options.is_none() {
4923                options = self.maybe_parse_options(Keyword::OPTIONS)?;
4924                Some(CreateFunctionBody::AsBeforeOptions(expr))
4925            } else {
4926                Some(CreateFunctionBody::AsAfterOptions(expr))
4927            }
4928        } else {
4929            None
4930        };
4931
4932        Ok(Statement::CreateFunction(CreateFunction {
4933            or_replace,
4934            temporary,
4935            if_not_exists,
4936            name,
4937            args: Some(args),
4938            return_type,
4939            function_body,
4940            language,
4941            determinism_specifier,
4942            options,
4943            remote_connection,
4944            using: None,
4945            behavior: None,
4946            called_on_null: None,
4947            parallel: None,
4948        }))
4949    }
4950
4951    fn parse_function_arg(&mut self) -> Result<OperateFunctionArg, ParserError> {
4952        let mode = if self.parse_keyword(Keyword::IN) {
4953            Some(ArgMode::In)
4954        } else if self.parse_keyword(Keyword::OUT) {
4955            Some(ArgMode::Out)
4956        } else if self.parse_keyword(Keyword::INOUT) {
4957            Some(ArgMode::InOut)
4958        } else {
4959            None
4960        };
4961
4962        // parse: [ argname ] argtype
4963        let mut name = None;
4964        let mut data_type = self.parse_data_type()?;
4965        if let DataType::Custom(n, _) = &data_type {
4966            // the first token is actually a name
4967            match n.0[0].clone() {
4968                ObjectNamePart::Identifier(ident) => name = Some(ident),
4969            }
4970            data_type = self.parse_data_type()?;
4971        }
4972
4973        let default_expr = if self.parse_keyword(Keyword::DEFAULT) || self.consume_token(&Token::Eq)
4974        {
4975            Some(self.parse_expr()?)
4976        } else {
4977            None
4978        };
4979        Ok(OperateFunctionArg {
4980            mode,
4981            name,
4982            data_type,
4983            default_expr,
4984        })
4985    }
4986
4987    /// Parse statements of the DropTrigger type such as:
4988    ///
4989    /// ```sql
4990    /// DROP TRIGGER [ IF EXISTS ] name ON table_name [ CASCADE | RESTRICT ]
4991    /// ```
4992    pub fn parse_drop_trigger(&mut self) -> Result<Statement, ParserError> {
4993        if !dialect_of!(self is PostgreSqlDialect | GenericDialect | MySqlDialect) {
4994            self.prev_token();
4995            return self.expected("an object type after DROP", self.peek_token());
4996        }
4997        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
4998        let trigger_name = self.parse_object_name(false)?;
4999        let table_name = if self.parse_keyword(Keyword::ON) {
5000            Some(self.parse_object_name(false)?)
5001        } else {
5002            None
5003        };
5004        let option = self
5005            .parse_one_of_keywords(&[Keyword::CASCADE, Keyword::RESTRICT])
5006            .map(|keyword| match keyword {
5007                Keyword::CASCADE => ReferentialAction::Cascade,
5008                Keyword::RESTRICT => ReferentialAction::Restrict,
5009                _ => unreachable!(),
5010            });
5011        Ok(Statement::DropTrigger {
5012            if_exists,
5013            trigger_name,
5014            table_name,
5015            option,
5016        })
5017    }
5018
5019    pub fn parse_create_trigger(
5020        &mut self,
5021        or_replace: bool,
5022        is_constraint: bool,
5023    ) -> Result<Statement, ParserError> {
5024        if !dialect_of!(self is PostgreSqlDialect | GenericDialect | MySqlDialect) {
5025            self.prev_token();
5026            return self.expected("an object type after CREATE", self.peek_token());
5027        }
5028
5029        let name = self.parse_object_name(false)?;
5030        let period = self.parse_trigger_period()?;
5031
5032        let events = self.parse_keyword_separated(Keyword::OR, Parser::parse_trigger_event)?;
5033        self.expect_keyword_is(Keyword::ON)?;
5034        let table_name = self.parse_object_name(false)?;
5035
5036        let referenced_table_name = if self.parse_keyword(Keyword::FROM) {
5037            self.parse_object_name(true).ok()
5038        } else {
5039            None
5040        };
5041
5042        let characteristics = self.parse_constraint_characteristics()?;
5043
5044        let mut referencing = vec![];
5045        if self.parse_keyword(Keyword::REFERENCING) {
5046            while let Some(refer) = self.parse_trigger_referencing()? {
5047                referencing.push(refer);
5048            }
5049        }
5050
5051        self.expect_keyword_is(Keyword::FOR)?;
5052        let include_each = self.parse_keyword(Keyword::EACH);
5053        let trigger_object =
5054            match self.expect_one_of_keywords(&[Keyword::ROW, Keyword::STATEMENT])? {
5055                Keyword::ROW => TriggerObject::Row,
5056                Keyword::STATEMENT => TriggerObject::Statement,
5057                _ => unreachable!(),
5058            };
5059
5060        let condition = self
5061            .parse_keyword(Keyword::WHEN)
5062            .then(|| self.parse_expr())
5063            .transpose()?;
5064
5065        self.expect_keyword_is(Keyword::EXECUTE)?;
5066
5067        let exec_body = self.parse_trigger_exec_body()?;
5068
5069        Ok(Statement::CreateTrigger {
5070            or_replace,
5071            is_constraint,
5072            name,
5073            period,
5074            events,
5075            table_name,
5076            referenced_table_name,
5077            referencing,
5078            trigger_object,
5079            include_each,
5080            condition,
5081            exec_body,
5082            characteristics,
5083        })
5084    }
5085
5086    pub fn parse_trigger_period(&mut self) -> Result<TriggerPeriod, ParserError> {
5087        Ok(
5088            match self.expect_one_of_keywords(&[
5089                Keyword::BEFORE,
5090                Keyword::AFTER,
5091                Keyword::INSTEAD,
5092            ])? {
5093                Keyword::BEFORE => TriggerPeriod::Before,
5094                Keyword::AFTER => TriggerPeriod::After,
5095                Keyword::INSTEAD => self
5096                    .expect_keyword_is(Keyword::OF)
5097                    .map(|_| TriggerPeriod::InsteadOf)?,
5098                _ => unreachable!(),
5099            },
5100        )
5101    }
5102
5103    pub fn parse_trigger_event(&mut self) -> Result<TriggerEvent, ParserError> {
5104        Ok(
5105            match self.expect_one_of_keywords(&[
5106                Keyword::INSERT,
5107                Keyword::UPDATE,
5108                Keyword::DELETE,
5109                Keyword::TRUNCATE,
5110            ])? {
5111                Keyword::INSERT => TriggerEvent::Insert,
5112                Keyword::UPDATE => {
5113                    if self.parse_keyword(Keyword::OF) {
5114                        let cols = self.parse_comma_separated(Parser::parse_identifier)?;
5115                        TriggerEvent::Update(cols)
5116                    } else {
5117                        TriggerEvent::Update(vec![])
5118                    }
5119                }
5120                Keyword::DELETE => TriggerEvent::Delete,
5121                Keyword::TRUNCATE => TriggerEvent::Truncate,
5122                _ => unreachable!(),
5123            },
5124        )
5125    }
5126
5127    pub fn parse_trigger_referencing(&mut self) -> Result<Option<TriggerReferencing>, ParserError> {
5128        let refer_type = match self.parse_one_of_keywords(&[Keyword::OLD, Keyword::NEW]) {
5129            Some(Keyword::OLD) if self.parse_keyword(Keyword::TABLE) => {
5130                TriggerReferencingType::OldTable
5131            }
5132            Some(Keyword::NEW) if self.parse_keyword(Keyword::TABLE) => {
5133                TriggerReferencingType::NewTable
5134            }
5135            _ => {
5136                return Ok(None);
5137            }
5138        };
5139
5140        let is_as = self.parse_keyword(Keyword::AS);
5141        let transition_relation_name = self.parse_object_name(false)?;
5142        Ok(Some(TriggerReferencing {
5143            refer_type,
5144            is_as,
5145            transition_relation_name,
5146        }))
5147    }
5148
5149    pub fn parse_trigger_exec_body(&mut self) -> Result<TriggerExecBody, ParserError> {
5150        Ok(TriggerExecBody {
5151            exec_type: match self
5152                .expect_one_of_keywords(&[Keyword::FUNCTION, Keyword::PROCEDURE])?
5153            {
5154                Keyword::FUNCTION => TriggerExecBodyType::Function,
5155                Keyword::PROCEDURE => TriggerExecBodyType::Procedure,
5156                _ => unreachable!(),
5157            },
5158            func_desc: self.parse_function_desc()?,
5159        })
5160    }
5161
5162    pub fn parse_create_macro(
5163        &mut self,
5164        or_replace: bool,
5165        temporary: bool,
5166    ) -> Result<Statement, ParserError> {
5167        if dialect_of!(self is DuckDbDialect |  GenericDialect) {
5168            let name = self.parse_object_name(false)?;
5169            self.expect_token(&Token::LParen)?;
5170            let args = if self.consume_token(&Token::RParen) {
5171                self.prev_token();
5172                None
5173            } else {
5174                Some(self.parse_comma_separated(Parser::parse_macro_arg)?)
5175            };
5176
5177            self.expect_token(&Token::RParen)?;
5178            self.expect_keyword_is(Keyword::AS)?;
5179
5180            Ok(Statement::CreateMacro {
5181                or_replace,
5182                temporary,
5183                name,
5184                args,
5185                definition: if self.parse_keyword(Keyword::TABLE) {
5186                    MacroDefinition::Table(self.parse_query()?)
5187                } else {
5188                    MacroDefinition::Expr(self.parse_expr()?)
5189                },
5190            })
5191        } else {
5192            self.prev_token();
5193            self.expected("an object type after CREATE", self.peek_token())
5194        }
5195    }
5196
5197    fn parse_macro_arg(&mut self) -> Result<MacroArg, ParserError> {
5198        let name = self.parse_identifier()?;
5199
5200        let default_expr =
5201            if self.consume_token(&Token::Assignment) || self.consume_token(&Token::RArrow) {
5202                Some(self.parse_expr()?)
5203            } else {
5204                None
5205            };
5206        Ok(MacroArg { name, default_expr })
5207    }
5208
5209    pub fn parse_create_external_table(
5210        &mut self,
5211        or_replace: bool,
5212    ) -> Result<Statement, ParserError> {
5213        self.expect_keyword_is(Keyword::TABLE)?;
5214        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5215        let table_name = self.parse_object_name(false)?;
5216        let (columns, constraints) = self.parse_columns()?;
5217
5218        let hive_distribution = self.parse_hive_distribution()?;
5219        let hive_formats = self.parse_hive_formats()?;
5220
5221        let file_format = if let Some(ff) = &hive_formats.storage {
5222            match ff {
5223                HiveIOFormat::FileFormat { format } => Some(*format),
5224                _ => None,
5225            }
5226        } else {
5227            None
5228        };
5229        let location = hive_formats.location.clone();
5230        let table_properties = self.parse_options(Keyword::TBLPROPERTIES)?;
5231        Ok(CreateTableBuilder::new(table_name)
5232            .columns(columns)
5233            .constraints(constraints)
5234            .hive_distribution(hive_distribution)
5235            .hive_formats(Some(hive_formats))
5236            .table_properties(table_properties)
5237            .or_replace(or_replace)
5238            .if_not_exists(if_not_exists)
5239            .external(true)
5240            .file_format(file_format)
5241            .location(location)
5242            .build())
5243    }
5244
5245    pub fn parse_file_format(&mut self) -> Result<FileFormat, ParserError> {
5246        let next_token = self.next_token();
5247        match &next_token.token {
5248            Token::Word(w) => match w.keyword {
5249                Keyword::AVRO => Ok(FileFormat::AVRO),
5250                Keyword::JSONFILE => Ok(FileFormat::JSONFILE),
5251                Keyword::ORC => Ok(FileFormat::ORC),
5252                Keyword::PARQUET => Ok(FileFormat::PARQUET),
5253                Keyword::RCFILE => Ok(FileFormat::RCFILE),
5254                Keyword::SEQUENCEFILE => Ok(FileFormat::SEQUENCEFILE),
5255                Keyword::TEXTFILE => Ok(FileFormat::TEXTFILE),
5256                _ => self.expected("fileformat", next_token),
5257            },
5258            _ => self.expected("fileformat", next_token),
5259        }
5260    }
5261
5262    pub fn parse_analyze_format(&mut self) -> Result<AnalyzeFormat, ParserError> {
5263        let next_token = self.next_token();
5264        match &next_token.token {
5265            Token::Word(w) => match w.keyword {
5266                Keyword::TEXT => Ok(AnalyzeFormat::TEXT),
5267                Keyword::GRAPHVIZ => Ok(AnalyzeFormat::GRAPHVIZ),
5268                Keyword::JSON => Ok(AnalyzeFormat::JSON),
5269                _ => self.expected("fileformat", next_token),
5270            },
5271            _ => self.expected("fileformat", next_token),
5272        }
5273    }
5274
5275    pub fn parse_create_view(
5276        &mut self,
5277        or_replace: bool,
5278        temporary: bool,
5279        create_view_params: Option<CreateViewParams>,
5280    ) -> Result<Statement, ParserError> {
5281        let materialized = self.parse_keyword(Keyword::MATERIALIZED);
5282        self.expect_keyword_is(Keyword::VIEW)?;
5283        let if_not_exists = dialect_of!(self is BigQueryDialect|SQLiteDialect|GenericDialect)
5284            && self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5285        // Many dialects support `OR ALTER` right after `CREATE`, but we don't (yet).
5286        // ANSI SQL and Postgres support RECURSIVE here, but we don't support it either.
5287        let allow_unquoted_hyphen = dialect_of!(self is BigQueryDialect);
5288        let name = self.parse_object_name(allow_unquoted_hyphen)?;
5289        let columns = self.parse_view_columns()?;
5290        let mut options = CreateTableOptions::None;
5291        let with_options = self.parse_options(Keyword::WITH)?;
5292        if !with_options.is_empty() {
5293            options = CreateTableOptions::With(with_options);
5294        }
5295
5296        let cluster_by = if self.parse_keyword(Keyword::CLUSTER) {
5297            self.expect_keyword_is(Keyword::BY)?;
5298            self.parse_parenthesized_column_list(Optional, false)?
5299        } else {
5300            vec![]
5301        };
5302
5303        if dialect_of!(self is BigQueryDialect | GenericDialect) {
5304            if let Some(opts) = self.maybe_parse_options(Keyword::OPTIONS)? {
5305                if !opts.is_empty() {
5306                    options = CreateTableOptions::Options(opts);
5307                }
5308            };
5309        }
5310
5311        let to = if dialect_of!(self is ClickHouseDialect | GenericDialect)
5312            && self.parse_keyword(Keyword::TO)
5313        {
5314            Some(self.parse_object_name(false)?)
5315        } else {
5316            None
5317        };
5318
5319        let comment = if dialect_of!(self is SnowflakeDialect | GenericDialect)
5320            && self.parse_keyword(Keyword::COMMENT)
5321        {
5322            self.expect_token(&Token::Eq)?;
5323            let next_token = self.next_token();
5324            match next_token.token {
5325                Token::SingleQuotedString(str) => Some(str),
5326                _ => self.expected("string literal", next_token)?,
5327            }
5328        } else {
5329            None
5330        };
5331
5332        self.expect_keyword_is(Keyword::AS)?;
5333        let query = self.parse_query()?;
5334        // Optional `WITH [ CASCADED | LOCAL ] CHECK OPTION` is widely supported here.
5335
5336        let with_no_schema_binding = dialect_of!(self is RedshiftSqlDialect | GenericDialect)
5337            && self.parse_keywords(&[
5338                Keyword::WITH,
5339                Keyword::NO,
5340                Keyword::SCHEMA,
5341                Keyword::BINDING,
5342            ]);
5343
5344        Ok(Statement::CreateView {
5345            name,
5346            columns,
5347            query,
5348            materialized,
5349            or_replace,
5350            options,
5351            cluster_by,
5352            comment,
5353            with_no_schema_binding,
5354            if_not_exists,
5355            temporary,
5356            to,
5357            params: create_view_params,
5358        })
5359    }
5360
5361    /// Parse optional parameters for the `CREATE VIEW` statement supported by [MySQL].
5362    ///
5363    /// [MySQL]: https://dev.mysql.com/doc/refman/9.1/en/create-view.html
5364    fn parse_create_view_params(&mut self) -> Result<Option<CreateViewParams>, ParserError> {
5365        let algorithm = if self.parse_keyword(Keyword::ALGORITHM) {
5366            self.expect_token(&Token::Eq)?;
5367            Some(
5368                match self.expect_one_of_keywords(&[
5369                    Keyword::UNDEFINED,
5370                    Keyword::MERGE,
5371                    Keyword::TEMPTABLE,
5372                ])? {
5373                    Keyword::UNDEFINED => CreateViewAlgorithm::Undefined,
5374                    Keyword::MERGE => CreateViewAlgorithm::Merge,
5375                    Keyword::TEMPTABLE => CreateViewAlgorithm::TempTable,
5376                    _ => {
5377                        self.prev_token();
5378                        let found = self.next_token();
5379                        return self
5380                            .expected("UNDEFINED or MERGE or TEMPTABLE after ALGORITHM =", found);
5381                    }
5382                },
5383            )
5384        } else {
5385            None
5386        };
5387        let definer = if self.parse_keyword(Keyword::DEFINER) {
5388            self.expect_token(&Token::Eq)?;
5389            Some(self.parse_grantee_name()?)
5390        } else {
5391            None
5392        };
5393        let security = if self.parse_keywords(&[Keyword::SQL, Keyword::SECURITY]) {
5394            Some(
5395                match self.expect_one_of_keywords(&[Keyword::DEFINER, Keyword::INVOKER])? {
5396                    Keyword::DEFINER => CreateViewSecurity::Definer,
5397                    Keyword::INVOKER => CreateViewSecurity::Invoker,
5398                    _ => {
5399                        self.prev_token();
5400                        let found = self.next_token();
5401                        return self.expected("DEFINER or INVOKER after SQL SECURITY", found);
5402                    }
5403                },
5404            )
5405        } else {
5406            None
5407        };
5408        if algorithm.is_some() || definer.is_some() || security.is_some() {
5409            Ok(Some(CreateViewParams {
5410                algorithm,
5411                definer,
5412                security,
5413            }))
5414        } else {
5415            Ok(None)
5416        }
5417    }
5418
5419    pub fn parse_create_role(&mut self) -> Result<Statement, ParserError> {
5420        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5421        let names = self.parse_comma_separated(|p| p.parse_object_name(false))?;
5422
5423        let _ = self.parse_keyword(Keyword::WITH); // [ WITH ]
5424
5425        let optional_keywords = if dialect_of!(self is MsSqlDialect) {
5426            vec![Keyword::AUTHORIZATION]
5427        } else if dialect_of!(self is PostgreSqlDialect) {
5428            vec![
5429                Keyword::LOGIN,
5430                Keyword::NOLOGIN,
5431                Keyword::INHERIT,
5432                Keyword::NOINHERIT,
5433                Keyword::BYPASSRLS,
5434                Keyword::NOBYPASSRLS,
5435                Keyword::PASSWORD,
5436                Keyword::CREATEDB,
5437                Keyword::NOCREATEDB,
5438                Keyword::CREATEROLE,
5439                Keyword::NOCREATEROLE,
5440                Keyword::SUPERUSER,
5441                Keyword::NOSUPERUSER,
5442                Keyword::REPLICATION,
5443                Keyword::NOREPLICATION,
5444                Keyword::CONNECTION,
5445                Keyword::VALID,
5446                Keyword::IN,
5447                Keyword::ROLE,
5448                Keyword::ADMIN,
5449                Keyword::USER,
5450            ]
5451        } else {
5452            vec![]
5453        };
5454
5455        // MSSQL
5456        let mut authorization_owner = None;
5457        // Postgres
5458        let mut login = None;
5459        let mut inherit = None;
5460        let mut bypassrls = None;
5461        let mut password = None;
5462        let mut create_db = None;
5463        let mut create_role = None;
5464        let mut superuser = None;
5465        let mut replication = None;
5466        let mut connection_limit = None;
5467        let mut valid_until = None;
5468        let mut in_role = vec![];
5469        let mut in_group = vec![];
5470        let mut role = vec![];
5471        let mut user = vec![];
5472        let mut admin = vec![];
5473
5474        while let Some(keyword) = self.parse_one_of_keywords(&optional_keywords) {
5475            let loc = self
5476                .tokens
5477                .get(self.index - 1)
5478                .map_or(Location { line: 0, column: 0 }, |t| t.span.start);
5479            match keyword {
5480                Keyword::AUTHORIZATION => {
5481                    if authorization_owner.is_some() {
5482                        parser_err!("Found multiple AUTHORIZATION", loc)
5483                    } else {
5484                        authorization_owner = Some(self.parse_object_name(false)?);
5485                        Ok(())
5486                    }
5487                }
5488                Keyword::LOGIN | Keyword::NOLOGIN => {
5489                    if login.is_some() {
5490                        parser_err!("Found multiple LOGIN or NOLOGIN", loc)
5491                    } else {
5492                        login = Some(keyword == Keyword::LOGIN);
5493                        Ok(())
5494                    }
5495                }
5496                Keyword::INHERIT | Keyword::NOINHERIT => {
5497                    if inherit.is_some() {
5498                        parser_err!("Found multiple INHERIT or NOINHERIT", loc)
5499                    } else {
5500                        inherit = Some(keyword == Keyword::INHERIT);
5501                        Ok(())
5502                    }
5503                }
5504                Keyword::BYPASSRLS | Keyword::NOBYPASSRLS => {
5505                    if bypassrls.is_some() {
5506                        parser_err!("Found multiple BYPASSRLS or NOBYPASSRLS", loc)
5507                    } else {
5508                        bypassrls = Some(keyword == Keyword::BYPASSRLS);
5509                        Ok(())
5510                    }
5511                }
5512                Keyword::CREATEDB | Keyword::NOCREATEDB => {
5513                    if create_db.is_some() {
5514                        parser_err!("Found multiple CREATEDB or NOCREATEDB", loc)
5515                    } else {
5516                        create_db = Some(keyword == Keyword::CREATEDB);
5517                        Ok(())
5518                    }
5519                }
5520                Keyword::CREATEROLE | Keyword::NOCREATEROLE => {
5521                    if create_role.is_some() {
5522                        parser_err!("Found multiple CREATEROLE or NOCREATEROLE", loc)
5523                    } else {
5524                        create_role = Some(keyword == Keyword::CREATEROLE);
5525                        Ok(())
5526                    }
5527                }
5528                Keyword::SUPERUSER | Keyword::NOSUPERUSER => {
5529                    if superuser.is_some() {
5530                        parser_err!("Found multiple SUPERUSER or NOSUPERUSER", loc)
5531                    } else {
5532                        superuser = Some(keyword == Keyword::SUPERUSER);
5533                        Ok(())
5534                    }
5535                }
5536                Keyword::REPLICATION | Keyword::NOREPLICATION => {
5537                    if replication.is_some() {
5538                        parser_err!("Found multiple REPLICATION or NOREPLICATION", loc)
5539                    } else {
5540                        replication = Some(keyword == Keyword::REPLICATION);
5541                        Ok(())
5542                    }
5543                }
5544                Keyword::PASSWORD => {
5545                    if password.is_some() {
5546                        parser_err!("Found multiple PASSWORD", loc)
5547                    } else {
5548                        password = if self.parse_keyword(Keyword::NULL) {
5549                            Some(Password::NullPassword)
5550                        } else {
5551                            Some(Password::Password(Expr::Value(self.parse_value()?)))
5552                        };
5553                        Ok(())
5554                    }
5555                }
5556                Keyword::CONNECTION => {
5557                    self.expect_keyword_is(Keyword::LIMIT)?;
5558                    if connection_limit.is_some() {
5559                        parser_err!("Found multiple CONNECTION LIMIT", loc)
5560                    } else {
5561                        connection_limit = Some(Expr::Value(self.parse_number_value()?));
5562                        Ok(())
5563                    }
5564                }
5565                Keyword::VALID => {
5566                    self.expect_keyword_is(Keyword::UNTIL)?;
5567                    if valid_until.is_some() {
5568                        parser_err!("Found multiple VALID UNTIL", loc)
5569                    } else {
5570                        valid_until = Some(Expr::Value(self.parse_value()?));
5571                        Ok(())
5572                    }
5573                }
5574                Keyword::IN => {
5575                    if self.parse_keyword(Keyword::ROLE) {
5576                        if !in_role.is_empty() {
5577                            parser_err!("Found multiple IN ROLE", loc)
5578                        } else {
5579                            in_role = self.parse_comma_separated(|p| p.parse_identifier())?;
5580                            Ok(())
5581                        }
5582                    } else if self.parse_keyword(Keyword::GROUP) {
5583                        if !in_group.is_empty() {
5584                            parser_err!("Found multiple IN GROUP", loc)
5585                        } else {
5586                            in_group = self.parse_comma_separated(|p| p.parse_identifier())?;
5587                            Ok(())
5588                        }
5589                    } else {
5590                        self.expected("ROLE or GROUP after IN", self.peek_token())
5591                    }
5592                }
5593                Keyword::ROLE => {
5594                    if !role.is_empty() {
5595                        parser_err!("Found multiple ROLE", loc)
5596                    } else {
5597                        role = self.parse_comma_separated(|p| p.parse_identifier())?;
5598                        Ok(())
5599                    }
5600                }
5601                Keyword::USER => {
5602                    if !user.is_empty() {
5603                        parser_err!("Found multiple USER", loc)
5604                    } else {
5605                        user = self.parse_comma_separated(|p| p.parse_identifier())?;
5606                        Ok(())
5607                    }
5608                }
5609                Keyword::ADMIN => {
5610                    if !admin.is_empty() {
5611                        parser_err!("Found multiple ADMIN", loc)
5612                    } else {
5613                        admin = self.parse_comma_separated(|p| p.parse_identifier())?;
5614                        Ok(())
5615                    }
5616                }
5617                _ => break,
5618            }?
5619        }
5620
5621        Ok(Statement::CreateRole {
5622            names,
5623            if_not_exists,
5624            login,
5625            inherit,
5626            bypassrls,
5627            password,
5628            create_db,
5629            create_role,
5630            replication,
5631            superuser,
5632            connection_limit,
5633            valid_until,
5634            in_role,
5635            in_group,
5636            role,
5637            user,
5638            admin,
5639            authorization_owner,
5640        })
5641    }
5642
5643    pub fn parse_owner(&mut self) -> Result<Owner, ParserError> {
5644        let owner = match self.parse_one_of_keywords(&[Keyword::CURRENT_USER, Keyword::CURRENT_ROLE, Keyword::SESSION_USER]) {
5645            Some(Keyword::CURRENT_USER) => Owner::CurrentUser,
5646            Some(Keyword::CURRENT_ROLE) => Owner::CurrentRole,
5647            Some(Keyword::SESSION_USER) => Owner::SessionUser,
5648            Some(_) => unreachable!(),
5649            None => {
5650                match self.parse_identifier() {
5651                    Ok(ident) => Owner::Ident(ident),
5652                    Err(e) => {
5653                        return Err(ParserError::ParserError(format!("Expected: CURRENT_USER, CURRENT_ROLE, SESSION_USER or identifier after OWNER TO. {e}")))
5654                    }
5655                }
5656            }
5657        };
5658        Ok(owner)
5659    }
5660
5661    /// ```sql
5662    ///     CREATE POLICY name ON table_name [ AS { PERMISSIVE | RESTRICTIVE } ]
5663    ///     [ FOR { ALL | SELECT | INSERT | UPDATE | DELETE } ]
5664    ///     [ TO { role_name | PUBLIC | CURRENT_USER | CURRENT_ROLE | SESSION_USER } [, ...] ]
5665    ///     [ USING ( using_expression ) ]
5666    ///     [ WITH CHECK ( with_check_expression ) ]
5667    /// ```
5668    ///
5669    /// [PostgreSQL Documentation](https://www.postgresql.org/docs/current/sql-createpolicy.html)
5670    pub fn parse_create_policy(&mut self) -> Result<Statement, ParserError> {
5671        let name = self.parse_identifier()?;
5672        self.expect_keyword_is(Keyword::ON)?;
5673        let table_name = self.parse_object_name(false)?;
5674
5675        let policy_type = if self.parse_keyword(Keyword::AS) {
5676            let keyword =
5677                self.expect_one_of_keywords(&[Keyword::PERMISSIVE, Keyword::RESTRICTIVE])?;
5678            Some(match keyword {
5679                Keyword::PERMISSIVE => CreatePolicyType::Permissive,
5680                Keyword::RESTRICTIVE => CreatePolicyType::Restrictive,
5681                _ => unreachable!(),
5682            })
5683        } else {
5684            None
5685        };
5686
5687        let command = if self.parse_keyword(Keyword::FOR) {
5688            let keyword = self.expect_one_of_keywords(&[
5689                Keyword::ALL,
5690                Keyword::SELECT,
5691                Keyword::INSERT,
5692                Keyword::UPDATE,
5693                Keyword::DELETE,
5694            ])?;
5695            Some(match keyword {
5696                Keyword::ALL => CreatePolicyCommand::All,
5697                Keyword::SELECT => CreatePolicyCommand::Select,
5698                Keyword::INSERT => CreatePolicyCommand::Insert,
5699                Keyword::UPDATE => CreatePolicyCommand::Update,
5700                Keyword::DELETE => CreatePolicyCommand::Delete,
5701                _ => unreachable!(),
5702            })
5703        } else {
5704            None
5705        };
5706
5707        let to = if self.parse_keyword(Keyword::TO) {
5708            Some(self.parse_comma_separated(|p| p.parse_owner())?)
5709        } else {
5710            None
5711        };
5712
5713        let using = if self.parse_keyword(Keyword::USING) {
5714            self.expect_token(&Token::LParen)?;
5715            let expr = self.parse_expr()?;
5716            self.expect_token(&Token::RParen)?;
5717            Some(expr)
5718        } else {
5719            None
5720        };
5721
5722        let with_check = if self.parse_keywords(&[Keyword::WITH, Keyword::CHECK]) {
5723            self.expect_token(&Token::LParen)?;
5724            let expr = self.parse_expr()?;
5725            self.expect_token(&Token::RParen)?;
5726            Some(expr)
5727        } else {
5728            None
5729        };
5730
5731        Ok(CreatePolicy {
5732            name,
5733            table_name,
5734            policy_type,
5735            command,
5736            to,
5737            using,
5738            with_check,
5739        })
5740    }
5741
5742    /// ```sql
5743    /// CREATE CONNECTOR [IF NOT EXISTS] connector_name
5744    /// [TYPE datasource_type]
5745    /// [URL datasource_url]
5746    /// [COMMENT connector_comment]
5747    /// [WITH DCPROPERTIES(property_name=property_value, ...)]
5748    /// ```
5749    ///
5750    /// [Hive Documentation](https://cwiki.apache.org/confluence/pages/viewpage.action?pageId=27362034#LanguageManualDDL-CreateDataConnectorCreateConnector)
5751    pub fn parse_create_connector(&mut self) -> Result<Statement, ParserError> {
5752        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5753        let name = self.parse_identifier()?;
5754
5755        let connector_type = if self.parse_keyword(Keyword::TYPE) {
5756            Some(self.parse_literal_string()?)
5757        } else {
5758            None
5759        };
5760
5761        let url = if self.parse_keyword(Keyword::URL) {
5762            Some(self.parse_literal_string()?)
5763        } else {
5764            None
5765        };
5766
5767        let comment = self.parse_optional_inline_comment()?;
5768
5769        let with_dcproperties =
5770            match self.parse_options_with_keywords(&[Keyword::WITH, Keyword::DCPROPERTIES])? {
5771                properties if !properties.is_empty() => Some(properties),
5772                _ => None,
5773            };
5774
5775        Ok(Statement::CreateConnector(CreateConnector {
5776            name,
5777            if_not_exists,
5778            connector_type,
5779            url,
5780            comment,
5781            with_dcproperties,
5782        }))
5783    }
5784
5785    pub fn parse_drop(&mut self) -> Result<Statement, ParserError> {
5786        // MySQL dialect supports `TEMPORARY`
5787        let temporary = dialect_of!(self is MySqlDialect | GenericDialect | DuckDbDialect)
5788            && self.parse_keyword(Keyword::TEMPORARY);
5789        let persistent = dialect_of!(self is DuckDbDialect)
5790            && self.parse_one_of_keywords(&[Keyword::PERSISTENT]).is_some();
5791
5792        let object_type = if self.parse_keyword(Keyword::TABLE) {
5793            ObjectType::Table
5794        } else if self.parse_keyword(Keyword::VIEW) {
5795            ObjectType::View
5796        } else if self.parse_keyword(Keyword::INDEX) {
5797            ObjectType::Index
5798        } else if self.parse_keyword(Keyword::ROLE) {
5799            ObjectType::Role
5800        } else if self.parse_keyword(Keyword::SCHEMA) {
5801            ObjectType::Schema
5802        } else if self.parse_keyword(Keyword::DATABASE) {
5803            ObjectType::Database
5804        } else if self.parse_keyword(Keyword::SEQUENCE) {
5805            ObjectType::Sequence
5806        } else if self.parse_keyword(Keyword::STAGE) {
5807            ObjectType::Stage
5808        } else if self.parse_keyword(Keyword::TYPE) {
5809            ObjectType::Type
5810        } else if self.parse_keyword(Keyword::FUNCTION) {
5811            return self.parse_drop_function();
5812        } else if self.parse_keyword(Keyword::POLICY) {
5813            return self.parse_drop_policy();
5814        } else if self.parse_keyword(Keyword::CONNECTOR) {
5815            return self.parse_drop_connector();
5816        } else if self.parse_keyword(Keyword::PROCEDURE) {
5817            return self.parse_drop_procedure();
5818        } else if self.parse_keyword(Keyword::SECRET) {
5819            return self.parse_drop_secret(temporary, persistent);
5820        } else if self.parse_keyword(Keyword::TRIGGER) {
5821            return self.parse_drop_trigger();
5822        } else if self.parse_keyword(Keyword::EXTENSION) {
5823            return self.parse_drop_extension();
5824        } else {
5825            return self.expected(
5826                "CONNECTOR, DATABASE, EXTENSION, FUNCTION, INDEX, POLICY, PROCEDURE, ROLE, SCHEMA, SECRET, SEQUENCE, STAGE, TABLE, TRIGGER, TYPE, or VIEW after DROP",
5827                self.peek_token(),
5828            );
5829        };
5830        // Many dialects support the non-standard `IF EXISTS` clause and allow
5831        // specifying multiple objects to delete in a single statement
5832        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
5833        let names = self.parse_comma_separated(|p| p.parse_object_name(false))?;
5834
5835        let loc = self.peek_token().span.start;
5836        let cascade = self.parse_keyword(Keyword::CASCADE);
5837        let restrict = self.parse_keyword(Keyword::RESTRICT);
5838        let purge = self.parse_keyword(Keyword::PURGE);
5839        if cascade && restrict {
5840            return parser_err!("Cannot specify both CASCADE and RESTRICT in DROP", loc);
5841        }
5842        if object_type == ObjectType::Role && (cascade || restrict || purge) {
5843            return parser_err!(
5844                "Cannot specify CASCADE, RESTRICT, or PURGE in DROP ROLE",
5845                loc
5846            );
5847        }
5848        Ok(Statement::Drop {
5849            object_type,
5850            if_exists,
5851            names,
5852            cascade,
5853            restrict,
5854            purge,
5855            temporary,
5856        })
5857    }
5858
5859    fn parse_optional_drop_behavior(&mut self) -> Option<DropBehavior> {
5860        match self.parse_one_of_keywords(&[Keyword::CASCADE, Keyword::RESTRICT]) {
5861            Some(Keyword::CASCADE) => Some(DropBehavior::Cascade),
5862            Some(Keyword::RESTRICT) => Some(DropBehavior::Restrict),
5863            _ => None,
5864        }
5865    }
5866
5867    /// ```sql
5868    /// DROP FUNCTION [ IF EXISTS ] name [ ( [ [ argmode ] [ argname ] argtype [, ...] ] ) ] [, ...]
5869    /// [ CASCADE | RESTRICT ]
5870    /// ```
5871    fn parse_drop_function(&mut self) -> Result<Statement, ParserError> {
5872        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
5873        let func_desc = self.parse_comma_separated(Parser::parse_function_desc)?;
5874        let drop_behavior = self.parse_optional_drop_behavior();
5875        Ok(Statement::DropFunction {
5876            if_exists,
5877            func_desc,
5878            drop_behavior,
5879        })
5880    }
5881
5882    /// ```sql
5883    /// DROP POLICY [ IF EXISTS ] name ON table_name [ CASCADE | RESTRICT ]
5884    /// ```
5885    ///
5886    /// [PostgreSQL Documentation](https://www.postgresql.org/docs/current/sql-droppolicy.html)
5887    fn parse_drop_policy(&mut self) -> Result<Statement, ParserError> {
5888        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
5889        let name = self.parse_identifier()?;
5890        self.expect_keyword_is(Keyword::ON)?;
5891        let table_name = self.parse_object_name(false)?;
5892        let drop_behavior = self.parse_optional_drop_behavior();
5893        Ok(Statement::DropPolicy {
5894            if_exists,
5895            name,
5896            table_name,
5897            drop_behavior,
5898        })
5899    }
5900    /// ```sql
5901    /// DROP CONNECTOR [IF EXISTS] name
5902    /// ```
5903    ///
5904    /// See [Hive](https://cwiki.apache.org/confluence/pages/viewpage.action?pageId=27362034#LanguageManualDDL-DropConnector)
5905    fn parse_drop_connector(&mut self) -> Result<Statement, ParserError> {
5906        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
5907        let name = self.parse_identifier()?;
5908        Ok(Statement::DropConnector { if_exists, name })
5909    }
5910
5911    /// ```sql
5912    /// DROP PROCEDURE [ IF EXISTS ] name [ ( [ [ argmode ] [ argname ] argtype [, ...] ] ) ] [, ...]
5913    /// [ CASCADE | RESTRICT ]
5914    /// ```
5915    fn parse_drop_procedure(&mut self) -> Result<Statement, ParserError> {
5916        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
5917        let proc_desc = self.parse_comma_separated(Parser::parse_function_desc)?;
5918        let drop_behavior = self.parse_optional_drop_behavior();
5919        Ok(Statement::DropProcedure {
5920            if_exists,
5921            proc_desc,
5922            drop_behavior,
5923        })
5924    }
5925
5926    fn parse_function_desc(&mut self) -> Result<FunctionDesc, ParserError> {
5927        let name = self.parse_object_name(false)?;
5928
5929        let args = if self.consume_token(&Token::LParen) {
5930            if self.consume_token(&Token::RParen) {
5931                None
5932            } else {
5933                let args = self.parse_comma_separated(Parser::parse_function_arg)?;
5934                self.expect_token(&Token::RParen)?;
5935                Some(args)
5936            }
5937        } else {
5938            None
5939        };
5940
5941        Ok(FunctionDesc { name, args })
5942    }
5943
5944    /// See [DuckDB Docs](https://duckdb.org/docs/sql/statements/create_secret.html) for more details.
5945    fn parse_drop_secret(
5946        &mut self,
5947        temporary: bool,
5948        persistent: bool,
5949    ) -> Result<Statement, ParserError> {
5950        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
5951        let name = self.parse_identifier()?;
5952        let storage_specifier = if self.parse_keyword(Keyword::FROM) {
5953            self.parse_identifier().ok()
5954        } else {
5955            None
5956        };
5957        let temp = match (temporary, persistent) {
5958            (true, false) => Some(true),
5959            (false, true) => Some(false),
5960            (false, false) => None,
5961            _ => self.expected("TEMPORARY or PERSISTENT", self.peek_token())?,
5962        };
5963
5964        Ok(Statement::DropSecret {
5965            if_exists,
5966            temporary: temp,
5967            name,
5968            storage_specifier,
5969        })
5970    }
5971
5972    /// Parse a `DECLARE` statement.
5973    ///
5974    /// ```sql
5975    /// DECLARE name [ BINARY ] [ ASENSITIVE | INSENSITIVE ] [ [ NO ] SCROLL ]
5976    ///     CURSOR [ { WITH | WITHOUT } HOLD ] FOR query
5977    /// ```
5978    ///
5979    /// The syntax can vary significantly between warehouses. See the grammar
5980    /// on the warehouse specific function in such cases.
5981    pub fn parse_declare(&mut self) -> Result<Statement, ParserError> {
5982        if dialect_of!(self is BigQueryDialect) {
5983            return self.parse_big_query_declare();
5984        }
5985        if dialect_of!(self is SnowflakeDialect) {
5986            return self.parse_snowflake_declare();
5987        }
5988        if dialect_of!(self is MsSqlDialect) {
5989            return self.parse_mssql_declare();
5990        }
5991
5992        let name = self.parse_identifier()?;
5993
5994        let binary = Some(self.parse_keyword(Keyword::BINARY));
5995        let sensitive = if self.parse_keyword(Keyword::INSENSITIVE) {
5996            Some(true)
5997        } else if self.parse_keyword(Keyword::ASENSITIVE) {
5998            Some(false)
5999        } else {
6000            None
6001        };
6002        let scroll = if self.parse_keyword(Keyword::SCROLL) {
6003            Some(true)
6004        } else if self.parse_keywords(&[Keyword::NO, Keyword::SCROLL]) {
6005            Some(false)
6006        } else {
6007            None
6008        };
6009
6010        self.expect_keyword_is(Keyword::CURSOR)?;
6011        let declare_type = Some(DeclareType::Cursor);
6012
6013        let hold = match self.parse_one_of_keywords(&[Keyword::WITH, Keyword::WITHOUT]) {
6014            Some(keyword) => {
6015                self.expect_keyword_is(Keyword::HOLD)?;
6016
6017                match keyword {
6018                    Keyword::WITH => Some(true),
6019                    Keyword::WITHOUT => Some(false),
6020                    _ => unreachable!(),
6021                }
6022            }
6023            None => None,
6024        };
6025
6026        self.expect_keyword_is(Keyword::FOR)?;
6027
6028        let query = Some(self.parse_query()?);
6029
6030        Ok(Statement::Declare {
6031            stmts: vec![Declare {
6032                names: vec![name],
6033                data_type: None,
6034                assignment: None,
6035                declare_type,
6036                binary,
6037                sensitive,
6038                scroll,
6039                hold,
6040                for_query: query,
6041            }],
6042        })
6043    }
6044
6045    /// Parse a [BigQuery] `DECLARE` statement.
6046    ///
6047    /// Syntax:
6048    /// ```text
6049    /// DECLARE variable_name[, ...] [{ <variable_type> | <DEFAULT expression> }];
6050    /// ```
6051    /// [BigQuery]: https://cloud.google.com/bigquery/docs/reference/standard-sql/procedural-language#declare
6052    pub fn parse_big_query_declare(&mut self) -> Result<Statement, ParserError> {
6053        let names = self.parse_comma_separated(Parser::parse_identifier)?;
6054
6055        let data_type = match self.peek_token().token {
6056            Token::Word(w) if w.keyword == Keyword::DEFAULT => None,
6057            _ => Some(self.parse_data_type()?),
6058        };
6059
6060        let expr = if data_type.is_some() {
6061            if self.parse_keyword(Keyword::DEFAULT) {
6062                Some(self.parse_expr()?)
6063            } else {
6064                None
6065            }
6066        } else {
6067            // If no variable type - default expression must be specified, per BQ docs.
6068            // i.e `DECLARE foo;` is invalid.
6069            self.expect_keyword_is(Keyword::DEFAULT)?;
6070            Some(self.parse_expr()?)
6071        };
6072
6073        Ok(Statement::Declare {
6074            stmts: vec![Declare {
6075                names,
6076                data_type,
6077                assignment: expr.map(|expr| DeclareAssignment::Default(Box::new(expr))),
6078                declare_type: None,
6079                binary: None,
6080                sensitive: None,
6081                scroll: None,
6082                hold: None,
6083                for_query: None,
6084            }],
6085        })
6086    }
6087
6088    /// Parse a [Snowflake] `DECLARE` statement.
6089    ///
6090    /// Syntax:
6091    /// ```text
6092    /// DECLARE
6093    ///   [{ <variable_declaration>
6094    ///      | <cursor_declaration>
6095    ///      | <resultset_declaration>
6096    ///      | <exception_declaration> }; ... ]
6097    ///
6098    /// <variable_declaration>
6099    /// <variable_name> [<type>] [ { DEFAULT | := } <expression>]
6100    ///
6101    /// <cursor_declaration>
6102    /// <cursor_name> CURSOR FOR <query>
6103    ///
6104    /// <resultset_declaration>
6105    /// <resultset_name> RESULTSET [ { DEFAULT | := } ( <query> ) ] ;
6106    ///
6107    /// <exception_declaration>
6108    /// <exception_name> EXCEPTION [ ( <exception_number> , '<exception_message>' ) ] ;
6109    /// ```
6110    ///
6111    /// [Snowflake]: https://docs.snowflake.com/en/sql-reference/snowflake-scripting/declare
6112    pub fn parse_snowflake_declare(&mut self) -> Result<Statement, ParserError> {
6113        let mut stmts = vec![];
6114        loop {
6115            let name = self.parse_identifier()?;
6116            let (declare_type, for_query, assigned_expr, data_type) =
6117                if self.parse_keyword(Keyword::CURSOR) {
6118                    self.expect_keyword_is(Keyword::FOR)?;
6119                    match self.peek_token().token {
6120                        Token::Word(w) if w.keyword == Keyword::SELECT => (
6121                            Some(DeclareType::Cursor),
6122                            Some(self.parse_query()?),
6123                            None,
6124                            None,
6125                        ),
6126                        _ => (
6127                            Some(DeclareType::Cursor),
6128                            None,
6129                            Some(DeclareAssignment::For(Box::new(self.parse_expr()?))),
6130                            None,
6131                        ),
6132                    }
6133                } else if self.parse_keyword(Keyword::RESULTSET) {
6134                    let assigned_expr = if self.peek_token().token != Token::SemiColon {
6135                        self.parse_snowflake_variable_declaration_expression()?
6136                    } else {
6137                        // Nothing more to do. The statement has no further parameters.
6138                        None
6139                    };
6140
6141                    (Some(DeclareType::ResultSet), None, assigned_expr, None)
6142                } else if self.parse_keyword(Keyword::EXCEPTION) {
6143                    let assigned_expr = if self.peek_token().token == Token::LParen {
6144                        Some(DeclareAssignment::Expr(Box::new(self.parse_expr()?)))
6145                    } else {
6146                        // Nothing more to do. The statement has no further parameters.
6147                        None
6148                    };
6149
6150                    (Some(DeclareType::Exception), None, assigned_expr, None)
6151                } else {
6152                    // Without an explicit keyword, the only valid option is variable declaration.
6153                    let (assigned_expr, data_type) = if let Some(assigned_expr) =
6154                        self.parse_snowflake_variable_declaration_expression()?
6155                    {
6156                        (Some(assigned_expr), None)
6157                    } else if let Token::Word(_) = self.peek_token().token {
6158                        let data_type = self.parse_data_type()?;
6159                        (
6160                            self.parse_snowflake_variable_declaration_expression()?,
6161                            Some(data_type),
6162                        )
6163                    } else {
6164                        (None, None)
6165                    };
6166                    (None, None, assigned_expr, data_type)
6167                };
6168            let stmt = Declare {
6169                names: vec![name],
6170                data_type,
6171                assignment: assigned_expr,
6172                declare_type,
6173                binary: None,
6174                sensitive: None,
6175                scroll: None,
6176                hold: None,
6177                for_query,
6178            };
6179
6180            stmts.push(stmt);
6181            if self.consume_token(&Token::SemiColon) {
6182                match self.peek_token().token {
6183                    Token::Word(w)
6184                        if ALL_KEYWORDS
6185                            .binary_search(&w.value.to_uppercase().as_str())
6186                            .is_err() =>
6187                    {
6188                        // Not a keyword - start of a new declaration.
6189                        continue;
6190                    }
6191                    _ => {
6192                        // Put back the semicolon, this is the end of the DECLARE statement.
6193                        self.prev_token();
6194                    }
6195                }
6196            }
6197
6198            break;
6199        }
6200
6201        Ok(Statement::Declare { stmts })
6202    }
6203
6204    /// Parse a [MsSql] `DECLARE` statement.
6205    ///
6206    /// Syntax:
6207    /// ```text
6208    /// DECLARE
6209    // {
6210    //   { @local_variable [AS] data_type [ = value ] }
6211    //   | { @cursor_variable_name CURSOR }
6212    // } [ ,...n ]
6213    /// ```
6214    /// [MsSql]: https://learn.microsoft.com/en-us/sql/t-sql/language-elements/declare-local-variable-transact-sql?view=sql-server-ver16
6215    pub fn parse_mssql_declare(&mut self) -> Result<Statement, ParserError> {
6216        let stmts = self.parse_comma_separated(Parser::parse_mssql_declare_stmt)?;
6217
6218        Ok(Statement::Declare { stmts })
6219    }
6220
6221    /// Parse the body of a [MsSql] `DECLARE`statement.
6222    ///
6223    /// Syntax:
6224    /// ```text
6225    // {
6226    //   { @local_variable [AS] data_type [ = value ] }
6227    //   | { @cursor_variable_name CURSOR }
6228    // } [ ,...n ]
6229    /// ```
6230    /// [MsSql]: https://learn.microsoft.com/en-us/sql/t-sql/language-elements/declare-local-variable-transact-sql?view=sql-server-ver16
6231    pub fn parse_mssql_declare_stmt(&mut self) -> Result<Declare, ParserError> {
6232        let name = {
6233            let ident = self.parse_identifier()?;
6234            if !ident.value.starts_with('@') {
6235                Err(ParserError::TokenizerError(
6236                    "Invalid MsSql variable declaration.".to_string(),
6237                ))
6238            } else {
6239                Ok(ident)
6240            }
6241        }?;
6242
6243        let (declare_type, data_type) = match self.peek_token().token {
6244            Token::Word(w) => match w.keyword {
6245                Keyword::CURSOR => {
6246                    self.next_token();
6247                    (Some(DeclareType::Cursor), None)
6248                }
6249                Keyword::AS => {
6250                    self.next_token();
6251                    (None, Some(self.parse_data_type()?))
6252                }
6253                _ => (None, Some(self.parse_data_type()?)),
6254            },
6255            _ => (None, Some(self.parse_data_type()?)),
6256        };
6257
6258        let assignment = self.parse_mssql_variable_declaration_expression()?;
6259
6260        Ok(Declare {
6261            names: vec![name],
6262            data_type,
6263            assignment,
6264            declare_type,
6265            binary: None,
6266            sensitive: None,
6267            scroll: None,
6268            hold: None,
6269            for_query: None,
6270        })
6271    }
6272
6273    /// Parses the assigned expression in a variable declaration.
6274    ///
6275    /// Syntax:
6276    /// ```text
6277    /// [ { DEFAULT | := } <expression>]
6278    /// ```
6279    /// <https://docs.snowflake.com/en/sql-reference/snowflake-scripting/declare#variable-declaration-syntax>
6280    pub fn parse_snowflake_variable_declaration_expression(
6281        &mut self,
6282    ) -> Result<Option<DeclareAssignment>, ParserError> {
6283        Ok(match self.peek_token().token {
6284            Token::Word(w) if w.keyword == Keyword::DEFAULT => {
6285                self.next_token(); // Skip `DEFAULT`
6286                Some(DeclareAssignment::Default(Box::new(self.parse_expr()?)))
6287            }
6288            Token::Assignment => {
6289                self.next_token(); // Skip `:=`
6290                Some(DeclareAssignment::DuckAssignment(Box::new(
6291                    self.parse_expr()?,
6292                )))
6293            }
6294            _ => None,
6295        })
6296    }
6297
6298    /// Parses the assigned expression in a variable declaration.
6299    ///
6300    /// Syntax:
6301    /// ```text
6302    /// [ = <expression>]
6303    /// ```
6304    pub fn parse_mssql_variable_declaration_expression(
6305        &mut self,
6306    ) -> Result<Option<DeclareAssignment>, ParserError> {
6307        Ok(match self.peek_token().token {
6308            Token::Eq => {
6309                self.next_token(); // Skip `=`
6310                Some(DeclareAssignment::MsSqlAssignment(Box::new(
6311                    self.parse_expr()?,
6312                )))
6313            }
6314            _ => None,
6315        })
6316    }
6317
6318    // FETCH [ direction { FROM | IN } ] cursor INTO target;
6319    pub fn parse_fetch_statement(&mut self) -> Result<Statement, ParserError> {
6320        let direction = if self.parse_keyword(Keyword::NEXT) {
6321            FetchDirection::Next
6322        } else if self.parse_keyword(Keyword::PRIOR) {
6323            FetchDirection::Prior
6324        } else if self.parse_keyword(Keyword::FIRST) {
6325            FetchDirection::First
6326        } else if self.parse_keyword(Keyword::LAST) {
6327            FetchDirection::Last
6328        } else if self.parse_keyword(Keyword::ABSOLUTE) {
6329            FetchDirection::Absolute {
6330                limit: self.parse_number_value()?.value,
6331            }
6332        } else if self.parse_keyword(Keyword::RELATIVE) {
6333            FetchDirection::Relative {
6334                limit: self.parse_number_value()?.value,
6335            }
6336        } else if self.parse_keyword(Keyword::FORWARD) {
6337            if self.parse_keyword(Keyword::ALL) {
6338                FetchDirection::ForwardAll
6339            } else {
6340                FetchDirection::Forward {
6341                    // TODO: Support optional
6342                    limit: Some(self.parse_number_value()?.value),
6343                }
6344            }
6345        } else if self.parse_keyword(Keyword::BACKWARD) {
6346            if self.parse_keyword(Keyword::ALL) {
6347                FetchDirection::BackwardAll
6348            } else {
6349                FetchDirection::Backward {
6350                    // TODO: Support optional
6351                    limit: Some(self.parse_number_value()?.value),
6352                }
6353            }
6354        } else if self.parse_keyword(Keyword::ALL) {
6355            FetchDirection::All
6356        } else {
6357            FetchDirection::Count {
6358                limit: self.parse_number_value()?.value,
6359            }
6360        };
6361
6362        self.expect_one_of_keywords(&[Keyword::FROM, Keyword::IN])?;
6363
6364        let name = self.parse_identifier()?;
6365
6366        let into = if self.parse_keyword(Keyword::INTO) {
6367            Some(self.parse_object_name(false)?)
6368        } else {
6369            None
6370        };
6371
6372        Ok(Statement::Fetch {
6373            name,
6374            direction,
6375            into,
6376        })
6377    }
6378
6379    pub fn parse_discard(&mut self) -> Result<Statement, ParserError> {
6380        let object_type = if self.parse_keyword(Keyword::ALL) {
6381            DiscardObject::ALL
6382        } else if self.parse_keyword(Keyword::PLANS) {
6383            DiscardObject::PLANS
6384        } else if self.parse_keyword(Keyword::SEQUENCES) {
6385            DiscardObject::SEQUENCES
6386        } else if self.parse_keyword(Keyword::TEMP) || self.parse_keyword(Keyword::TEMPORARY) {
6387            DiscardObject::TEMP
6388        } else {
6389            return self.expected(
6390                "ALL, PLANS, SEQUENCES, TEMP or TEMPORARY after DISCARD",
6391                self.peek_token(),
6392            );
6393        };
6394        Ok(Statement::Discard { object_type })
6395    }
6396
6397    pub fn parse_create_index(&mut self, unique: bool) -> Result<Statement, ParserError> {
6398        let concurrently = self.parse_keyword(Keyword::CONCURRENTLY);
6399        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
6400        let index_name = if if_not_exists || !self.parse_keyword(Keyword::ON) {
6401            let index_name = self.parse_object_name(false)?;
6402            self.expect_keyword_is(Keyword::ON)?;
6403            Some(index_name)
6404        } else {
6405            None
6406        };
6407        let table_name = self.parse_object_name(false)?;
6408        let using = if self.parse_keyword(Keyword::USING) {
6409            Some(self.parse_identifier()?)
6410        } else {
6411            None
6412        };
6413        self.expect_token(&Token::LParen)?;
6414        let columns = self.parse_comma_separated(Parser::parse_order_by_expr)?;
6415        self.expect_token(&Token::RParen)?;
6416
6417        let include = if self.parse_keyword(Keyword::INCLUDE) {
6418            self.expect_token(&Token::LParen)?;
6419            let columns = self.parse_comma_separated(|p| p.parse_identifier())?;
6420            self.expect_token(&Token::RParen)?;
6421            columns
6422        } else {
6423            vec![]
6424        };
6425
6426        let nulls_distinct = if self.parse_keyword(Keyword::NULLS) {
6427            let not = self.parse_keyword(Keyword::NOT);
6428            self.expect_keyword_is(Keyword::DISTINCT)?;
6429            Some(!not)
6430        } else {
6431            None
6432        };
6433
6434        let with = if self.dialect.supports_create_index_with_clause()
6435            && self.parse_keyword(Keyword::WITH)
6436        {
6437            self.expect_token(&Token::LParen)?;
6438            let with_params = self.parse_comma_separated(Parser::parse_expr)?;
6439            self.expect_token(&Token::RParen)?;
6440            with_params
6441        } else {
6442            Vec::new()
6443        };
6444
6445        let predicate = if self.parse_keyword(Keyword::WHERE) {
6446            Some(self.parse_expr()?)
6447        } else {
6448            None
6449        };
6450
6451        Ok(Statement::CreateIndex(CreateIndex {
6452            name: index_name,
6453            table_name,
6454            using,
6455            columns,
6456            unique,
6457            concurrently,
6458            if_not_exists,
6459            include,
6460            nulls_distinct,
6461            with,
6462            predicate,
6463        }))
6464    }
6465
6466    pub fn parse_create_extension(&mut self) -> Result<Statement, ParserError> {
6467        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
6468        let name = self.parse_identifier()?;
6469
6470        let (schema, version, cascade) = if self.parse_keyword(Keyword::WITH) {
6471            let schema = if self.parse_keyword(Keyword::SCHEMA) {
6472                Some(self.parse_identifier()?)
6473            } else {
6474                None
6475            };
6476
6477            let version = if self.parse_keyword(Keyword::VERSION) {
6478                Some(self.parse_identifier()?)
6479            } else {
6480                None
6481            };
6482
6483            let cascade = self.parse_keyword(Keyword::CASCADE);
6484
6485            (schema, version, cascade)
6486        } else {
6487            (None, None, false)
6488        };
6489
6490        Ok(Statement::CreateExtension {
6491            name,
6492            if_not_exists,
6493            schema,
6494            version,
6495            cascade,
6496        })
6497    }
6498
6499    /// Parse a PostgreSQL-specific [Statement::DropExtension] statement.
6500    pub fn parse_drop_extension(&mut self) -> Result<Statement, ParserError> {
6501        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6502        let names = self.parse_comma_separated(|p| p.parse_identifier())?;
6503        let cascade_or_restrict =
6504            self.parse_one_of_keywords(&[Keyword::CASCADE, Keyword::RESTRICT]);
6505        Ok(Statement::DropExtension {
6506            names,
6507            if_exists,
6508            cascade_or_restrict: cascade_or_restrict
6509                .map(|k| match k {
6510                    Keyword::CASCADE => Ok(ReferentialAction::Cascade),
6511                    Keyword::RESTRICT => Ok(ReferentialAction::Restrict),
6512                    _ => self.expected("CASCADE or RESTRICT", self.peek_token()),
6513                })
6514                .transpose()?,
6515        })
6516    }
6517
6518    //TODO: Implement parsing for Skewed
6519    pub fn parse_hive_distribution(&mut self) -> Result<HiveDistributionStyle, ParserError> {
6520        if self.parse_keywords(&[Keyword::PARTITIONED, Keyword::BY]) {
6521            self.expect_token(&Token::LParen)?;
6522            let columns = self.parse_comma_separated(Parser::parse_column_def)?;
6523            self.expect_token(&Token::RParen)?;
6524            Ok(HiveDistributionStyle::PARTITIONED { columns })
6525        } else {
6526            Ok(HiveDistributionStyle::NONE)
6527        }
6528    }
6529
6530    pub fn parse_hive_formats(&mut self) -> Result<HiveFormat, ParserError> {
6531        let mut hive_format = HiveFormat::default();
6532        loop {
6533            match self.parse_one_of_keywords(&[
6534                Keyword::ROW,
6535                Keyword::STORED,
6536                Keyword::LOCATION,
6537                Keyword::WITH,
6538            ]) {
6539                Some(Keyword::ROW) => {
6540                    hive_format.row_format = Some(self.parse_row_format()?);
6541                }
6542                Some(Keyword::STORED) => {
6543                    self.expect_keyword_is(Keyword::AS)?;
6544                    if self.parse_keyword(Keyword::INPUTFORMAT) {
6545                        let input_format = self.parse_expr()?;
6546                        self.expect_keyword_is(Keyword::OUTPUTFORMAT)?;
6547                        let output_format = self.parse_expr()?;
6548                        hive_format.storage = Some(HiveIOFormat::IOF {
6549                            input_format,
6550                            output_format,
6551                        });
6552                    } else {
6553                        let format = self.parse_file_format()?;
6554                        hive_format.storage = Some(HiveIOFormat::FileFormat { format });
6555                    }
6556                }
6557                Some(Keyword::LOCATION) => {
6558                    hive_format.location = Some(self.parse_literal_string()?);
6559                }
6560                Some(Keyword::WITH) => {
6561                    self.prev_token();
6562                    let properties = self
6563                        .parse_options_with_keywords(&[Keyword::WITH, Keyword::SERDEPROPERTIES])?;
6564                    if !properties.is_empty() {
6565                        hive_format.serde_properties = Some(properties);
6566                    } else {
6567                        break;
6568                    }
6569                }
6570                None => break,
6571                _ => break,
6572            }
6573        }
6574
6575        Ok(hive_format)
6576    }
6577
6578    pub fn parse_row_format(&mut self) -> Result<HiveRowFormat, ParserError> {
6579        self.expect_keyword_is(Keyword::FORMAT)?;
6580        match self.parse_one_of_keywords(&[Keyword::SERDE, Keyword::DELIMITED]) {
6581            Some(Keyword::SERDE) => {
6582                let class = self.parse_literal_string()?;
6583                Ok(HiveRowFormat::SERDE { class })
6584            }
6585            _ => {
6586                let mut row_delimiters = vec![];
6587
6588                loop {
6589                    match self.parse_one_of_keywords(&[
6590                        Keyword::FIELDS,
6591                        Keyword::COLLECTION,
6592                        Keyword::MAP,
6593                        Keyword::LINES,
6594                        Keyword::NULL,
6595                    ]) {
6596                        Some(Keyword::FIELDS) => {
6597                            if self.parse_keywords(&[Keyword::TERMINATED, Keyword::BY]) {
6598                                row_delimiters.push(HiveRowDelimiter {
6599                                    delimiter: HiveDelimiter::FieldsTerminatedBy,
6600                                    char: self.parse_identifier()?,
6601                                });
6602
6603                                if self.parse_keywords(&[Keyword::ESCAPED, Keyword::BY]) {
6604                                    row_delimiters.push(HiveRowDelimiter {
6605                                        delimiter: HiveDelimiter::FieldsEscapedBy,
6606                                        char: self.parse_identifier()?,
6607                                    });
6608                                }
6609                            } else {
6610                                break;
6611                            }
6612                        }
6613                        Some(Keyword::COLLECTION) => {
6614                            if self.parse_keywords(&[
6615                                Keyword::ITEMS,
6616                                Keyword::TERMINATED,
6617                                Keyword::BY,
6618                            ]) {
6619                                row_delimiters.push(HiveRowDelimiter {
6620                                    delimiter: HiveDelimiter::CollectionItemsTerminatedBy,
6621                                    char: self.parse_identifier()?,
6622                                });
6623                            } else {
6624                                break;
6625                            }
6626                        }
6627                        Some(Keyword::MAP) => {
6628                            if self.parse_keywords(&[
6629                                Keyword::KEYS,
6630                                Keyword::TERMINATED,
6631                                Keyword::BY,
6632                            ]) {
6633                                row_delimiters.push(HiveRowDelimiter {
6634                                    delimiter: HiveDelimiter::MapKeysTerminatedBy,
6635                                    char: self.parse_identifier()?,
6636                                });
6637                            } else {
6638                                break;
6639                            }
6640                        }
6641                        Some(Keyword::LINES) => {
6642                            if self.parse_keywords(&[Keyword::TERMINATED, Keyword::BY]) {
6643                                row_delimiters.push(HiveRowDelimiter {
6644                                    delimiter: HiveDelimiter::LinesTerminatedBy,
6645                                    char: self.parse_identifier()?,
6646                                });
6647                            } else {
6648                                break;
6649                            }
6650                        }
6651                        Some(Keyword::NULL) => {
6652                            if self.parse_keywords(&[Keyword::DEFINED, Keyword::AS]) {
6653                                row_delimiters.push(HiveRowDelimiter {
6654                                    delimiter: HiveDelimiter::NullDefinedAs,
6655                                    char: self.parse_identifier()?,
6656                                });
6657                            } else {
6658                                break;
6659                            }
6660                        }
6661                        _ => {
6662                            break;
6663                        }
6664                    }
6665                }
6666
6667                Ok(HiveRowFormat::DELIMITED {
6668                    delimiters: row_delimiters,
6669                })
6670            }
6671        }
6672    }
6673
6674    fn parse_optional_on_cluster(&mut self) -> Result<Option<Ident>, ParserError> {
6675        if self.parse_keywords(&[Keyword::ON, Keyword::CLUSTER]) {
6676            Ok(Some(self.parse_identifier()?))
6677        } else {
6678            Ok(None)
6679        }
6680    }
6681
6682    pub fn parse_create_table(
6683        &mut self,
6684        or_replace: bool,
6685        temporary: bool,
6686        global: Option<bool>,
6687        transient: bool,
6688    ) -> Result<Statement, ParserError> {
6689        let allow_unquoted_hyphen = dialect_of!(self is BigQueryDialect);
6690        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
6691        let table_name = self.parse_object_name(allow_unquoted_hyphen)?;
6692
6693        // Clickhouse has `ON CLUSTER 'cluster'` syntax for DDLs
6694        let on_cluster = self.parse_optional_on_cluster()?;
6695
6696        let like = if self.parse_keyword(Keyword::LIKE) || self.parse_keyword(Keyword::ILIKE) {
6697            self.parse_object_name(allow_unquoted_hyphen).ok()
6698        } else {
6699            None
6700        };
6701
6702        let clone = if self.parse_keyword(Keyword::CLONE) {
6703            self.parse_object_name(allow_unquoted_hyphen).ok()
6704        } else {
6705            None
6706        };
6707
6708        // parse optional column list (schema)
6709        let (columns, constraints) = self.parse_columns()?;
6710        let mut comment = if dialect_of!(self is HiveDialect)
6711            && self.parse_keyword(Keyword::COMMENT)
6712        {
6713            let next_token = self.next_token();
6714            match next_token.token {
6715                Token::SingleQuotedString(str) => Some(CommentDef::AfterColumnDefsWithoutEq(str)),
6716                _ => self.expected("comment", next_token)?,
6717            }
6718        } else {
6719            None
6720        };
6721
6722        // SQLite supports `WITHOUT ROWID` at the end of `CREATE TABLE`
6723        let without_rowid = self.parse_keywords(&[Keyword::WITHOUT, Keyword::ROWID]);
6724
6725        let hive_distribution = self.parse_hive_distribution()?;
6726        let clustered_by = self.parse_optional_clustered_by()?;
6727        let hive_formats = self.parse_hive_formats()?;
6728        // PostgreSQL supports `WITH ( options )`, before `AS`
6729        let with_options = self.parse_options(Keyword::WITH)?;
6730        let table_properties = self.parse_options(Keyword::TBLPROPERTIES)?;
6731
6732        let engine = if self.parse_keyword(Keyword::ENGINE) {
6733            self.expect_token(&Token::Eq)?;
6734            let next_token = self.next_token();
6735            match next_token.token {
6736                Token::Word(w) => {
6737                    let name = w.value;
6738                    let parameters = if self.peek_token() == Token::LParen {
6739                        Some(self.parse_parenthesized_identifiers()?)
6740                    } else {
6741                        None
6742                    };
6743                    Some(TableEngine { name, parameters })
6744                }
6745                _ => self.expected("identifier", next_token)?,
6746            }
6747        } else {
6748            None
6749        };
6750
6751        let auto_increment_offset = if self.parse_keyword(Keyword::AUTO_INCREMENT) {
6752            let _ = self.consume_token(&Token::Eq);
6753            let next_token = self.next_token();
6754            match next_token.token {
6755                Token::Number(s, _) => Some(Self::parse::<u32>(s, next_token.span.start)?),
6756                _ => self.expected("literal int", next_token)?,
6757            }
6758        } else {
6759            None
6760        };
6761
6762        // ClickHouse supports `PRIMARY KEY`, before `ORDER BY`
6763        // https://clickhouse.com/docs/en/sql-reference/statements/create/table#primary-key
6764        let primary_key = if dialect_of!(self is ClickHouseDialect | GenericDialect)
6765            && self.parse_keywords(&[Keyword::PRIMARY, Keyword::KEY])
6766        {
6767            Some(Box::new(self.parse_expr()?))
6768        } else {
6769            None
6770        };
6771
6772        let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
6773            if self.consume_token(&Token::LParen) {
6774                let columns = if self.peek_token() != Token::RParen {
6775                    self.parse_comma_separated(|p| p.parse_expr())?
6776                } else {
6777                    vec![]
6778                };
6779                self.expect_token(&Token::RParen)?;
6780                Some(OneOrManyWithParens::Many(columns))
6781            } else {
6782                Some(OneOrManyWithParens::One(self.parse_expr()?))
6783            }
6784        } else {
6785            None
6786        };
6787
6788        let create_table_config = self.parse_optional_create_table_config()?;
6789
6790        let default_charset = if self.parse_keywords(&[Keyword::DEFAULT, Keyword::CHARSET]) {
6791            self.expect_token(&Token::Eq)?;
6792            let next_token = self.next_token();
6793            match next_token.token {
6794                Token::Word(w) => Some(w.value),
6795                _ => self.expected("identifier", next_token)?,
6796            }
6797        } else {
6798            None
6799        };
6800
6801        let collation = if self.parse_keywords(&[Keyword::COLLATE]) {
6802            self.expect_token(&Token::Eq)?;
6803            let next_token = self.next_token();
6804            match next_token.token {
6805                Token::Word(w) => Some(w.value),
6806                _ => self.expected("identifier", next_token)?,
6807            }
6808        } else {
6809            None
6810        };
6811
6812        let on_commit = if self.parse_keywords(&[Keyword::ON, Keyword::COMMIT]) {
6813            Some(self.parse_create_table_on_commit()?)
6814        } else {
6815            None
6816        };
6817
6818        let strict = self.parse_keyword(Keyword::STRICT);
6819
6820        // Excludes Hive dialect here since it has been handled after table column definitions.
6821        if !dialect_of!(self is HiveDialect) && self.parse_keyword(Keyword::COMMENT) {
6822            // rewind the COMMENT keyword
6823            self.prev_token();
6824            comment = self.parse_optional_inline_comment()?
6825        };
6826
6827        // Parse optional `AS ( query )`
6828        let query = if self.parse_keyword(Keyword::AS) {
6829            Some(self.parse_query()?)
6830        } else if self.dialect.supports_create_table_select() && self.parse_keyword(Keyword::SELECT)
6831        {
6832            // rewind the SELECT keyword
6833            self.prev_token();
6834            Some(self.parse_query()?)
6835        } else {
6836            None
6837        };
6838
6839        Ok(CreateTableBuilder::new(table_name)
6840            .temporary(temporary)
6841            .columns(columns)
6842            .constraints(constraints)
6843            .with_options(with_options)
6844            .table_properties(table_properties)
6845            .or_replace(or_replace)
6846            .if_not_exists(if_not_exists)
6847            .transient(transient)
6848            .hive_distribution(hive_distribution)
6849            .hive_formats(Some(hive_formats))
6850            .global(global)
6851            .query(query)
6852            .without_rowid(without_rowid)
6853            .like(like)
6854            .clone_clause(clone)
6855            .engine(engine)
6856            .comment(comment)
6857            .auto_increment_offset(auto_increment_offset)
6858            .order_by(order_by)
6859            .default_charset(default_charset)
6860            .collation(collation)
6861            .on_commit(on_commit)
6862            .on_cluster(on_cluster)
6863            .clustered_by(clustered_by)
6864            .partition_by(create_table_config.partition_by)
6865            .cluster_by(create_table_config.cluster_by)
6866            .options(create_table_config.options)
6867            .primary_key(primary_key)
6868            .strict(strict)
6869            .build())
6870    }
6871
6872    pub(crate) fn parse_create_table_on_commit(&mut self) -> Result<OnCommit, ParserError> {
6873        if self.parse_keywords(&[Keyword::DELETE, Keyword::ROWS]) {
6874            Ok(OnCommit::DeleteRows)
6875        } else if self.parse_keywords(&[Keyword::PRESERVE, Keyword::ROWS]) {
6876            Ok(OnCommit::PreserveRows)
6877        } else if self.parse_keywords(&[Keyword::DROP]) {
6878            Ok(OnCommit::Drop)
6879        } else {
6880            parser_err!(
6881                "Expecting DELETE ROWS, PRESERVE ROWS or DROP",
6882                self.peek_token()
6883            )
6884        }
6885    }
6886
6887    /// Parse configuration like partitioning, clustering information during the table creation.
6888    ///
6889    /// [BigQuery](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#syntax_2)
6890    /// [PostgreSQL](https://www.postgresql.org/docs/current/ddl-partitioning.html)
6891    fn parse_optional_create_table_config(
6892        &mut self,
6893    ) -> Result<CreateTableConfiguration, ParserError> {
6894        let partition_by = if dialect_of!(self is BigQueryDialect | PostgreSqlDialect | GenericDialect)
6895            && self.parse_keywords(&[Keyword::PARTITION, Keyword::BY])
6896        {
6897            Some(Box::new(self.parse_expr()?))
6898        } else {
6899            None
6900        };
6901
6902        let mut cluster_by = None;
6903        let mut options = None;
6904        if dialect_of!(self is BigQueryDialect | GenericDialect) {
6905            if self.parse_keywords(&[Keyword::CLUSTER, Keyword::BY]) {
6906                cluster_by = Some(WrappedCollection::NoWrapping(
6907                    self.parse_comma_separated(|p| p.parse_identifier())?,
6908                ));
6909            };
6910
6911            if let Token::Word(word) = self.peek_token().token {
6912                if word.keyword == Keyword::OPTIONS {
6913                    options = Some(self.parse_options(Keyword::OPTIONS)?);
6914                }
6915            };
6916        }
6917
6918        Ok(CreateTableConfiguration {
6919            partition_by,
6920            cluster_by,
6921            options,
6922        })
6923    }
6924
6925    pub fn parse_optional_inline_comment(&mut self) -> Result<Option<CommentDef>, ParserError> {
6926        let comment = if self.parse_keyword(Keyword::COMMENT) {
6927            let has_eq = self.consume_token(&Token::Eq);
6928            let next_token = self.next_token();
6929            match next_token.token {
6930                Token::SingleQuotedString(str) => Some(if has_eq {
6931                    CommentDef::WithEq(str)
6932                } else {
6933                    CommentDef::WithoutEq(str)
6934                }),
6935                _ => self.expected("comment", next_token)?,
6936            }
6937        } else {
6938            None
6939        };
6940        Ok(comment)
6941    }
6942
6943    pub fn parse_optional_procedure_parameters(
6944        &mut self,
6945    ) -> Result<Option<Vec<ProcedureParam>>, ParserError> {
6946        let mut params = vec![];
6947        if !self.consume_token(&Token::LParen) || self.consume_token(&Token::RParen) {
6948            return Ok(Some(params));
6949        }
6950        loop {
6951            if let Token::Word(_) = self.peek_token().token {
6952                params.push(self.parse_procedure_param()?)
6953            }
6954            let comma = self.consume_token(&Token::Comma);
6955            if self.consume_token(&Token::RParen) {
6956                // allow a trailing comma, even though it's not in standard
6957                break;
6958            } else if !comma {
6959                return self.expected("',' or ')' after parameter definition", self.peek_token());
6960            }
6961        }
6962        Ok(Some(params))
6963    }
6964
6965    pub fn parse_columns(&mut self) -> Result<(Vec<ColumnDef>, Vec<TableConstraint>), ParserError> {
6966        let mut columns = vec![];
6967        let mut constraints = vec![];
6968        if !self.consume_token(&Token::LParen) || self.consume_token(&Token::RParen) {
6969            return Ok((columns, constraints));
6970        }
6971
6972        loop {
6973            if let Some(constraint) = self.parse_optional_table_constraint()? {
6974                constraints.push(constraint);
6975            } else if let Token::Word(_) = self.peek_token().token {
6976                columns.push(self.parse_column_def()?);
6977            } else {
6978                return self.expected("column name or constraint definition", self.peek_token());
6979            }
6980
6981            let comma = self.consume_token(&Token::Comma);
6982            let rparen = self.peek_token().token == Token::RParen;
6983
6984            if !comma && !rparen {
6985                return self.expected("',' or ')' after column definition", self.peek_token());
6986            };
6987
6988            if rparen
6989                && (!comma
6990                    || self.dialect.supports_column_definition_trailing_commas()
6991                    || self.options.trailing_commas)
6992            {
6993                let _ = self.consume_token(&Token::RParen);
6994                break;
6995            }
6996        }
6997
6998        Ok((columns, constraints))
6999    }
7000
7001    pub fn parse_procedure_param(&mut self) -> Result<ProcedureParam, ParserError> {
7002        let name = self.parse_identifier()?;
7003        let data_type = self.parse_data_type()?;
7004        Ok(ProcedureParam { name, data_type })
7005    }
7006
7007    pub fn parse_column_def(&mut self) -> Result<ColumnDef, ParserError> {
7008        let name = self.parse_identifier()?;
7009        let data_type = if self.is_column_type_sqlite_unspecified() {
7010            DataType::Unspecified
7011        } else {
7012            self.parse_data_type()?
7013        };
7014        let mut options = vec![];
7015        loop {
7016            if self.parse_keyword(Keyword::CONSTRAINT) {
7017                let name = Some(self.parse_identifier()?);
7018                if let Some(option) = self.parse_optional_column_option()? {
7019                    options.push(ColumnOptionDef { name, option });
7020                } else {
7021                    return self.expected(
7022                        "constraint details after CONSTRAINT <name>",
7023                        self.peek_token(),
7024                    );
7025                }
7026            } else if let Some(option) = self.parse_optional_column_option()? {
7027                options.push(ColumnOptionDef { name: None, option });
7028            } else {
7029                break;
7030            };
7031        }
7032        Ok(ColumnDef {
7033            name,
7034            data_type,
7035            options,
7036        })
7037    }
7038
7039    fn is_column_type_sqlite_unspecified(&mut self) -> bool {
7040        if dialect_of!(self is SQLiteDialect) {
7041            match self.peek_token().token {
7042                Token::Word(word) => matches!(
7043                    word.keyword,
7044                    Keyword::CONSTRAINT
7045                        | Keyword::PRIMARY
7046                        | Keyword::NOT
7047                        | Keyword::UNIQUE
7048                        | Keyword::CHECK
7049                        | Keyword::DEFAULT
7050                        | Keyword::COLLATE
7051                        | Keyword::REFERENCES
7052                        | Keyword::GENERATED
7053                        | Keyword::AS
7054                ),
7055                _ => true, // e.g. comma immediately after column name
7056            }
7057        } else {
7058            false
7059        }
7060    }
7061
7062    pub fn parse_optional_column_option(&mut self) -> Result<Option<ColumnOption>, ParserError> {
7063        if let Some(option) = self.dialect.parse_column_option(self)? {
7064            return option;
7065        }
7066
7067        if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
7068            Ok(Some(ColumnOption::CharacterSet(
7069                self.parse_object_name(false)?,
7070            )))
7071        } else if self.parse_keywords(&[Keyword::COLLATE]) {
7072            Ok(Some(ColumnOption::Collation(
7073                self.parse_object_name(false)?,
7074            )))
7075        } else if self.parse_keywords(&[Keyword::NOT, Keyword::NULL]) {
7076            Ok(Some(ColumnOption::NotNull))
7077        } else if self.parse_keywords(&[Keyword::COMMENT]) {
7078            let next_token = self.next_token();
7079            match next_token.token {
7080                Token::SingleQuotedString(value, ..) => Ok(Some(ColumnOption::Comment(value))),
7081                _ => self.expected("string", next_token),
7082            }
7083        } else if self.parse_keyword(Keyword::NULL) {
7084            Ok(Some(ColumnOption::Null))
7085        } else if self.parse_keyword(Keyword::DEFAULT) {
7086            Ok(Some(ColumnOption::Default(self.parse_expr()?)))
7087        } else if dialect_of!(self is ClickHouseDialect| GenericDialect)
7088            && self.parse_keyword(Keyword::MATERIALIZED)
7089        {
7090            Ok(Some(ColumnOption::Materialized(self.parse_expr()?)))
7091        } else if dialect_of!(self is ClickHouseDialect| GenericDialect)
7092            && self.parse_keyword(Keyword::ALIAS)
7093        {
7094            Ok(Some(ColumnOption::Alias(self.parse_expr()?)))
7095        } else if dialect_of!(self is ClickHouseDialect| GenericDialect)
7096            && self.parse_keyword(Keyword::EPHEMERAL)
7097        {
7098            // The expression is optional for the EPHEMERAL syntax, so we need to check
7099            // if the column definition has remaining tokens before parsing the expression.
7100            if matches!(self.peek_token().token, Token::Comma | Token::RParen) {
7101                Ok(Some(ColumnOption::Ephemeral(None)))
7102            } else {
7103                Ok(Some(ColumnOption::Ephemeral(Some(self.parse_expr()?))))
7104            }
7105        } else if self.parse_keywords(&[Keyword::PRIMARY, Keyword::KEY]) {
7106            let characteristics = self.parse_constraint_characteristics()?;
7107            Ok(Some(ColumnOption::Unique {
7108                is_primary: true,
7109                characteristics,
7110            }))
7111        } else if self.parse_keyword(Keyword::UNIQUE) {
7112            let characteristics = self.parse_constraint_characteristics()?;
7113            Ok(Some(ColumnOption::Unique {
7114                is_primary: false,
7115                characteristics,
7116            }))
7117        } else if self.parse_keyword(Keyword::REFERENCES) {
7118            let foreign_table = self.parse_object_name(false)?;
7119            // PostgreSQL allows omitting the column list and
7120            // uses the primary key column of the foreign table by default
7121            let referred_columns = self.parse_parenthesized_column_list(Optional, false)?;
7122            let mut on_delete = None;
7123            let mut on_update = None;
7124            loop {
7125                if on_delete.is_none() && self.parse_keywords(&[Keyword::ON, Keyword::DELETE]) {
7126                    on_delete = Some(self.parse_referential_action()?);
7127                } else if on_update.is_none()
7128                    && self.parse_keywords(&[Keyword::ON, Keyword::UPDATE])
7129                {
7130                    on_update = Some(self.parse_referential_action()?);
7131                } else {
7132                    break;
7133                }
7134            }
7135            let characteristics = self.parse_constraint_characteristics()?;
7136
7137            Ok(Some(ColumnOption::ForeignKey {
7138                foreign_table,
7139                referred_columns,
7140                on_delete,
7141                on_update,
7142                characteristics,
7143            }))
7144        } else if self.parse_keyword(Keyword::CHECK) {
7145            self.expect_token(&Token::LParen)?;
7146            let expr = self.parse_expr()?;
7147            self.expect_token(&Token::RParen)?;
7148            Ok(Some(ColumnOption::Check(expr)))
7149        } else if self.parse_keyword(Keyword::AUTO_INCREMENT)
7150            && dialect_of!(self is MySqlDialect | GenericDialect)
7151        {
7152            // Support AUTO_INCREMENT for MySQL
7153            Ok(Some(ColumnOption::DialectSpecific(vec![
7154                Token::make_keyword("AUTO_INCREMENT"),
7155            ])))
7156        } else if self.parse_keyword(Keyword::AUTOINCREMENT)
7157            && dialect_of!(self is SQLiteDialect |  GenericDialect)
7158        {
7159            // Support AUTOINCREMENT for SQLite
7160            Ok(Some(ColumnOption::DialectSpecific(vec![
7161                Token::make_keyword("AUTOINCREMENT"),
7162            ])))
7163        } else if self.parse_keyword(Keyword::ASC)
7164            && self.dialect.supports_asc_desc_in_column_definition()
7165        {
7166            // Support ASC for SQLite
7167            Ok(Some(ColumnOption::DialectSpecific(vec![
7168                Token::make_keyword("ASC"),
7169            ])))
7170        } else if self.parse_keyword(Keyword::DESC)
7171            && self.dialect.supports_asc_desc_in_column_definition()
7172        {
7173            // Support DESC for SQLite
7174            Ok(Some(ColumnOption::DialectSpecific(vec![
7175                Token::make_keyword("DESC"),
7176            ])))
7177        } else if self.parse_keywords(&[Keyword::ON, Keyword::UPDATE])
7178            && dialect_of!(self is MySqlDialect | GenericDialect)
7179        {
7180            let expr = self.parse_expr()?;
7181            Ok(Some(ColumnOption::OnUpdate(expr)))
7182        } else if self.parse_keyword(Keyword::GENERATED) {
7183            self.parse_optional_column_option_generated()
7184        } else if dialect_of!(self is BigQueryDialect | GenericDialect)
7185            && self.parse_keyword(Keyword::OPTIONS)
7186        {
7187            self.prev_token();
7188            Ok(Some(ColumnOption::Options(
7189                self.parse_options(Keyword::OPTIONS)?,
7190            )))
7191        } else if self.parse_keyword(Keyword::AS)
7192            && dialect_of!(self is MySqlDialect | SQLiteDialect | DuckDbDialect | GenericDialect)
7193        {
7194            self.parse_optional_column_option_as()
7195        } else if self.parse_keyword(Keyword::IDENTITY)
7196            && dialect_of!(self is MsSqlDialect | GenericDialect)
7197        {
7198            let parameters = if self.consume_token(&Token::LParen) {
7199                let seed = self.parse_number()?;
7200                self.expect_token(&Token::Comma)?;
7201                let increment = self.parse_number()?;
7202                self.expect_token(&Token::RParen)?;
7203
7204                Some(IdentityPropertyFormatKind::FunctionCall(
7205                    IdentityParameters { seed, increment },
7206                ))
7207            } else {
7208                None
7209            };
7210            Ok(Some(ColumnOption::Identity(
7211                IdentityPropertyKind::Identity(IdentityProperty {
7212                    parameters,
7213                    order: None,
7214                }),
7215            )))
7216        } else if dialect_of!(self is SQLiteDialect | GenericDialect)
7217            && self.parse_keywords(&[Keyword::ON, Keyword::CONFLICT])
7218        {
7219            // Support ON CONFLICT for SQLite
7220            Ok(Some(ColumnOption::OnConflict(
7221                self.expect_one_of_keywords(&[
7222                    Keyword::ROLLBACK,
7223                    Keyword::ABORT,
7224                    Keyword::FAIL,
7225                    Keyword::IGNORE,
7226                    Keyword::REPLACE,
7227                ])?,
7228            )))
7229        } else {
7230            Ok(None)
7231        }
7232    }
7233
7234    pub(crate) fn parse_tag(&mut self) -> Result<Tag, ParserError> {
7235        let name = self.parse_identifier()?;
7236        self.expect_token(&Token::Eq)?;
7237        let value = self.parse_literal_string()?;
7238
7239        Ok(Tag::new(name, value))
7240    }
7241
7242    fn parse_optional_column_option_generated(
7243        &mut self,
7244    ) -> Result<Option<ColumnOption>, ParserError> {
7245        if self.parse_keywords(&[Keyword::ALWAYS, Keyword::AS, Keyword::IDENTITY]) {
7246            let mut sequence_options = vec![];
7247            if self.expect_token(&Token::LParen).is_ok() {
7248                sequence_options = self.parse_create_sequence_options()?;
7249                self.expect_token(&Token::RParen)?;
7250            }
7251            Ok(Some(ColumnOption::Generated {
7252                generated_as: GeneratedAs::Always,
7253                sequence_options: Some(sequence_options),
7254                generation_expr: None,
7255                generation_expr_mode: None,
7256                generated_keyword: true,
7257            }))
7258        } else if self.parse_keywords(&[
7259            Keyword::BY,
7260            Keyword::DEFAULT,
7261            Keyword::AS,
7262            Keyword::IDENTITY,
7263        ]) {
7264            let mut sequence_options = vec![];
7265            if self.expect_token(&Token::LParen).is_ok() {
7266                sequence_options = self.parse_create_sequence_options()?;
7267                self.expect_token(&Token::RParen)?;
7268            }
7269            Ok(Some(ColumnOption::Generated {
7270                generated_as: GeneratedAs::ByDefault,
7271                sequence_options: Some(sequence_options),
7272                generation_expr: None,
7273                generation_expr_mode: None,
7274                generated_keyword: true,
7275            }))
7276        } else if self.parse_keywords(&[Keyword::ALWAYS, Keyword::AS]) {
7277            if self.expect_token(&Token::LParen).is_ok() {
7278                let expr = self.parse_expr()?;
7279                self.expect_token(&Token::RParen)?;
7280                let (gen_as, expr_mode) = if self.parse_keywords(&[Keyword::STORED]) {
7281                    Ok((
7282                        GeneratedAs::ExpStored,
7283                        Some(GeneratedExpressionMode::Stored),
7284                    ))
7285                } else if dialect_of!(self is PostgreSqlDialect) {
7286                    // Postgres' AS IDENTITY branches are above, this one needs STORED
7287                    self.expected("STORED", self.peek_token())
7288                } else if self.parse_keywords(&[Keyword::VIRTUAL]) {
7289                    Ok((GeneratedAs::Always, Some(GeneratedExpressionMode::Virtual)))
7290                } else {
7291                    Ok((GeneratedAs::Always, None))
7292                }?;
7293
7294                Ok(Some(ColumnOption::Generated {
7295                    generated_as: gen_as,
7296                    sequence_options: None,
7297                    generation_expr: Some(expr),
7298                    generation_expr_mode: expr_mode,
7299                    generated_keyword: true,
7300                }))
7301            } else {
7302                Ok(None)
7303            }
7304        } else {
7305            Ok(None)
7306        }
7307    }
7308
7309    fn parse_optional_column_option_as(&mut self) -> Result<Option<ColumnOption>, ParserError> {
7310        // Some DBs allow 'AS (expr)', shorthand for GENERATED ALWAYS AS
7311        self.expect_token(&Token::LParen)?;
7312        let expr = self.parse_expr()?;
7313        self.expect_token(&Token::RParen)?;
7314
7315        let (gen_as, expr_mode) = if self.parse_keywords(&[Keyword::STORED]) {
7316            (
7317                GeneratedAs::ExpStored,
7318                Some(GeneratedExpressionMode::Stored),
7319            )
7320        } else if self.parse_keywords(&[Keyword::VIRTUAL]) {
7321            (GeneratedAs::Always, Some(GeneratedExpressionMode::Virtual))
7322        } else {
7323            (GeneratedAs::Always, None)
7324        };
7325
7326        Ok(Some(ColumnOption::Generated {
7327            generated_as: gen_as,
7328            sequence_options: None,
7329            generation_expr: Some(expr),
7330            generation_expr_mode: expr_mode,
7331            generated_keyword: false,
7332        }))
7333    }
7334
7335    pub fn parse_optional_clustered_by(&mut self) -> Result<Option<ClusteredBy>, ParserError> {
7336        let clustered_by = if dialect_of!(self is HiveDialect|GenericDialect)
7337            && self.parse_keywords(&[Keyword::CLUSTERED, Keyword::BY])
7338        {
7339            let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
7340
7341            let sorted_by = if self.parse_keywords(&[Keyword::SORTED, Keyword::BY]) {
7342                self.expect_token(&Token::LParen)?;
7343                let sorted_by_columns = self.parse_comma_separated(|p| p.parse_order_by_expr())?;
7344                self.expect_token(&Token::RParen)?;
7345                Some(sorted_by_columns)
7346            } else {
7347                None
7348            };
7349
7350            self.expect_keyword_is(Keyword::INTO)?;
7351            let num_buckets = self.parse_number_value()?.value;
7352            self.expect_keyword_is(Keyword::BUCKETS)?;
7353            Some(ClusteredBy {
7354                columns,
7355                sorted_by,
7356                num_buckets,
7357            })
7358        } else {
7359            None
7360        };
7361        Ok(clustered_by)
7362    }
7363
7364    pub fn parse_referential_action(&mut self) -> Result<ReferentialAction, ParserError> {
7365        if self.parse_keyword(Keyword::RESTRICT) {
7366            Ok(ReferentialAction::Restrict)
7367        } else if self.parse_keyword(Keyword::CASCADE) {
7368            Ok(ReferentialAction::Cascade)
7369        } else if self.parse_keywords(&[Keyword::SET, Keyword::NULL]) {
7370            Ok(ReferentialAction::SetNull)
7371        } else if self.parse_keywords(&[Keyword::NO, Keyword::ACTION]) {
7372            Ok(ReferentialAction::NoAction)
7373        } else if self.parse_keywords(&[Keyword::SET, Keyword::DEFAULT]) {
7374            Ok(ReferentialAction::SetDefault)
7375        } else {
7376            self.expected(
7377                "one of RESTRICT, CASCADE, SET NULL, NO ACTION or SET DEFAULT",
7378                self.peek_token(),
7379            )
7380        }
7381    }
7382
7383    pub fn parse_constraint_characteristics(
7384        &mut self,
7385    ) -> Result<Option<ConstraintCharacteristics>, ParserError> {
7386        let mut cc = ConstraintCharacteristics::default();
7387
7388        loop {
7389            if cc.deferrable.is_none() && self.parse_keywords(&[Keyword::NOT, Keyword::DEFERRABLE])
7390            {
7391                cc.deferrable = Some(false);
7392            } else if cc.deferrable.is_none() && self.parse_keyword(Keyword::DEFERRABLE) {
7393                cc.deferrable = Some(true);
7394            } else if cc.initially.is_none() && self.parse_keyword(Keyword::INITIALLY) {
7395                if self.parse_keyword(Keyword::DEFERRED) {
7396                    cc.initially = Some(DeferrableInitial::Deferred);
7397                } else if self.parse_keyword(Keyword::IMMEDIATE) {
7398                    cc.initially = Some(DeferrableInitial::Immediate);
7399                } else {
7400                    self.expected("one of DEFERRED or IMMEDIATE", self.peek_token())?;
7401                }
7402            } else if cc.enforced.is_none() && self.parse_keyword(Keyword::ENFORCED) {
7403                cc.enforced = Some(true);
7404            } else if cc.enforced.is_none()
7405                && self.parse_keywords(&[Keyword::NOT, Keyword::ENFORCED])
7406            {
7407                cc.enforced = Some(false);
7408            } else {
7409                break;
7410            }
7411        }
7412
7413        if cc.deferrable.is_some() || cc.initially.is_some() || cc.enforced.is_some() {
7414            Ok(Some(cc))
7415        } else {
7416            Ok(None)
7417        }
7418    }
7419
7420    pub fn parse_optional_table_constraint(
7421        &mut self,
7422    ) -> Result<Option<TableConstraint>, ParserError> {
7423        let name = if self.parse_keyword(Keyword::CONSTRAINT) {
7424            Some(self.parse_identifier()?)
7425        } else {
7426            None
7427        };
7428
7429        let next_token = self.next_token();
7430        match next_token.token {
7431            Token::Word(w) if w.keyword == Keyword::UNIQUE => {
7432                let index_type_display = self.parse_index_type_display();
7433                if !dialect_of!(self is GenericDialect | MySqlDialect)
7434                    && !index_type_display.is_none()
7435                {
7436                    return self
7437                        .expected("`index_name` or `(column_name [, ...])`", self.peek_token());
7438                }
7439
7440                let nulls_distinct = self.parse_optional_nulls_distinct()?;
7441
7442                // optional index name
7443                let index_name = self.parse_optional_indent()?;
7444                let index_type = self.parse_optional_using_then_index_type()?;
7445
7446                let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
7447                let index_options = self.parse_index_options()?;
7448                let characteristics = self.parse_constraint_characteristics()?;
7449                Ok(Some(TableConstraint::Unique {
7450                    name,
7451                    index_name,
7452                    index_type_display,
7453                    index_type,
7454                    columns,
7455                    index_options,
7456                    characteristics,
7457                    nulls_distinct,
7458                }))
7459            }
7460            Token::Word(w) if w.keyword == Keyword::PRIMARY => {
7461                // after `PRIMARY` always stay `KEY`
7462                self.expect_keyword_is(Keyword::KEY)?;
7463
7464                // optional index name
7465                let index_name = self.parse_optional_indent()?;
7466                let index_type = self.parse_optional_using_then_index_type()?;
7467
7468                let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
7469                let index_options = self.parse_index_options()?;
7470                let characteristics = self.parse_constraint_characteristics()?;
7471                Ok(Some(TableConstraint::PrimaryKey {
7472                    name,
7473                    index_name,
7474                    index_type,
7475                    columns,
7476                    index_options,
7477                    characteristics,
7478                }))
7479            }
7480            Token::Word(w) if w.keyword == Keyword::FOREIGN => {
7481                self.expect_keyword_is(Keyword::KEY)?;
7482                let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
7483                self.expect_keyword_is(Keyword::REFERENCES)?;
7484                let foreign_table = self.parse_object_name(false)?;
7485                let referred_columns = self.parse_parenthesized_column_list(Optional, false)?;
7486                let mut on_delete = None;
7487                let mut on_update = None;
7488                loop {
7489                    if on_delete.is_none() && self.parse_keywords(&[Keyword::ON, Keyword::DELETE]) {
7490                        on_delete = Some(self.parse_referential_action()?);
7491                    } else if on_update.is_none()
7492                        && self.parse_keywords(&[Keyword::ON, Keyword::UPDATE])
7493                    {
7494                        on_update = Some(self.parse_referential_action()?);
7495                    } else {
7496                        break;
7497                    }
7498                }
7499
7500                let characteristics = self.parse_constraint_characteristics()?;
7501
7502                Ok(Some(TableConstraint::ForeignKey {
7503                    name,
7504                    columns,
7505                    foreign_table,
7506                    referred_columns,
7507                    on_delete,
7508                    on_update,
7509                    characteristics,
7510                }))
7511            }
7512            Token::Word(w) if w.keyword == Keyword::CHECK => {
7513                self.expect_token(&Token::LParen)?;
7514                let expr = Box::new(self.parse_expr()?);
7515                self.expect_token(&Token::RParen)?;
7516                Ok(Some(TableConstraint::Check { name, expr }))
7517            }
7518            Token::Word(w)
7519                if (w.keyword == Keyword::INDEX || w.keyword == Keyword::KEY)
7520                    && dialect_of!(self is GenericDialect | MySqlDialect)
7521                    && name.is_none() =>
7522            {
7523                let display_as_key = w.keyword == Keyword::KEY;
7524
7525                let name = match self.peek_token().token {
7526                    Token::Word(word) if word.keyword == Keyword::USING => None,
7527                    _ => self.parse_optional_indent()?,
7528                };
7529
7530                let index_type = self.parse_optional_using_then_index_type()?;
7531                let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
7532
7533                Ok(Some(TableConstraint::Index {
7534                    display_as_key,
7535                    name,
7536                    index_type,
7537                    columns,
7538                }))
7539            }
7540            Token::Word(w)
7541                if (w.keyword == Keyword::FULLTEXT || w.keyword == Keyword::SPATIAL)
7542                    && dialect_of!(self is GenericDialect | MySqlDialect) =>
7543            {
7544                if let Some(name) = name {
7545                    return self.expected(
7546                        "FULLTEXT or SPATIAL option without constraint name",
7547                        TokenWithSpan {
7548                            token: Token::make_keyword(&name.to_string()),
7549                            span: next_token.span,
7550                        },
7551                    );
7552                }
7553
7554                let fulltext = w.keyword == Keyword::FULLTEXT;
7555
7556                let index_type_display = self.parse_index_type_display();
7557
7558                let opt_index_name = self.parse_optional_indent()?;
7559
7560                let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
7561
7562                Ok(Some(TableConstraint::FulltextOrSpatial {
7563                    fulltext,
7564                    index_type_display,
7565                    opt_index_name,
7566                    columns,
7567                }))
7568            }
7569            _ => {
7570                if name.is_some() {
7571                    self.expected("PRIMARY, UNIQUE, FOREIGN, or CHECK", next_token)
7572                } else {
7573                    self.prev_token();
7574                    Ok(None)
7575                }
7576            }
7577        }
7578    }
7579
7580    fn parse_optional_nulls_distinct(&mut self) -> Result<NullsDistinctOption, ParserError> {
7581        Ok(if self.parse_keyword(Keyword::NULLS) {
7582            let not = self.parse_keyword(Keyword::NOT);
7583            self.expect_keyword_is(Keyword::DISTINCT)?;
7584            if not {
7585                NullsDistinctOption::NotDistinct
7586            } else {
7587                NullsDistinctOption::Distinct
7588            }
7589        } else {
7590            NullsDistinctOption::None
7591        })
7592    }
7593
7594    pub fn maybe_parse_options(
7595        &mut self,
7596        keyword: Keyword,
7597    ) -> Result<Option<Vec<SqlOption>>, ParserError> {
7598        if let Token::Word(word) = self.peek_token().token {
7599            if word.keyword == keyword {
7600                return Ok(Some(self.parse_options(keyword)?));
7601            }
7602        };
7603        Ok(None)
7604    }
7605
7606    pub fn parse_options(&mut self, keyword: Keyword) -> Result<Vec<SqlOption>, ParserError> {
7607        if self.parse_keyword(keyword) {
7608            self.expect_token(&Token::LParen)?;
7609            let options = self.parse_comma_separated0(Parser::parse_sql_option, Token::RParen)?;
7610            self.expect_token(&Token::RParen)?;
7611            Ok(options)
7612        } else {
7613            Ok(vec![])
7614        }
7615    }
7616
7617    pub fn parse_options_with_keywords(
7618        &mut self,
7619        keywords: &[Keyword],
7620    ) -> Result<Vec<SqlOption>, ParserError> {
7621        if self.parse_keywords(keywords) {
7622            self.expect_token(&Token::LParen)?;
7623            let options = self.parse_comma_separated(Parser::parse_sql_option)?;
7624            self.expect_token(&Token::RParen)?;
7625            Ok(options)
7626        } else {
7627            Ok(vec![])
7628        }
7629    }
7630
7631    pub fn parse_index_type(&mut self) -> Result<IndexType, ParserError> {
7632        if self.parse_keyword(Keyword::BTREE) {
7633            Ok(IndexType::BTree)
7634        } else if self.parse_keyword(Keyword::HASH) {
7635            Ok(IndexType::Hash)
7636        } else {
7637            self.expected("index type {BTREE | HASH}", self.peek_token())
7638        }
7639    }
7640
7641    /// Parse [USING {BTREE | HASH}]
7642    pub fn parse_optional_using_then_index_type(
7643        &mut self,
7644    ) -> Result<Option<IndexType>, ParserError> {
7645        if self.parse_keyword(Keyword::USING) {
7646            Ok(Some(self.parse_index_type()?))
7647        } else {
7648            Ok(None)
7649        }
7650    }
7651
7652    /// Parse `[ident]`, mostly `ident` is name, like:
7653    /// `window_name`, `index_name`, ...
7654    pub fn parse_optional_indent(&mut self) -> Result<Option<Ident>, ParserError> {
7655        self.maybe_parse(|parser| parser.parse_identifier())
7656    }
7657
7658    #[must_use]
7659    pub fn parse_index_type_display(&mut self) -> KeyOrIndexDisplay {
7660        if self.parse_keyword(Keyword::KEY) {
7661            KeyOrIndexDisplay::Key
7662        } else if self.parse_keyword(Keyword::INDEX) {
7663            KeyOrIndexDisplay::Index
7664        } else {
7665            KeyOrIndexDisplay::None
7666        }
7667    }
7668
7669    pub fn parse_optional_index_option(&mut self) -> Result<Option<IndexOption>, ParserError> {
7670        if let Some(index_type) = self.parse_optional_using_then_index_type()? {
7671            Ok(Some(IndexOption::Using(index_type)))
7672        } else if self.parse_keyword(Keyword::COMMENT) {
7673            let s = self.parse_literal_string()?;
7674            Ok(Some(IndexOption::Comment(s)))
7675        } else {
7676            Ok(None)
7677        }
7678    }
7679
7680    pub fn parse_index_options(&mut self) -> Result<Vec<IndexOption>, ParserError> {
7681        let mut options = Vec::new();
7682
7683        loop {
7684            match self.parse_optional_index_option()? {
7685                Some(index_option) => options.push(index_option),
7686                None => return Ok(options),
7687            }
7688        }
7689    }
7690
7691    pub fn parse_sql_option(&mut self) -> Result<SqlOption, ParserError> {
7692        let is_mssql = dialect_of!(self is MsSqlDialect|GenericDialect);
7693
7694        match self.peek_token().token {
7695            Token::Word(w) if w.keyword == Keyword::HEAP && is_mssql => {
7696                Ok(SqlOption::Ident(self.parse_identifier()?))
7697            }
7698            Token::Word(w) if w.keyword == Keyword::PARTITION && is_mssql => {
7699                self.parse_option_partition()
7700            }
7701            Token::Word(w) if w.keyword == Keyword::CLUSTERED && is_mssql => {
7702                self.parse_option_clustered()
7703            }
7704            _ => {
7705                let name = self.parse_identifier()?;
7706                self.expect_token(&Token::Eq)?;
7707                let value = self.parse_expr()?;
7708
7709                Ok(SqlOption::KeyValue { key: name, value })
7710            }
7711        }
7712    }
7713
7714    pub fn parse_option_clustered(&mut self) -> Result<SqlOption, ParserError> {
7715        if self.parse_keywords(&[
7716            Keyword::CLUSTERED,
7717            Keyword::COLUMNSTORE,
7718            Keyword::INDEX,
7719            Keyword::ORDER,
7720        ]) {
7721            Ok(SqlOption::Clustered(
7722                TableOptionsClustered::ColumnstoreIndexOrder(
7723                    self.parse_parenthesized_column_list(IsOptional::Mandatory, false)?,
7724                ),
7725            ))
7726        } else if self.parse_keywords(&[Keyword::CLUSTERED, Keyword::COLUMNSTORE, Keyword::INDEX]) {
7727            Ok(SqlOption::Clustered(
7728                TableOptionsClustered::ColumnstoreIndex,
7729            ))
7730        } else if self.parse_keywords(&[Keyword::CLUSTERED, Keyword::INDEX]) {
7731            self.expect_token(&Token::LParen)?;
7732
7733            let columns = self.parse_comma_separated(|p| {
7734                let name = p.parse_identifier()?;
7735                let asc = p.parse_asc_desc();
7736
7737                Ok(ClusteredIndex { name, asc })
7738            })?;
7739
7740            self.expect_token(&Token::RParen)?;
7741
7742            Ok(SqlOption::Clustered(TableOptionsClustered::Index(columns)))
7743        } else {
7744            Err(ParserError::ParserError(
7745                "invalid CLUSTERED sequence".to_string(),
7746            ))
7747        }
7748    }
7749
7750    pub fn parse_option_partition(&mut self) -> Result<SqlOption, ParserError> {
7751        self.expect_keyword_is(Keyword::PARTITION)?;
7752        self.expect_token(&Token::LParen)?;
7753        let column_name = self.parse_identifier()?;
7754
7755        self.expect_keyword_is(Keyword::RANGE)?;
7756        let range_direction = if self.parse_keyword(Keyword::LEFT) {
7757            Some(PartitionRangeDirection::Left)
7758        } else if self.parse_keyword(Keyword::RIGHT) {
7759            Some(PartitionRangeDirection::Right)
7760        } else {
7761            None
7762        };
7763
7764        self.expect_keywords(&[Keyword::FOR, Keyword::VALUES])?;
7765        self.expect_token(&Token::LParen)?;
7766
7767        let for_values = self.parse_comma_separated(Parser::parse_expr)?;
7768
7769        self.expect_token(&Token::RParen)?;
7770        self.expect_token(&Token::RParen)?;
7771
7772        Ok(SqlOption::Partition {
7773            column_name,
7774            range_direction,
7775            for_values,
7776        })
7777    }
7778
7779    pub fn parse_partition(&mut self) -> Result<Partition, ParserError> {
7780        self.expect_token(&Token::LParen)?;
7781        let partitions = self.parse_comma_separated(Parser::parse_expr)?;
7782        self.expect_token(&Token::RParen)?;
7783        Ok(Partition::Partitions(partitions))
7784    }
7785
7786    pub fn parse_projection_select(&mut self) -> Result<ProjectionSelect, ParserError> {
7787        self.expect_token(&Token::LParen)?;
7788        self.expect_keyword_is(Keyword::SELECT)?;
7789        let projection = self.parse_projection()?;
7790        let group_by = self.parse_optional_group_by()?;
7791        let order_by = self.parse_optional_order_by()?;
7792        self.expect_token(&Token::RParen)?;
7793        Ok(ProjectionSelect {
7794            projection,
7795            group_by,
7796            order_by,
7797        })
7798    }
7799    pub fn parse_alter_table_add_projection(&mut self) -> Result<AlterTableOperation, ParserError> {
7800        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
7801        let name = self.parse_identifier()?;
7802        let query = self.parse_projection_select()?;
7803        Ok(AlterTableOperation::AddProjection {
7804            if_not_exists,
7805            name,
7806            select: query,
7807        })
7808    }
7809
7810    pub fn parse_alter_table_operation(&mut self) -> Result<AlterTableOperation, ParserError> {
7811        let operation = if self.parse_keyword(Keyword::ADD) {
7812            if let Some(constraint) = self.parse_optional_table_constraint()? {
7813                AlterTableOperation::AddConstraint(constraint)
7814            } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
7815                && self.parse_keyword(Keyword::PROJECTION)
7816            {
7817                return self.parse_alter_table_add_projection();
7818            } else {
7819                let if_not_exists =
7820                    self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
7821                let mut new_partitions = vec![];
7822                loop {
7823                    if self.parse_keyword(Keyword::PARTITION) {
7824                        new_partitions.push(self.parse_partition()?);
7825                    } else {
7826                        break;
7827                    }
7828                }
7829                if !new_partitions.is_empty() {
7830                    AlterTableOperation::AddPartitions {
7831                        if_not_exists,
7832                        new_partitions,
7833                    }
7834                } else {
7835                    let column_keyword = self.parse_keyword(Keyword::COLUMN);
7836
7837                    let if_not_exists = if dialect_of!(self is PostgreSqlDialect | BigQueryDialect | DuckDbDialect | GenericDialect)
7838                    {
7839                        self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS])
7840                            || if_not_exists
7841                    } else {
7842                        false
7843                    };
7844
7845                    let column_def = self.parse_column_def()?;
7846
7847                    let column_position = self.parse_column_position()?;
7848
7849                    AlterTableOperation::AddColumn {
7850                        column_keyword,
7851                        if_not_exists,
7852                        column_def,
7853                        column_position,
7854                    }
7855                }
7856            }
7857        } else if self.parse_keyword(Keyword::RENAME) {
7858            if dialect_of!(self is PostgreSqlDialect) && self.parse_keyword(Keyword::CONSTRAINT) {
7859                let old_name = self.parse_identifier()?;
7860                self.expect_keyword_is(Keyword::TO)?;
7861                let new_name = self.parse_identifier()?;
7862                AlterTableOperation::RenameConstraint { old_name, new_name }
7863            } else if self.parse_keyword(Keyword::TO) {
7864                let table_name = self.parse_object_name(false)?;
7865                AlterTableOperation::RenameTable { table_name }
7866            } else {
7867                let _ = self.parse_keyword(Keyword::COLUMN); // [ COLUMN ]
7868                let old_column_name = self.parse_identifier()?;
7869                self.expect_keyword_is(Keyword::TO)?;
7870                let new_column_name = self.parse_identifier()?;
7871                AlterTableOperation::RenameColumn {
7872                    old_column_name,
7873                    new_column_name,
7874                }
7875            }
7876        } else if self.parse_keyword(Keyword::DISABLE) {
7877            if self.parse_keywords(&[Keyword::ROW, Keyword::LEVEL, Keyword::SECURITY]) {
7878                AlterTableOperation::DisableRowLevelSecurity {}
7879            } else if self.parse_keyword(Keyword::RULE) {
7880                let name = self.parse_identifier()?;
7881                AlterTableOperation::DisableRule { name }
7882            } else if self.parse_keyword(Keyword::TRIGGER) {
7883                let name = self.parse_identifier()?;
7884                AlterTableOperation::DisableTrigger { name }
7885            } else {
7886                return self.expected(
7887                    "ROW LEVEL SECURITY, RULE, or TRIGGER after DISABLE",
7888                    self.peek_token(),
7889                );
7890            }
7891        } else if self.parse_keyword(Keyword::ENABLE) {
7892            if self.parse_keywords(&[Keyword::ALWAYS, Keyword::RULE]) {
7893                let name = self.parse_identifier()?;
7894                AlterTableOperation::EnableAlwaysRule { name }
7895            } else if self.parse_keywords(&[Keyword::ALWAYS, Keyword::TRIGGER]) {
7896                let name = self.parse_identifier()?;
7897                AlterTableOperation::EnableAlwaysTrigger { name }
7898            } else if self.parse_keywords(&[Keyword::ROW, Keyword::LEVEL, Keyword::SECURITY]) {
7899                AlterTableOperation::EnableRowLevelSecurity {}
7900            } else if self.parse_keywords(&[Keyword::REPLICA, Keyword::RULE]) {
7901                let name = self.parse_identifier()?;
7902                AlterTableOperation::EnableReplicaRule { name }
7903            } else if self.parse_keywords(&[Keyword::REPLICA, Keyword::TRIGGER]) {
7904                let name = self.parse_identifier()?;
7905                AlterTableOperation::EnableReplicaTrigger { name }
7906            } else if self.parse_keyword(Keyword::RULE) {
7907                let name = self.parse_identifier()?;
7908                AlterTableOperation::EnableRule { name }
7909            } else if self.parse_keyword(Keyword::TRIGGER) {
7910                let name = self.parse_identifier()?;
7911                AlterTableOperation::EnableTrigger { name }
7912            } else {
7913                return self.expected(
7914                    "ALWAYS, REPLICA, ROW LEVEL SECURITY, RULE, or TRIGGER after ENABLE",
7915                    self.peek_token(),
7916                );
7917            }
7918        } else if self.parse_keywords(&[Keyword::CLEAR, Keyword::PROJECTION])
7919            && dialect_of!(self is ClickHouseDialect|GenericDialect)
7920        {
7921            let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7922            let name = self.parse_identifier()?;
7923            let partition = if self.parse_keywords(&[Keyword::IN, Keyword::PARTITION]) {
7924                Some(self.parse_identifier()?)
7925            } else {
7926                None
7927            };
7928            AlterTableOperation::ClearProjection {
7929                if_exists,
7930                name,
7931                partition,
7932            }
7933        } else if self.parse_keywords(&[Keyword::MATERIALIZE, Keyword::PROJECTION])
7934            && dialect_of!(self is ClickHouseDialect|GenericDialect)
7935        {
7936            let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7937            let name = self.parse_identifier()?;
7938            let partition = if self.parse_keywords(&[Keyword::IN, Keyword::PARTITION]) {
7939                Some(self.parse_identifier()?)
7940            } else {
7941                None
7942            };
7943            AlterTableOperation::MaterializeProjection {
7944                if_exists,
7945                name,
7946                partition,
7947            }
7948        } else if self.parse_keyword(Keyword::DROP) {
7949            if self.parse_keywords(&[Keyword::IF, Keyword::EXISTS, Keyword::PARTITION]) {
7950                self.expect_token(&Token::LParen)?;
7951                let partitions = self.parse_comma_separated(Parser::parse_expr)?;
7952                self.expect_token(&Token::RParen)?;
7953                AlterTableOperation::DropPartitions {
7954                    partitions,
7955                    if_exists: true,
7956                }
7957            } else if self.parse_keyword(Keyword::PARTITION) {
7958                self.expect_token(&Token::LParen)?;
7959                let partitions = self.parse_comma_separated(Parser::parse_expr)?;
7960                self.expect_token(&Token::RParen)?;
7961                AlterTableOperation::DropPartitions {
7962                    partitions,
7963                    if_exists: false,
7964                }
7965            } else if self.parse_keyword(Keyword::CONSTRAINT) {
7966                let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7967                let name = self.parse_identifier()?;
7968                let drop_behavior = self.parse_optional_drop_behavior();
7969                AlterTableOperation::DropConstraint {
7970                    if_exists,
7971                    name,
7972                    drop_behavior,
7973                }
7974            } else if self.parse_keywords(&[Keyword::PRIMARY, Keyword::KEY])
7975                && dialect_of!(self is MySqlDialect | GenericDialect)
7976            {
7977                AlterTableOperation::DropPrimaryKey
7978            } else if self.parse_keyword(Keyword::PROJECTION)
7979                && dialect_of!(self is ClickHouseDialect|GenericDialect)
7980            {
7981                let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7982                let name = self.parse_identifier()?;
7983                AlterTableOperation::DropProjection { if_exists, name }
7984            } else if self.parse_keywords(&[Keyword::CLUSTERING, Keyword::KEY]) {
7985                AlterTableOperation::DropClusteringKey
7986            } else {
7987                let _ = self.parse_keyword(Keyword::COLUMN); // [ COLUMN ]
7988                let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7989                let column_name = self.parse_identifier()?;
7990                let drop_behavior = self.parse_optional_drop_behavior();
7991                AlterTableOperation::DropColumn {
7992                    column_name,
7993                    if_exists,
7994                    drop_behavior,
7995                }
7996            }
7997        } else if self.parse_keyword(Keyword::PARTITION) {
7998            self.expect_token(&Token::LParen)?;
7999            let before = self.parse_comma_separated(Parser::parse_expr)?;
8000            self.expect_token(&Token::RParen)?;
8001            self.expect_keyword_is(Keyword::RENAME)?;
8002            self.expect_keywords(&[Keyword::TO, Keyword::PARTITION])?;
8003            self.expect_token(&Token::LParen)?;
8004            let renames = self.parse_comma_separated(Parser::parse_expr)?;
8005            self.expect_token(&Token::RParen)?;
8006            AlterTableOperation::RenamePartitions {
8007                old_partitions: before,
8008                new_partitions: renames,
8009            }
8010        } else if self.parse_keyword(Keyword::CHANGE) {
8011            let _ = self.parse_keyword(Keyword::COLUMN); // [ COLUMN ]
8012            let old_name = self.parse_identifier()?;
8013            let new_name = self.parse_identifier()?;
8014            let data_type = self.parse_data_type()?;
8015            let mut options = vec![];
8016            while let Some(option) = self.parse_optional_column_option()? {
8017                options.push(option);
8018            }
8019
8020            let column_position = self.parse_column_position()?;
8021
8022            AlterTableOperation::ChangeColumn {
8023                old_name,
8024                new_name,
8025                data_type,
8026                options,
8027                column_position,
8028            }
8029        } else if self.parse_keyword(Keyword::MODIFY) {
8030            let _ = self.parse_keyword(Keyword::COLUMN); // [ COLUMN ]
8031            let col_name = self.parse_identifier()?;
8032            let data_type = self.parse_data_type()?;
8033            let mut options = vec![];
8034            while let Some(option) = self.parse_optional_column_option()? {
8035                options.push(option);
8036            }
8037
8038            let column_position = self.parse_column_position()?;
8039
8040            AlterTableOperation::ModifyColumn {
8041                col_name,
8042                data_type,
8043                options,
8044                column_position,
8045            }
8046        } else if self.parse_keyword(Keyword::ALTER) {
8047            let _ = self.parse_keyword(Keyword::COLUMN); // [ COLUMN ]
8048            let column_name = self.parse_identifier()?;
8049            let is_postgresql = dialect_of!(self is PostgreSqlDialect);
8050
8051            let op: AlterColumnOperation = if self.parse_keywords(&[
8052                Keyword::SET,
8053                Keyword::NOT,
8054                Keyword::NULL,
8055            ]) {
8056                AlterColumnOperation::SetNotNull {}
8057            } else if self.parse_keywords(&[Keyword::DROP, Keyword::NOT, Keyword::NULL]) {
8058                AlterColumnOperation::DropNotNull {}
8059            } else if self.parse_keywords(&[Keyword::SET, Keyword::DEFAULT]) {
8060                AlterColumnOperation::SetDefault {
8061                    value: self.parse_expr()?,
8062                }
8063            } else if self.parse_keywords(&[Keyword::DROP, Keyword::DEFAULT]) {
8064                AlterColumnOperation::DropDefault {}
8065            } else if self.parse_keywords(&[Keyword::SET, Keyword::DATA, Keyword::TYPE])
8066                || (is_postgresql && self.parse_keyword(Keyword::TYPE))
8067            {
8068                let data_type = self.parse_data_type()?;
8069                let using = if is_postgresql && self.parse_keyword(Keyword::USING) {
8070                    Some(self.parse_expr()?)
8071                } else {
8072                    None
8073                };
8074                AlterColumnOperation::SetDataType { data_type, using }
8075            } else if self.parse_keywords(&[Keyword::ADD, Keyword::GENERATED]) {
8076                let generated_as = if self.parse_keyword(Keyword::ALWAYS) {
8077                    Some(GeneratedAs::Always)
8078                } else if self.parse_keywords(&[Keyword::BY, Keyword::DEFAULT]) {
8079                    Some(GeneratedAs::ByDefault)
8080                } else {
8081                    None
8082                };
8083
8084                self.expect_keywords(&[Keyword::AS, Keyword::IDENTITY])?;
8085
8086                let mut sequence_options: Option<Vec<SequenceOptions>> = None;
8087
8088                if self.peek_token().token == Token::LParen {
8089                    self.expect_token(&Token::LParen)?;
8090                    sequence_options = Some(self.parse_create_sequence_options()?);
8091                    self.expect_token(&Token::RParen)?;
8092                }
8093
8094                AlterColumnOperation::AddGenerated {
8095                    generated_as,
8096                    sequence_options,
8097                }
8098            } else {
8099                let message = if is_postgresql {
8100                    "SET/DROP NOT NULL, SET DEFAULT, SET DATA TYPE, or ADD GENERATED after ALTER COLUMN"
8101                } else {
8102                    "SET/DROP NOT NULL, SET DEFAULT, or SET DATA TYPE after ALTER COLUMN"
8103                };
8104
8105                return self.expected(message, self.peek_token());
8106            };
8107            AlterTableOperation::AlterColumn { column_name, op }
8108        } else if self.parse_keyword(Keyword::SWAP) {
8109            self.expect_keyword_is(Keyword::WITH)?;
8110            let table_name = self.parse_object_name(false)?;
8111            AlterTableOperation::SwapWith { table_name }
8112        } else if dialect_of!(self is PostgreSqlDialect | GenericDialect)
8113            && self.parse_keywords(&[Keyword::OWNER, Keyword::TO])
8114        {
8115            let new_owner = self.parse_owner()?;
8116            AlterTableOperation::OwnerTo { new_owner }
8117        } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
8118            && self.parse_keyword(Keyword::ATTACH)
8119        {
8120            AlterTableOperation::AttachPartition {
8121                partition: self.parse_part_or_partition()?,
8122            }
8123        } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
8124            && self.parse_keyword(Keyword::DETACH)
8125        {
8126            AlterTableOperation::DetachPartition {
8127                partition: self.parse_part_or_partition()?,
8128            }
8129        } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
8130            && self.parse_keyword(Keyword::FREEZE)
8131        {
8132            let partition = self.parse_part_or_partition()?;
8133            let with_name = if self.parse_keyword(Keyword::WITH) {
8134                self.expect_keyword_is(Keyword::NAME)?;
8135                Some(self.parse_identifier()?)
8136            } else {
8137                None
8138            };
8139            AlterTableOperation::FreezePartition {
8140                partition,
8141                with_name,
8142            }
8143        } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
8144            && self.parse_keyword(Keyword::UNFREEZE)
8145        {
8146            let partition = self.parse_part_or_partition()?;
8147            let with_name = if self.parse_keyword(Keyword::WITH) {
8148                self.expect_keyword_is(Keyword::NAME)?;
8149                Some(self.parse_identifier()?)
8150            } else {
8151                None
8152            };
8153            AlterTableOperation::UnfreezePartition {
8154                partition,
8155                with_name,
8156            }
8157        } else if self.parse_keywords(&[Keyword::CLUSTER, Keyword::BY]) {
8158            self.expect_token(&Token::LParen)?;
8159            let exprs = self.parse_comma_separated(|parser| parser.parse_expr())?;
8160            self.expect_token(&Token::RParen)?;
8161            AlterTableOperation::ClusterBy { exprs }
8162        } else if self.parse_keywords(&[Keyword::SUSPEND, Keyword::RECLUSTER]) {
8163            AlterTableOperation::SuspendRecluster
8164        } else if self.parse_keywords(&[Keyword::RESUME, Keyword::RECLUSTER]) {
8165            AlterTableOperation::ResumeRecluster
8166        } else if self.parse_keyword(Keyword::ALGORITHM) {
8167            let equals = self.consume_token(&Token::Eq);
8168            let algorithm = match self.parse_one_of_keywords(&[
8169                Keyword::DEFAULT,
8170                Keyword::INSTANT,
8171                Keyword::INPLACE,
8172                Keyword::COPY,
8173            ]) {
8174                Some(Keyword::DEFAULT) => AlterTableAlgorithm::Default,
8175                Some(Keyword::INSTANT) => AlterTableAlgorithm::Instant,
8176                Some(Keyword::INPLACE) => AlterTableAlgorithm::Inplace,
8177                Some(Keyword::COPY) => AlterTableAlgorithm::Copy,
8178                _ => self.expected(
8179                    "DEFAULT, INSTANT, INPLACE, or COPY after ALGORITHM [=]",
8180                    self.peek_token(),
8181                )?,
8182            };
8183            AlterTableOperation::Algorithm { equals, algorithm }
8184        } else if self.parse_keyword(Keyword::AUTO_INCREMENT) {
8185            let equals = self.consume_token(&Token::Eq);
8186            let value = self.parse_number_value()?;
8187            AlterTableOperation::AutoIncrement { equals, value }
8188        } else {
8189            let options: Vec<SqlOption> =
8190                self.parse_options_with_keywords(&[Keyword::SET, Keyword::TBLPROPERTIES])?;
8191            if !options.is_empty() {
8192                AlterTableOperation::SetTblProperties {
8193                    table_properties: options,
8194                }
8195            } else {
8196                return self.expected(
8197                    "ADD, RENAME, PARTITION, SWAP, DROP, or SET TBLPROPERTIES after ALTER TABLE",
8198                    self.peek_token(),
8199                );
8200            }
8201        };
8202        Ok(operation)
8203    }
8204
8205    fn parse_part_or_partition(&mut self) -> Result<Partition, ParserError> {
8206        let keyword = self.expect_one_of_keywords(&[Keyword::PART, Keyword::PARTITION])?;
8207        match keyword {
8208            Keyword::PART => Ok(Partition::Part(self.parse_expr()?)),
8209            Keyword::PARTITION => Ok(Partition::Expr(self.parse_expr()?)),
8210            // unreachable because expect_one_of_keywords used above
8211            _ => unreachable!(),
8212        }
8213    }
8214
8215    pub fn parse_alter(&mut self) -> Result<Statement, ParserError> {
8216        let object_type = self.expect_one_of_keywords(&[
8217            Keyword::VIEW,
8218            Keyword::TYPE,
8219            Keyword::TABLE,
8220            Keyword::INDEX,
8221            Keyword::ROLE,
8222            Keyword::POLICY,
8223            Keyword::CONNECTOR,
8224        ])?;
8225        match object_type {
8226            Keyword::VIEW => self.parse_alter_view(),
8227            Keyword::TYPE => self.parse_alter_type(),
8228            Keyword::TABLE => {
8229                let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
8230                let only = self.parse_keyword(Keyword::ONLY); // [ ONLY ]
8231                let table_name = self.parse_object_name(false)?;
8232                let on_cluster = self.parse_optional_on_cluster()?;
8233                let operations = self.parse_comma_separated(Parser::parse_alter_table_operation)?;
8234
8235                let mut location = None;
8236                if self.parse_keyword(Keyword::LOCATION) {
8237                    location = Some(HiveSetLocation {
8238                        has_set: false,
8239                        location: self.parse_identifier()?,
8240                    });
8241                } else if self.parse_keywords(&[Keyword::SET, Keyword::LOCATION]) {
8242                    location = Some(HiveSetLocation {
8243                        has_set: true,
8244                        location: self.parse_identifier()?,
8245                    });
8246                }
8247
8248                Ok(Statement::AlterTable {
8249                    name: table_name,
8250                    if_exists,
8251                    only,
8252                    operations,
8253                    location,
8254                    on_cluster,
8255                })
8256            }
8257            Keyword::INDEX => {
8258                let index_name = self.parse_object_name(false)?;
8259                let operation = if self.parse_keyword(Keyword::RENAME) {
8260                    if self.parse_keyword(Keyword::TO) {
8261                        let index_name = self.parse_object_name(false)?;
8262                        AlterIndexOperation::RenameIndex { index_name }
8263                    } else {
8264                        return self.expected("TO after RENAME", self.peek_token());
8265                    }
8266                } else {
8267                    return self.expected("RENAME after ALTER INDEX", self.peek_token());
8268                };
8269
8270                Ok(Statement::AlterIndex {
8271                    name: index_name,
8272                    operation,
8273                })
8274            }
8275            Keyword::ROLE => self.parse_alter_role(),
8276            Keyword::POLICY => self.parse_alter_policy(),
8277            Keyword::CONNECTOR => self.parse_alter_connector(),
8278            // unreachable because expect_one_of_keywords used above
8279            _ => unreachable!(),
8280        }
8281    }
8282
8283    pub fn parse_alter_view(&mut self) -> Result<Statement, ParserError> {
8284        let name = self.parse_object_name(false)?;
8285        let columns = self.parse_parenthesized_column_list(Optional, false)?;
8286
8287        let with_options = self.parse_options(Keyword::WITH)?;
8288
8289        self.expect_keyword_is(Keyword::AS)?;
8290        let query = self.parse_query()?;
8291
8292        Ok(Statement::AlterView {
8293            name,
8294            columns,
8295            query,
8296            with_options,
8297        })
8298    }
8299
8300    /// Parse a [Statement::AlterType]
8301    pub fn parse_alter_type(&mut self) -> Result<Statement, ParserError> {
8302        let name = self.parse_object_name(false)?;
8303
8304        if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
8305            let new_name = self.parse_identifier()?;
8306            Ok(Statement::AlterType(AlterType {
8307                name,
8308                operation: AlterTypeOperation::Rename(AlterTypeRename { new_name }),
8309            }))
8310        } else if self.parse_keywords(&[Keyword::ADD, Keyword::VALUE]) {
8311            let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
8312            let new_enum_value = self.parse_identifier()?;
8313            let position = if self.parse_keyword(Keyword::BEFORE) {
8314                Some(AlterTypeAddValuePosition::Before(self.parse_identifier()?))
8315            } else if self.parse_keyword(Keyword::AFTER) {
8316                Some(AlterTypeAddValuePosition::After(self.parse_identifier()?))
8317            } else {
8318                None
8319            };
8320
8321            Ok(Statement::AlterType(AlterType {
8322                name,
8323                operation: AlterTypeOperation::AddValue(AlterTypeAddValue {
8324                    if_not_exists,
8325                    value: new_enum_value,
8326                    position,
8327                }),
8328            }))
8329        } else if self.parse_keywords(&[Keyword::RENAME, Keyword::VALUE]) {
8330            let existing_enum_value = self.parse_identifier()?;
8331            self.expect_keyword(Keyword::TO)?;
8332            let new_enum_value = self.parse_identifier()?;
8333
8334            Ok(Statement::AlterType(AlterType {
8335                name,
8336                operation: AlterTypeOperation::RenameValue(AlterTypeRenameValue {
8337                    from: existing_enum_value,
8338                    to: new_enum_value,
8339                }),
8340            }))
8341        } else {
8342            return self.expected_ref(
8343                "{RENAME TO | { RENAME | ADD } VALUE}",
8344                self.peek_token_ref(),
8345            );
8346        }
8347    }
8348
8349    /// Parse a `CALL procedure_name(arg1, arg2, ...)`
8350    /// or `CALL procedure_name` statement
8351    pub fn parse_call(&mut self) -> Result<Statement, ParserError> {
8352        let object_name = self.parse_object_name(false)?;
8353        if self.peek_token().token == Token::LParen {
8354            match self.parse_function(object_name)? {
8355                Expr::Function(f) => Ok(Statement::Call(f)),
8356                other => parser_err!(
8357                    format!("Expected a simple procedure call but found: {other}"),
8358                    self.peek_token().span.start
8359                ),
8360            }
8361        } else {
8362            Ok(Statement::Call(Function {
8363                name: object_name,
8364                uses_odbc_syntax: false,
8365                parameters: FunctionArguments::None,
8366                args: FunctionArguments::None,
8367                over: None,
8368                filter: None,
8369                null_treatment: None,
8370                within_group: vec![],
8371            }))
8372        }
8373    }
8374
8375    /// Parse a copy statement
8376    pub fn parse_copy(&mut self) -> Result<Statement, ParserError> {
8377        let source;
8378        if self.consume_token(&Token::LParen) {
8379            source = CopySource::Query(self.parse_query()?);
8380            self.expect_token(&Token::RParen)?;
8381        } else {
8382            let table_name = self.parse_object_name(false)?;
8383            let columns = self.parse_parenthesized_column_list(Optional, false)?;
8384            source = CopySource::Table {
8385                table_name,
8386                columns,
8387            };
8388        }
8389        let to = match self.parse_one_of_keywords(&[Keyword::FROM, Keyword::TO]) {
8390            Some(Keyword::FROM) => false,
8391            Some(Keyword::TO) => true,
8392            _ => self.expected("FROM or TO", self.peek_token())?,
8393        };
8394        if !to {
8395            // Use a separate if statement to prevent Rust compiler from complaining about
8396            // "if statement in this position is unstable: https://github.com/rust-lang/rust/issues/53667"
8397            if let CopySource::Query(_) = source {
8398                return Err(ParserError::ParserError(
8399                    "COPY ... FROM does not support query as a source".to_string(),
8400                ));
8401            }
8402        }
8403        let target = if self.parse_keyword(Keyword::STDIN) {
8404            CopyTarget::Stdin
8405        } else if self.parse_keyword(Keyword::STDOUT) {
8406            CopyTarget::Stdout
8407        } else if self.parse_keyword(Keyword::PROGRAM) {
8408            CopyTarget::Program {
8409                command: self.parse_literal_string()?,
8410            }
8411        } else {
8412            CopyTarget::File {
8413                filename: self.parse_literal_string()?,
8414            }
8415        };
8416        let _ = self.parse_keyword(Keyword::WITH); // [ WITH ]
8417        let mut options = vec![];
8418        if self.consume_token(&Token::LParen) {
8419            options = self.parse_comma_separated(Parser::parse_copy_option)?;
8420            self.expect_token(&Token::RParen)?;
8421        }
8422        let mut legacy_options = vec![];
8423        while let Some(opt) = self.maybe_parse(|parser| parser.parse_copy_legacy_option())? {
8424            legacy_options.push(opt);
8425        }
8426        let values = if let CopyTarget::Stdin = target {
8427            self.expect_token(&Token::SemiColon)?;
8428            self.parse_tsv()
8429        } else {
8430            vec![]
8431        };
8432        Ok(Statement::Copy {
8433            source,
8434            to,
8435            target,
8436            options,
8437            legacy_options,
8438            values,
8439        })
8440    }
8441
8442    pub fn parse_close(&mut self) -> Result<Statement, ParserError> {
8443        let cursor = if self.parse_keyword(Keyword::ALL) {
8444            CloseCursor::All
8445        } else {
8446            let name = self.parse_identifier()?;
8447
8448            CloseCursor::Specific { name }
8449        };
8450
8451        Ok(Statement::Close { cursor })
8452    }
8453
8454    fn parse_copy_option(&mut self) -> Result<CopyOption, ParserError> {
8455        let ret = match self.parse_one_of_keywords(&[
8456            Keyword::FORMAT,
8457            Keyword::FREEZE,
8458            Keyword::DELIMITER,
8459            Keyword::NULL,
8460            Keyword::HEADER,
8461            Keyword::QUOTE,
8462            Keyword::ESCAPE,
8463            Keyword::FORCE_QUOTE,
8464            Keyword::FORCE_NOT_NULL,
8465            Keyword::FORCE_NULL,
8466            Keyword::ENCODING,
8467        ]) {
8468            Some(Keyword::FORMAT) => CopyOption::Format(self.parse_identifier()?),
8469            Some(Keyword::FREEZE) => CopyOption::Freeze(!matches!(
8470                self.parse_one_of_keywords(&[Keyword::TRUE, Keyword::FALSE]),
8471                Some(Keyword::FALSE)
8472            )),
8473            Some(Keyword::DELIMITER) => CopyOption::Delimiter(self.parse_literal_char()?),
8474            Some(Keyword::NULL) => CopyOption::Null(self.parse_literal_string()?),
8475            Some(Keyword::HEADER) => CopyOption::Header(!matches!(
8476                self.parse_one_of_keywords(&[Keyword::TRUE, Keyword::FALSE]),
8477                Some(Keyword::FALSE)
8478            )),
8479            Some(Keyword::QUOTE) => CopyOption::Quote(self.parse_literal_char()?),
8480            Some(Keyword::ESCAPE) => CopyOption::Escape(self.parse_literal_char()?),
8481            Some(Keyword::FORCE_QUOTE) => {
8482                CopyOption::ForceQuote(self.parse_parenthesized_column_list(Mandatory, false)?)
8483            }
8484            Some(Keyword::FORCE_NOT_NULL) => {
8485                CopyOption::ForceNotNull(self.parse_parenthesized_column_list(Mandatory, false)?)
8486            }
8487            Some(Keyword::FORCE_NULL) => {
8488                CopyOption::ForceNull(self.parse_parenthesized_column_list(Mandatory, false)?)
8489            }
8490            Some(Keyword::ENCODING) => CopyOption::Encoding(self.parse_literal_string()?),
8491            _ => self.expected("option", self.peek_token())?,
8492        };
8493        Ok(ret)
8494    }
8495
8496    fn parse_copy_legacy_option(&mut self) -> Result<CopyLegacyOption, ParserError> {
8497        let ret = match self.parse_one_of_keywords(&[
8498            Keyword::BINARY,
8499            Keyword::DELIMITER,
8500            Keyword::NULL,
8501            Keyword::CSV,
8502        ]) {
8503            Some(Keyword::BINARY) => CopyLegacyOption::Binary,
8504            Some(Keyword::DELIMITER) => {
8505                let _ = self.parse_keyword(Keyword::AS); // [ AS ]
8506                CopyLegacyOption::Delimiter(self.parse_literal_char()?)
8507            }
8508            Some(Keyword::NULL) => {
8509                let _ = self.parse_keyword(Keyword::AS); // [ AS ]
8510                CopyLegacyOption::Null(self.parse_literal_string()?)
8511            }
8512            Some(Keyword::CSV) => CopyLegacyOption::Csv({
8513                let mut opts = vec![];
8514                while let Some(opt) =
8515                    self.maybe_parse(|parser| parser.parse_copy_legacy_csv_option())?
8516                {
8517                    opts.push(opt);
8518                }
8519                opts
8520            }),
8521            _ => self.expected("option", self.peek_token())?,
8522        };
8523        Ok(ret)
8524    }
8525
8526    fn parse_copy_legacy_csv_option(&mut self) -> Result<CopyLegacyCsvOption, ParserError> {
8527        let ret = match self.parse_one_of_keywords(&[
8528            Keyword::HEADER,
8529            Keyword::QUOTE,
8530            Keyword::ESCAPE,
8531            Keyword::FORCE,
8532        ]) {
8533            Some(Keyword::HEADER) => CopyLegacyCsvOption::Header,
8534            Some(Keyword::QUOTE) => {
8535                let _ = self.parse_keyword(Keyword::AS); // [ AS ]
8536                CopyLegacyCsvOption::Quote(self.parse_literal_char()?)
8537            }
8538            Some(Keyword::ESCAPE) => {
8539                let _ = self.parse_keyword(Keyword::AS); // [ AS ]
8540                CopyLegacyCsvOption::Escape(self.parse_literal_char()?)
8541            }
8542            Some(Keyword::FORCE) if self.parse_keywords(&[Keyword::NOT, Keyword::NULL]) => {
8543                CopyLegacyCsvOption::ForceNotNull(
8544                    self.parse_comma_separated(|p| p.parse_identifier())?,
8545                )
8546            }
8547            Some(Keyword::FORCE) if self.parse_keywords(&[Keyword::QUOTE]) => {
8548                CopyLegacyCsvOption::ForceQuote(
8549                    self.parse_comma_separated(|p| p.parse_identifier())?,
8550                )
8551            }
8552            _ => self.expected("csv option", self.peek_token())?,
8553        };
8554        Ok(ret)
8555    }
8556
8557    fn parse_literal_char(&mut self) -> Result<char, ParserError> {
8558        let s = self.parse_literal_string()?;
8559        if s.len() != 1 {
8560            let loc = self
8561                .tokens
8562                .get(self.index - 1)
8563                .map_or(Location { line: 0, column: 0 }, |t| t.span.start);
8564            return parser_err!(format!("Expect a char, found {s:?}"), loc);
8565        }
8566        Ok(s.chars().next().unwrap())
8567    }
8568
8569    /// Parse a tab separated values in
8570    /// COPY payload
8571    pub fn parse_tsv(&mut self) -> Vec<Option<String>> {
8572        self.parse_tab_value()
8573    }
8574
8575    pub fn parse_tab_value(&mut self) -> Vec<Option<String>> {
8576        let mut values = vec![];
8577        let mut content = String::from("");
8578        while let Some(t) = self.next_token_no_skip().map(|t| &t.token) {
8579            match t {
8580                Token::Whitespace(Whitespace::Tab) => {
8581                    values.push(Some(content.to_string()));
8582                    content.clear();
8583                }
8584                Token::Whitespace(Whitespace::Newline) => {
8585                    values.push(Some(content.to_string()));
8586                    content.clear();
8587                }
8588                Token::Backslash => {
8589                    if self.consume_token(&Token::Period) {
8590                        return values;
8591                    }
8592                    if let Token::Word(w) = self.next_token().token {
8593                        if w.value == "N" {
8594                            values.push(None);
8595                        }
8596                    }
8597                }
8598                _ => {
8599                    content.push_str(&t.to_string());
8600                }
8601            }
8602        }
8603        values
8604    }
8605
8606    /// Parse a literal value (numbers, strings, date/time, booleans)
8607    pub fn parse_value(&mut self) -> Result<ValueWithSpan, ParserError> {
8608        let next_token = self.next_token();
8609        let span = next_token.span;
8610        let ok_value = |value: Value| Ok(value.with_span(span));
8611        match next_token.token {
8612            Token::Word(w) => match w.keyword {
8613                Keyword::TRUE if self.dialect.supports_boolean_literals() => {
8614                    ok_value(Value::Boolean(true))
8615                }
8616                Keyword::FALSE if self.dialect.supports_boolean_literals() => {
8617                    ok_value(Value::Boolean(false))
8618                }
8619                Keyword::NULL => ok_value(Value::Null),
8620                Keyword::NoKeyword if w.quote_style.is_some() => match w.quote_style {
8621                    Some('"') => ok_value(Value::DoubleQuotedString(w.value)),
8622                    Some('\'') => ok_value(Value::SingleQuotedString(w.value)),
8623                    _ => self.expected(
8624                        "A value?",
8625                        TokenWithSpan {
8626                            token: Token::Word(w),
8627                            span,
8628                        },
8629                    )?,
8630                },
8631                _ => self.expected(
8632                    "a concrete value",
8633                    TokenWithSpan {
8634                        token: Token::Word(w),
8635                        span,
8636                    },
8637                ),
8638            },
8639            // The call to n.parse() returns a bigdecimal when the
8640            // bigdecimal feature is enabled, and is otherwise a no-op
8641            // (i.e., it returns the input string).
8642            Token::Number(n, l) => ok_value(Value::Number(Self::parse(n, span.start)?, l)),
8643            Token::SingleQuotedString(ref s) => ok_value(Value::SingleQuotedString(s.to_string())),
8644            Token::DoubleQuotedString(ref s) => ok_value(Value::DoubleQuotedString(s.to_string())),
8645            Token::TripleSingleQuotedString(ref s) => {
8646                ok_value(Value::TripleSingleQuotedString(s.to_string()))
8647            }
8648            Token::TripleDoubleQuotedString(ref s) => {
8649                ok_value(Value::TripleDoubleQuotedString(s.to_string()))
8650            }
8651            Token::DollarQuotedString(ref s) => ok_value(Value::DollarQuotedString(s.clone())),
8652            Token::SingleQuotedByteStringLiteral(ref s) => {
8653                ok_value(Value::SingleQuotedByteStringLiteral(s.clone()))
8654            }
8655            Token::DoubleQuotedByteStringLiteral(ref s) => {
8656                ok_value(Value::DoubleQuotedByteStringLiteral(s.clone()))
8657            }
8658            Token::TripleSingleQuotedByteStringLiteral(ref s) => {
8659                ok_value(Value::TripleSingleQuotedByteStringLiteral(s.clone()))
8660            }
8661            Token::TripleDoubleQuotedByteStringLiteral(ref s) => {
8662                ok_value(Value::TripleDoubleQuotedByteStringLiteral(s.clone()))
8663            }
8664            Token::SingleQuotedRawStringLiteral(ref s) => {
8665                ok_value(Value::SingleQuotedRawStringLiteral(s.clone()))
8666            }
8667            Token::DoubleQuotedRawStringLiteral(ref s) => {
8668                ok_value(Value::DoubleQuotedRawStringLiteral(s.clone()))
8669            }
8670            Token::TripleSingleQuotedRawStringLiteral(ref s) => {
8671                ok_value(Value::TripleSingleQuotedRawStringLiteral(s.clone()))
8672            }
8673            Token::TripleDoubleQuotedRawStringLiteral(ref s) => {
8674                ok_value(Value::TripleDoubleQuotedRawStringLiteral(s.clone()))
8675            }
8676            Token::NationalStringLiteral(ref s) => {
8677                ok_value(Value::NationalStringLiteral(s.to_string()))
8678            }
8679            Token::EscapedStringLiteral(ref s) => {
8680                ok_value(Value::EscapedStringLiteral(s.to_string()))
8681            }
8682            Token::UnicodeStringLiteral(ref s) => {
8683                ok_value(Value::UnicodeStringLiteral(s.to_string()))
8684            }
8685            Token::HexStringLiteral(ref s) => ok_value(Value::HexStringLiteral(s.to_string())),
8686            Token::Placeholder(ref s) => ok_value(Value::Placeholder(s.to_string())),
8687            tok @ Token::Colon | tok @ Token::AtSign => {
8688                // Not calling self.parse_identifier(false)? because only in placeholder we want to check numbers as idfentifies
8689                // This because snowflake allows numbers as placeholders
8690                let next_token = self.next_token();
8691                let ident = match next_token.token {
8692                    Token::Word(w) => Ok(w.into_ident(next_token.span)),
8693                    Token::Number(w, false) => Ok(Ident::new(w)),
8694                    _ => self.expected("placeholder", next_token),
8695                }?;
8696                let placeholder = tok.to_string() + &ident.value;
8697                ok_value(Value::Placeholder(placeholder))
8698            }
8699            unexpected => self.expected(
8700                "a value",
8701                TokenWithSpan {
8702                    token: unexpected,
8703                    span,
8704                },
8705            ),
8706        }
8707    }
8708
8709    /// Parse an unsigned numeric literal
8710    pub fn parse_number_value(&mut self) -> Result<ValueWithSpan, ParserError> {
8711        let value_wrapper = self.parse_value()?;
8712        match &value_wrapper.value {
8713            Value::Number(_, _) => Ok(value_wrapper),
8714            Value::Placeholder(_) => Ok(value_wrapper),
8715            _ => {
8716                self.prev_token();
8717                self.expected("literal number", self.peek_token())
8718            }
8719        }
8720    }
8721
8722    /// Parse a numeric literal as an expression. Returns a [`Expr::UnaryOp`] if the number is signed,
8723    /// otherwise returns a [`Expr::Value`]
8724    pub fn parse_number(&mut self) -> Result<Expr, ParserError> {
8725        let next_token = self.next_token();
8726        match next_token.token {
8727            Token::Plus => Ok(Expr::UnaryOp {
8728                op: UnaryOperator::Plus,
8729                expr: Box::new(Expr::Value(self.parse_number_value()?)),
8730            }),
8731            Token::Minus => Ok(Expr::UnaryOp {
8732                op: UnaryOperator::Minus,
8733                expr: Box::new(Expr::Value(self.parse_number_value()?)),
8734            }),
8735            _ => {
8736                self.prev_token();
8737                Ok(Expr::Value(self.parse_number_value()?))
8738            }
8739        }
8740    }
8741
8742    fn parse_introduced_string_value(&mut self) -> Result<Value, ParserError> {
8743        let next_token = self.next_token();
8744        let span = next_token.span;
8745        match next_token.token {
8746            Token::SingleQuotedString(ref s) => Ok(Value::SingleQuotedString(s.to_string())),
8747            Token::DoubleQuotedString(ref s) => Ok(Value::DoubleQuotedString(s.to_string())),
8748            Token::HexStringLiteral(ref s) => Ok(Value::HexStringLiteral(s.to_string())),
8749            unexpected => self.expected(
8750                "a string value",
8751                TokenWithSpan {
8752                    token: unexpected,
8753                    span,
8754                },
8755            ),
8756        }
8757    }
8758
8759    /// Parse an unsigned literal integer/long
8760    pub fn parse_literal_uint(&mut self) -> Result<u64, ParserError> {
8761        let next_token = self.next_token();
8762        match next_token.token {
8763            Token::Number(s, _) => Self::parse::<u64>(s, next_token.span.start),
8764            _ => self.expected("literal int", next_token),
8765        }
8766    }
8767
8768    /// Parse the body of a `CREATE FUNCTION` specified as a string.
8769    /// e.g. `CREATE FUNCTION ... AS $$ body $$`.
8770    fn parse_create_function_body_string(&mut self) -> Result<Expr, ParserError> {
8771        let peek_token = self.peek_token();
8772        let span = peek_token.span;
8773        match peek_token.token {
8774            Token::DollarQuotedString(s) if dialect_of!(self is PostgreSqlDialect | GenericDialect) =>
8775            {
8776                self.next_token();
8777                Ok(Expr::Value(Value::DollarQuotedString(s).with_span(span)))
8778            }
8779            _ => Ok(Expr::Value(
8780                Value::SingleQuotedString(self.parse_literal_string()?).with_span(span),
8781            )),
8782        }
8783    }
8784
8785    /// Parse a literal string
8786    pub fn parse_literal_string(&mut self) -> Result<String, ParserError> {
8787        let next_token = self.next_token();
8788        match next_token.token {
8789            Token::Word(Word {
8790                value,
8791                keyword: Keyword::NoKeyword,
8792                ..
8793            }) => Ok(value),
8794            Token::SingleQuotedString(s) => Ok(s),
8795            Token::DoubleQuotedString(s) => Ok(s),
8796            Token::EscapedStringLiteral(s) if dialect_of!(self is PostgreSqlDialect | GenericDialect) => {
8797                Ok(s)
8798            }
8799            Token::UnicodeStringLiteral(s) => Ok(s),
8800            _ => self.expected("literal string", next_token),
8801        }
8802    }
8803
8804    /// Parse a literal unicode normalization clause
8805    pub fn parse_unicode_is_normalized(&mut self, expr: Expr) -> Result<Expr, ParserError> {
8806        let neg = self.parse_keyword(Keyword::NOT);
8807        let normalized_form = self.maybe_parse(|parser| {
8808            match parser.parse_one_of_keywords(&[
8809                Keyword::NFC,
8810                Keyword::NFD,
8811                Keyword::NFKC,
8812                Keyword::NFKD,
8813            ]) {
8814                Some(Keyword::NFC) => Ok(NormalizationForm::NFC),
8815                Some(Keyword::NFD) => Ok(NormalizationForm::NFD),
8816                Some(Keyword::NFKC) => Ok(NormalizationForm::NFKC),
8817                Some(Keyword::NFKD) => Ok(NormalizationForm::NFKD),
8818                _ => parser.expected("unicode normalization form", parser.peek_token()),
8819            }
8820        })?;
8821        if self.parse_keyword(Keyword::NORMALIZED) {
8822            return Ok(Expr::IsNormalized {
8823                expr: Box::new(expr),
8824                form: normalized_form,
8825                negated: neg,
8826            });
8827        }
8828        self.expected("unicode normalization form", self.peek_token())
8829    }
8830
8831    pub fn parse_enum_values(&mut self) -> Result<Vec<EnumMember>, ParserError> {
8832        self.expect_token(&Token::LParen)?;
8833        let values = self.parse_comma_separated(|parser| {
8834            let name = parser.parse_literal_string()?;
8835            let e = if parser.consume_token(&Token::Eq) {
8836                let value = parser.parse_number()?;
8837                EnumMember::NamedValue(name, value)
8838            } else {
8839                EnumMember::Name(name)
8840            };
8841            Ok(e)
8842        })?;
8843        self.expect_token(&Token::RParen)?;
8844
8845        Ok(values)
8846    }
8847
8848    /// Parse a SQL datatype (in the context of a CREATE TABLE statement for example)
8849    pub fn parse_data_type(&mut self) -> Result<DataType, ParserError> {
8850        let (ty, trailing_bracket) = self.parse_data_type_helper()?;
8851        if trailing_bracket.0 {
8852            return parser_err!(
8853                format!("unmatched > after parsing data type {ty}"),
8854                self.peek_token()
8855            );
8856        }
8857
8858        Ok(ty)
8859    }
8860
8861    fn parse_data_type_helper(
8862        &mut self,
8863    ) -> Result<(DataType, MatchedTrailingBracket), ParserError> {
8864        let dialect = self.dialect;
8865        self.advance_token();
8866        let next_token = self.get_current_token();
8867        let next_token_index = self.get_current_index();
8868
8869        let mut trailing_bracket: MatchedTrailingBracket = false.into();
8870        let mut data = match &next_token.token {
8871            Token::Word(w) => match w.keyword {
8872                Keyword::BOOLEAN => Ok(DataType::Boolean),
8873                Keyword::BOOL => Ok(DataType::Bool),
8874                Keyword::FLOAT => Ok(DataType::Float(self.parse_optional_precision()?)),
8875                Keyword::REAL => Ok(DataType::Real),
8876                Keyword::FLOAT4 => Ok(DataType::Float4),
8877                Keyword::FLOAT32 => Ok(DataType::Float32),
8878                Keyword::FLOAT64 => Ok(DataType::Float64),
8879                Keyword::FLOAT8 => Ok(DataType::Float8),
8880                Keyword::DOUBLE => {
8881                    if self.parse_keyword(Keyword::PRECISION) {
8882                        Ok(DataType::DoublePrecision)
8883                    } else {
8884                        Ok(DataType::Double(
8885                            self.parse_exact_number_optional_precision_scale()?,
8886                        ))
8887                    }
8888                }
8889                Keyword::TINYINT => {
8890                    let optional_precision = self.parse_optional_precision();
8891                    if self.parse_keyword(Keyword::UNSIGNED) {
8892                        Ok(DataType::TinyIntUnsigned(optional_precision?))
8893                    } else {
8894                        Ok(DataType::TinyInt(optional_precision?))
8895                    }
8896                }
8897                Keyword::INT2 => {
8898                    let optional_precision = self.parse_optional_precision();
8899                    if self.parse_keyword(Keyword::UNSIGNED) {
8900                        Ok(DataType::Int2Unsigned(optional_precision?))
8901                    } else {
8902                        Ok(DataType::Int2(optional_precision?))
8903                    }
8904                }
8905                Keyword::SMALLINT => {
8906                    let optional_precision = self.parse_optional_precision();
8907                    if self.parse_keyword(Keyword::UNSIGNED) {
8908                        Ok(DataType::SmallIntUnsigned(optional_precision?))
8909                    } else {
8910                        Ok(DataType::SmallInt(optional_precision?))
8911                    }
8912                }
8913                Keyword::MEDIUMINT => {
8914                    let optional_precision = self.parse_optional_precision();
8915                    if self.parse_keyword(Keyword::UNSIGNED) {
8916                        Ok(DataType::MediumIntUnsigned(optional_precision?))
8917                    } else {
8918                        Ok(DataType::MediumInt(optional_precision?))
8919                    }
8920                }
8921                Keyword::INT => {
8922                    let optional_precision = self.parse_optional_precision();
8923                    if self.parse_keyword(Keyword::UNSIGNED) {
8924                        Ok(DataType::IntUnsigned(optional_precision?))
8925                    } else {
8926                        Ok(DataType::Int(optional_precision?))
8927                    }
8928                }
8929                Keyword::INT4 => {
8930                    let optional_precision = self.parse_optional_precision();
8931                    if self.parse_keyword(Keyword::UNSIGNED) {
8932                        Ok(DataType::Int4Unsigned(optional_precision?))
8933                    } else {
8934                        Ok(DataType::Int4(optional_precision?))
8935                    }
8936                }
8937                Keyword::INT8 => {
8938                    let optional_precision = self.parse_optional_precision();
8939                    if self.parse_keyword(Keyword::UNSIGNED) {
8940                        Ok(DataType::Int8Unsigned(optional_precision?))
8941                    } else {
8942                        Ok(DataType::Int8(optional_precision?))
8943                    }
8944                }
8945                Keyword::INT16 => Ok(DataType::Int16),
8946                Keyword::INT32 => Ok(DataType::Int32),
8947                Keyword::INT64 => Ok(DataType::Int64),
8948                Keyword::INT128 => Ok(DataType::Int128),
8949                Keyword::INT256 => Ok(DataType::Int256),
8950                Keyword::INTEGER => {
8951                    let optional_precision = self.parse_optional_precision();
8952                    if self.parse_keyword(Keyword::UNSIGNED) {
8953                        Ok(DataType::IntegerUnsigned(optional_precision?))
8954                    } else {
8955                        Ok(DataType::Integer(optional_precision?))
8956                    }
8957                }
8958                Keyword::BIGINT => {
8959                    let optional_precision = self.parse_optional_precision();
8960                    if self.parse_keyword(Keyword::UNSIGNED) {
8961                        Ok(DataType::BigIntUnsigned(optional_precision?))
8962                    } else {
8963                        Ok(DataType::BigInt(optional_precision?))
8964                    }
8965                }
8966                Keyword::UINT8 => Ok(DataType::UInt8),
8967                Keyword::UINT16 => Ok(DataType::UInt16),
8968                Keyword::UINT32 => Ok(DataType::UInt32),
8969                Keyword::UINT64 => Ok(DataType::UInt64),
8970                Keyword::UINT128 => Ok(DataType::UInt128),
8971                Keyword::UINT256 => Ok(DataType::UInt256),
8972                Keyword::VARCHAR => Ok(DataType::Varchar(self.parse_optional_character_length()?)),
8973                Keyword::NVARCHAR => {
8974                    Ok(DataType::Nvarchar(self.parse_optional_character_length()?))
8975                }
8976                Keyword::CHARACTER => {
8977                    if self.parse_keyword(Keyword::VARYING) {
8978                        Ok(DataType::CharacterVarying(
8979                            self.parse_optional_character_length()?,
8980                        ))
8981                    } else if self.parse_keywords(&[Keyword::LARGE, Keyword::OBJECT]) {
8982                        Ok(DataType::CharacterLargeObject(
8983                            self.parse_optional_precision()?,
8984                        ))
8985                    } else {
8986                        Ok(DataType::Character(self.parse_optional_character_length()?))
8987                    }
8988                }
8989                Keyword::CHAR => {
8990                    if self.parse_keyword(Keyword::VARYING) {
8991                        Ok(DataType::CharVarying(
8992                            self.parse_optional_character_length()?,
8993                        ))
8994                    } else if self.parse_keywords(&[Keyword::LARGE, Keyword::OBJECT]) {
8995                        Ok(DataType::CharLargeObject(self.parse_optional_precision()?))
8996                    } else {
8997                        Ok(DataType::Char(self.parse_optional_character_length()?))
8998                    }
8999                }
9000                Keyword::CLOB => Ok(DataType::Clob(self.parse_optional_precision()?)),
9001                Keyword::BINARY => Ok(DataType::Binary(self.parse_optional_precision()?)),
9002                Keyword::VARBINARY => Ok(DataType::Varbinary(self.parse_optional_binary_length()?)),
9003                Keyword::BLOB => Ok(DataType::Blob(self.parse_optional_precision()?)),
9004                Keyword::TINYBLOB => Ok(DataType::TinyBlob),
9005                Keyword::MEDIUMBLOB => Ok(DataType::MediumBlob),
9006                Keyword::LONGBLOB => Ok(DataType::LongBlob),
9007                Keyword::BYTES => Ok(DataType::Bytes(self.parse_optional_precision()?)),
9008                Keyword::BIT => {
9009                    if self.parse_keyword(Keyword::VARYING) {
9010                        Ok(DataType::BitVarying(self.parse_optional_precision()?))
9011                    } else {
9012                        Ok(DataType::Bit(self.parse_optional_precision()?))
9013                    }
9014                }
9015                Keyword::VARBIT => Ok(DataType::VarBit(self.parse_optional_precision()?)),
9016                Keyword::UUID => Ok(DataType::Uuid),
9017                Keyword::DATE => Ok(DataType::Date),
9018                Keyword::DATE32 => Ok(DataType::Date32),
9019                Keyword::DATETIME => Ok(DataType::Datetime(self.parse_optional_precision()?)),
9020                Keyword::DATETIME64 => {
9021                    self.prev_token();
9022                    let (precision, time_zone) = self.parse_datetime_64()?;
9023                    Ok(DataType::Datetime64(precision, time_zone))
9024                }
9025                Keyword::TIMESTAMP => {
9026                    let precision = self.parse_optional_precision()?;
9027                    let tz = if self.parse_keyword(Keyword::WITH) {
9028                        self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
9029                        TimezoneInfo::WithTimeZone
9030                    } else if self.parse_keyword(Keyword::WITHOUT) {
9031                        self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
9032                        TimezoneInfo::WithoutTimeZone
9033                    } else {
9034                        TimezoneInfo::None
9035                    };
9036                    Ok(DataType::Timestamp(precision, tz))
9037                }
9038                Keyword::TIMESTAMPTZ => Ok(DataType::Timestamp(
9039                    self.parse_optional_precision()?,
9040                    TimezoneInfo::Tz,
9041                )),
9042                Keyword::TIME => {
9043                    let precision = self.parse_optional_precision()?;
9044                    let tz = if self.parse_keyword(Keyword::WITH) {
9045                        self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
9046                        TimezoneInfo::WithTimeZone
9047                    } else if self.parse_keyword(Keyword::WITHOUT) {
9048                        self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
9049                        TimezoneInfo::WithoutTimeZone
9050                    } else {
9051                        TimezoneInfo::None
9052                    };
9053                    Ok(DataType::Time(precision, tz))
9054                }
9055                Keyword::TIMETZ => Ok(DataType::Time(
9056                    self.parse_optional_precision()?,
9057                    TimezoneInfo::Tz,
9058                )),
9059                // Interval types can be followed by a complicated interval
9060                // qualifier that we don't currently support. See
9061                // parse_interval for a taste.
9062                Keyword::INTERVAL => Ok(DataType::Interval),
9063                Keyword::JSON => Ok(DataType::JSON),
9064                Keyword::JSONB => Ok(DataType::JSONB),
9065                Keyword::REGCLASS => Ok(DataType::Regclass),
9066                Keyword::STRING => Ok(DataType::String(self.parse_optional_precision()?)),
9067                Keyword::FIXEDSTRING => {
9068                    self.expect_token(&Token::LParen)?;
9069                    let character_length = self.parse_literal_uint()?;
9070                    self.expect_token(&Token::RParen)?;
9071                    Ok(DataType::FixedString(character_length))
9072                }
9073                Keyword::TEXT => Ok(DataType::Text),
9074                Keyword::TINYTEXT => Ok(DataType::TinyText),
9075                Keyword::MEDIUMTEXT => Ok(DataType::MediumText),
9076                Keyword::LONGTEXT => Ok(DataType::LongText),
9077                Keyword::BYTEA => Ok(DataType::Bytea),
9078                Keyword::NUMERIC => Ok(DataType::Numeric(
9079                    self.parse_exact_number_optional_precision_scale()?,
9080                )),
9081                Keyword::DECIMAL => Ok(DataType::Decimal(
9082                    self.parse_exact_number_optional_precision_scale()?,
9083                )),
9084                Keyword::DEC => Ok(DataType::Dec(
9085                    self.parse_exact_number_optional_precision_scale()?,
9086                )),
9087                Keyword::BIGNUMERIC => Ok(DataType::BigNumeric(
9088                    self.parse_exact_number_optional_precision_scale()?,
9089                )),
9090                Keyword::BIGDECIMAL => Ok(DataType::BigDecimal(
9091                    self.parse_exact_number_optional_precision_scale()?,
9092                )),
9093                Keyword::ENUM => Ok(DataType::Enum(self.parse_enum_values()?, None)),
9094                Keyword::ENUM8 => Ok(DataType::Enum(self.parse_enum_values()?, Some(8))),
9095                Keyword::ENUM16 => Ok(DataType::Enum(self.parse_enum_values()?, Some(16))),
9096                Keyword::SET => Ok(DataType::Set(self.parse_string_values()?)),
9097                Keyword::ARRAY => {
9098                    if dialect_of!(self is SnowflakeDialect) {
9099                        Ok(DataType::Array(ArrayElemTypeDef::None))
9100                    } else if dialect_of!(self is ClickHouseDialect) {
9101                        Ok(self.parse_sub_type(|internal_type| {
9102                            DataType::Array(ArrayElemTypeDef::Parenthesis(internal_type))
9103                        })?)
9104                    } else {
9105                        self.expect_token(&Token::Lt)?;
9106                        let (inside_type, _trailing_bracket) = self.parse_data_type_helper()?;
9107                        trailing_bracket = self.expect_closing_angle_bracket(_trailing_bracket)?;
9108                        Ok(DataType::Array(ArrayElemTypeDef::AngleBracket(Box::new(
9109                            inside_type,
9110                        ))))
9111                    }
9112                }
9113                Keyword::STRUCT if dialect_is!(dialect is DuckDbDialect) => {
9114                    self.prev_token();
9115                    let field_defs = self.parse_duckdb_struct_type_def()?;
9116                    Ok(DataType::Struct(field_defs, StructBracketKind::Parentheses))
9117                }
9118                Keyword::STRUCT if dialect_is!(dialect is BigQueryDialect | GenericDialect) => {
9119                    self.prev_token();
9120                    let (field_defs, _trailing_bracket) =
9121                        self.parse_struct_type_def(Self::parse_struct_field_def)?;
9122                    trailing_bracket = _trailing_bracket;
9123                    Ok(DataType::Struct(
9124                        field_defs,
9125                        StructBracketKind::AngleBrackets,
9126                    ))
9127                }
9128                Keyword::UNION if dialect_is!(dialect is DuckDbDialect | GenericDialect) => {
9129                    self.prev_token();
9130                    let fields = self.parse_union_type_def()?;
9131                    Ok(DataType::Union(fields))
9132                }
9133                Keyword::NULLABLE if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
9134                    Ok(self.parse_sub_type(DataType::Nullable)?)
9135                }
9136                Keyword::LOWCARDINALITY if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
9137                    Ok(self.parse_sub_type(DataType::LowCardinality)?)
9138                }
9139                Keyword::MAP if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
9140                    self.prev_token();
9141                    let (key_data_type, value_data_type) = self.parse_click_house_map_def()?;
9142                    Ok(DataType::Map(
9143                        Box::new(key_data_type),
9144                        Box::new(value_data_type),
9145                    ))
9146                }
9147                Keyword::NESTED if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
9148                    self.expect_token(&Token::LParen)?;
9149                    let field_defs = self.parse_comma_separated(Parser::parse_column_def)?;
9150                    self.expect_token(&Token::RParen)?;
9151                    Ok(DataType::Nested(field_defs))
9152                }
9153                Keyword::TUPLE if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
9154                    self.prev_token();
9155                    let field_defs = self.parse_click_house_tuple_def()?;
9156                    Ok(DataType::Tuple(field_defs))
9157                }
9158                Keyword::TRIGGER => Ok(DataType::Trigger),
9159                Keyword::ANY if self.peek_keyword(Keyword::TYPE) => {
9160                    let _ = self.parse_keyword(Keyword::TYPE);
9161                    Ok(DataType::AnyType)
9162                }
9163                Keyword::TABLE => {
9164                    let columns = self.parse_returns_table_columns()?;
9165                    Ok(DataType::Table(columns))
9166                }
9167                Keyword::SIGNED => {
9168                    if self.parse_keyword(Keyword::INTEGER) {
9169                        Ok(DataType::SignedInteger)
9170                    } else {
9171                        Ok(DataType::Signed)
9172                    }
9173                }
9174                Keyword::UNSIGNED => {
9175                    if self.parse_keyword(Keyword::INTEGER) {
9176                        Ok(DataType::UnsignedInteger)
9177                    } else {
9178                        Ok(DataType::Unsigned)
9179                    }
9180                }
9181                _ => {
9182                    self.prev_token();
9183                    let type_name = self.parse_object_name(false)?;
9184                    if let Some(modifiers) = self.parse_optional_type_modifiers()? {
9185                        Ok(DataType::Custom(type_name, modifiers))
9186                    } else {
9187                        Ok(DataType::Custom(type_name, vec![]))
9188                    }
9189                }
9190            },
9191            _ => self.expected_at("a data type name", next_token_index),
9192        }?;
9193
9194        if self.dialect.supports_array_typedef_with_brackets() {
9195            while self.consume_token(&Token::LBracket) {
9196                // Parse optional array data type size
9197                let size = self.maybe_parse(|p| p.parse_literal_uint())?;
9198                self.expect_token(&Token::RBracket)?;
9199                data = DataType::Array(ArrayElemTypeDef::SquareBracket(Box::new(data), size))
9200            }
9201        }
9202        Ok((data, trailing_bracket))
9203    }
9204
9205    fn parse_returns_table_column(&mut self) -> Result<ColumnDef, ParserError> {
9206        let name = self.parse_identifier()?;
9207        let data_type = self.parse_data_type()?;
9208        Ok(ColumnDef {
9209            name,
9210            data_type,
9211            options: Vec::new(), // No constraints expected here
9212        })
9213    }
9214
9215    fn parse_returns_table_columns(&mut self) -> Result<Vec<ColumnDef>, ParserError> {
9216        self.expect_token(&Token::LParen)?;
9217        let columns = self.parse_comma_separated(Parser::parse_returns_table_column)?;
9218        self.expect_token(&Token::RParen)?;
9219        Ok(columns)
9220    }
9221
9222    pub fn parse_string_values(&mut self) -> Result<Vec<String>, ParserError> {
9223        self.expect_token(&Token::LParen)?;
9224        let mut values = Vec::new();
9225        loop {
9226            let next_token = self.next_token();
9227            match next_token.token {
9228                Token::SingleQuotedString(value) => values.push(value),
9229                _ => self.expected("a string", next_token)?,
9230            }
9231            let next_token = self.next_token();
9232            match next_token.token {
9233                Token::Comma => (),
9234                Token::RParen => break,
9235                _ => self.expected(", or }", next_token)?,
9236            }
9237        }
9238        Ok(values)
9239    }
9240
9241    /// Strictly parse `identifier AS identifier`
9242    pub fn parse_identifier_with_alias(&mut self) -> Result<IdentWithAlias, ParserError> {
9243        let ident = self.parse_identifier()?;
9244        self.expect_keyword_is(Keyword::AS)?;
9245        let alias = self.parse_identifier()?;
9246        Ok(IdentWithAlias { ident, alias })
9247    }
9248
9249    /// Optionally parses an alias for a select list item
9250    fn maybe_parse_select_item_alias(&mut self) -> Result<Option<Ident>, ParserError> {
9251        fn validator(explicit: bool, kw: &Keyword, parser: &mut Parser) -> bool {
9252            parser.dialect.is_select_item_alias(explicit, kw, parser)
9253        }
9254        self.parse_optional_alias_inner(None, validator)
9255    }
9256
9257    /// Optionally parses an alias for a table like in `... FROM generate_series(1, 10) AS t (col)`.
9258    /// In this case, the alias is allowed to optionally name the columns in the table, in
9259    /// addition to the table itself.
9260    pub fn maybe_parse_table_alias(&mut self) -> Result<Option<TableAlias>, ParserError> {
9261        fn validator(explicit: bool, kw: &Keyword, parser: &mut Parser) -> bool {
9262            parser.dialect.is_table_factor_alias(explicit, kw, parser)
9263        }
9264        match self.parse_optional_alias_inner(None, validator)? {
9265            Some(name) => {
9266                let columns = self.parse_table_alias_column_defs()?;
9267                Ok(Some(TableAlias { name, columns }))
9268            }
9269            None => Ok(None),
9270        }
9271    }
9272
9273    fn parse_table_index_hints(&mut self) -> Result<Vec<TableIndexHints>, ParserError> {
9274        let mut hints = vec![];
9275        while let Some(hint_type) =
9276            self.parse_one_of_keywords(&[Keyword::USE, Keyword::IGNORE, Keyword::FORCE])
9277        {
9278            let hint_type = match hint_type {
9279                Keyword::USE => TableIndexHintType::Use,
9280                Keyword::IGNORE => TableIndexHintType::Ignore,
9281                Keyword::FORCE => TableIndexHintType::Force,
9282                _ => {
9283                    return self.expected(
9284                        "expected to match USE/IGNORE/FORCE keyword",
9285                        self.peek_token(),
9286                    )
9287                }
9288            };
9289            let index_type = match self.parse_one_of_keywords(&[Keyword::INDEX, Keyword::KEY]) {
9290                Some(Keyword::INDEX) => TableIndexType::Index,
9291                Some(Keyword::KEY) => TableIndexType::Key,
9292                _ => {
9293                    return self.expected("expected to match INDEX/KEY keyword", self.peek_token())
9294                }
9295            };
9296            let for_clause = if self.parse_keyword(Keyword::FOR) {
9297                let clause = if self.parse_keyword(Keyword::JOIN) {
9298                    TableIndexHintForClause::Join
9299                } else if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
9300                    TableIndexHintForClause::OrderBy
9301                } else if self.parse_keywords(&[Keyword::GROUP, Keyword::BY]) {
9302                    TableIndexHintForClause::GroupBy
9303                } else {
9304                    return self.expected(
9305                        "expected to match FOR/ORDER BY/GROUP BY table hint in for clause",
9306                        self.peek_token(),
9307                    );
9308                };
9309                Some(clause)
9310            } else {
9311                None
9312            };
9313
9314            self.expect_token(&Token::LParen)?;
9315            let index_names = if self.peek_token().token != Token::RParen {
9316                self.parse_comma_separated(Parser::parse_identifier)?
9317            } else {
9318                vec![]
9319            };
9320            self.expect_token(&Token::RParen)?;
9321            hints.push(TableIndexHints {
9322                hint_type,
9323                index_type,
9324                for_clause,
9325                index_names,
9326            });
9327        }
9328        Ok(hints)
9329    }
9330
9331    /// Wrapper for parse_optional_alias_inner, left for backwards-compatibility
9332    /// but new flows should use the context-specific methods such as `maybe_parse_select_item_alias`
9333    /// and `maybe_parse_table_alias`.
9334    pub fn parse_optional_alias(
9335        &mut self,
9336        reserved_kwds: &[Keyword],
9337    ) -> Result<Option<Ident>, ParserError> {
9338        fn validator(_explicit: bool, _kw: &Keyword, _parser: &mut Parser) -> bool {
9339            false
9340        }
9341        self.parse_optional_alias_inner(Some(reserved_kwds), validator)
9342    }
9343
9344    /// Parses an optional alias after a SQL element such as a select list item
9345    /// or a table name.
9346    ///
9347    /// This method accepts an optional list of reserved keywords or a function
9348    /// to call to validate if a keyword should be parsed as an alias, to allow
9349    /// callers to customize the parsing logic based on their context.
9350    fn parse_optional_alias_inner<F>(
9351        &mut self,
9352        reserved_kwds: Option<&[Keyword]>,
9353        validator: F,
9354    ) -> Result<Option<Ident>, ParserError>
9355    where
9356        F: Fn(bool, &Keyword, &mut Parser) -> bool,
9357    {
9358        let after_as = self.parse_keyword(Keyword::AS);
9359
9360        let next_token = self.next_token();
9361        match next_token.token {
9362            // By default, if a word is located after the `AS` keyword we consider it an alias
9363            // as long as it's not reserved.
9364            Token::Word(w)
9365                if after_as || reserved_kwds.is_some_and(|x| !x.contains(&w.keyword)) =>
9366            {
9367                Ok(Some(w.into_ident(next_token.span)))
9368            }
9369            // This pattern allows for customizing the acceptance of words as aliases based on the caller's
9370            // context, such as to what SQL element this word is a potential alias of (select item alias, table name
9371            // alias, etc.) or dialect-specific logic that goes beyond a simple list of reserved keywords.
9372            Token::Word(w) if validator(after_as, &w.keyword, self) => {
9373                Ok(Some(w.into_ident(next_token.span)))
9374            }
9375            // For backwards-compatibility, we accept quoted strings as aliases regardless of the context.
9376            Token::SingleQuotedString(s) => Ok(Some(Ident::with_quote('\'', s))),
9377            Token::DoubleQuotedString(s) => Ok(Some(Ident::with_quote('\"', s))),
9378            _ => {
9379                if after_as {
9380                    return self.expected("an identifier after AS", next_token);
9381                }
9382                self.prev_token();
9383                Ok(None) // no alias found
9384            }
9385        }
9386    }
9387
9388    pub fn parse_optional_group_by(&mut self) -> Result<Option<GroupByExpr>, ParserError> {
9389        if self.parse_keywords(&[Keyword::GROUP, Keyword::BY]) {
9390            let expressions = if self.parse_keyword(Keyword::ALL) {
9391                None
9392            } else {
9393                Some(self.parse_comma_separated(Parser::parse_group_by_expr)?)
9394            };
9395
9396            let mut modifiers = vec![];
9397            if self.dialect.supports_group_by_with_modifier() {
9398                loop {
9399                    if !self.parse_keyword(Keyword::WITH) {
9400                        break;
9401                    }
9402                    let keyword = self.expect_one_of_keywords(&[
9403                        Keyword::ROLLUP,
9404                        Keyword::CUBE,
9405                        Keyword::TOTALS,
9406                    ])?;
9407                    modifiers.push(match keyword {
9408                        Keyword::ROLLUP => GroupByWithModifier::Rollup,
9409                        Keyword::CUBE => GroupByWithModifier::Cube,
9410                        Keyword::TOTALS => GroupByWithModifier::Totals,
9411                        _ => {
9412                            return parser_err!(
9413                                "BUG: expected to match GroupBy modifier keyword",
9414                                self.peek_token().span.start
9415                            )
9416                        }
9417                    });
9418                }
9419            }
9420            if self.parse_keywords(&[Keyword::GROUPING, Keyword::SETS]) {
9421                self.expect_token(&Token::LParen)?;
9422                let result = self.parse_comma_separated(|p| p.parse_tuple(true, true))?;
9423                self.expect_token(&Token::RParen)?;
9424                modifiers.push(GroupByWithModifier::GroupingSets(Expr::GroupingSets(
9425                    result,
9426                )));
9427            };
9428            let group_by = match expressions {
9429                None => GroupByExpr::All(modifiers),
9430                Some(exprs) => GroupByExpr::Expressions(exprs, modifiers),
9431            };
9432            Ok(Some(group_by))
9433        } else {
9434            Ok(None)
9435        }
9436    }
9437
9438    pub fn parse_optional_order_by(&mut self) -> Result<Option<OrderBy>, ParserError> {
9439        if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
9440            let order_by =
9441                if self.dialect.supports_order_by_all() && self.parse_keyword(Keyword::ALL) {
9442                    let order_by_options = self.parse_order_by_options()?;
9443                    OrderBy {
9444                        kind: OrderByKind::All(order_by_options),
9445                        interpolate: None,
9446                    }
9447                } else {
9448                    let exprs = self.parse_comma_separated(Parser::parse_order_by_expr)?;
9449                    let interpolate = if dialect_of!(self is ClickHouseDialect | GenericDialect) {
9450                        self.parse_interpolations()?
9451                    } else {
9452                        None
9453                    };
9454                    OrderBy {
9455                        kind: OrderByKind::Expressions(exprs),
9456                        interpolate,
9457                    }
9458                };
9459            Ok(Some(order_by))
9460        } else {
9461            Ok(None)
9462        }
9463    }
9464
9465    /// Parse a table object for insertion
9466    /// e.g. `some_database.some_table` or `FUNCTION some_table_func(...)`
9467    pub fn parse_table_object(&mut self) -> Result<TableObject, ParserError> {
9468        if self.dialect.supports_insert_table_function() && self.parse_keyword(Keyword::FUNCTION) {
9469            let fn_name = self.parse_object_name(false)?;
9470            self.parse_function_call(fn_name)
9471                .map(TableObject::TableFunction)
9472        } else {
9473            self.parse_object_name(false).map(TableObject::TableName)
9474        }
9475    }
9476
9477    /// Parse a possibly qualified, possibly quoted identifier, optionally allowing for wildcards,
9478    /// e.g. *, *.*, `foo`.*, or "foo"."bar"
9479    fn parse_object_name_with_wildcards(
9480        &mut self,
9481        in_table_clause: bool,
9482        allow_wildcards: bool,
9483    ) -> Result<ObjectName, ParserError> {
9484        let mut idents = vec![];
9485
9486        if dialect_of!(self is BigQueryDialect) && in_table_clause {
9487            loop {
9488                let (ident, end_with_period) = self.parse_unquoted_hyphenated_identifier()?;
9489                idents.push(ident);
9490                if !self.consume_token(&Token::Period) && !end_with_period {
9491                    break;
9492                }
9493            }
9494        } else {
9495            loop {
9496                let ident = if allow_wildcards && self.peek_token().token == Token::Mul {
9497                    let span = self.next_token().span;
9498                    Ident {
9499                        value: Token::Mul.to_string(),
9500                        quote_style: None,
9501                        span,
9502                    }
9503                } else {
9504                    if self.dialect.supports_object_name_double_dot_notation()
9505                        && idents.len() == 1
9506                        && self.consume_token(&Token::Period)
9507                    {
9508                        // Empty string here means default schema
9509                        idents.push(Ident::new(""));
9510                    }
9511                    self.parse_identifier()?
9512                };
9513                idents.push(ident);
9514                if !self.consume_token(&Token::Period) {
9515                    break;
9516                }
9517            }
9518        }
9519        Ok(ObjectName::from(idents))
9520    }
9521
9522    /// Parse a possibly qualified, possibly quoted identifier, e.g.
9523    /// `foo` or `myschema."table"
9524    ///
9525    /// The `in_table_clause` parameter indicates whether the object name is a table in a FROM, JOIN,
9526    /// or similar table clause. Currently, this is used only to support unquoted hyphenated identifiers
9527    /// in this context on BigQuery.
9528    pub fn parse_object_name(&mut self, in_table_clause: bool) -> Result<ObjectName, ParserError> {
9529        let ObjectName(mut idents) =
9530            self.parse_object_name_with_wildcards(in_table_clause, false)?;
9531
9532        // BigQuery accepts any number of quoted identifiers of a table name.
9533        // https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#quoted_identifiers
9534        if dialect_of!(self is BigQueryDialect)
9535            && idents.iter().any(|part| {
9536                part.as_ident()
9537                    .is_some_and(|ident| ident.value.contains('.'))
9538            })
9539        {
9540            idents = idents
9541                .into_iter()
9542                .flat_map(|part| match part.as_ident() {
9543                    Some(ident) => ident
9544                        .value
9545                        .split('.')
9546                        .map(|value| {
9547                            ObjectNamePart::Identifier(Ident {
9548                                value: value.into(),
9549                                quote_style: ident.quote_style,
9550                                span: ident.span,
9551                            })
9552                        })
9553                        .collect::<Vec<_>>(),
9554                    None => vec![part],
9555                })
9556                .collect()
9557        }
9558
9559        Ok(ObjectName(idents))
9560    }
9561
9562    /// Parse identifiers
9563    pub fn parse_identifiers(&mut self) -> Result<Vec<Ident>, ParserError> {
9564        let mut idents = vec![];
9565        loop {
9566            match &self.peek_token_ref().token {
9567                Token::Word(w) => {
9568                    idents.push(w.clone().into_ident(self.peek_token_ref().span));
9569                }
9570                Token::EOF | Token::Eq => break,
9571                _ => {}
9572            }
9573            self.advance_token();
9574        }
9575        Ok(idents)
9576    }
9577
9578    /// Parse identifiers of form ident1[.identN]*
9579    ///
9580    /// Similar in functionality to [parse_identifiers], with difference
9581    /// being this function is much more strict about parsing a valid multipart identifier, not
9582    /// allowing extraneous tokens to be parsed, otherwise it fails.
9583    ///
9584    /// For example:
9585    ///
9586    /// ```rust
9587    /// use sqlparser::ast::Ident;
9588    /// use sqlparser::dialect::GenericDialect;
9589    /// use sqlparser::parser::Parser;
9590    ///
9591    /// let dialect = GenericDialect {};
9592    /// let expected = vec![Ident::new("one"), Ident::new("two")];
9593    ///
9594    /// // expected usage
9595    /// let sql = "one.two";
9596    /// let mut parser = Parser::new(&dialect).try_with_sql(sql).unwrap();
9597    /// let actual = parser.parse_multipart_identifier().unwrap();
9598    /// assert_eq!(&actual, &expected);
9599    ///
9600    /// // parse_identifiers is more loose on what it allows, parsing successfully
9601    /// let sql = "one + two";
9602    /// let mut parser = Parser::new(&dialect).try_with_sql(sql).unwrap();
9603    /// let actual = parser.parse_identifiers().unwrap();
9604    /// assert_eq!(&actual, &expected);
9605    ///
9606    /// // expected to strictly fail due to + separator
9607    /// let sql = "one + two";
9608    /// let mut parser = Parser::new(&dialect).try_with_sql(sql).unwrap();
9609    /// let actual = parser.parse_multipart_identifier().unwrap_err();
9610    /// assert_eq!(
9611    ///     actual.to_string(),
9612    ///     "sql parser error: Unexpected token in identifier: +"
9613    /// );
9614    /// ```
9615    ///
9616    /// [parse_identifiers]: Parser::parse_identifiers
9617    pub fn parse_multipart_identifier(&mut self) -> Result<Vec<Ident>, ParserError> {
9618        let mut idents = vec![];
9619
9620        // expecting at least one word for identifier
9621        let next_token = self.next_token();
9622        match next_token.token {
9623            Token::Word(w) => idents.push(w.into_ident(next_token.span)),
9624            Token::EOF => {
9625                return Err(ParserError::ParserError(
9626                    "Empty input when parsing identifier".to_string(),
9627                ))?
9628            }
9629            token => {
9630                return Err(ParserError::ParserError(format!(
9631                    "Unexpected token in identifier: {token}"
9632                )))?
9633            }
9634        };
9635
9636        // parse optional next parts if exist
9637        loop {
9638            match self.next_token().token {
9639                // ensure that optional period is succeeded by another identifier
9640                Token::Period => {
9641                    let next_token = self.next_token();
9642                    match next_token.token {
9643                        Token::Word(w) => idents.push(w.into_ident(next_token.span)),
9644                        Token::EOF => {
9645                            return Err(ParserError::ParserError(
9646                                "Trailing period in identifier".to_string(),
9647                            ))?
9648                        }
9649                        token => {
9650                            return Err(ParserError::ParserError(format!(
9651                                "Unexpected token following period in identifier: {token}"
9652                            )))?
9653                        }
9654                    }
9655                }
9656                Token::EOF => break,
9657                token => {
9658                    return Err(ParserError::ParserError(format!(
9659                        "Unexpected token in identifier: {token}"
9660                    )))?
9661                }
9662            }
9663        }
9664
9665        Ok(idents)
9666    }
9667
9668    /// Parse a simple one-word identifier (possibly quoted, possibly a keyword)
9669    pub fn parse_identifier(&mut self) -> Result<Ident, ParserError> {
9670        let next_token = self.next_token();
9671        match next_token.token {
9672            Token::Word(w) => Ok(w.into_ident(next_token.span)),
9673            Token::SingleQuotedString(s) => Ok(Ident::with_quote('\'', s)),
9674            Token::DoubleQuotedString(s) => Ok(Ident::with_quote('\"', s)),
9675            _ => self.expected("identifier", next_token),
9676        }
9677    }
9678
9679    /// On BigQuery, hyphens are permitted in unquoted identifiers inside of a FROM or
9680    /// TABLE clause.
9681    ///
9682    /// The first segment must be an ordinary unquoted identifier, e.g. it must not start
9683    /// with a digit. Subsequent segments are either must either be valid identifiers or
9684    /// integers, e.g. foo-123 is allowed, but foo-123a is not.
9685    ///
9686    /// [BigQuery-lexical](https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical)
9687    ///
9688    /// Return a tuple of the identifier and a boolean indicating it ends with a period.
9689    fn parse_unquoted_hyphenated_identifier(&mut self) -> Result<(Ident, bool), ParserError> {
9690        match self.peek_token().token {
9691            Token::Word(w) => {
9692                let quote_style_is_none = w.quote_style.is_none();
9693                let mut requires_whitespace = false;
9694                let mut ident = w.into_ident(self.next_token().span);
9695                if quote_style_is_none {
9696                    while matches!(self.peek_token_no_skip().token, Token::Minus) {
9697                        self.next_token();
9698                        ident.value.push('-');
9699
9700                        let token = self
9701                            .next_token_no_skip()
9702                            .cloned()
9703                            .unwrap_or(TokenWithSpan::wrap(Token::EOF));
9704                        requires_whitespace = match token.token {
9705                            Token::Word(next_word) if next_word.quote_style.is_none() => {
9706                                ident.value.push_str(&next_word.value);
9707                                false
9708                            }
9709                            Token::Number(s, false) => {
9710                                // A number token can represent a decimal value ending with a period, e.g., `Number('123.')`.
9711                                // However, for an [ObjectName], it is part of a hyphenated identifier, e.g., `foo-123.bar`.
9712                                //
9713                                // If a number token is followed by a period, it is part of an [ObjectName].
9714                                // Return the identifier with `true` if the number token is followed by a period, indicating that
9715                                // parsing should continue for the next part of the hyphenated identifier.
9716                                if s.ends_with('.') {
9717                                    let Some(s) = s.split('.').next().filter(|s| {
9718                                        !s.is_empty() && s.chars().all(|c| c.is_ascii_digit())
9719                                    }) else {
9720                                        return self.expected(
9721                                            "continuation of hyphenated identifier",
9722                                            TokenWithSpan::new(Token::Number(s, false), token.span),
9723                                        );
9724                                    };
9725                                    ident.value.push_str(s);
9726                                    return Ok((ident, true));
9727                                } else {
9728                                    ident.value.push_str(&s);
9729                                }
9730                                // If next token is period, then it is part of an ObjectName and we don't expect whitespace
9731                                // after the number.
9732                                !matches!(self.peek_token().token, Token::Period)
9733                            }
9734                            _ => {
9735                                return self
9736                                    .expected("continuation of hyphenated identifier", token);
9737                            }
9738                        }
9739                    }
9740
9741                    // If the last segment was a number, we must check that it's followed by whitespace,
9742                    // otherwise foo-123a will be parsed as `foo-123` with the alias `a`.
9743                    if requires_whitespace {
9744                        let token = self.next_token();
9745                        if !matches!(token.token, Token::EOF | Token::Whitespace(_)) {
9746                            return self
9747                                .expected("whitespace following hyphenated identifier", token);
9748                        }
9749                    }
9750                }
9751                Ok((ident, false))
9752            }
9753            _ => Ok((self.parse_identifier()?, false)),
9754        }
9755    }
9756
9757    /// Parses a parenthesized, comma-separated list of column definitions within a view.
9758    fn parse_view_columns(&mut self) -> Result<Vec<ViewColumnDef>, ParserError> {
9759        if self.consume_token(&Token::LParen) {
9760            if self.peek_token().token == Token::RParen {
9761                self.next_token();
9762                Ok(vec![])
9763            } else {
9764                let cols = self.parse_comma_separated_with_trailing_commas(
9765                    Parser::parse_view_column,
9766                    self.dialect.supports_column_definition_trailing_commas(),
9767                    Self::is_reserved_for_column_alias,
9768                )?;
9769                self.expect_token(&Token::RParen)?;
9770                Ok(cols)
9771            }
9772        } else {
9773            Ok(vec![])
9774        }
9775    }
9776
9777    /// Parses a column definition within a view.
9778    fn parse_view_column(&mut self) -> Result<ViewColumnDef, ParserError> {
9779        let name = self.parse_identifier()?;
9780        let options = if (dialect_of!(self is BigQueryDialect | GenericDialect)
9781            && self.parse_keyword(Keyword::OPTIONS))
9782            || (dialect_of!(self is SnowflakeDialect | GenericDialect)
9783                && self.parse_keyword(Keyword::COMMENT))
9784        {
9785            self.prev_token();
9786            self.parse_optional_column_option()?
9787                .map(|option| vec![option])
9788        } else {
9789            None
9790        };
9791        let data_type = if dialect_of!(self is ClickHouseDialect) {
9792            Some(self.parse_data_type()?)
9793        } else {
9794            None
9795        };
9796        Ok(ViewColumnDef {
9797            name,
9798            data_type,
9799            options,
9800        })
9801    }
9802
9803    /// Parses a parenthesized comma-separated list of unqualified, possibly quoted identifiers.
9804    /// For example: `(col1, "col 2", ...)`
9805    pub fn parse_parenthesized_column_list(
9806        &mut self,
9807        optional: IsOptional,
9808        allow_empty: bool,
9809    ) -> Result<Vec<Ident>, ParserError> {
9810        self.parse_parenthesized_column_list_inner(optional, allow_empty, |p| p.parse_identifier())
9811    }
9812
9813    /// Parses a parenthesized comma-separated list of qualified, possibly quoted identifiers.
9814    /// For example: `(db1.sc1.tbl1.col1, db1.sc1.tbl1."col 2", ...)`
9815    pub fn parse_parenthesized_qualified_column_list(
9816        &mut self,
9817        optional: IsOptional,
9818        allow_empty: bool,
9819    ) -> Result<Vec<ObjectName>, ParserError> {
9820        self.parse_parenthesized_column_list_inner(optional, allow_empty, |p| {
9821            p.parse_object_name(true)
9822        })
9823    }
9824
9825    /// Parses a parenthesized comma-separated list of columns using
9826    /// the provided function to parse each element.
9827    fn parse_parenthesized_column_list_inner<F, T>(
9828        &mut self,
9829        optional: IsOptional,
9830        allow_empty: bool,
9831        mut f: F,
9832    ) -> Result<Vec<T>, ParserError>
9833    where
9834        F: FnMut(&mut Parser) -> Result<T, ParserError>,
9835    {
9836        if self.consume_token(&Token::LParen) {
9837            if allow_empty && self.peek_token().token == Token::RParen {
9838                self.next_token();
9839                Ok(vec![])
9840            } else {
9841                let cols = self.parse_comma_separated(|p| f(p))?;
9842                self.expect_token(&Token::RParen)?;
9843                Ok(cols)
9844            }
9845        } else if optional == Optional {
9846            Ok(vec![])
9847        } else {
9848            self.expected("a list of columns in parentheses", self.peek_token())
9849        }
9850    }
9851
9852    /// Parses a parenthesized comma-separated list of table alias column definitions.
9853    fn parse_table_alias_column_defs(&mut self) -> Result<Vec<TableAliasColumnDef>, ParserError> {
9854        if self.consume_token(&Token::LParen) {
9855            let cols = self.parse_comma_separated(|p| {
9856                let name = p.parse_identifier()?;
9857                let data_type = p.maybe_parse(|p| p.parse_data_type())?;
9858                Ok(TableAliasColumnDef { name, data_type })
9859            })?;
9860            self.expect_token(&Token::RParen)?;
9861            Ok(cols)
9862        } else {
9863            Ok(vec![])
9864        }
9865    }
9866
9867    pub fn parse_precision(&mut self) -> Result<u64, ParserError> {
9868        self.expect_token(&Token::LParen)?;
9869        let n = self.parse_literal_uint()?;
9870        self.expect_token(&Token::RParen)?;
9871        Ok(n)
9872    }
9873
9874    pub fn parse_optional_precision(&mut self) -> Result<Option<u64>, ParserError> {
9875        if self.consume_token(&Token::LParen) {
9876            let n = self.parse_literal_uint()?;
9877            self.expect_token(&Token::RParen)?;
9878            Ok(Some(n))
9879        } else {
9880            Ok(None)
9881        }
9882    }
9883
9884    /// Parse datetime64 [1]
9885    /// Syntax
9886    /// ```sql
9887    /// DateTime64(precision[, timezone])
9888    /// ```
9889    ///
9890    /// [1]: https://clickhouse.com/docs/en/sql-reference/data-types/datetime64
9891    pub fn parse_datetime_64(&mut self) -> Result<(u64, Option<String>), ParserError> {
9892        self.expect_keyword_is(Keyword::DATETIME64)?;
9893        self.expect_token(&Token::LParen)?;
9894        let precision = self.parse_literal_uint()?;
9895        let time_zone = if self.consume_token(&Token::Comma) {
9896            Some(self.parse_literal_string()?)
9897        } else {
9898            None
9899        };
9900        self.expect_token(&Token::RParen)?;
9901        Ok((precision, time_zone))
9902    }
9903
9904    pub fn parse_optional_character_length(
9905        &mut self,
9906    ) -> Result<Option<CharacterLength>, ParserError> {
9907        if self.consume_token(&Token::LParen) {
9908            let character_length = self.parse_character_length()?;
9909            self.expect_token(&Token::RParen)?;
9910            Ok(Some(character_length))
9911        } else {
9912            Ok(None)
9913        }
9914    }
9915
9916    pub fn parse_optional_binary_length(&mut self) -> Result<Option<BinaryLength>, ParserError> {
9917        if self.consume_token(&Token::LParen) {
9918            let binary_length = self.parse_binary_length()?;
9919            self.expect_token(&Token::RParen)?;
9920            Ok(Some(binary_length))
9921        } else {
9922            Ok(None)
9923        }
9924    }
9925
9926    pub fn parse_character_length(&mut self) -> Result<CharacterLength, ParserError> {
9927        if self.parse_keyword(Keyword::MAX) {
9928            return Ok(CharacterLength::Max);
9929        }
9930        let length = self.parse_literal_uint()?;
9931        let unit = if self.parse_keyword(Keyword::CHARACTERS) {
9932            Some(CharLengthUnits::Characters)
9933        } else if self.parse_keyword(Keyword::OCTETS) {
9934            Some(CharLengthUnits::Octets)
9935        } else {
9936            None
9937        };
9938        Ok(CharacterLength::IntegerLength { length, unit })
9939    }
9940
9941    pub fn parse_binary_length(&mut self) -> Result<BinaryLength, ParserError> {
9942        if self.parse_keyword(Keyword::MAX) {
9943            return Ok(BinaryLength::Max);
9944        }
9945        let length = self.parse_literal_uint()?;
9946        Ok(BinaryLength::IntegerLength { length })
9947    }
9948
9949    pub fn parse_optional_precision_scale(
9950        &mut self,
9951    ) -> Result<(Option<u64>, Option<u64>), ParserError> {
9952        if self.consume_token(&Token::LParen) {
9953            let n = self.parse_literal_uint()?;
9954            let scale = if self.consume_token(&Token::Comma) {
9955                Some(self.parse_literal_uint()?)
9956            } else {
9957                None
9958            };
9959            self.expect_token(&Token::RParen)?;
9960            Ok((Some(n), scale))
9961        } else {
9962            Ok((None, None))
9963        }
9964    }
9965
9966    pub fn parse_exact_number_optional_precision_scale(
9967        &mut self,
9968    ) -> Result<ExactNumberInfo, ParserError> {
9969        if self.consume_token(&Token::LParen) {
9970            let precision = self.parse_literal_uint()?;
9971            let scale = if self.consume_token(&Token::Comma) {
9972                Some(self.parse_literal_uint()?)
9973            } else {
9974                None
9975            };
9976
9977            self.expect_token(&Token::RParen)?;
9978
9979            match scale {
9980                None => Ok(ExactNumberInfo::Precision(precision)),
9981                Some(scale) => Ok(ExactNumberInfo::PrecisionAndScale(precision, scale)),
9982            }
9983        } else {
9984            Ok(ExactNumberInfo::None)
9985        }
9986    }
9987
9988    pub fn parse_optional_type_modifiers(&mut self) -> Result<Option<Vec<String>>, ParserError> {
9989        if self.consume_token(&Token::LParen) {
9990            let mut modifiers = Vec::new();
9991            loop {
9992                let next_token = self.next_token();
9993                match next_token.token {
9994                    Token::Word(w) => modifiers.push(w.to_string()),
9995                    Token::Number(n, _) => modifiers.push(n),
9996                    Token::SingleQuotedString(s) => modifiers.push(s),
9997
9998                    Token::Comma => {
9999                        continue;
10000                    }
10001                    Token::RParen => {
10002                        break;
10003                    }
10004                    _ => self.expected("type modifiers", next_token)?,
10005                }
10006            }
10007
10008            Ok(Some(modifiers))
10009        } else {
10010            Ok(None)
10011        }
10012    }
10013
10014    /// Parse a parenthesized sub data type
10015    fn parse_sub_type<F>(&mut self, parent_type: F) -> Result<DataType, ParserError>
10016    where
10017        F: FnOnce(Box<DataType>) -> DataType,
10018    {
10019        self.expect_token(&Token::LParen)?;
10020        let inside_type = self.parse_data_type()?;
10021        self.expect_token(&Token::RParen)?;
10022        Ok(parent_type(inside_type.into()))
10023    }
10024
10025    pub fn parse_delete(&mut self) -> Result<Statement, ParserError> {
10026        let (tables, with_from_keyword) = if !self.parse_keyword(Keyword::FROM) {
10027            // `FROM` keyword is optional in BigQuery SQL.
10028            // https://cloud.google.com/bigquery/docs/reference/standard-sql/dml-syntax#delete_statement
10029            if dialect_of!(self is BigQueryDialect | GenericDialect) {
10030                (vec![], false)
10031            } else {
10032                let tables = self.parse_comma_separated(|p| p.parse_object_name(false))?;
10033                self.expect_keyword_is(Keyword::FROM)?;
10034                (tables, true)
10035            }
10036        } else {
10037            (vec![], true)
10038        };
10039
10040        let from = self.parse_comma_separated(Parser::parse_table_and_joins)?;
10041        let using = if self.parse_keyword(Keyword::USING) {
10042            Some(self.parse_comma_separated(Parser::parse_table_and_joins)?)
10043        } else {
10044            None
10045        };
10046        let selection = if self.parse_keyword(Keyword::WHERE) {
10047            Some(self.parse_expr()?)
10048        } else {
10049            None
10050        };
10051        let returning = if self.parse_keyword(Keyword::RETURNING) {
10052            Some(self.parse_comma_separated(Parser::parse_select_item)?)
10053        } else {
10054            None
10055        };
10056        let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
10057            self.parse_comma_separated(Parser::parse_order_by_expr)?
10058        } else {
10059            vec![]
10060        };
10061        let limit = if self.parse_keyword(Keyword::LIMIT) {
10062            self.parse_limit()?
10063        } else {
10064            None
10065        };
10066
10067        Ok(Statement::Delete(Delete {
10068            tables,
10069            from: if with_from_keyword {
10070                FromTable::WithFromKeyword(from)
10071            } else {
10072                FromTable::WithoutKeyword(from)
10073            },
10074            using,
10075            selection,
10076            returning,
10077            order_by,
10078            limit,
10079        }))
10080    }
10081
10082    // KILL [CONNECTION | QUERY | MUTATION] processlist_id
10083    pub fn parse_kill(&mut self) -> Result<Statement, ParserError> {
10084        let modifier_keyword =
10085            self.parse_one_of_keywords(&[Keyword::CONNECTION, Keyword::QUERY, Keyword::MUTATION]);
10086
10087        let id = self.parse_literal_uint()?;
10088
10089        let modifier = match modifier_keyword {
10090            Some(Keyword::CONNECTION) => Some(KillType::Connection),
10091            Some(Keyword::QUERY) => Some(KillType::Query),
10092            Some(Keyword::MUTATION) => {
10093                if dialect_of!(self is ClickHouseDialect | GenericDialect) {
10094                    Some(KillType::Mutation)
10095                } else {
10096                    self.expected(
10097                        "Unsupported type for KILL, allowed: CONNECTION | QUERY",
10098                        self.peek_token(),
10099                    )?
10100                }
10101            }
10102            _ => None,
10103        };
10104
10105        Ok(Statement::Kill { modifier, id })
10106    }
10107
10108    pub fn parse_explain(
10109        &mut self,
10110        describe_alias: DescribeAlias,
10111    ) -> Result<Statement, ParserError> {
10112        let mut analyze = false;
10113        let mut verbose = false;
10114        let mut query_plan = false;
10115        let mut estimate = false;
10116        let mut format = None;
10117        let mut options = None;
10118
10119        // Note: DuckDB is compatible with PostgreSQL syntax for this statement,
10120        // although not all features may be implemented.
10121        if describe_alias == DescribeAlias::Explain
10122            && self.dialect.supports_explain_with_utility_options()
10123            && self.peek_token().token == Token::LParen
10124        {
10125            options = Some(self.parse_utility_options()?)
10126        } else if self.parse_keywords(&[Keyword::QUERY, Keyword::PLAN]) {
10127            query_plan = true;
10128        } else if self.parse_keyword(Keyword::ESTIMATE) {
10129            estimate = true;
10130        } else {
10131            analyze = self.parse_keyword(Keyword::ANALYZE);
10132            verbose = self.parse_keyword(Keyword::VERBOSE);
10133            if self.parse_keyword(Keyword::FORMAT) {
10134                format = Some(self.parse_analyze_format()?);
10135            }
10136        }
10137
10138        match self.maybe_parse(|parser| parser.parse_statement())? {
10139            Some(Statement::Explain { .. }) | Some(Statement::ExplainTable { .. }) => Err(
10140                ParserError::ParserError("Explain must be root of the plan".to_string()),
10141            ),
10142            Some(statement) => Ok(Statement::Explain {
10143                describe_alias,
10144                analyze,
10145                verbose,
10146                query_plan,
10147                estimate,
10148                statement: Box::new(statement),
10149                format,
10150                options,
10151            }),
10152            _ => {
10153                let hive_format =
10154                    match self.parse_one_of_keywords(&[Keyword::EXTENDED, Keyword::FORMATTED]) {
10155                        Some(Keyword::EXTENDED) => Some(HiveDescribeFormat::Extended),
10156                        Some(Keyword::FORMATTED) => Some(HiveDescribeFormat::Formatted),
10157                        _ => None,
10158                    };
10159
10160                let has_table_keyword = if self.dialect.describe_requires_table_keyword() {
10161                    // only allow to use TABLE keyword for DESC|DESCRIBE statement
10162                    self.parse_keyword(Keyword::TABLE)
10163                } else {
10164                    false
10165                };
10166
10167                let table_name = self.parse_object_name(false)?;
10168                Ok(Statement::ExplainTable {
10169                    describe_alias,
10170                    hive_format,
10171                    has_table_keyword,
10172                    table_name,
10173                })
10174            }
10175        }
10176    }
10177
10178    /// Parse a query expression, i.e. a `SELECT` statement optionally
10179    /// preceded with some `WITH` CTE declarations and optionally followed
10180    /// by `ORDER BY`. Unlike some other parse_... methods, this one doesn't
10181    /// expect the initial keyword to be already consumed
10182    pub fn parse_query(&mut self) -> Result<Box<Query>, ParserError> {
10183        let _guard = self.recursion_counter.try_decrease()?;
10184        let with = if self.parse_keyword(Keyword::WITH) {
10185            let with_token = self.get_current_token();
10186            Some(With {
10187                with_token: with_token.clone().into(),
10188                recursive: self.parse_keyword(Keyword::RECURSIVE),
10189                cte_tables: self.parse_comma_separated(Parser::parse_cte)?,
10190            })
10191        } else {
10192            None
10193        };
10194        if self.parse_keyword(Keyword::INSERT) {
10195            Ok(Query {
10196                with,
10197                body: self.parse_insert_setexpr_boxed()?,
10198                limit: None,
10199                limit_by: vec![],
10200                order_by: None,
10201                offset: None,
10202                fetch: None,
10203                locks: vec![],
10204                for_clause: None,
10205                settings: None,
10206                format_clause: None,
10207            }
10208            .into())
10209        } else if self.parse_keyword(Keyword::UPDATE) {
10210            Ok(Query {
10211                with,
10212                body: self.parse_update_setexpr_boxed()?,
10213                limit: None,
10214                limit_by: vec![],
10215                order_by: None,
10216                offset: None,
10217                fetch: None,
10218                locks: vec![],
10219                for_clause: None,
10220                settings: None,
10221                format_clause: None,
10222            }
10223            .into())
10224        } else {
10225            let body = self.parse_query_body(self.dialect.prec_unknown())?;
10226
10227            let order_by = self.parse_optional_order_by()?;
10228
10229            let mut limit = None;
10230            let mut offset = None;
10231
10232            for _x in 0..2 {
10233                if limit.is_none() && self.parse_keyword(Keyword::LIMIT) {
10234                    limit = self.parse_limit()?
10235                }
10236
10237                if offset.is_none() && self.parse_keyword(Keyword::OFFSET) {
10238                    offset = Some(self.parse_offset()?)
10239                }
10240
10241                if self.dialect.supports_limit_comma()
10242                    && limit.is_some()
10243                    && offset.is_none()
10244                    && self.consume_token(&Token::Comma)
10245                {
10246                    // MySQL style LIMIT x,y => LIMIT y OFFSET x.
10247                    // Check <https://dev.mysql.com/doc/refman/8.0/en/select.html> for more details.
10248                    offset = Some(Offset {
10249                        value: limit.unwrap(),
10250                        rows: OffsetRows::None,
10251                    });
10252                    limit = Some(self.parse_expr()?);
10253                }
10254            }
10255
10256            let limit_by = if dialect_of!(self is ClickHouseDialect | GenericDialect)
10257                && self.parse_keyword(Keyword::BY)
10258            {
10259                self.parse_comma_separated(Parser::parse_expr)?
10260            } else {
10261                vec![]
10262            };
10263
10264            let settings = self.parse_settings()?;
10265
10266            let fetch = if self.parse_keyword(Keyword::FETCH) {
10267                Some(self.parse_fetch()?)
10268            } else {
10269                None
10270            };
10271
10272            let mut for_clause = None;
10273            let mut locks = Vec::new();
10274            while self.parse_keyword(Keyword::FOR) {
10275                if let Some(parsed_for_clause) = self.parse_for_clause()? {
10276                    for_clause = Some(parsed_for_clause);
10277                    break;
10278                } else {
10279                    locks.push(self.parse_lock()?);
10280                }
10281            }
10282            let format_clause = if dialect_of!(self is ClickHouseDialect | GenericDialect)
10283                && self.parse_keyword(Keyword::FORMAT)
10284            {
10285                if self.parse_keyword(Keyword::NULL) {
10286                    Some(FormatClause::Null)
10287                } else {
10288                    let ident = self.parse_identifier()?;
10289                    Some(FormatClause::Identifier(ident))
10290                }
10291            } else {
10292                None
10293            };
10294
10295            Ok(Query {
10296                with,
10297                body,
10298                order_by,
10299                limit,
10300                limit_by,
10301                offset,
10302                fetch,
10303                locks,
10304                for_clause,
10305                settings,
10306                format_clause,
10307            }
10308            .into())
10309        }
10310    }
10311
10312    fn parse_settings(&mut self) -> Result<Option<Vec<Setting>>, ParserError> {
10313        let settings = if dialect_of!(self is ClickHouseDialect|GenericDialect)
10314            && self.parse_keyword(Keyword::SETTINGS)
10315        {
10316            let key_values = self.parse_comma_separated(|p| {
10317                let key = p.parse_identifier()?;
10318                p.expect_token(&Token::Eq)?;
10319                let value = p.parse_value()?.value;
10320                Ok(Setting { key, value })
10321            })?;
10322            Some(key_values)
10323        } else {
10324            None
10325        };
10326        Ok(settings)
10327    }
10328
10329    /// Parse a mssql `FOR [XML | JSON | BROWSE]` clause
10330    pub fn parse_for_clause(&mut self) -> Result<Option<ForClause>, ParserError> {
10331        if self.parse_keyword(Keyword::XML) {
10332            Ok(Some(self.parse_for_xml()?))
10333        } else if self.parse_keyword(Keyword::JSON) {
10334            Ok(Some(self.parse_for_json()?))
10335        } else if self.parse_keyword(Keyword::BROWSE) {
10336            Ok(Some(ForClause::Browse))
10337        } else {
10338            Ok(None)
10339        }
10340    }
10341
10342    /// Parse a mssql `FOR XML` clause
10343    pub fn parse_for_xml(&mut self) -> Result<ForClause, ParserError> {
10344        let for_xml = if self.parse_keyword(Keyword::RAW) {
10345            let mut element_name = None;
10346            if self.peek_token().token == Token::LParen {
10347                self.expect_token(&Token::LParen)?;
10348                element_name = Some(self.parse_literal_string()?);
10349                self.expect_token(&Token::RParen)?;
10350            }
10351            ForXml::Raw(element_name)
10352        } else if self.parse_keyword(Keyword::AUTO) {
10353            ForXml::Auto
10354        } else if self.parse_keyword(Keyword::EXPLICIT) {
10355            ForXml::Explicit
10356        } else if self.parse_keyword(Keyword::PATH) {
10357            let mut element_name = None;
10358            if self.peek_token().token == Token::LParen {
10359                self.expect_token(&Token::LParen)?;
10360                element_name = Some(self.parse_literal_string()?);
10361                self.expect_token(&Token::RParen)?;
10362            }
10363            ForXml::Path(element_name)
10364        } else {
10365            return Err(ParserError::ParserError(
10366                "Expected FOR XML [RAW | AUTO | EXPLICIT | PATH ]".to_string(),
10367            ));
10368        };
10369        let mut elements = false;
10370        let mut binary_base64 = false;
10371        let mut root = None;
10372        let mut r#type = false;
10373        while self.peek_token().token == Token::Comma {
10374            self.next_token();
10375            if self.parse_keyword(Keyword::ELEMENTS) {
10376                elements = true;
10377            } else if self.parse_keyword(Keyword::BINARY) {
10378                self.expect_keyword_is(Keyword::BASE64)?;
10379                binary_base64 = true;
10380            } else if self.parse_keyword(Keyword::ROOT) {
10381                self.expect_token(&Token::LParen)?;
10382                root = Some(self.parse_literal_string()?);
10383                self.expect_token(&Token::RParen)?;
10384            } else if self.parse_keyword(Keyword::TYPE) {
10385                r#type = true;
10386            }
10387        }
10388        Ok(ForClause::Xml {
10389            for_xml,
10390            elements,
10391            binary_base64,
10392            root,
10393            r#type,
10394        })
10395    }
10396
10397    /// Parse a mssql `FOR JSON` clause
10398    pub fn parse_for_json(&mut self) -> Result<ForClause, ParserError> {
10399        let for_json = if self.parse_keyword(Keyword::AUTO) {
10400            ForJson::Auto
10401        } else if self.parse_keyword(Keyword::PATH) {
10402            ForJson::Path
10403        } else {
10404            return Err(ParserError::ParserError(
10405                "Expected FOR JSON [AUTO | PATH ]".to_string(),
10406            ));
10407        };
10408        let mut root = None;
10409        let mut include_null_values = false;
10410        let mut without_array_wrapper = false;
10411        while self.peek_token().token == Token::Comma {
10412            self.next_token();
10413            if self.parse_keyword(Keyword::ROOT) {
10414                self.expect_token(&Token::LParen)?;
10415                root = Some(self.parse_literal_string()?);
10416                self.expect_token(&Token::RParen)?;
10417            } else if self.parse_keyword(Keyword::INCLUDE_NULL_VALUES) {
10418                include_null_values = true;
10419            } else if self.parse_keyword(Keyword::WITHOUT_ARRAY_WRAPPER) {
10420                without_array_wrapper = true;
10421            }
10422        }
10423        Ok(ForClause::Json {
10424            for_json,
10425            root,
10426            include_null_values,
10427            without_array_wrapper,
10428        })
10429    }
10430
10431    /// Parse a CTE (`alias [( col1, col2, ... )] AS (subquery)`)
10432    pub fn parse_cte(&mut self) -> Result<Cte, ParserError> {
10433        let name = self.parse_identifier()?;
10434
10435        let mut cte = if self.parse_keyword(Keyword::AS) {
10436            let mut is_materialized = None;
10437            if dialect_of!(self is PostgreSqlDialect) {
10438                if self.parse_keyword(Keyword::MATERIALIZED) {
10439                    is_materialized = Some(CteAsMaterialized::Materialized);
10440                } else if self.parse_keywords(&[Keyword::NOT, Keyword::MATERIALIZED]) {
10441                    is_materialized = Some(CteAsMaterialized::NotMaterialized);
10442                }
10443            }
10444            self.expect_token(&Token::LParen)?;
10445
10446            let query = self.parse_query()?;
10447            let closing_paren_token = self.expect_token(&Token::RParen)?;
10448
10449            let alias = TableAlias {
10450                name,
10451                columns: vec![],
10452            };
10453            Cte {
10454                alias,
10455                query,
10456                from: None,
10457                materialized: is_materialized,
10458                closing_paren_token: closing_paren_token.into(),
10459            }
10460        } else {
10461            let columns = self.parse_table_alias_column_defs()?;
10462            self.expect_keyword_is(Keyword::AS)?;
10463            let mut is_materialized = None;
10464            if dialect_of!(self is PostgreSqlDialect) {
10465                if self.parse_keyword(Keyword::MATERIALIZED) {
10466                    is_materialized = Some(CteAsMaterialized::Materialized);
10467                } else if self.parse_keywords(&[Keyword::NOT, Keyword::MATERIALIZED]) {
10468                    is_materialized = Some(CteAsMaterialized::NotMaterialized);
10469                }
10470            }
10471            self.expect_token(&Token::LParen)?;
10472
10473            let query = self.parse_query()?;
10474            let closing_paren_token = self.expect_token(&Token::RParen)?;
10475
10476            let alias = TableAlias { name, columns };
10477            Cte {
10478                alias,
10479                query,
10480                from: None,
10481                materialized: is_materialized,
10482                closing_paren_token: closing_paren_token.into(),
10483            }
10484        };
10485        if self.parse_keyword(Keyword::FROM) {
10486            cte.from = Some(self.parse_identifier()?);
10487        }
10488        Ok(cte)
10489    }
10490
10491    /// Parse a "query body", which is an expression with roughly the
10492    /// following grammar:
10493    /// ```sql
10494    ///   query_body ::= restricted_select | '(' subquery ')' | set_operation
10495    ///   restricted_select ::= 'SELECT' [expr_list] [ from ] [ where ] [ groupby_having ]
10496    ///   subquery ::= query_body [ order_by_limit ]
10497    ///   set_operation ::= query_body { 'UNION' | 'EXCEPT' | 'INTERSECT' } [ 'ALL' ] query_body
10498    /// ```
10499    pub fn parse_query_body(&mut self, precedence: u8) -> Result<Box<SetExpr>, ParserError> {
10500        // We parse the expression using a Pratt parser, as in `parse_expr()`.
10501        // Start by parsing a restricted SELECT or a `(subquery)`:
10502        let expr = if self.peek_keyword(Keyword::SELECT)
10503            || (self.peek_keyword(Keyword::FROM) && self.dialect.supports_from_first_select())
10504        {
10505            SetExpr::Select(self.parse_select().map(Box::new)?)
10506        } else if self.consume_token(&Token::LParen) {
10507            // CTEs are not allowed here, but the parser currently accepts them
10508            let subquery = self.parse_query()?;
10509            self.expect_token(&Token::RParen)?;
10510            SetExpr::Query(subquery)
10511        } else if self.parse_keyword(Keyword::VALUES) {
10512            let is_mysql = dialect_of!(self is MySqlDialect);
10513            SetExpr::Values(self.parse_values(is_mysql)?)
10514        } else if self.parse_keyword(Keyword::TABLE) {
10515            SetExpr::Table(Box::new(self.parse_as_table()?))
10516        } else {
10517            return self.expected(
10518                "SELECT, VALUES, or a subquery in the query body",
10519                self.peek_token(),
10520            );
10521        };
10522
10523        self.parse_remaining_set_exprs(expr, precedence)
10524    }
10525
10526    /// Parse any extra set expressions that may be present in a query body
10527    ///
10528    /// (this is its own function to reduce required stack size in debug builds)
10529    fn parse_remaining_set_exprs(
10530        &mut self,
10531        mut expr: SetExpr,
10532        precedence: u8,
10533    ) -> Result<Box<SetExpr>, ParserError> {
10534        loop {
10535            // The query can be optionally followed by a set operator:
10536            let op = self.parse_set_operator(&self.peek_token().token);
10537            let next_precedence = match op {
10538                // UNION and EXCEPT have the same binding power and evaluate left-to-right
10539                Some(SetOperator::Union) | Some(SetOperator::Except) | Some(SetOperator::Minus) => {
10540                    10
10541                }
10542                // INTERSECT has higher precedence than UNION/EXCEPT
10543                Some(SetOperator::Intersect) => 20,
10544                // Unexpected token or EOF => stop parsing the query body
10545                None => break,
10546            };
10547            if precedence >= next_precedence {
10548                break;
10549            }
10550            self.next_token(); // skip past the set operator
10551            let set_quantifier = self.parse_set_quantifier(&op);
10552            expr = SetExpr::SetOperation {
10553                left: Box::new(expr),
10554                op: op.unwrap(),
10555                set_quantifier,
10556                right: self.parse_query_body(next_precedence)?,
10557            };
10558        }
10559
10560        Ok(expr.into())
10561    }
10562
10563    pub fn parse_set_operator(&mut self, token: &Token) -> Option<SetOperator> {
10564        match token {
10565            Token::Word(w) if w.keyword == Keyword::UNION => Some(SetOperator::Union),
10566            Token::Word(w) if w.keyword == Keyword::EXCEPT => Some(SetOperator::Except),
10567            Token::Word(w) if w.keyword == Keyword::INTERSECT => Some(SetOperator::Intersect),
10568            Token::Word(w) if w.keyword == Keyword::MINUS => Some(SetOperator::Minus),
10569            _ => None,
10570        }
10571    }
10572
10573    pub fn parse_set_quantifier(&mut self, op: &Option<SetOperator>) -> SetQuantifier {
10574        match op {
10575            Some(
10576                SetOperator::Except
10577                | SetOperator::Intersect
10578                | SetOperator::Union
10579                | SetOperator::Minus,
10580            ) => {
10581                if self.parse_keywords(&[Keyword::DISTINCT, Keyword::BY, Keyword::NAME]) {
10582                    SetQuantifier::DistinctByName
10583                } else if self.parse_keywords(&[Keyword::BY, Keyword::NAME]) {
10584                    SetQuantifier::ByName
10585                } else if self.parse_keyword(Keyword::ALL) {
10586                    if self.parse_keywords(&[Keyword::BY, Keyword::NAME]) {
10587                        SetQuantifier::AllByName
10588                    } else {
10589                        SetQuantifier::All
10590                    }
10591                } else if self.parse_keyword(Keyword::DISTINCT) {
10592                    SetQuantifier::Distinct
10593                } else {
10594                    SetQuantifier::None
10595                }
10596            }
10597            _ => SetQuantifier::None,
10598        }
10599    }
10600
10601    /// Parse a restricted `SELECT` statement (no CTEs / `UNION` / `ORDER BY`)
10602    pub fn parse_select(&mut self) -> Result<Select, ParserError> {
10603        let mut from_first = None;
10604
10605        if self.dialect.supports_from_first_select() && self.peek_keyword(Keyword::FROM) {
10606            let from_token = self.expect_keyword(Keyword::FROM)?;
10607            let from = self.parse_table_with_joins()?;
10608            if !self.peek_keyword(Keyword::SELECT) {
10609                return Ok(Select {
10610                    select_token: AttachedToken(from_token),
10611                    distinct: None,
10612                    top: None,
10613                    top_before_distinct: false,
10614                    projection: vec![],
10615                    into: None,
10616                    from,
10617                    lateral_views: vec![],
10618                    prewhere: None,
10619                    selection: None,
10620                    group_by: GroupByExpr::Expressions(vec![], vec![]),
10621                    cluster_by: vec![],
10622                    distribute_by: vec![],
10623                    sort_by: vec![],
10624                    having: None,
10625                    named_window: vec![],
10626                    window_before_qualify: false,
10627                    qualify: None,
10628                    value_table_mode: None,
10629                    connect_by: None,
10630                    flavor: SelectFlavor::FromFirstNoSelect,
10631                });
10632            }
10633            from_first = Some(from);
10634        }
10635
10636        let select_token = self.expect_keyword(Keyword::SELECT)?;
10637        let value_table_mode =
10638            if dialect_of!(self is BigQueryDialect) && self.parse_keyword(Keyword::AS) {
10639                if self.parse_keyword(Keyword::VALUE) {
10640                    Some(ValueTableMode::AsValue)
10641                } else if self.parse_keyword(Keyword::STRUCT) {
10642                    Some(ValueTableMode::AsStruct)
10643                } else {
10644                    self.expected("VALUE or STRUCT", self.peek_token())?
10645                }
10646            } else {
10647                None
10648            };
10649
10650        let mut top_before_distinct = false;
10651        let mut top = None;
10652        if self.dialect.supports_top_before_distinct() && self.parse_keyword(Keyword::TOP) {
10653            top = Some(self.parse_top()?);
10654            top_before_distinct = true;
10655        }
10656        let distinct = self.parse_all_or_distinct()?;
10657        if !self.dialect.supports_top_before_distinct() && self.parse_keyword(Keyword::TOP) {
10658            top = Some(self.parse_top()?);
10659        }
10660
10661        let projection =
10662            if self.dialect.supports_empty_projections() && self.peek_keyword(Keyword::FROM) {
10663                vec![]
10664            } else {
10665                self.parse_projection()?
10666            };
10667
10668        let into = if self.parse_keyword(Keyword::INTO) {
10669            let temporary = self
10670                .parse_one_of_keywords(&[Keyword::TEMP, Keyword::TEMPORARY])
10671                .is_some();
10672            let unlogged = self.parse_keyword(Keyword::UNLOGGED);
10673            let table = self.parse_keyword(Keyword::TABLE);
10674            let name = self.parse_object_name(false)?;
10675            Some(SelectInto {
10676                temporary,
10677                unlogged,
10678                table,
10679                name,
10680            })
10681        } else {
10682            None
10683        };
10684
10685        // Note that for keywords to be properly handled here, they need to be
10686        // added to `RESERVED_FOR_COLUMN_ALIAS` / `RESERVED_FOR_TABLE_ALIAS`,
10687        // otherwise they may be parsed as an alias as part of the `projection`
10688        // or `from`.
10689
10690        let (from, from_first) = if let Some(from) = from_first.take() {
10691            (from, true)
10692        } else if self.parse_keyword(Keyword::FROM) {
10693            (self.parse_table_with_joins()?, false)
10694        } else {
10695            (vec![], false)
10696        };
10697
10698        let mut lateral_views = vec![];
10699        loop {
10700            if self.parse_keywords(&[Keyword::LATERAL, Keyword::VIEW]) {
10701                let outer = self.parse_keyword(Keyword::OUTER);
10702                let lateral_view = self.parse_expr()?;
10703                let lateral_view_name = self.parse_object_name(false)?;
10704                let lateral_col_alias = self
10705                    .parse_comma_separated(|parser| {
10706                        parser.parse_optional_alias(&[
10707                            Keyword::WHERE,
10708                            Keyword::GROUP,
10709                            Keyword::CLUSTER,
10710                            Keyword::HAVING,
10711                            Keyword::LATERAL,
10712                        ]) // This couldn't possibly be a bad idea
10713                    })?
10714                    .into_iter()
10715                    .flatten()
10716                    .collect();
10717
10718                lateral_views.push(LateralView {
10719                    lateral_view,
10720                    lateral_view_name,
10721                    lateral_col_alias,
10722                    outer,
10723                });
10724            } else {
10725                break;
10726            }
10727        }
10728
10729        let prewhere = if dialect_of!(self is ClickHouseDialect|GenericDialect)
10730            && self.parse_keyword(Keyword::PREWHERE)
10731        {
10732            Some(self.parse_expr()?)
10733        } else {
10734            None
10735        };
10736
10737        let selection = if self.parse_keyword(Keyword::WHERE) {
10738            Some(self.parse_expr()?)
10739        } else {
10740            None
10741        };
10742
10743        let group_by = self
10744            .parse_optional_group_by()?
10745            .unwrap_or_else(|| GroupByExpr::Expressions(vec![], vec![]));
10746
10747        let cluster_by = if self.parse_keywords(&[Keyword::CLUSTER, Keyword::BY]) {
10748            self.parse_comma_separated(Parser::parse_expr)?
10749        } else {
10750            vec![]
10751        };
10752
10753        let distribute_by = if self.parse_keywords(&[Keyword::DISTRIBUTE, Keyword::BY]) {
10754            self.parse_comma_separated(Parser::parse_expr)?
10755        } else {
10756            vec![]
10757        };
10758
10759        let sort_by = if self.parse_keywords(&[Keyword::SORT, Keyword::BY]) {
10760            self.parse_comma_separated(Parser::parse_expr)?
10761        } else {
10762            vec![]
10763        };
10764
10765        let having = if self.parse_keyword(Keyword::HAVING) {
10766            Some(self.parse_expr()?)
10767        } else {
10768            None
10769        };
10770
10771        // Accept QUALIFY and WINDOW in any order and flag accordingly.
10772        let (named_windows, qualify, window_before_qualify) = if self.parse_keyword(Keyword::WINDOW)
10773        {
10774            let named_windows = self.parse_comma_separated(Parser::parse_named_window)?;
10775            if self.parse_keyword(Keyword::QUALIFY) {
10776                (named_windows, Some(self.parse_expr()?), true)
10777            } else {
10778                (named_windows, None, true)
10779            }
10780        } else if self.parse_keyword(Keyword::QUALIFY) {
10781            let qualify = Some(self.parse_expr()?);
10782            if self.parse_keyword(Keyword::WINDOW) {
10783                (
10784                    self.parse_comma_separated(Parser::parse_named_window)?,
10785                    qualify,
10786                    false,
10787                )
10788            } else {
10789                (Default::default(), qualify, false)
10790            }
10791        } else {
10792            Default::default()
10793        };
10794
10795        let connect_by = if self.dialect.supports_connect_by()
10796            && self
10797                .parse_one_of_keywords(&[Keyword::START, Keyword::CONNECT])
10798                .is_some()
10799        {
10800            self.prev_token();
10801            Some(self.parse_connect_by()?)
10802        } else {
10803            None
10804        };
10805
10806        Ok(Select {
10807            select_token: AttachedToken(select_token),
10808            distinct,
10809            top,
10810            top_before_distinct,
10811            projection,
10812            into,
10813            from,
10814            lateral_views,
10815            prewhere,
10816            selection,
10817            group_by,
10818            cluster_by,
10819            distribute_by,
10820            sort_by,
10821            having,
10822            named_window: named_windows,
10823            window_before_qualify,
10824            qualify,
10825            value_table_mode,
10826            connect_by,
10827            flavor: if from_first {
10828                SelectFlavor::FromFirst
10829            } else {
10830                SelectFlavor::Standard
10831            },
10832        })
10833    }
10834
10835    /// Invoke `f` after first setting the parser's `ParserState` to `state`.
10836    ///
10837    /// Upon return, restores the parser's state to what it started at.
10838    fn with_state<T, F>(&mut self, state: ParserState, mut f: F) -> Result<T, ParserError>
10839    where
10840        F: FnMut(&mut Parser) -> Result<T, ParserError>,
10841    {
10842        let current_state = self.state;
10843        self.state = state;
10844        let res = f(self);
10845        self.state = current_state;
10846        res
10847    }
10848
10849    pub fn parse_connect_by(&mut self) -> Result<ConnectBy, ParserError> {
10850        let (condition, relationships) = if self.parse_keywords(&[Keyword::CONNECT, Keyword::BY]) {
10851            let relationships = self.with_state(ParserState::ConnectBy, |parser| {
10852                parser.parse_comma_separated(Parser::parse_expr)
10853            })?;
10854            self.expect_keywords(&[Keyword::START, Keyword::WITH])?;
10855            let condition = self.parse_expr()?;
10856            (condition, relationships)
10857        } else {
10858            self.expect_keywords(&[Keyword::START, Keyword::WITH])?;
10859            let condition = self.parse_expr()?;
10860            self.expect_keywords(&[Keyword::CONNECT, Keyword::BY])?;
10861            let relationships = self.with_state(ParserState::ConnectBy, |parser| {
10862                parser.parse_comma_separated(Parser::parse_expr)
10863            })?;
10864            (condition, relationships)
10865        };
10866        Ok(ConnectBy {
10867            condition,
10868            relationships,
10869        })
10870    }
10871
10872    /// Parse `CREATE TABLE x AS TABLE y`
10873    pub fn parse_as_table(&mut self) -> Result<Table, ParserError> {
10874        let token1 = self.next_token();
10875        let token2 = self.next_token();
10876        let token3 = self.next_token();
10877
10878        let table_name;
10879        let schema_name;
10880        if token2 == Token::Period {
10881            match token1.token {
10882                Token::Word(w) => {
10883                    schema_name = w.value;
10884                }
10885                _ => {
10886                    return self.expected("Schema name", token1);
10887                }
10888            }
10889            match token3.token {
10890                Token::Word(w) => {
10891                    table_name = w.value;
10892                }
10893                _ => {
10894                    return self.expected("Table name", token3);
10895                }
10896            }
10897            Ok(Table {
10898                table_name: Some(table_name),
10899                schema_name: Some(schema_name),
10900            })
10901        } else {
10902            match token1.token {
10903                Token::Word(w) => {
10904                    table_name = w.value;
10905                }
10906                _ => {
10907                    return self.expected("Table name", token1);
10908                }
10909            }
10910            Ok(Table {
10911                table_name: Some(table_name),
10912                schema_name: None,
10913            })
10914        }
10915    }
10916
10917    /// Parse a `SET ROLE` statement. Expects SET to be consumed already.
10918    fn parse_set_role(&mut self, modifier: Option<Keyword>) -> Result<Statement, ParserError> {
10919        self.expect_keyword_is(Keyword::ROLE)?;
10920        let context_modifier = match modifier {
10921            Some(Keyword::LOCAL) => ContextModifier::Local,
10922            Some(Keyword::SESSION) => ContextModifier::Session,
10923            _ => ContextModifier::None,
10924        };
10925
10926        let role_name = if self.parse_keyword(Keyword::NONE) {
10927            None
10928        } else {
10929            Some(self.parse_identifier()?)
10930        };
10931        Ok(Statement::SetRole {
10932            context_modifier,
10933            role_name,
10934        })
10935    }
10936
10937    pub fn parse_set(&mut self) -> Result<Statement, ParserError> {
10938        let modifier =
10939            self.parse_one_of_keywords(&[Keyword::SESSION, Keyword::LOCAL, Keyword::HIVEVAR]);
10940        if let Some(Keyword::HIVEVAR) = modifier {
10941            self.expect_token(&Token::Colon)?;
10942        } else if let Some(set_role_stmt) =
10943            self.maybe_parse(|parser| parser.parse_set_role(modifier))?
10944        {
10945            return Ok(set_role_stmt);
10946        }
10947
10948        let variables = if self.parse_keywords(&[Keyword::TIME, Keyword::ZONE]) {
10949            OneOrManyWithParens::One(ObjectName::from(vec!["TIMEZONE".into()]))
10950        } else if self.dialect.supports_parenthesized_set_variables()
10951            && self.consume_token(&Token::LParen)
10952        {
10953            let variables = OneOrManyWithParens::Many(
10954                self.parse_comma_separated(|parser: &mut Parser<'a>| parser.parse_identifier())?
10955                    .into_iter()
10956                    .map(|ident| ObjectName::from(vec![ident]))
10957                    .collect(),
10958            );
10959            self.expect_token(&Token::RParen)?;
10960            variables
10961        } else {
10962            OneOrManyWithParens::One(self.parse_object_name(false)?)
10963        };
10964
10965        if matches!(&variables, OneOrManyWithParens::One(variable) if variable.to_string().eq_ignore_ascii_case("NAMES")
10966            && dialect_of!(self is MySqlDialect | GenericDialect))
10967        {
10968            if self.parse_keyword(Keyword::DEFAULT) {
10969                return Ok(Statement::SetNamesDefault {});
10970            }
10971
10972            let charset_name = self.parse_literal_string()?;
10973            let collation_name = if self.parse_one_of_keywords(&[Keyword::COLLATE]).is_some() {
10974                Some(self.parse_literal_string()?)
10975            } else {
10976                None
10977            };
10978
10979            return Ok(Statement::SetNames {
10980                charset_name,
10981                collation_name,
10982            });
10983        }
10984
10985        let parenthesized_assignment = matches!(&variables, OneOrManyWithParens::Many(_));
10986
10987        if self.consume_token(&Token::Eq) || self.parse_keyword(Keyword::TO) {
10988            if parenthesized_assignment {
10989                self.expect_token(&Token::LParen)?;
10990            }
10991
10992            let mut values = vec![];
10993            loop {
10994                let value = if let Some(expr) = self.try_parse_expr_sub_query()? {
10995                    expr
10996                } else if let Ok(expr) = self.parse_expr() {
10997                    expr
10998                } else {
10999                    self.expected("variable value", self.peek_token())?
11000                };
11001
11002                values.push(value);
11003                if self.consume_token(&Token::Comma) {
11004                    continue;
11005                }
11006
11007                if parenthesized_assignment {
11008                    self.expect_token(&Token::RParen)?;
11009                }
11010                return Ok(Statement::SetVariable {
11011                    local: modifier == Some(Keyword::LOCAL),
11012                    hivevar: Some(Keyword::HIVEVAR) == modifier,
11013                    variables,
11014                    value: values,
11015                });
11016            }
11017        }
11018
11019        let OneOrManyWithParens::One(variable) = variables else {
11020            return self.expected("set variable", self.peek_token());
11021        };
11022
11023        if variable.to_string().eq_ignore_ascii_case("TIMEZONE") {
11024            // for some db (e.g. postgresql), SET TIME ZONE <value> is an alias for SET TIMEZONE [TO|=] <value>
11025            match self.parse_expr() {
11026                Ok(expr) => Ok(Statement::SetTimeZone {
11027                    local: modifier == Some(Keyword::LOCAL),
11028                    value: expr,
11029                }),
11030                _ => self.expected("timezone value", self.peek_token())?,
11031            }
11032        } else if variable.to_string() == "CHARACTERISTICS" {
11033            self.expect_keywords(&[Keyword::AS, Keyword::TRANSACTION])?;
11034            Ok(Statement::SetTransaction {
11035                modes: self.parse_transaction_modes()?,
11036                snapshot: None,
11037                session: true,
11038            })
11039        } else if variable.to_string() == "TRANSACTION" && modifier.is_none() {
11040            if self.parse_keyword(Keyword::SNAPSHOT) {
11041                let snapshot_id = self.parse_value()?.value;
11042                return Ok(Statement::SetTransaction {
11043                    modes: vec![],
11044                    snapshot: Some(snapshot_id),
11045                    session: false,
11046                });
11047            }
11048            Ok(Statement::SetTransaction {
11049                modes: self.parse_transaction_modes()?,
11050                snapshot: None,
11051                session: false,
11052            })
11053        } else if self.dialect.supports_set_stmt_without_operator() {
11054            self.prev_token();
11055            self.parse_set_session_params()
11056        } else {
11057            self.expected("equals sign or TO", self.peek_token())
11058        }
11059    }
11060
11061    pub fn parse_set_session_params(&mut self) -> Result<Statement, ParserError> {
11062        if self.parse_keyword(Keyword::STATISTICS) {
11063            let topic = match self.parse_one_of_keywords(&[
11064                Keyword::IO,
11065                Keyword::PROFILE,
11066                Keyword::TIME,
11067                Keyword::XML,
11068            ]) {
11069                Some(Keyword::IO) => SessionParamStatsTopic::IO,
11070                Some(Keyword::PROFILE) => SessionParamStatsTopic::Profile,
11071                Some(Keyword::TIME) => SessionParamStatsTopic::Time,
11072                Some(Keyword::XML) => SessionParamStatsTopic::Xml,
11073                _ => return self.expected("IO, PROFILE, TIME or XML", self.peek_token()),
11074            };
11075            let value = self.parse_session_param_value()?;
11076            Ok(Statement::SetSessionParam(SetSessionParamKind::Statistics(
11077                SetSessionParamStatistics { topic, value },
11078            )))
11079        } else if self.parse_keyword(Keyword::IDENTITY_INSERT) {
11080            let obj = self.parse_object_name(false)?;
11081            let value = self.parse_session_param_value()?;
11082            Ok(Statement::SetSessionParam(
11083                SetSessionParamKind::IdentityInsert(SetSessionParamIdentityInsert { obj, value }),
11084            ))
11085        } else if self.parse_keyword(Keyword::OFFSETS) {
11086            let keywords = self.parse_comma_separated(|parser| {
11087                let next_token = parser.next_token();
11088                match &next_token.token {
11089                    Token::Word(w) => Ok(w.to_string()),
11090                    _ => parser.expected("SQL keyword", next_token),
11091                }
11092            })?;
11093            let value = self.parse_session_param_value()?;
11094            Ok(Statement::SetSessionParam(SetSessionParamKind::Offsets(
11095                SetSessionParamOffsets { keywords, value },
11096            )))
11097        } else {
11098            let names = self.parse_comma_separated(|parser| {
11099                let next_token = parser.next_token();
11100                match next_token.token {
11101                    Token::Word(w) => Ok(w.to_string()),
11102                    _ => parser.expected("Session param name", next_token),
11103                }
11104            })?;
11105            let value = self.parse_expr()?.to_string();
11106            Ok(Statement::SetSessionParam(SetSessionParamKind::Generic(
11107                SetSessionParamGeneric { names, value },
11108            )))
11109        }
11110    }
11111
11112    fn parse_session_param_value(&mut self) -> Result<SessionParamValue, ParserError> {
11113        if self.parse_keyword(Keyword::ON) {
11114            Ok(SessionParamValue::On)
11115        } else if self.parse_keyword(Keyword::OFF) {
11116            Ok(SessionParamValue::Off)
11117        } else {
11118            self.expected("ON or OFF", self.peek_token())
11119        }
11120    }
11121
11122    pub fn parse_show(&mut self) -> Result<Statement, ParserError> {
11123        let terse = self.parse_keyword(Keyword::TERSE);
11124        let extended = self.parse_keyword(Keyword::EXTENDED);
11125        let full = self.parse_keyword(Keyword::FULL);
11126        let session = self.parse_keyword(Keyword::SESSION);
11127        let global = self.parse_keyword(Keyword::GLOBAL);
11128        let external = self.parse_keyword(Keyword::EXTERNAL);
11129        if self
11130            .parse_one_of_keywords(&[Keyword::COLUMNS, Keyword::FIELDS])
11131            .is_some()
11132        {
11133            Ok(self.parse_show_columns(extended, full)?)
11134        } else if self.parse_keyword(Keyword::TABLES) {
11135            Ok(self.parse_show_tables(terse, extended, full, external)?)
11136        } else if self.parse_keywords(&[Keyword::MATERIALIZED, Keyword::VIEWS]) {
11137            Ok(self.parse_show_views(terse, true)?)
11138        } else if self.parse_keyword(Keyword::VIEWS) {
11139            Ok(self.parse_show_views(terse, false)?)
11140        } else if self.parse_keyword(Keyword::FUNCTIONS) {
11141            Ok(self.parse_show_functions()?)
11142        } else if extended || full {
11143            Err(ParserError::ParserError(
11144                "EXTENDED/FULL are not supported with this type of SHOW query".to_string(),
11145            ))
11146        } else if self.parse_one_of_keywords(&[Keyword::CREATE]).is_some() {
11147            Ok(self.parse_show_create()?)
11148        } else if self.parse_keyword(Keyword::COLLATION) {
11149            Ok(self.parse_show_collation()?)
11150        } else if self.parse_keyword(Keyword::VARIABLES)
11151            && dialect_of!(self is MySqlDialect | GenericDialect)
11152        {
11153            Ok(Statement::ShowVariables {
11154                filter: self.parse_show_statement_filter()?,
11155                session,
11156                global,
11157            })
11158        } else if self.parse_keyword(Keyword::STATUS)
11159            && dialect_of!(self is MySqlDialect | GenericDialect)
11160        {
11161            Ok(Statement::ShowStatus {
11162                filter: self.parse_show_statement_filter()?,
11163                session,
11164                global,
11165            })
11166        } else if self.parse_keyword(Keyword::DATABASES) {
11167            self.parse_show_databases(terse)
11168        } else if self.parse_keyword(Keyword::SCHEMAS) {
11169            self.parse_show_schemas(terse)
11170        } else {
11171            Ok(Statement::ShowVariable {
11172                variable: self.parse_identifiers()?,
11173            })
11174        }
11175    }
11176
11177    fn parse_show_databases(&mut self, terse: bool) -> Result<Statement, ParserError> {
11178        let history = self.parse_keyword(Keyword::HISTORY);
11179        let show_options = self.parse_show_stmt_options()?;
11180        Ok(Statement::ShowDatabases {
11181            terse,
11182            history,
11183            show_options,
11184        })
11185    }
11186
11187    fn parse_show_schemas(&mut self, terse: bool) -> Result<Statement, ParserError> {
11188        let history = self.parse_keyword(Keyword::HISTORY);
11189        let show_options = self.parse_show_stmt_options()?;
11190        Ok(Statement::ShowSchemas {
11191            terse,
11192            history,
11193            show_options,
11194        })
11195    }
11196
11197    pub fn parse_show_create(&mut self) -> Result<Statement, ParserError> {
11198        let obj_type = match self.expect_one_of_keywords(&[
11199            Keyword::TABLE,
11200            Keyword::TRIGGER,
11201            Keyword::FUNCTION,
11202            Keyword::PROCEDURE,
11203            Keyword::EVENT,
11204            Keyword::VIEW,
11205        ])? {
11206            Keyword::TABLE => Ok(ShowCreateObject::Table),
11207            Keyword::TRIGGER => Ok(ShowCreateObject::Trigger),
11208            Keyword::FUNCTION => Ok(ShowCreateObject::Function),
11209            Keyword::PROCEDURE => Ok(ShowCreateObject::Procedure),
11210            Keyword::EVENT => Ok(ShowCreateObject::Event),
11211            Keyword::VIEW => Ok(ShowCreateObject::View),
11212            keyword => Err(ParserError::ParserError(format!(
11213                "Unable to map keyword to ShowCreateObject: {keyword:?}"
11214            ))),
11215        }?;
11216
11217        let obj_name = self.parse_object_name(false)?;
11218
11219        Ok(Statement::ShowCreate { obj_type, obj_name })
11220    }
11221
11222    pub fn parse_show_columns(
11223        &mut self,
11224        extended: bool,
11225        full: bool,
11226    ) -> Result<Statement, ParserError> {
11227        let show_options = self.parse_show_stmt_options()?;
11228        Ok(Statement::ShowColumns {
11229            extended,
11230            full,
11231            show_options,
11232        })
11233    }
11234
11235    fn parse_show_tables(
11236        &mut self,
11237        terse: bool,
11238        extended: bool,
11239        full: bool,
11240        external: bool,
11241    ) -> Result<Statement, ParserError> {
11242        let history = !external && self.parse_keyword(Keyword::HISTORY);
11243        let show_options = self.parse_show_stmt_options()?;
11244        Ok(Statement::ShowTables {
11245            terse,
11246            history,
11247            extended,
11248            full,
11249            external,
11250            show_options,
11251        })
11252    }
11253
11254    fn parse_show_views(
11255        &mut self,
11256        terse: bool,
11257        materialized: bool,
11258    ) -> Result<Statement, ParserError> {
11259        let show_options = self.parse_show_stmt_options()?;
11260        Ok(Statement::ShowViews {
11261            materialized,
11262            terse,
11263            show_options,
11264        })
11265    }
11266
11267    pub fn parse_show_functions(&mut self) -> Result<Statement, ParserError> {
11268        let filter = self.parse_show_statement_filter()?;
11269        Ok(Statement::ShowFunctions { filter })
11270    }
11271
11272    pub fn parse_show_collation(&mut self) -> Result<Statement, ParserError> {
11273        let filter = self.parse_show_statement_filter()?;
11274        Ok(Statement::ShowCollation { filter })
11275    }
11276
11277    pub fn parse_show_statement_filter(
11278        &mut self,
11279    ) -> Result<Option<ShowStatementFilter>, ParserError> {
11280        if self.parse_keyword(Keyword::LIKE) {
11281            Ok(Some(ShowStatementFilter::Like(
11282                self.parse_literal_string()?,
11283            )))
11284        } else if self.parse_keyword(Keyword::ILIKE) {
11285            Ok(Some(ShowStatementFilter::ILike(
11286                self.parse_literal_string()?,
11287            )))
11288        } else if self.parse_keyword(Keyword::WHERE) {
11289            Ok(Some(ShowStatementFilter::Where(self.parse_expr()?)))
11290        } else {
11291            self.maybe_parse(|parser| -> Result<String, ParserError> {
11292                parser.parse_literal_string()
11293            })?
11294            .map_or(Ok(None), |filter| {
11295                Ok(Some(ShowStatementFilter::NoKeyword(filter)))
11296            })
11297        }
11298    }
11299
11300    pub fn parse_use(&mut self) -> Result<Statement, ParserError> {
11301        // Determine which keywords are recognized by the current dialect
11302        let parsed_keyword = if dialect_of!(self is HiveDialect) {
11303            // HiveDialect accepts USE DEFAULT; statement without any db specified
11304            if self.parse_keyword(Keyword::DEFAULT) {
11305                return Ok(Statement::Use(Use::Default));
11306            }
11307            None // HiveDialect doesn't expect any other specific keyword after `USE`
11308        } else if dialect_of!(self is DatabricksDialect) {
11309            self.parse_one_of_keywords(&[Keyword::CATALOG, Keyword::DATABASE, Keyword::SCHEMA])
11310        } else if dialect_of!(self is SnowflakeDialect) {
11311            self.parse_one_of_keywords(&[
11312                Keyword::DATABASE,
11313                Keyword::SCHEMA,
11314                Keyword::WAREHOUSE,
11315                Keyword::ROLE,
11316                Keyword::SECONDARY,
11317            ])
11318        } else {
11319            None // No specific keywords for other dialects, including GenericDialect
11320        };
11321
11322        let result = if matches!(parsed_keyword, Some(Keyword::SECONDARY)) {
11323            self.parse_secondary_roles()?
11324        } else {
11325            let obj_name = self.parse_object_name(false)?;
11326            match parsed_keyword {
11327                Some(Keyword::CATALOG) => Use::Catalog(obj_name),
11328                Some(Keyword::DATABASE) => Use::Database(obj_name),
11329                Some(Keyword::SCHEMA) => Use::Schema(obj_name),
11330                Some(Keyword::WAREHOUSE) => Use::Warehouse(obj_name),
11331                Some(Keyword::ROLE) => Use::Role(obj_name),
11332                _ => Use::Object(obj_name),
11333            }
11334        };
11335
11336        Ok(Statement::Use(result))
11337    }
11338
11339    fn parse_secondary_roles(&mut self) -> Result<Use, ParserError> {
11340        self.expect_one_of_keywords(&[Keyword::ROLES, Keyword::ROLE])?;
11341        if self.parse_keyword(Keyword::NONE) {
11342            Ok(Use::SecondaryRoles(SecondaryRoles::None))
11343        } else if self.parse_keyword(Keyword::ALL) {
11344            Ok(Use::SecondaryRoles(SecondaryRoles::All))
11345        } else {
11346            let roles = self.parse_comma_separated(|parser| parser.parse_identifier())?;
11347            Ok(Use::SecondaryRoles(SecondaryRoles::List(roles)))
11348        }
11349    }
11350
11351    pub fn parse_table_and_joins(&mut self) -> Result<TableWithJoins, ParserError> {
11352        let relation = self.parse_table_factor()?;
11353        // Note that for keywords to be properly handled here, they need to be
11354        // added to `RESERVED_FOR_TABLE_ALIAS`, otherwise they may be parsed as
11355        // a table alias.
11356        let mut joins = vec![];
11357        loop {
11358            let global = self.parse_keyword(Keyword::GLOBAL);
11359            let join = if self.parse_keyword(Keyword::CROSS) {
11360                let join_operator = if self.parse_keyword(Keyword::JOIN) {
11361                    JoinOperator::CrossJoin
11362                } else if self.parse_keyword(Keyword::APPLY) {
11363                    // MSSQL extension, similar to CROSS JOIN LATERAL
11364                    JoinOperator::CrossApply
11365                } else {
11366                    return self.expected("JOIN or APPLY after CROSS", self.peek_token());
11367                };
11368                Join {
11369                    relation: self.parse_table_factor()?,
11370                    global,
11371                    join_operator,
11372                }
11373            } else if self.parse_keyword(Keyword::OUTER) {
11374                // MSSQL extension, similar to LEFT JOIN LATERAL .. ON 1=1
11375                self.expect_keyword_is(Keyword::APPLY)?;
11376                Join {
11377                    relation: self.parse_table_factor()?,
11378                    global,
11379                    join_operator: JoinOperator::OuterApply,
11380                }
11381            } else if self.parse_keyword(Keyword::ASOF) {
11382                self.expect_keyword_is(Keyword::JOIN)?;
11383                let relation = self.parse_table_factor()?;
11384                self.expect_keyword_is(Keyword::MATCH_CONDITION)?;
11385                let match_condition = self.parse_parenthesized(Self::parse_expr)?;
11386                Join {
11387                    relation,
11388                    global,
11389                    join_operator: JoinOperator::AsOf {
11390                        match_condition,
11391                        constraint: self.parse_join_constraint(false)?,
11392                    },
11393                }
11394            } else {
11395                let natural = self.parse_keyword(Keyword::NATURAL);
11396                let peek_keyword = if let Token::Word(w) = self.peek_token().token {
11397                    w.keyword
11398                } else {
11399                    Keyword::NoKeyword
11400                };
11401
11402                let join_operator_type = match peek_keyword {
11403                    Keyword::INNER | Keyword::JOIN => {
11404                        let inner = self.parse_keyword(Keyword::INNER); // [ INNER ]
11405                        self.expect_keyword_is(Keyword::JOIN)?;
11406                        if inner {
11407                            JoinOperator::Inner
11408                        } else {
11409                            JoinOperator::Join
11410                        }
11411                    }
11412                    kw @ Keyword::LEFT | kw @ Keyword::RIGHT => {
11413                        let _ = self.next_token(); // consume LEFT/RIGHT
11414                        let is_left = kw == Keyword::LEFT;
11415                        let join_type = self.parse_one_of_keywords(&[
11416                            Keyword::OUTER,
11417                            Keyword::SEMI,
11418                            Keyword::ANTI,
11419                            Keyword::JOIN,
11420                        ]);
11421                        match join_type {
11422                            Some(Keyword::OUTER) => {
11423                                self.expect_keyword_is(Keyword::JOIN)?;
11424                                if is_left {
11425                                    JoinOperator::LeftOuter
11426                                } else {
11427                                    JoinOperator::RightOuter
11428                                }
11429                            }
11430                            Some(Keyword::SEMI) => {
11431                                self.expect_keyword_is(Keyword::JOIN)?;
11432                                if is_left {
11433                                    JoinOperator::LeftSemi
11434                                } else {
11435                                    JoinOperator::RightSemi
11436                                }
11437                            }
11438                            Some(Keyword::ANTI) => {
11439                                self.expect_keyword_is(Keyword::JOIN)?;
11440                                if is_left {
11441                                    JoinOperator::LeftAnti
11442                                } else {
11443                                    JoinOperator::RightAnti
11444                                }
11445                            }
11446                            Some(Keyword::JOIN) => {
11447                                if is_left {
11448                                    JoinOperator::Left
11449                                } else {
11450                                    JoinOperator::Right
11451                                }
11452                            }
11453                            _ => {
11454                                return Err(ParserError::ParserError(format!(
11455                                    "expected OUTER, SEMI, ANTI or JOIN after {kw:?}"
11456                                )))
11457                            }
11458                        }
11459                    }
11460                    Keyword::ANTI => {
11461                        let _ = self.next_token(); // consume ANTI
11462                        self.expect_keyword_is(Keyword::JOIN)?;
11463                        JoinOperator::Anti
11464                    }
11465                    Keyword::SEMI => {
11466                        let _ = self.next_token(); // consume SEMI
11467                        self.expect_keyword_is(Keyword::JOIN)?;
11468                        JoinOperator::Semi
11469                    }
11470                    Keyword::FULL => {
11471                        let _ = self.next_token(); // consume FULL
11472                        let _ = self.parse_keyword(Keyword::OUTER); // [ OUTER ]
11473                        self.expect_keyword_is(Keyword::JOIN)?;
11474                        JoinOperator::FullOuter
11475                    }
11476                    Keyword::OUTER => {
11477                        return self.expected("LEFT, RIGHT, or FULL", self.peek_token());
11478                    }
11479                    _ if natural => {
11480                        return self.expected("a join type after NATURAL", self.peek_token());
11481                    }
11482                    _ => break,
11483                };
11484                let relation = self.parse_table_factor()?;
11485                let join_constraint = self.parse_join_constraint(natural)?;
11486                Join {
11487                    relation,
11488                    global,
11489                    join_operator: join_operator_type(join_constraint),
11490                }
11491            };
11492            joins.push(join);
11493        }
11494        Ok(TableWithJoins { relation, joins })
11495    }
11496
11497    /// A table name or a parenthesized subquery, followed by optional `[AS] alias`
11498    pub fn parse_table_factor(&mut self) -> Result<TableFactor, ParserError> {
11499        if self.parse_keyword(Keyword::LATERAL) {
11500            // LATERAL must always be followed by a subquery or table function.
11501            if self.consume_token(&Token::LParen) {
11502                self.parse_derived_table_factor(Lateral)
11503            } else {
11504                let name = self.parse_object_name(false)?;
11505                self.expect_token(&Token::LParen)?;
11506                let args = self.parse_optional_args()?;
11507                let alias = self.maybe_parse_table_alias()?;
11508                Ok(TableFactor::Function {
11509                    lateral: true,
11510                    name,
11511                    args,
11512                    alias,
11513                })
11514            }
11515        } else if self.parse_keyword(Keyword::TABLE) {
11516            // parse table function (SELECT * FROM TABLE (<expr>) [ AS <alias> ])
11517            self.expect_token(&Token::LParen)?;
11518            let expr = self.parse_expr()?;
11519            self.expect_token(&Token::RParen)?;
11520            let alias = self.maybe_parse_table_alias()?;
11521            Ok(TableFactor::TableFunction { expr, alias })
11522        } else if self.consume_token(&Token::LParen) {
11523            // A left paren introduces either a derived table (i.e., a subquery)
11524            // or a nested join. It's nearly impossible to determine ahead of
11525            // time which it is... so we just try to parse both.
11526            //
11527            // Here's an example that demonstrates the complexity:
11528            //                     /-------------------------------------------------------\
11529            //                     | /-----------------------------------\                 |
11530            //     SELECT * FROM ( ( ( (SELECT 1) UNION (SELECT 2) ) AS t1 NATURAL JOIN t2 ) )
11531            //                   ^ ^ ^ ^
11532            //                   | | | |
11533            //                   | | | |
11534            //                   | | | (4) belongs to a SetExpr::Query inside the subquery
11535            //                   | | (3) starts a derived table (subquery)
11536            //                   | (2) starts a nested join
11537            //                   (1) an additional set of parens around a nested join
11538            //
11539
11540            // If the recently consumed '(' starts a derived table, the call to
11541            // `parse_derived_table_factor` below will return success after parsing the
11542            // subquery, followed by the closing ')', and the alias of the derived table.
11543            // In the example above this is case (3).
11544            if let Some(mut table) =
11545                self.maybe_parse(|parser| parser.parse_derived_table_factor(NotLateral))?
11546            {
11547                while let Some(kw) = self.parse_one_of_keywords(&[Keyword::PIVOT, Keyword::UNPIVOT])
11548                {
11549                    table = match kw {
11550                        Keyword::PIVOT => self.parse_pivot_table_factor(table)?,
11551                        Keyword::UNPIVOT => self.parse_unpivot_table_factor(table)?,
11552                        _ => unreachable!(),
11553                    }
11554                }
11555                return Ok(table);
11556            }
11557
11558            // A parsing error from `parse_derived_table_factor` indicates that the '(' we've
11559            // recently consumed does not start a derived table (cases 1, 2, or 4).
11560            // `maybe_parse` will ignore such an error and rewind to be after the opening '('.
11561
11562            // Inside the parentheses we expect to find an (A) table factor
11563            // followed by some joins or (B) another level of nesting.
11564            let mut table_and_joins = self.parse_table_and_joins()?;
11565
11566            #[allow(clippy::if_same_then_else)]
11567            if !table_and_joins.joins.is_empty() {
11568                self.expect_token(&Token::RParen)?;
11569                let alias = self.maybe_parse_table_alias()?;
11570                Ok(TableFactor::NestedJoin {
11571                    table_with_joins: Box::new(table_and_joins),
11572                    alias,
11573                }) // (A)
11574            } else if let TableFactor::NestedJoin {
11575                table_with_joins: _,
11576                alias: _,
11577            } = &table_and_joins.relation
11578            {
11579                // (B): `table_and_joins` (what we found inside the parentheses)
11580                // is a nested join `(foo JOIN bar)`, not followed by other joins.
11581                self.expect_token(&Token::RParen)?;
11582                let alias = self.maybe_parse_table_alias()?;
11583                Ok(TableFactor::NestedJoin {
11584                    table_with_joins: Box::new(table_and_joins),
11585                    alias,
11586                })
11587            } else if dialect_of!(self is SnowflakeDialect | GenericDialect) {
11588                // Dialect-specific behavior: Snowflake diverges from the
11589                // standard and from most of the other implementations by
11590                // allowing extra parentheses not only around a join (B), but
11591                // around lone table names (e.g. `FROM (mytable [AS alias])`)
11592                // and around derived tables (e.g. `FROM ((SELECT ...)
11593                // [AS alias])`) as well.
11594                self.expect_token(&Token::RParen)?;
11595
11596                if let Some(outer_alias) = self.maybe_parse_table_alias()? {
11597                    // Snowflake also allows specifying an alias *after* parens
11598                    // e.g. `FROM (mytable) AS alias`
11599                    match &mut table_and_joins.relation {
11600                        TableFactor::Derived { alias, .. }
11601                        | TableFactor::Table { alias, .. }
11602                        | TableFactor::Function { alias, .. }
11603                        | TableFactor::UNNEST { alias, .. }
11604                        | TableFactor::JsonTable { alias, .. }
11605                        | TableFactor::OpenJsonTable { alias, .. }
11606                        | TableFactor::TableFunction { alias, .. }
11607                        | TableFactor::Pivot { alias, .. }
11608                        | TableFactor::Unpivot { alias, .. }
11609                        | TableFactor::MatchRecognize { alias, .. }
11610                        | TableFactor::NestedJoin { alias, .. } => {
11611                            // but not `FROM (mytable AS alias1) AS alias2`.
11612                            if let Some(inner_alias) = alias {
11613                                return Err(ParserError::ParserError(format!(
11614                                    "duplicate alias {inner_alias}"
11615                                )));
11616                            }
11617                            // Act as if the alias was specified normally next
11618                            // to the table name: `(mytable) AS alias` ->
11619                            // `(mytable AS alias)`
11620                            alias.replace(outer_alias);
11621                        }
11622                    };
11623                }
11624                // Do not store the extra set of parens in the AST
11625                Ok(table_and_joins.relation)
11626            } else {
11627                // The SQL spec prohibits derived tables and bare tables from
11628                // appearing alone in parentheses (e.g. `FROM (mytable)`)
11629                self.expected("joined table", self.peek_token())
11630            }
11631        } else if dialect_of!(self is SnowflakeDialect | DatabricksDialect | GenericDialect)
11632            && matches!(
11633                self.peek_tokens(),
11634                [
11635                    Token::Word(Word {
11636                        keyword: Keyword::VALUES,
11637                        ..
11638                    }),
11639                    Token::LParen
11640                ]
11641            )
11642        {
11643            self.expect_keyword_is(Keyword::VALUES)?;
11644
11645            // Snowflake and Databricks allow syntax like below:
11646            // SELECT * FROM VALUES (1, 'a'), (2, 'b') AS t (col1, col2)
11647            // where there are no parentheses around the VALUES clause.
11648            let values = SetExpr::Values(self.parse_values(false)?);
11649            let alias = self.maybe_parse_table_alias()?;
11650            Ok(TableFactor::Derived {
11651                lateral: false,
11652                subquery: Box::new(Query {
11653                    with: None,
11654                    body: Box::new(values),
11655                    order_by: None,
11656                    limit: None,
11657                    limit_by: vec![],
11658                    offset: None,
11659                    fetch: None,
11660                    locks: vec![],
11661                    for_clause: None,
11662                    settings: None,
11663                    format_clause: None,
11664                }),
11665                alias,
11666            })
11667        } else if dialect_of!(self is BigQueryDialect | PostgreSqlDialect | GenericDialect)
11668            && self.parse_keyword(Keyword::UNNEST)
11669        {
11670            self.expect_token(&Token::LParen)?;
11671            let array_exprs = self.parse_comma_separated(Parser::parse_expr)?;
11672            self.expect_token(&Token::RParen)?;
11673
11674            let with_ordinality = self.parse_keywords(&[Keyword::WITH, Keyword::ORDINALITY]);
11675            let alias = match self.maybe_parse_table_alias() {
11676                Ok(Some(alias)) => Some(alias),
11677                Ok(None) => None,
11678                Err(e) => return Err(e),
11679            };
11680
11681            let with_offset = match self.expect_keywords(&[Keyword::WITH, Keyword::OFFSET]) {
11682                Ok(()) => true,
11683                Err(_) => false,
11684            };
11685
11686            let with_offset_alias = if with_offset {
11687                match self.parse_optional_alias(keywords::RESERVED_FOR_COLUMN_ALIAS) {
11688                    Ok(Some(alias)) => Some(alias),
11689                    Ok(None) => None,
11690                    Err(e) => return Err(e),
11691                }
11692            } else {
11693                None
11694            };
11695
11696            Ok(TableFactor::UNNEST {
11697                alias,
11698                array_exprs,
11699                with_offset,
11700                with_offset_alias,
11701                with_ordinality,
11702            })
11703        } else if self.parse_keyword_with_tokens(Keyword::JSON_TABLE, &[Token::LParen]) {
11704            let json_expr = self.parse_expr()?;
11705            self.expect_token(&Token::Comma)?;
11706            let json_path = self.parse_value()?.value;
11707            self.expect_keyword_is(Keyword::COLUMNS)?;
11708            self.expect_token(&Token::LParen)?;
11709            let columns = self.parse_comma_separated(Parser::parse_json_table_column_def)?;
11710            self.expect_token(&Token::RParen)?;
11711            self.expect_token(&Token::RParen)?;
11712            let alias = self.maybe_parse_table_alias()?;
11713            Ok(TableFactor::JsonTable {
11714                json_expr,
11715                json_path,
11716                columns,
11717                alias,
11718            })
11719        } else if self.parse_keyword_with_tokens(Keyword::OPENJSON, &[Token::LParen]) {
11720            self.prev_token();
11721            self.parse_open_json_table_factor()
11722        } else {
11723            let name = self.parse_object_name(true)?;
11724
11725            let json_path = match self.peek_token().token {
11726                Token::LBracket if self.dialect.supports_partiql() => Some(self.parse_json_path()?),
11727                _ => None,
11728            };
11729
11730            let partitions: Vec<Ident> = if dialect_of!(self is MySqlDialect | GenericDialect)
11731                && self.parse_keyword(Keyword::PARTITION)
11732            {
11733                self.parse_parenthesized_identifiers()?
11734            } else {
11735                vec![]
11736            };
11737
11738            // Parse potential version qualifier
11739            let version = self.maybe_parse_table_version()?;
11740
11741            // Postgres, MSSQL, ClickHouse: table-valued functions:
11742            let args = if self.consume_token(&Token::LParen) {
11743                Some(self.parse_table_function_args()?)
11744            } else {
11745                None
11746            };
11747
11748            let with_ordinality = self.parse_keywords(&[Keyword::WITH, Keyword::ORDINALITY]);
11749
11750            let mut sample = None;
11751            if self.dialect.supports_table_sample_before_alias() {
11752                if let Some(parsed_sample) = self.maybe_parse_table_sample()? {
11753                    sample = Some(TableSampleKind::BeforeTableAlias(parsed_sample));
11754                }
11755            }
11756
11757            let alias = self.maybe_parse_table_alias()?;
11758
11759            // MYSQL-specific table hints:
11760            let index_hints = if self.dialect.supports_table_hints() {
11761                self.maybe_parse(|p| p.parse_table_index_hints())?
11762                    .unwrap_or(vec![])
11763            } else {
11764                vec![]
11765            };
11766
11767            // MSSQL-specific table hints:
11768            let mut with_hints = vec![];
11769            if self.parse_keyword(Keyword::WITH) {
11770                if self.consume_token(&Token::LParen) {
11771                    with_hints = self.parse_comma_separated(Parser::parse_expr)?;
11772                    self.expect_token(&Token::RParen)?;
11773                } else {
11774                    // rewind, as WITH may belong to the next statement's CTE
11775                    self.prev_token();
11776                }
11777            };
11778
11779            if !self.dialect.supports_table_sample_before_alias() {
11780                if let Some(parsed_sample) = self.maybe_parse_table_sample()? {
11781                    sample = Some(TableSampleKind::AfterTableAlias(parsed_sample));
11782                }
11783            }
11784
11785            let mut table = TableFactor::Table {
11786                name,
11787                alias,
11788                args,
11789                with_hints,
11790                version,
11791                partitions,
11792                with_ordinality,
11793                json_path,
11794                sample,
11795                index_hints,
11796            };
11797
11798            while let Some(kw) = self.parse_one_of_keywords(&[Keyword::PIVOT, Keyword::UNPIVOT]) {
11799                table = match kw {
11800                    Keyword::PIVOT => self.parse_pivot_table_factor(table)?,
11801                    Keyword::UNPIVOT => self.parse_unpivot_table_factor(table)?,
11802                    _ => unreachable!(),
11803                }
11804            }
11805
11806            if self.dialect.supports_match_recognize()
11807                && self.parse_keyword(Keyword::MATCH_RECOGNIZE)
11808            {
11809                table = self.parse_match_recognize(table)?;
11810            }
11811
11812            Ok(table)
11813        }
11814    }
11815
11816    fn maybe_parse_table_sample(&mut self) -> Result<Option<Box<TableSample>>, ParserError> {
11817        let modifier = if self.parse_keyword(Keyword::TABLESAMPLE) {
11818            TableSampleModifier::TableSample
11819        } else if self.parse_keyword(Keyword::SAMPLE) {
11820            TableSampleModifier::Sample
11821        } else {
11822            return Ok(None);
11823        };
11824
11825        let name = match self.parse_one_of_keywords(&[
11826            Keyword::BERNOULLI,
11827            Keyword::ROW,
11828            Keyword::SYSTEM,
11829            Keyword::BLOCK,
11830        ]) {
11831            Some(Keyword::BERNOULLI) => Some(TableSampleMethod::Bernoulli),
11832            Some(Keyword::ROW) => Some(TableSampleMethod::Row),
11833            Some(Keyword::SYSTEM) => Some(TableSampleMethod::System),
11834            Some(Keyword::BLOCK) => Some(TableSampleMethod::Block),
11835            _ => None,
11836        };
11837
11838        let parenthesized = self.consume_token(&Token::LParen);
11839
11840        let (quantity, bucket) = if parenthesized && self.parse_keyword(Keyword::BUCKET) {
11841            let selected_bucket = self.parse_number_value()?.value;
11842            self.expect_keywords(&[Keyword::OUT, Keyword::OF])?;
11843            let total = self.parse_number_value()?.value;
11844            let on = if self.parse_keyword(Keyword::ON) {
11845                Some(self.parse_expr()?)
11846            } else {
11847                None
11848            };
11849            (
11850                None,
11851                Some(TableSampleBucket {
11852                    bucket: selected_bucket,
11853                    total,
11854                    on,
11855                }),
11856            )
11857        } else {
11858            let value = match self.maybe_parse(|p| p.parse_expr())? {
11859                Some(num) => num,
11860                None => {
11861                    let next_token = self.next_token();
11862                    if let Token::Word(w) = next_token.token {
11863                        Expr::Value(Value::Placeholder(w.value).with_span(next_token.span))
11864                    } else {
11865                        return parser_err!(
11866                            "Expecting number or byte length e.g. 100M",
11867                            self.peek_token().span.start
11868                        );
11869                    }
11870                }
11871            };
11872            let unit = if self.parse_keyword(Keyword::ROWS) {
11873                Some(TableSampleUnit::Rows)
11874            } else if self.parse_keyword(Keyword::PERCENT) {
11875                Some(TableSampleUnit::Percent)
11876            } else {
11877                None
11878            };
11879            (
11880                Some(TableSampleQuantity {
11881                    parenthesized,
11882                    value,
11883                    unit,
11884                }),
11885                None,
11886            )
11887        };
11888        if parenthesized {
11889            self.expect_token(&Token::RParen)?;
11890        }
11891
11892        let seed = if self.parse_keyword(Keyword::REPEATABLE) {
11893            Some(self.parse_table_sample_seed(TableSampleSeedModifier::Repeatable)?)
11894        } else if self.parse_keyword(Keyword::SEED) {
11895            Some(self.parse_table_sample_seed(TableSampleSeedModifier::Seed)?)
11896        } else {
11897            None
11898        };
11899
11900        let offset = if self.parse_keyword(Keyword::OFFSET) {
11901            Some(self.parse_expr()?)
11902        } else {
11903            None
11904        };
11905
11906        Ok(Some(Box::new(TableSample {
11907            modifier,
11908            name,
11909            quantity,
11910            seed,
11911            bucket,
11912            offset,
11913        })))
11914    }
11915
11916    fn parse_table_sample_seed(
11917        &mut self,
11918        modifier: TableSampleSeedModifier,
11919    ) -> Result<TableSampleSeed, ParserError> {
11920        self.expect_token(&Token::LParen)?;
11921        let value = self.parse_number_value()?.value;
11922        self.expect_token(&Token::RParen)?;
11923        Ok(TableSampleSeed { modifier, value })
11924    }
11925
11926    /// Parses `OPENJSON( jsonExpression [ , path ] )  [ <with_clause> ]` clause,
11927    /// assuming the `OPENJSON` keyword was already consumed.
11928    fn parse_open_json_table_factor(&mut self) -> Result<TableFactor, ParserError> {
11929        self.expect_token(&Token::LParen)?;
11930        let json_expr = self.parse_expr()?;
11931        let json_path = if self.consume_token(&Token::Comma) {
11932            Some(self.parse_value()?.value)
11933        } else {
11934            None
11935        };
11936        self.expect_token(&Token::RParen)?;
11937        let columns = if self.parse_keyword(Keyword::WITH) {
11938            self.expect_token(&Token::LParen)?;
11939            let columns = self.parse_comma_separated(Parser::parse_openjson_table_column_def)?;
11940            self.expect_token(&Token::RParen)?;
11941            columns
11942        } else {
11943            Vec::new()
11944        };
11945        let alias = self.maybe_parse_table_alias()?;
11946        Ok(TableFactor::OpenJsonTable {
11947            json_expr,
11948            json_path,
11949            columns,
11950            alias,
11951        })
11952    }
11953
11954    fn parse_match_recognize(&mut self, table: TableFactor) -> Result<TableFactor, ParserError> {
11955        self.expect_token(&Token::LParen)?;
11956
11957        let partition_by = if self.parse_keywords(&[Keyword::PARTITION, Keyword::BY]) {
11958            self.parse_comma_separated(Parser::parse_expr)?
11959        } else {
11960            vec![]
11961        };
11962
11963        let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
11964            self.parse_comma_separated(Parser::parse_order_by_expr)?
11965        } else {
11966            vec![]
11967        };
11968
11969        let measures = if self.parse_keyword(Keyword::MEASURES) {
11970            self.parse_comma_separated(|p| {
11971                let expr = p.parse_expr()?;
11972                let _ = p.parse_keyword(Keyword::AS);
11973                let alias = p.parse_identifier()?;
11974                Ok(Measure { expr, alias })
11975            })?
11976        } else {
11977            vec![]
11978        };
11979
11980        let rows_per_match =
11981            if self.parse_keywords(&[Keyword::ONE, Keyword::ROW, Keyword::PER, Keyword::MATCH]) {
11982                Some(RowsPerMatch::OneRow)
11983            } else if self.parse_keywords(&[
11984                Keyword::ALL,
11985                Keyword::ROWS,
11986                Keyword::PER,
11987                Keyword::MATCH,
11988            ]) {
11989                Some(RowsPerMatch::AllRows(
11990                    if self.parse_keywords(&[Keyword::SHOW, Keyword::EMPTY, Keyword::MATCHES]) {
11991                        Some(EmptyMatchesMode::Show)
11992                    } else if self.parse_keywords(&[
11993                        Keyword::OMIT,
11994                        Keyword::EMPTY,
11995                        Keyword::MATCHES,
11996                    ]) {
11997                        Some(EmptyMatchesMode::Omit)
11998                    } else if self.parse_keywords(&[
11999                        Keyword::WITH,
12000                        Keyword::UNMATCHED,
12001                        Keyword::ROWS,
12002                    ]) {
12003                        Some(EmptyMatchesMode::WithUnmatched)
12004                    } else {
12005                        None
12006                    },
12007                ))
12008            } else {
12009                None
12010            };
12011
12012        let after_match_skip =
12013            if self.parse_keywords(&[Keyword::AFTER, Keyword::MATCH, Keyword::SKIP]) {
12014                if self.parse_keywords(&[Keyword::PAST, Keyword::LAST, Keyword::ROW]) {
12015                    Some(AfterMatchSkip::PastLastRow)
12016                } else if self.parse_keywords(&[Keyword::TO, Keyword::NEXT, Keyword::ROW]) {
12017                    Some(AfterMatchSkip::ToNextRow)
12018                } else if self.parse_keywords(&[Keyword::TO, Keyword::FIRST]) {
12019                    Some(AfterMatchSkip::ToFirst(self.parse_identifier()?))
12020                } else if self.parse_keywords(&[Keyword::TO, Keyword::LAST]) {
12021                    Some(AfterMatchSkip::ToLast(self.parse_identifier()?))
12022                } else {
12023                    let found = self.next_token();
12024                    return self.expected("after match skip option", found);
12025                }
12026            } else {
12027                None
12028            };
12029
12030        self.expect_keyword_is(Keyword::PATTERN)?;
12031        let pattern = self.parse_parenthesized(Self::parse_pattern)?;
12032
12033        self.expect_keyword_is(Keyword::DEFINE)?;
12034
12035        let symbols = self.parse_comma_separated(|p| {
12036            let symbol = p.parse_identifier()?;
12037            p.expect_keyword_is(Keyword::AS)?;
12038            let definition = p.parse_expr()?;
12039            Ok(SymbolDefinition { symbol, definition })
12040        })?;
12041
12042        self.expect_token(&Token::RParen)?;
12043
12044        let alias = self.maybe_parse_table_alias()?;
12045
12046        Ok(TableFactor::MatchRecognize {
12047            table: Box::new(table),
12048            partition_by,
12049            order_by,
12050            measures,
12051            rows_per_match,
12052            after_match_skip,
12053            pattern,
12054            symbols,
12055            alias,
12056        })
12057    }
12058
12059    fn parse_base_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
12060        match self.next_token().token {
12061            Token::Caret => Ok(MatchRecognizePattern::Symbol(MatchRecognizeSymbol::Start)),
12062            Token::Placeholder(s) if s == "$" => {
12063                Ok(MatchRecognizePattern::Symbol(MatchRecognizeSymbol::End))
12064            }
12065            Token::LBrace => {
12066                self.expect_token(&Token::Minus)?;
12067                let symbol = self.parse_identifier().map(MatchRecognizeSymbol::Named)?;
12068                self.expect_token(&Token::Minus)?;
12069                self.expect_token(&Token::RBrace)?;
12070                Ok(MatchRecognizePattern::Exclude(symbol))
12071            }
12072            Token::Word(Word {
12073                value,
12074                quote_style: None,
12075                ..
12076            }) if value == "PERMUTE" => {
12077                self.expect_token(&Token::LParen)?;
12078                let symbols = self.parse_comma_separated(|p| {
12079                    p.parse_identifier().map(MatchRecognizeSymbol::Named)
12080                })?;
12081                self.expect_token(&Token::RParen)?;
12082                Ok(MatchRecognizePattern::Permute(symbols))
12083            }
12084            Token::LParen => {
12085                let pattern = self.parse_pattern()?;
12086                self.expect_token(&Token::RParen)?;
12087                Ok(MatchRecognizePattern::Group(Box::new(pattern)))
12088            }
12089            _ => {
12090                self.prev_token();
12091                self.parse_identifier()
12092                    .map(MatchRecognizeSymbol::Named)
12093                    .map(MatchRecognizePattern::Symbol)
12094            }
12095        }
12096    }
12097
12098    fn parse_repetition_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
12099        let mut pattern = self.parse_base_pattern()?;
12100        loop {
12101            let token = self.next_token();
12102            let quantifier = match token.token {
12103                Token::Mul => RepetitionQuantifier::ZeroOrMore,
12104                Token::Plus => RepetitionQuantifier::OneOrMore,
12105                Token::Placeholder(s) if s == "?" => RepetitionQuantifier::AtMostOne,
12106                Token::LBrace => {
12107                    // quantifier is a range like {n} or {n,} or {,m} or {n,m}
12108                    let token = self.next_token();
12109                    match token.token {
12110                        Token::Comma => {
12111                            let next_token = self.next_token();
12112                            let Token::Number(n, _) = next_token.token else {
12113                                return self.expected("literal number", next_token);
12114                            };
12115                            self.expect_token(&Token::RBrace)?;
12116                            RepetitionQuantifier::AtMost(Self::parse(n, token.span.start)?)
12117                        }
12118                        Token::Number(n, _) if self.consume_token(&Token::Comma) => {
12119                            let next_token = self.next_token();
12120                            match next_token.token {
12121                                Token::Number(m, _) => {
12122                                    self.expect_token(&Token::RBrace)?;
12123                                    RepetitionQuantifier::Range(
12124                                        Self::parse(n, token.span.start)?,
12125                                        Self::parse(m, token.span.start)?,
12126                                    )
12127                                }
12128                                Token::RBrace => {
12129                                    RepetitionQuantifier::AtLeast(Self::parse(n, token.span.start)?)
12130                                }
12131                                _ => {
12132                                    return self.expected("} or upper bound", next_token);
12133                                }
12134                            }
12135                        }
12136                        Token::Number(n, _) => {
12137                            self.expect_token(&Token::RBrace)?;
12138                            RepetitionQuantifier::Exactly(Self::parse(n, token.span.start)?)
12139                        }
12140                        _ => return self.expected("quantifier range", token),
12141                    }
12142                }
12143                _ => {
12144                    self.prev_token();
12145                    break;
12146                }
12147            };
12148            pattern = MatchRecognizePattern::Repetition(Box::new(pattern), quantifier);
12149        }
12150        Ok(pattern)
12151    }
12152
12153    fn parse_concat_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
12154        let mut patterns = vec![self.parse_repetition_pattern()?];
12155        while !matches!(self.peek_token().token, Token::RParen | Token::Pipe) {
12156            patterns.push(self.parse_repetition_pattern()?);
12157        }
12158        match <[MatchRecognizePattern; 1]>::try_from(patterns) {
12159            Ok([pattern]) => Ok(pattern),
12160            Err(patterns) => Ok(MatchRecognizePattern::Concat(patterns)),
12161        }
12162    }
12163
12164    fn parse_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
12165        let pattern = self.parse_concat_pattern()?;
12166        if self.consume_token(&Token::Pipe) {
12167            match self.parse_pattern()? {
12168                // flatten nested alternations
12169                MatchRecognizePattern::Alternation(mut patterns) => {
12170                    patterns.insert(0, pattern);
12171                    Ok(MatchRecognizePattern::Alternation(patterns))
12172                }
12173                next => Ok(MatchRecognizePattern::Alternation(vec![pattern, next])),
12174            }
12175        } else {
12176            Ok(pattern)
12177        }
12178    }
12179
12180    /// Parses a the timestamp version specifier (i.e. query historical data)
12181    pub fn maybe_parse_table_version(&mut self) -> Result<Option<TableVersion>, ParserError> {
12182        if self.dialect.supports_timestamp_versioning() {
12183            if self.parse_keywords(&[Keyword::FOR, Keyword::SYSTEM_TIME, Keyword::AS, Keyword::OF])
12184            {
12185                let expr = self.parse_expr()?;
12186                return Ok(Some(TableVersion::ForSystemTimeAsOf(expr)));
12187            } else if self.peek_keyword(Keyword::AT) || self.peek_keyword(Keyword::BEFORE) {
12188                let func_name = self.parse_object_name(true)?;
12189                let func = self.parse_function(func_name)?;
12190                return Ok(Some(TableVersion::Function(func)));
12191            }
12192        }
12193        Ok(None)
12194    }
12195
12196    /// Parses MySQL's JSON_TABLE column definition.
12197    /// For example: `id INT EXISTS PATH '$' DEFAULT '0' ON EMPTY ERROR ON ERROR`
12198    pub fn parse_json_table_column_def(&mut self) -> Result<JsonTableColumn, ParserError> {
12199        if self.parse_keyword(Keyword::NESTED) {
12200            let _has_path_keyword = self.parse_keyword(Keyword::PATH);
12201            let path = self.parse_value()?.value;
12202            self.expect_keyword_is(Keyword::COLUMNS)?;
12203            let columns = self.parse_parenthesized(|p| {
12204                p.parse_comma_separated(Self::parse_json_table_column_def)
12205            })?;
12206            return Ok(JsonTableColumn::Nested(JsonTableNestedColumn {
12207                path,
12208                columns,
12209            }));
12210        }
12211        let name = self.parse_identifier()?;
12212        if self.parse_keyword(Keyword::FOR) {
12213            self.expect_keyword_is(Keyword::ORDINALITY)?;
12214            return Ok(JsonTableColumn::ForOrdinality(name));
12215        }
12216        let r#type = self.parse_data_type()?;
12217        let exists = self.parse_keyword(Keyword::EXISTS);
12218        self.expect_keyword_is(Keyword::PATH)?;
12219        let path = self.parse_value()?.value;
12220        let mut on_empty = None;
12221        let mut on_error = None;
12222        while let Some(error_handling) = self.parse_json_table_column_error_handling()? {
12223            if self.parse_keyword(Keyword::EMPTY) {
12224                on_empty = Some(error_handling);
12225            } else {
12226                self.expect_keyword_is(Keyword::ERROR)?;
12227                on_error = Some(error_handling);
12228            }
12229        }
12230        Ok(JsonTableColumn::Named(JsonTableNamedColumn {
12231            name,
12232            r#type,
12233            path,
12234            exists,
12235            on_empty,
12236            on_error,
12237        }))
12238    }
12239
12240    /// Parses MSSQL's `OPENJSON WITH` column definition.
12241    ///
12242    /// ```sql
12243    /// colName type [ column_path ] [ AS JSON ]
12244    /// ```
12245    ///
12246    /// Reference: <https://learn.microsoft.com/en-us/sql/t-sql/functions/openjson-transact-sql?view=sql-server-ver16#syntax>
12247    pub fn parse_openjson_table_column_def(&mut self) -> Result<OpenJsonTableColumn, ParserError> {
12248        let name = self.parse_identifier()?;
12249        let r#type = self.parse_data_type()?;
12250        let path = if let Token::SingleQuotedString(path) = self.peek_token().token {
12251            self.next_token();
12252            Some(path)
12253        } else {
12254            None
12255        };
12256        let as_json = self.parse_keyword(Keyword::AS);
12257        if as_json {
12258            self.expect_keyword_is(Keyword::JSON)?;
12259        }
12260        Ok(OpenJsonTableColumn {
12261            name,
12262            r#type,
12263            path,
12264            as_json,
12265        })
12266    }
12267
12268    fn parse_json_table_column_error_handling(
12269        &mut self,
12270    ) -> Result<Option<JsonTableColumnErrorHandling>, ParserError> {
12271        let res = if self.parse_keyword(Keyword::NULL) {
12272            JsonTableColumnErrorHandling::Null
12273        } else if self.parse_keyword(Keyword::ERROR) {
12274            JsonTableColumnErrorHandling::Error
12275        } else if self.parse_keyword(Keyword::DEFAULT) {
12276            JsonTableColumnErrorHandling::Default(self.parse_value()?.value)
12277        } else {
12278            return Ok(None);
12279        };
12280        self.expect_keyword_is(Keyword::ON)?;
12281        Ok(Some(res))
12282    }
12283
12284    pub fn parse_derived_table_factor(
12285        &mut self,
12286        lateral: IsLateral,
12287    ) -> Result<TableFactor, ParserError> {
12288        let subquery = self.parse_query()?;
12289        self.expect_token(&Token::RParen)?;
12290        let alias = self.maybe_parse_table_alias()?;
12291        Ok(TableFactor::Derived {
12292            lateral: match lateral {
12293                Lateral => true,
12294                NotLateral => false,
12295            },
12296            subquery,
12297            alias,
12298        })
12299    }
12300
12301    fn parse_aliased_function_call(&mut self) -> Result<ExprWithAlias, ParserError> {
12302        let function_name = match self.next_token().token {
12303            Token::Word(w) => Ok(w.value),
12304            _ => self.expected("a function identifier", self.peek_token()),
12305        }?;
12306        let expr = self.parse_function(ObjectName::from(vec![Ident::new(function_name)]))?;
12307        let alias = if self.parse_keyword(Keyword::AS) {
12308            Some(self.parse_identifier()?)
12309        } else {
12310            None
12311        };
12312
12313        Ok(ExprWithAlias { expr, alias })
12314    }
12315    /// Parses an expression with an optional alias
12316    ///
12317    /// Examples:
12318    ///
12319    /// ```sql
12320    /// SUM(price) AS total_price
12321    /// ```
12322    /// ```sql
12323    /// SUM(price)
12324    /// ```
12325    ///
12326    /// Example
12327    /// ```
12328    /// # use sqlparser::parser::{Parser, ParserError};
12329    /// # use sqlparser::dialect::GenericDialect;
12330    /// # fn main() ->Result<(), ParserError> {
12331    /// let sql = r#"SUM("a") as "b""#;
12332    /// let mut parser = Parser::new(&GenericDialect).try_with_sql(sql)?;
12333    /// let expr_with_alias = parser.parse_expr_with_alias()?;
12334    /// assert_eq!(Some("b".to_string()), expr_with_alias.alias.map(|x|x.value));
12335    /// # Ok(())
12336    /// # }
12337    pub fn parse_expr_with_alias(&mut self) -> Result<ExprWithAlias, ParserError> {
12338        let expr = self.parse_expr()?;
12339        let alias = if self.parse_keyword(Keyword::AS) {
12340            Some(self.parse_identifier()?)
12341        } else {
12342            None
12343        };
12344
12345        Ok(ExprWithAlias { expr, alias })
12346    }
12347
12348    pub fn parse_pivot_table_factor(
12349        &mut self,
12350        table: TableFactor,
12351    ) -> Result<TableFactor, ParserError> {
12352        self.expect_token(&Token::LParen)?;
12353        let aggregate_functions = self.parse_comma_separated(Self::parse_aliased_function_call)?;
12354        self.expect_keyword_is(Keyword::FOR)?;
12355        let value_column = self.parse_period_separated(|p| p.parse_identifier())?;
12356        self.expect_keyword_is(Keyword::IN)?;
12357
12358        self.expect_token(&Token::LParen)?;
12359        let value_source = if self.parse_keyword(Keyword::ANY) {
12360            let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
12361                self.parse_comma_separated(Parser::parse_order_by_expr)?
12362            } else {
12363                vec![]
12364            };
12365            PivotValueSource::Any(order_by)
12366        } else if self.peek_sub_query() {
12367            PivotValueSource::Subquery(self.parse_query()?)
12368        } else {
12369            PivotValueSource::List(self.parse_comma_separated(Self::parse_expr_with_alias)?)
12370        };
12371        self.expect_token(&Token::RParen)?;
12372
12373        let default_on_null =
12374            if self.parse_keywords(&[Keyword::DEFAULT, Keyword::ON, Keyword::NULL]) {
12375                self.expect_token(&Token::LParen)?;
12376                let expr = self.parse_expr()?;
12377                self.expect_token(&Token::RParen)?;
12378                Some(expr)
12379            } else {
12380                None
12381            };
12382
12383        self.expect_token(&Token::RParen)?;
12384        let alias = self.maybe_parse_table_alias()?;
12385        Ok(TableFactor::Pivot {
12386            table: Box::new(table),
12387            aggregate_functions,
12388            value_column,
12389            value_source,
12390            default_on_null,
12391            alias,
12392        })
12393    }
12394
12395    pub fn parse_unpivot_table_factor(
12396        &mut self,
12397        table: TableFactor,
12398    ) -> Result<TableFactor, ParserError> {
12399        self.expect_token(&Token::LParen)?;
12400        let value = self.parse_identifier()?;
12401        self.expect_keyword_is(Keyword::FOR)?;
12402        let name = self.parse_identifier()?;
12403        self.expect_keyword_is(Keyword::IN)?;
12404        let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
12405        self.expect_token(&Token::RParen)?;
12406        let alias = self.maybe_parse_table_alias()?;
12407        Ok(TableFactor::Unpivot {
12408            table: Box::new(table),
12409            value,
12410            name,
12411            columns,
12412            alias,
12413        })
12414    }
12415
12416    pub fn parse_join_constraint(&mut self, natural: bool) -> Result<JoinConstraint, ParserError> {
12417        if natural {
12418            Ok(JoinConstraint::Natural)
12419        } else if self.parse_keyword(Keyword::ON) {
12420            let constraint = self.parse_expr()?;
12421            Ok(JoinConstraint::On(constraint))
12422        } else if self.parse_keyword(Keyword::USING) {
12423            let columns = self.parse_parenthesized_qualified_column_list(Mandatory, false)?;
12424            Ok(JoinConstraint::Using(columns))
12425        } else {
12426            Ok(JoinConstraint::None)
12427            //self.expected("ON, or USING after JOIN", self.peek_token())
12428        }
12429    }
12430
12431    /// Parse a GRANT statement.
12432    pub fn parse_grant(&mut self) -> Result<Statement, ParserError> {
12433        let (privileges, objects) = self.parse_grant_revoke_privileges_objects()?;
12434
12435        self.expect_keyword_is(Keyword::TO)?;
12436        let grantees = self.parse_grantees()?;
12437
12438        let with_grant_option =
12439            self.parse_keywords(&[Keyword::WITH, Keyword::GRANT, Keyword::OPTION]);
12440
12441        let granted_by = self
12442            .parse_keywords(&[Keyword::GRANTED, Keyword::BY])
12443            .then(|| self.parse_identifier().unwrap());
12444
12445        Ok(Statement::Grant {
12446            privileges,
12447            objects,
12448            grantees,
12449            with_grant_option,
12450            granted_by,
12451        })
12452    }
12453
12454    fn parse_grantees(&mut self) -> Result<Vec<Grantee>, ParserError> {
12455        let mut values = vec![];
12456        let mut grantee_type = GranteesType::None;
12457        loop {
12458            grantee_type = if self.parse_keyword(Keyword::ROLE) {
12459                GranteesType::Role
12460            } else if self.parse_keyword(Keyword::USER) {
12461                GranteesType::User
12462            } else if self.parse_keyword(Keyword::SHARE) {
12463                GranteesType::Share
12464            } else if self.parse_keyword(Keyword::GROUP) {
12465                GranteesType::Group
12466            } else if self.parse_keyword(Keyword::PUBLIC) {
12467                GranteesType::Public
12468            } else if self.parse_keywords(&[Keyword::DATABASE, Keyword::ROLE]) {
12469                GranteesType::DatabaseRole
12470            } else if self.parse_keywords(&[Keyword::APPLICATION, Keyword::ROLE]) {
12471                GranteesType::ApplicationRole
12472            } else if self.parse_keyword(Keyword::APPLICATION) {
12473                GranteesType::Application
12474            } else {
12475                grantee_type // keep from previous iteraton, if not specified
12476            };
12477
12478            let grantee = if grantee_type == GranteesType::Public {
12479                Grantee {
12480                    grantee_type: grantee_type.clone(),
12481                    name: None,
12482                }
12483            } else {
12484                let mut name = self.parse_grantee_name()?;
12485                if self.consume_token(&Token::Colon) {
12486                    // Redshift supports namespace prefix for external users and groups:
12487                    // <Namespace>:<GroupName> or <Namespace>:<UserName>
12488                    // https://docs.aws.amazon.com/redshift/latest/mgmt/redshift-iam-access-control-native-idp.html
12489                    let ident = self.parse_identifier()?;
12490                    if let GranteeName::ObjectName(namespace) = name {
12491                        name = GranteeName::ObjectName(ObjectName::from(vec![Ident::new(
12492                            format!("{}:{}", namespace, ident),
12493                        )]));
12494                    };
12495                }
12496                Grantee {
12497                    grantee_type: grantee_type.clone(),
12498                    name: Some(name),
12499                }
12500            };
12501
12502            values.push(grantee);
12503
12504            if !self.consume_token(&Token::Comma) {
12505                break;
12506            }
12507        }
12508
12509        Ok(values)
12510    }
12511
12512    pub fn parse_grant_revoke_privileges_objects(
12513        &mut self,
12514    ) -> Result<(Privileges, Option<GrantObjects>), ParserError> {
12515        let privileges = if self.parse_keyword(Keyword::ALL) {
12516            Privileges::All {
12517                with_privileges_keyword: self.parse_keyword(Keyword::PRIVILEGES),
12518            }
12519        } else {
12520            let actions = self.parse_actions_list()?;
12521            Privileges::Actions(actions)
12522        };
12523
12524        let objects = if self.parse_keyword(Keyword::ON) {
12525            if self.parse_keywords(&[Keyword::ALL, Keyword::TABLES, Keyword::IN, Keyword::SCHEMA]) {
12526                Some(GrantObjects::AllTablesInSchema {
12527                    schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
12528                })
12529            } else if self.parse_keywords(&[
12530                Keyword::ALL,
12531                Keyword::SEQUENCES,
12532                Keyword::IN,
12533                Keyword::SCHEMA,
12534            ]) {
12535                Some(GrantObjects::AllSequencesInSchema {
12536                    schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
12537                })
12538            } else {
12539                let object_type = self.parse_one_of_keywords(&[
12540                    Keyword::SEQUENCE,
12541                    Keyword::DATABASE,
12542                    Keyword::DATABASE,
12543                    Keyword::SCHEMA,
12544                    Keyword::TABLE,
12545                    Keyword::VIEW,
12546                    Keyword::WAREHOUSE,
12547                    Keyword::INTEGRATION,
12548                    Keyword::VIEW,
12549                    Keyword::WAREHOUSE,
12550                    Keyword::INTEGRATION,
12551                ]);
12552                let objects =
12553                    self.parse_comma_separated(|p| p.parse_object_name_with_wildcards(false, true));
12554                match object_type {
12555                    Some(Keyword::DATABASE) => Some(GrantObjects::Databases(objects?)),
12556                    Some(Keyword::SCHEMA) => Some(GrantObjects::Schemas(objects?)),
12557                    Some(Keyword::SEQUENCE) => Some(GrantObjects::Sequences(objects?)),
12558                    Some(Keyword::WAREHOUSE) => Some(GrantObjects::Warehouses(objects?)),
12559                    Some(Keyword::INTEGRATION) => Some(GrantObjects::Integrations(objects?)),
12560                    Some(Keyword::VIEW) => Some(GrantObjects::Views(objects?)),
12561                    Some(Keyword::TABLE) | None => Some(GrantObjects::Tables(objects?)),
12562                    _ => unreachable!(),
12563                }
12564            }
12565        } else {
12566            None
12567        };
12568
12569        Ok((privileges, objects))
12570    }
12571
12572    pub fn parse_grant_permission(&mut self) -> Result<Action, ParserError> {
12573        fn parse_columns(parser: &mut Parser) -> Result<Option<Vec<Ident>>, ParserError> {
12574            let columns = parser.parse_parenthesized_column_list(Optional, false)?;
12575            if columns.is_empty() {
12576                Ok(None)
12577            } else {
12578                Ok(Some(columns))
12579            }
12580        }
12581
12582        // Multi-word privileges
12583        if self.parse_keywords(&[Keyword::IMPORTED, Keyword::PRIVILEGES]) {
12584            Ok(Action::ImportedPrivileges)
12585        } else if self.parse_keywords(&[Keyword::ADD, Keyword::SEARCH, Keyword::OPTIMIZATION]) {
12586            Ok(Action::AddSearchOptimization)
12587        } else if self.parse_keywords(&[Keyword::ATTACH, Keyword::LISTING]) {
12588            Ok(Action::AttachListing)
12589        } else if self.parse_keywords(&[Keyword::ATTACH, Keyword::POLICY]) {
12590            Ok(Action::AttachPolicy)
12591        } else if self.parse_keywords(&[Keyword::BIND, Keyword::SERVICE, Keyword::ENDPOINT]) {
12592            Ok(Action::BindServiceEndpoint)
12593        } else if self.parse_keywords(&[Keyword::DATABASE, Keyword::ROLE]) {
12594            let role = self.parse_object_name(false)?;
12595            Ok(Action::DatabaseRole { role })
12596        } else if self.parse_keywords(&[Keyword::EVOLVE, Keyword::SCHEMA]) {
12597            Ok(Action::EvolveSchema)
12598        } else if self.parse_keywords(&[Keyword::IMPORT, Keyword::SHARE]) {
12599            Ok(Action::ImportShare)
12600        } else if self.parse_keywords(&[Keyword::MANAGE, Keyword::VERSIONS]) {
12601            Ok(Action::ManageVersions)
12602        } else if self.parse_keywords(&[Keyword::MANAGE, Keyword::RELEASES]) {
12603            Ok(Action::ManageReleases)
12604        } else if self.parse_keywords(&[Keyword::OVERRIDE, Keyword::SHARE, Keyword::RESTRICTIONS]) {
12605            Ok(Action::OverrideShareRestrictions)
12606        } else if self.parse_keywords(&[
12607            Keyword::PURCHASE,
12608            Keyword::DATA,
12609            Keyword::EXCHANGE,
12610            Keyword::LISTING,
12611        ]) {
12612            Ok(Action::PurchaseDataExchangeListing)
12613        } else if self.parse_keywords(&[Keyword::RESOLVE, Keyword::ALL]) {
12614            Ok(Action::ResolveAll)
12615        } else if self.parse_keywords(&[Keyword::READ, Keyword::SESSION]) {
12616            Ok(Action::ReadSession)
12617
12618        // Single-word privileges
12619        } else if self.parse_keyword(Keyword::APPLY) {
12620            let apply_type = self.parse_action_apply_type()?;
12621            Ok(Action::Apply { apply_type })
12622        } else if self.parse_keyword(Keyword::APPLYBUDGET) {
12623            Ok(Action::ApplyBudget)
12624        } else if self.parse_keyword(Keyword::AUDIT) {
12625            Ok(Action::Audit)
12626        } else if self.parse_keyword(Keyword::CONNECT) {
12627            Ok(Action::Connect)
12628        } else if self.parse_keyword(Keyword::CREATE) {
12629            let obj_type = self.maybe_parse_action_create_object_type();
12630            Ok(Action::Create { obj_type })
12631        } else if self.parse_keyword(Keyword::DELETE) {
12632            Ok(Action::Delete)
12633        } else if self.parse_keyword(Keyword::EXECUTE) {
12634            let obj_type = self.maybe_parse_action_execute_obj_type();
12635            Ok(Action::Execute { obj_type })
12636        } else if self.parse_keyword(Keyword::FAILOVER) {
12637            Ok(Action::Failover)
12638        } else if self.parse_keyword(Keyword::INSERT) {
12639            Ok(Action::Insert {
12640                columns: parse_columns(self)?,
12641            })
12642        } else if self.parse_keyword(Keyword::MANAGE) {
12643            let manage_type = self.parse_action_manage_type()?;
12644            Ok(Action::Manage { manage_type })
12645        } else if self.parse_keyword(Keyword::MODIFY) {
12646            let modify_type = self.parse_action_modify_type()?;
12647            Ok(Action::Modify { modify_type })
12648        } else if self.parse_keyword(Keyword::MONITOR) {
12649            let monitor_type = self.parse_action_monitor_type()?;
12650            Ok(Action::Monitor { monitor_type })
12651        } else if self.parse_keyword(Keyword::OPERATE) {
12652            Ok(Action::Operate)
12653        } else if self.parse_keyword(Keyword::REFERENCES) {
12654            Ok(Action::References {
12655                columns: parse_columns(self)?,
12656            })
12657        } else if self.parse_keyword(Keyword::READ) {
12658            Ok(Action::Read)
12659        } else if self.parse_keyword(Keyword::REPLICATE) {
12660            Ok(Action::Replicate)
12661        } else if self.parse_keyword(Keyword::ROLE) {
12662            let role = self.parse_identifier()?;
12663            Ok(Action::Role { role })
12664        } else if self.parse_keyword(Keyword::SELECT) {
12665            Ok(Action::Select {
12666                columns: parse_columns(self)?,
12667            })
12668        } else if self.parse_keyword(Keyword::TEMPORARY) {
12669            Ok(Action::Temporary)
12670        } else if self.parse_keyword(Keyword::TRIGGER) {
12671            Ok(Action::Trigger)
12672        } else if self.parse_keyword(Keyword::TRUNCATE) {
12673            Ok(Action::Truncate)
12674        } else if self.parse_keyword(Keyword::UPDATE) {
12675            Ok(Action::Update {
12676                columns: parse_columns(self)?,
12677            })
12678        } else if self.parse_keyword(Keyword::USAGE) {
12679            Ok(Action::Usage)
12680        } else if self.parse_keyword(Keyword::OWNERSHIP) {
12681            Ok(Action::Ownership)
12682        } else {
12683            self.expected("a privilege keyword", self.peek_token())?
12684        }
12685    }
12686
12687    fn maybe_parse_action_create_object_type(&mut self) -> Option<ActionCreateObjectType> {
12688        // Multi-word object types
12689        if self.parse_keywords(&[Keyword::APPLICATION, Keyword::PACKAGE]) {
12690            Some(ActionCreateObjectType::ApplicationPackage)
12691        } else if self.parse_keywords(&[Keyword::COMPUTE, Keyword::POOL]) {
12692            Some(ActionCreateObjectType::ComputePool)
12693        } else if self.parse_keywords(&[Keyword::DATA, Keyword::EXCHANGE, Keyword::LISTING]) {
12694            Some(ActionCreateObjectType::DataExchangeListing)
12695        } else if self.parse_keywords(&[Keyword::EXTERNAL, Keyword::VOLUME]) {
12696            Some(ActionCreateObjectType::ExternalVolume)
12697        } else if self.parse_keywords(&[Keyword::FAILOVER, Keyword::GROUP]) {
12698            Some(ActionCreateObjectType::FailoverGroup)
12699        } else if self.parse_keywords(&[Keyword::NETWORK, Keyword::POLICY]) {
12700            Some(ActionCreateObjectType::NetworkPolicy)
12701        } else if self.parse_keywords(&[Keyword::ORGANIZATION, Keyword::LISTING]) {
12702            Some(ActionCreateObjectType::OrganiationListing)
12703        } else if self.parse_keywords(&[Keyword::REPLICATION, Keyword::GROUP]) {
12704            Some(ActionCreateObjectType::ReplicationGroup)
12705        }
12706        // Single-word object types
12707        else if self.parse_keyword(Keyword::ACCOUNT) {
12708            Some(ActionCreateObjectType::Account)
12709        } else if self.parse_keyword(Keyword::APPLICATION) {
12710            Some(ActionCreateObjectType::Application)
12711        } else if self.parse_keyword(Keyword::DATABASE) {
12712            Some(ActionCreateObjectType::Database)
12713        } else if self.parse_keyword(Keyword::INTEGRATION) {
12714            Some(ActionCreateObjectType::Integration)
12715        } else if self.parse_keyword(Keyword::ROLE) {
12716            Some(ActionCreateObjectType::Role)
12717        } else if self.parse_keyword(Keyword::SHARE) {
12718            Some(ActionCreateObjectType::Share)
12719        } else if self.parse_keyword(Keyword::USER) {
12720            Some(ActionCreateObjectType::User)
12721        } else if self.parse_keyword(Keyword::WAREHOUSE) {
12722            Some(ActionCreateObjectType::Warehouse)
12723        } else {
12724            None
12725        }
12726    }
12727
12728    fn parse_action_apply_type(&mut self) -> Result<ActionApplyType, ParserError> {
12729        if self.parse_keywords(&[Keyword::AGGREGATION, Keyword::POLICY]) {
12730            Ok(ActionApplyType::AggregationPolicy)
12731        } else if self.parse_keywords(&[Keyword::AUTHENTICATION, Keyword::POLICY]) {
12732            Ok(ActionApplyType::AuthenticationPolicy)
12733        } else if self.parse_keywords(&[Keyword::JOIN, Keyword::POLICY]) {
12734            Ok(ActionApplyType::JoinPolicy)
12735        } else if self.parse_keywords(&[Keyword::MASKING, Keyword::POLICY]) {
12736            Ok(ActionApplyType::MaskingPolicy)
12737        } else if self.parse_keywords(&[Keyword::PACKAGES, Keyword::POLICY]) {
12738            Ok(ActionApplyType::PackagesPolicy)
12739        } else if self.parse_keywords(&[Keyword::PASSWORD, Keyword::POLICY]) {
12740            Ok(ActionApplyType::PasswordPolicy)
12741        } else if self.parse_keywords(&[Keyword::PROJECTION, Keyword::POLICY]) {
12742            Ok(ActionApplyType::ProjectionPolicy)
12743        } else if self.parse_keywords(&[Keyword::ROW, Keyword::ACCESS, Keyword::POLICY]) {
12744            Ok(ActionApplyType::RowAccessPolicy)
12745        } else if self.parse_keywords(&[Keyword::SESSION, Keyword::POLICY]) {
12746            Ok(ActionApplyType::SessionPolicy)
12747        } else if self.parse_keyword(Keyword::TAG) {
12748            Ok(ActionApplyType::Tag)
12749        } else {
12750            self.expected("GRANT APPLY type", self.peek_token())
12751        }
12752    }
12753
12754    fn maybe_parse_action_execute_obj_type(&mut self) -> Option<ActionExecuteObjectType> {
12755        if self.parse_keywords(&[Keyword::DATA, Keyword::METRIC, Keyword::FUNCTION]) {
12756            Some(ActionExecuteObjectType::DataMetricFunction)
12757        } else if self.parse_keywords(&[Keyword::MANAGED, Keyword::ALERT]) {
12758            Some(ActionExecuteObjectType::ManagedAlert)
12759        } else if self.parse_keywords(&[Keyword::MANAGED, Keyword::TASK]) {
12760            Some(ActionExecuteObjectType::ManagedTask)
12761        } else if self.parse_keyword(Keyword::ALERT) {
12762            Some(ActionExecuteObjectType::Alert)
12763        } else if self.parse_keyword(Keyword::TASK) {
12764            Some(ActionExecuteObjectType::Task)
12765        } else {
12766            None
12767        }
12768    }
12769
12770    fn parse_action_manage_type(&mut self) -> Result<ActionManageType, ParserError> {
12771        if self.parse_keywords(&[Keyword::ACCOUNT, Keyword::SUPPORT, Keyword::CASES]) {
12772            Ok(ActionManageType::AccountSupportCases)
12773        } else if self.parse_keywords(&[Keyword::EVENT, Keyword::SHARING]) {
12774            Ok(ActionManageType::EventSharing)
12775        } else if self.parse_keywords(&[Keyword::LISTING, Keyword::AUTO, Keyword::FULFILLMENT]) {
12776            Ok(ActionManageType::ListingAutoFulfillment)
12777        } else if self.parse_keywords(&[Keyword::ORGANIZATION, Keyword::SUPPORT, Keyword::CASES]) {
12778            Ok(ActionManageType::OrganizationSupportCases)
12779        } else if self.parse_keywords(&[Keyword::USER, Keyword::SUPPORT, Keyword::CASES]) {
12780            Ok(ActionManageType::UserSupportCases)
12781        } else if self.parse_keyword(Keyword::GRANTS) {
12782            Ok(ActionManageType::Grants)
12783        } else if self.parse_keyword(Keyword::WAREHOUSES) {
12784            Ok(ActionManageType::Warehouses)
12785        } else {
12786            self.expected("GRANT MANAGE type", self.peek_token())
12787        }
12788    }
12789
12790    fn parse_action_modify_type(&mut self) -> Result<ActionModifyType, ParserError> {
12791        if self.parse_keywords(&[Keyword::LOG, Keyword::LEVEL]) {
12792            Ok(ActionModifyType::LogLevel)
12793        } else if self.parse_keywords(&[Keyword::TRACE, Keyword::LEVEL]) {
12794            Ok(ActionModifyType::TraceLevel)
12795        } else if self.parse_keywords(&[Keyword::SESSION, Keyword::LOG, Keyword::LEVEL]) {
12796            Ok(ActionModifyType::SessionLogLevel)
12797        } else if self.parse_keywords(&[Keyword::SESSION, Keyword::TRACE, Keyword::LEVEL]) {
12798            Ok(ActionModifyType::SessionTraceLevel)
12799        } else {
12800            self.expected("GRANT MODIFY type", self.peek_token())
12801        }
12802    }
12803
12804    fn parse_action_monitor_type(&mut self) -> Result<ActionMonitorType, ParserError> {
12805        if self.parse_keyword(Keyword::EXECUTION) {
12806            Ok(ActionMonitorType::Execution)
12807        } else if self.parse_keyword(Keyword::SECURITY) {
12808            Ok(ActionMonitorType::Security)
12809        } else if self.parse_keyword(Keyword::USAGE) {
12810            Ok(ActionMonitorType::Usage)
12811        } else {
12812            self.expected("GRANT MONITOR type", self.peek_token())
12813        }
12814    }
12815
12816    pub fn parse_grantee_name(&mut self) -> Result<GranteeName, ParserError> {
12817        let mut name = self.parse_object_name(false)?;
12818        if self.dialect.supports_user_host_grantee()
12819            && name.0.len() == 1
12820            && name.0[0].as_ident().is_some()
12821            && self.consume_token(&Token::AtSign)
12822        {
12823            let user = name.0.pop().unwrap().as_ident().unwrap().clone();
12824            let host = self.parse_identifier()?;
12825            Ok(GranteeName::UserHost { user, host })
12826        } else {
12827            Ok(GranteeName::ObjectName(name))
12828        }
12829    }
12830
12831    /// Parse a REVOKE statement
12832    pub fn parse_revoke(&mut self) -> Result<Statement, ParserError> {
12833        let (privileges, objects) = self.parse_grant_revoke_privileges_objects()?;
12834
12835        self.expect_keyword_is(Keyword::FROM)?;
12836        let grantees = self.parse_grantees()?;
12837
12838        let granted_by = self
12839            .parse_keywords(&[Keyword::GRANTED, Keyword::BY])
12840            .then(|| self.parse_identifier().unwrap());
12841
12842        let cascade = self.parse_cascade_option();
12843
12844        Ok(Statement::Revoke {
12845            privileges,
12846            objects,
12847            grantees,
12848            granted_by,
12849            cascade,
12850        })
12851    }
12852
12853    /// Parse an REPLACE statement
12854    pub fn parse_replace(&mut self) -> Result<Statement, ParserError> {
12855        if !dialect_of!(self is MySqlDialect | GenericDialect) {
12856            return parser_err!(
12857                "Unsupported statement REPLACE",
12858                self.peek_token().span.start
12859            );
12860        }
12861
12862        let mut insert = self.parse_insert()?;
12863        if let Statement::Insert(Insert { replace_into, .. }) = &mut insert {
12864            *replace_into = true;
12865        }
12866
12867        Ok(insert)
12868    }
12869
12870    /// Parse an INSERT statement, returning a `Box`ed SetExpr
12871    ///
12872    /// This is used to reduce the size of the stack frames in debug builds
12873    fn parse_insert_setexpr_boxed(&mut self) -> Result<Box<SetExpr>, ParserError> {
12874        Ok(Box::new(SetExpr::Insert(self.parse_insert()?)))
12875    }
12876
12877    /// Parse an INSERT statement
12878    pub fn parse_insert(&mut self) -> Result<Statement, ParserError> {
12879        let or = self.parse_conflict_clause();
12880        let priority = if !dialect_of!(self is MySqlDialect | GenericDialect) {
12881            None
12882        } else if self.parse_keyword(Keyword::LOW_PRIORITY) {
12883            Some(MysqlInsertPriority::LowPriority)
12884        } else if self.parse_keyword(Keyword::DELAYED) {
12885            Some(MysqlInsertPriority::Delayed)
12886        } else if self.parse_keyword(Keyword::HIGH_PRIORITY) {
12887            Some(MysqlInsertPriority::HighPriority)
12888        } else {
12889            None
12890        };
12891
12892        let ignore = dialect_of!(self is MySqlDialect | GenericDialect)
12893            && self.parse_keyword(Keyword::IGNORE);
12894
12895        let replace_into = false;
12896
12897        let overwrite = self.parse_keyword(Keyword::OVERWRITE);
12898        let into = self.parse_keyword(Keyword::INTO);
12899
12900        let local = self.parse_keyword(Keyword::LOCAL);
12901
12902        if self.parse_keyword(Keyword::DIRECTORY) {
12903            let path = self.parse_literal_string()?;
12904            let file_format = if self.parse_keywords(&[Keyword::STORED, Keyword::AS]) {
12905                Some(self.parse_file_format()?)
12906            } else {
12907                None
12908            };
12909            let source = self.parse_query()?;
12910            Ok(Statement::Directory {
12911                local,
12912                path,
12913                overwrite,
12914                file_format,
12915                source,
12916            })
12917        } else {
12918            // Hive lets you put table here regardless
12919            let table = self.parse_keyword(Keyword::TABLE);
12920            let table_object = self.parse_table_object()?;
12921
12922            let table_alias =
12923                if dialect_of!(self is PostgreSqlDialect) && self.parse_keyword(Keyword::AS) {
12924                    Some(self.parse_identifier()?)
12925                } else {
12926                    None
12927                };
12928
12929            let is_mysql = dialect_of!(self is MySqlDialect);
12930
12931            let (columns, partitioned, after_columns, source, assignments) = if self
12932                .parse_keywords(&[Keyword::DEFAULT, Keyword::VALUES])
12933            {
12934                (vec![], None, vec![], None, vec![])
12935            } else {
12936                let (columns, partitioned, after_columns) = if !self.peek_subquery_start() {
12937                    let columns = self.parse_parenthesized_column_list(Optional, is_mysql)?;
12938
12939                    let partitioned = self.parse_insert_partition()?;
12940                    // Hive allows you to specify columns after partitions as well if you want.
12941                    let after_columns = if dialect_of!(self is HiveDialect) {
12942                        self.parse_parenthesized_column_list(Optional, false)?
12943                    } else {
12944                        vec![]
12945                    };
12946                    (columns, partitioned, after_columns)
12947                } else {
12948                    Default::default()
12949                };
12950
12951                let (source, assignments) = if self.peek_keyword(Keyword::FORMAT)
12952                    || self.peek_keyword(Keyword::SETTINGS)
12953                {
12954                    (None, vec![])
12955                } else if self.dialect.supports_insert_set() && self.parse_keyword(Keyword::SET) {
12956                    (None, self.parse_comma_separated(Parser::parse_assignment)?)
12957                } else {
12958                    (Some(self.parse_query()?), vec![])
12959                };
12960
12961                (columns, partitioned, after_columns, source, assignments)
12962            };
12963
12964            let (format_clause, settings) = if self.dialect.supports_insert_format() {
12965                // Settings always comes before `FORMAT` for ClickHouse:
12966                // <https://clickhouse.com/docs/en/sql-reference/statements/insert-into>
12967                let settings = self.parse_settings()?;
12968
12969                let format = if self.parse_keyword(Keyword::FORMAT) {
12970                    Some(self.parse_input_format_clause()?)
12971                } else {
12972                    None
12973                };
12974
12975                (format, settings)
12976            } else {
12977                Default::default()
12978            };
12979
12980            let insert_alias = if dialect_of!(self is MySqlDialect | GenericDialect)
12981                && self.parse_keyword(Keyword::AS)
12982            {
12983                let row_alias = self.parse_object_name(false)?;
12984                let col_aliases = Some(self.parse_parenthesized_column_list(Optional, false)?);
12985                Some(InsertAliases {
12986                    row_alias,
12987                    col_aliases,
12988                })
12989            } else {
12990                None
12991            };
12992
12993            let on = if self.parse_keyword(Keyword::ON) {
12994                if self.parse_keyword(Keyword::CONFLICT) {
12995                    let conflict_target =
12996                        if self.parse_keywords(&[Keyword::ON, Keyword::CONSTRAINT]) {
12997                            Some(ConflictTarget::OnConstraint(self.parse_object_name(false)?))
12998                        } else if self.peek_token() == Token::LParen {
12999                            Some(ConflictTarget::Columns(
13000                                self.parse_parenthesized_column_list(IsOptional::Mandatory, false)?,
13001                            ))
13002                        } else {
13003                            None
13004                        };
13005
13006                    self.expect_keyword_is(Keyword::DO)?;
13007                    let action = if self.parse_keyword(Keyword::NOTHING) {
13008                        OnConflictAction::DoNothing
13009                    } else {
13010                        self.expect_keyword_is(Keyword::UPDATE)?;
13011                        self.expect_keyword_is(Keyword::SET)?;
13012                        let assignments = self.parse_comma_separated(Parser::parse_assignment)?;
13013                        let selection = if self.parse_keyword(Keyword::WHERE) {
13014                            Some(self.parse_expr()?)
13015                        } else {
13016                            None
13017                        };
13018                        OnConflictAction::DoUpdate(DoUpdate {
13019                            assignments,
13020                            selection,
13021                        })
13022                    };
13023
13024                    Some(OnInsert::OnConflict(OnConflict {
13025                        conflict_target,
13026                        action,
13027                    }))
13028                } else {
13029                    self.expect_keyword_is(Keyword::DUPLICATE)?;
13030                    self.expect_keyword_is(Keyword::KEY)?;
13031                    self.expect_keyword_is(Keyword::UPDATE)?;
13032                    let l = self.parse_comma_separated(Parser::parse_assignment)?;
13033
13034                    Some(OnInsert::DuplicateKeyUpdate(l))
13035                }
13036            } else {
13037                None
13038            };
13039
13040            let returning = if self.parse_keyword(Keyword::RETURNING) {
13041                Some(self.parse_comma_separated(Parser::parse_select_item)?)
13042            } else {
13043                None
13044            };
13045
13046            Ok(Statement::Insert(Insert {
13047                or,
13048                table: table_object,
13049                table_alias,
13050                ignore,
13051                into,
13052                overwrite,
13053                partitioned,
13054                columns,
13055                after_columns,
13056                source,
13057                assignments,
13058                has_table_keyword: table,
13059                on,
13060                returning,
13061                replace_into,
13062                priority,
13063                insert_alias,
13064                settings,
13065                format_clause,
13066            }))
13067        }
13068    }
13069
13070    // Parses input format clause used for [ClickHouse].
13071    //
13072    // <https://clickhouse.com/docs/en/interfaces/formats>
13073    pub fn parse_input_format_clause(&mut self) -> Result<InputFormatClause, ParserError> {
13074        let ident = self.parse_identifier()?;
13075        let values = self
13076            .maybe_parse(|p| p.parse_comma_separated(|p| p.parse_expr()))?
13077            .unwrap_or_default();
13078
13079        Ok(InputFormatClause { ident, values })
13080    }
13081
13082    /// Returns true if the immediate tokens look like the
13083    /// beginning of a subquery. `(SELECT ...`
13084    fn peek_subquery_start(&mut self) -> bool {
13085        let [maybe_lparen, maybe_select] = self.peek_tokens();
13086        Token::LParen == maybe_lparen
13087            && matches!(maybe_select, Token::Word(w) if w.keyword == Keyword::SELECT)
13088    }
13089
13090    fn parse_conflict_clause(&mut self) -> Option<SqliteOnConflict> {
13091        if self.parse_keywords(&[Keyword::OR, Keyword::REPLACE]) {
13092            Some(SqliteOnConflict::Replace)
13093        } else if self.parse_keywords(&[Keyword::OR, Keyword::ROLLBACK]) {
13094            Some(SqliteOnConflict::Rollback)
13095        } else if self.parse_keywords(&[Keyword::OR, Keyword::ABORT]) {
13096            Some(SqliteOnConflict::Abort)
13097        } else if self.parse_keywords(&[Keyword::OR, Keyword::FAIL]) {
13098            Some(SqliteOnConflict::Fail)
13099        } else if self.parse_keywords(&[Keyword::OR, Keyword::IGNORE]) {
13100            Some(SqliteOnConflict::Ignore)
13101        } else if self.parse_keyword(Keyword::REPLACE) {
13102            Some(SqliteOnConflict::Replace)
13103        } else {
13104            None
13105        }
13106    }
13107
13108    pub fn parse_insert_partition(&mut self) -> Result<Option<Vec<Expr>>, ParserError> {
13109        if self.parse_keyword(Keyword::PARTITION) {
13110            self.expect_token(&Token::LParen)?;
13111            let partition_cols = Some(self.parse_comma_separated(Parser::parse_expr)?);
13112            self.expect_token(&Token::RParen)?;
13113            Ok(partition_cols)
13114        } else {
13115            Ok(None)
13116        }
13117    }
13118
13119    pub fn parse_load_data_table_format(
13120        &mut self,
13121    ) -> Result<Option<HiveLoadDataFormat>, ParserError> {
13122        if self.parse_keyword(Keyword::INPUTFORMAT) {
13123            let input_format = self.parse_expr()?;
13124            self.expect_keyword_is(Keyword::SERDE)?;
13125            let serde = self.parse_expr()?;
13126            Ok(Some(HiveLoadDataFormat {
13127                input_format,
13128                serde,
13129            }))
13130        } else {
13131            Ok(None)
13132        }
13133    }
13134
13135    /// Parse an UPDATE statement, returning a `Box`ed SetExpr
13136    ///
13137    /// This is used to reduce the size of the stack frames in debug builds
13138    fn parse_update_setexpr_boxed(&mut self) -> Result<Box<SetExpr>, ParserError> {
13139        Ok(Box::new(SetExpr::Update(self.parse_update()?)))
13140    }
13141
13142    pub fn parse_update(&mut self) -> Result<Statement, ParserError> {
13143        let or = self.parse_conflict_clause();
13144        let table = self.parse_table_and_joins()?;
13145        let from_before_set = if self.parse_keyword(Keyword::FROM) {
13146            Some(UpdateTableFromKind::BeforeSet(
13147                self.parse_table_with_joins()?,
13148            ))
13149        } else {
13150            None
13151        };
13152        self.expect_keyword(Keyword::SET)?;
13153        let assignments = self.parse_comma_separated(Parser::parse_assignment)?;
13154        let from = if from_before_set.is_none() && self.parse_keyword(Keyword::FROM) {
13155            Some(UpdateTableFromKind::AfterSet(
13156                self.parse_table_with_joins()?,
13157            ))
13158        } else {
13159            from_before_set
13160        };
13161        let selection = if self.parse_keyword(Keyword::WHERE) {
13162            Some(self.parse_expr()?)
13163        } else {
13164            None
13165        };
13166        let returning = if self.parse_keyword(Keyword::RETURNING) {
13167            Some(self.parse_comma_separated(Parser::parse_select_item)?)
13168        } else {
13169            None
13170        };
13171        Ok(Statement::Update {
13172            table,
13173            assignments,
13174            from,
13175            selection,
13176            returning,
13177            or,
13178        })
13179    }
13180
13181    /// Parse a `var = expr` assignment, used in an UPDATE statement
13182    pub fn parse_assignment(&mut self) -> Result<Assignment, ParserError> {
13183        let target = self.parse_assignment_target()?;
13184        self.expect_token(&Token::Eq)?;
13185        let value = self.parse_expr()?;
13186        Ok(Assignment { target, value })
13187    }
13188
13189    /// Parse the left-hand side of an assignment, used in an UPDATE statement
13190    pub fn parse_assignment_target(&mut self) -> Result<AssignmentTarget, ParserError> {
13191        if self.consume_token(&Token::LParen) {
13192            let columns = self.parse_comma_separated(|p| p.parse_object_name(false))?;
13193            self.expect_token(&Token::RParen)?;
13194            Ok(AssignmentTarget::Tuple(columns))
13195        } else {
13196            let column = self.parse_object_name(false)?;
13197            Ok(AssignmentTarget::ColumnName(column))
13198        }
13199    }
13200
13201    pub fn parse_function_args(&mut self) -> Result<FunctionArg, ParserError> {
13202        let arg = if self.dialect.supports_named_fn_args_with_expr_name() {
13203            self.maybe_parse(|p| {
13204                let name = p.parse_expr()?;
13205                let operator = p.parse_function_named_arg_operator()?;
13206                let arg = p.parse_wildcard_expr()?.into();
13207                Ok(FunctionArg::ExprNamed {
13208                    name,
13209                    arg,
13210                    operator,
13211                })
13212            })?
13213        } else {
13214            self.maybe_parse(|p| {
13215                let name = p.parse_identifier()?;
13216                let operator = p.parse_function_named_arg_operator()?;
13217                let arg = p.parse_wildcard_expr()?.into();
13218                Ok(FunctionArg::Named {
13219                    name,
13220                    arg,
13221                    operator,
13222                })
13223            })?
13224        };
13225        if let Some(arg) = arg {
13226            return Ok(arg);
13227        }
13228        Ok(FunctionArg::Unnamed(self.parse_wildcard_expr()?.into()))
13229    }
13230
13231    fn parse_function_named_arg_operator(&mut self) -> Result<FunctionArgOperator, ParserError> {
13232        if self.parse_keyword(Keyword::VALUE) {
13233            return Ok(FunctionArgOperator::Value);
13234        }
13235        let tok = self.next_token();
13236        match tok.token {
13237            Token::RArrow if self.dialect.supports_named_fn_args_with_rarrow_operator() => {
13238                Ok(FunctionArgOperator::RightArrow)
13239            }
13240            Token::Eq if self.dialect.supports_named_fn_args_with_eq_operator() => {
13241                Ok(FunctionArgOperator::Equals)
13242            }
13243            Token::Assignment
13244                if self
13245                    .dialect
13246                    .supports_named_fn_args_with_assignment_operator() =>
13247            {
13248                Ok(FunctionArgOperator::Assignment)
13249            }
13250            Token::Colon if self.dialect.supports_named_fn_args_with_colon_operator() => {
13251                Ok(FunctionArgOperator::Colon)
13252            }
13253            _ => {
13254                self.prev_token();
13255                self.expected("argument operator", tok)
13256            }
13257        }
13258    }
13259
13260    pub fn parse_optional_args(&mut self) -> Result<Vec<FunctionArg>, ParserError> {
13261        if self.consume_token(&Token::RParen) {
13262            Ok(vec![])
13263        } else {
13264            let args = self.parse_comma_separated(Parser::parse_function_args)?;
13265            self.expect_token(&Token::RParen)?;
13266            Ok(args)
13267        }
13268    }
13269
13270    fn parse_table_function_args(&mut self) -> Result<TableFunctionArgs, ParserError> {
13271        if self.consume_token(&Token::RParen) {
13272            return Ok(TableFunctionArgs {
13273                args: vec![],
13274                settings: None,
13275            });
13276        }
13277        let mut args = vec![];
13278        let settings = loop {
13279            if let Some(settings) = self.parse_settings()? {
13280                break Some(settings);
13281            }
13282            args.push(self.parse_function_args()?);
13283            if self.is_parse_comma_separated_end() {
13284                break None;
13285            }
13286        };
13287        self.expect_token(&Token::RParen)?;
13288        Ok(TableFunctionArgs { args, settings })
13289    }
13290
13291    /// Parses a potentially empty list of arguments to a window function
13292    /// (including the closing parenthesis).
13293    ///
13294    /// Examples:
13295    /// ```sql
13296    /// FIRST_VALUE(x ORDER BY 1,2,3);
13297    /// FIRST_VALUE(x IGNORE NULL);
13298    /// ```
13299    fn parse_function_argument_list(&mut self) -> Result<FunctionArgumentList, ParserError> {
13300        let mut clauses = vec![];
13301
13302        // For MSSQL empty argument list with json-null-clause case, e.g. `JSON_ARRAY(NULL ON NULL)`
13303        if let Some(null_clause) = self.parse_json_null_clause() {
13304            clauses.push(FunctionArgumentClause::JsonNullClause(null_clause));
13305        }
13306
13307        if self.consume_token(&Token::RParen) {
13308            return Ok(FunctionArgumentList {
13309                duplicate_treatment: None,
13310                args: vec![],
13311                clauses,
13312            });
13313        }
13314
13315        let duplicate_treatment = self.parse_duplicate_treatment()?;
13316        let args = self.parse_comma_separated(Parser::parse_function_args)?;
13317
13318        if self.dialect.supports_window_function_null_treatment_arg() {
13319            if let Some(null_treatment) = self.parse_null_treatment()? {
13320                clauses.push(FunctionArgumentClause::IgnoreOrRespectNulls(null_treatment));
13321            }
13322        }
13323
13324        if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
13325            clauses.push(FunctionArgumentClause::OrderBy(
13326                self.parse_comma_separated(Parser::parse_order_by_expr)?,
13327            ));
13328        }
13329
13330        if self.parse_keyword(Keyword::LIMIT) {
13331            clauses.push(FunctionArgumentClause::Limit(self.parse_expr()?));
13332        }
13333
13334        if dialect_of!(self is GenericDialect | BigQueryDialect)
13335            && self.parse_keyword(Keyword::HAVING)
13336        {
13337            let kind = match self.expect_one_of_keywords(&[Keyword::MIN, Keyword::MAX])? {
13338                Keyword::MIN => HavingBoundKind::Min,
13339                Keyword::MAX => HavingBoundKind::Max,
13340                _ => unreachable!(),
13341            };
13342            clauses.push(FunctionArgumentClause::Having(HavingBound(
13343                kind,
13344                self.parse_expr()?,
13345            )))
13346        }
13347
13348        if dialect_of!(self is GenericDialect | MySqlDialect)
13349            && self.parse_keyword(Keyword::SEPARATOR)
13350        {
13351            clauses.push(FunctionArgumentClause::Separator(self.parse_value()?.value));
13352        }
13353
13354        if let Some(on_overflow) = self.parse_listagg_on_overflow()? {
13355            clauses.push(FunctionArgumentClause::OnOverflow(on_overflow));
13356        }
13357
13358        if let Some(null_clause) = self.parse_json_null_clause() {
13359            clauses.push(FunctionArgumentClause::JsonNullClause(null_clause));
13360        }
13361
13362        self.expect_token(&Token::RParen)?;
13363        Ok(FunctionArgumentList {
13364            duplicate_treatment,
13365            args,
13366            clauses,
13367        })
13368    }
13369
13370    /// Parses MSSQL's json-null-clause
13371    fn parse_json_null_clause(&mut self) -> Option<JsonNullClause> {
13372        if self.parse_keywords(&[Keyword::ABSENT, Keyword::ON, Keyword::NULL]) {
13373            Some(JsonNullClause::AbsentOnNull)
13374        } else if self.parse_keywords(&[Keyword::NULL, Keyword::ON, Keyword::NULL]) {
13375            Some(JsonNullClause::NullOnNull)
13376        } else {
13377            None
13378        }
13379    }
13380
13381    fn parse_duplicate_treatment(&mut self) -> Result<Option<DuplicateTreatment>, ParserError> {
13382        let loc = self.peek_token().span.start;
13383        match (
13384            self.parse_keyword(Keyword::ALL),
13385            self.parse_keyword(Keyword::DISTINCT),
13386        ) {
13387            (true, false) => Ok(Some(DuplicateTreatment::All)),
13388            (false, true) => Ok(Some(DuplicateTreatment::Distinct)),
13389            (false, false) => Ok(None),
13390            (true, true) => parser_err!("Cannot specify both ALL and DISTINCT".to_string(), loc),
13391        }
13392    }
13393
13394    /// Parse a comma-delimited list of projections after SELECT
13395    pub fn parse_select_item(&mut self) -> Result<SelectItem, ParserError> {
13396        match self.parse_wildcard_expr()? {
13397            Expr::QualifiedWildcard(prefix, token) => Ok(SelectItem::QualifiedWildcard(
13398                SelectItemQualifiedWildcardKind::ObjectName(prefix),
13399                self.parse_wildcard_additional_options(token.0)?,
13400            )),
13401            Expr::Wildcard(token) => Ok(SelectItem::Wildcard(
13402                self.parse_wildcard_additional_options(token.0)?,
13403            )),
13404            Expr::Identifier(v) if v.value.to_lowercase() == "from" && v.quote_style.is_none() => {
13405                parser_err!(
13406                    format!("Expected an expression, found: {}", v),
13407                    self.peek_token().span.start
13408                )
13409            }
13410            Expr::BinaryOp {
13411                left,
13412                op: BinaryOperator::Eq,
13413                right,
13414            } if self.dialect.supports_eq_alias_assignment()
13415                && matches!(left.as_ref(), Expr::Identifier(_)) =>
13416            {
13417                let Expr::Identifier(alias) = *left else {
13418                    return parser_err!(
13419                        "BUG: expected identifier expression as alias",
13420                        self.peek_token().span.start
13421                    );
13422                };
13423                Ok(SelectItem::ExprWithAlias {
13424                    expr: *right,
13425                    alias,
13426                })
13427            }
13428            expr if self.dialect.supports_select_expr_star()
13429                && self.consume_tokens(&[Token::Period, Token::Mul]) =>
13430            {
13431                let wildcard_token = self.get_previous_token().clone();
13432                Ok(SelectItem::QualifiedWildcard(
13433                    SelectItemQualifiedWildcardKind::Expr(expr),
13434                    self.parse_wildcard_additional_options(wildcard_token)?,
13435                ))
13436            }
13437            expr => self
13438                .maybe_parse_select_item_alias()
13439                .map(|alias| match alias {
13440                    Some(alias) => SelectItem::ExprWithAlias { expr, alias },
13441                    None => SelectItem::UnnamedExpr(expr),
13442                }),
13443        }
13444    }
13445
13446    /// Parse an [`WildcardAdditionalOptions`] information for wildcard select items.
13447    ///
13448    /// If it is not possible to parse it, will return an option.
13449    pub fn parse_wildcard_additional_options(
13450        &mut self,
13451        wildcard_token: TokenWithSpan,
13452    ) -> Result<WildcardAdditionalOptions, ParserError> {
13453        let opt_ilike = if dialect_of!(self is GenericDialect | SnowflakeDialect) {
13454            self.parse_optional_select_item_ilike()?
13455        } else {
13456            None
13457        };
13458        let opt_exclude = if opt_ilike.is_none()
13459            && dialect_of!(self is GenericDialect | DuckDbDialect | SnowflakeDialect)
13460        {
13461            self.parse_optional_select_item_exclude()?
13462        } else {
13463            None
13464        };
13465        let opt_except = if self.dialect.supports_select_wildcard_except() {
13466            self.parse_optional_select_item_except()?
13467        } else {
13468            None
13469        };
13470        let opt_replace = if dialect_of!(self is GenericDialect | BigQueryDialect | ClickHouseDialect | DuckDbDialect | SnowflakeDialect)
13471        {
13472            self.parse_optional_select_item_replace()?
13473        } else {
13474            None
13475        };
13476        let opt_rename = if dialect_of!(self is GenericDialect | SnowflakeDialect) {
13477            self.parse_optional_select_item_rename()?
13478        } else {
13479            None
13480        };
13481
13482        Ok(WildcardAdditionalOptions {
13483            wildcard_token: wildcard_token.into(),
13484            opt_ilike,
13485            opt_exclude,
13486            opt_except,
13487            opt_rename,
13488            opt_replace,
13489        })
13490    }
13491
13492    /// Parse an [`Ilike`](IlikeSelectItem) information for wildcard select items.
13493    ///
13494    /// If it is not possible to parse it, will return an option.
13495    pub fn parse_optional_select_item_ilike(
13496        &mut self,
13497    ) -> Result<Option<IlikeSelectItem>, ParserError> {
13498        let opt_ilike = if self.parse_keyword(Keyword::ILIKE) {
13499            let next_token = self.next_token();
13500            let pattern = match next_token.token {
13501                Token::SingleQuotedString(s) => s,
13502                _ => return self.expected("ilike pattern", next_token),
13503            };
13504            Some(IlikeSelectItem { pattern })
13505        } else {
13506            None
13507        };
13508        Ok(opt_ilike)
13509    }
13510
13511    /// Parse an [`Exclude`](ExcludeSelectItem) information for wildcard select items.
13512    ///
13513    /// If it is not possible to parse it, will return an option.
13514    pub fn parse_optional_select_item_exclude(
13515        &mut self,
13516    ) -> Result<Option<ExcludeSelectItem>, ParserError> {
13517        let opt_exclude = if self.parse_keyword(Keyword::EXCLUDE) {
13518            if self.consume_token(&Token::LParen) {
13519                let columns = self.parse_comma_separated(|parser| parser.parse_identifier())?;
13520                self.expect_token(&Token::RParen)?;
13521                Some(ExcludeSelectItem::Multiple(columns))
13522            } else {
13523                let column = self.parse_identifier()?;
13524                Some(ExcludeSelectItem::Single(column))
13525            }
13526        } else {
13527            None
13528        };
13529
13530        Ok(opt_exclude)
13531    }
13532
13533    /// Parse an [`Except`](ExceptSelectItem) information for wildcard select items.
13534    ///
13535    /// If it is not possible to parse it, will return an option.
13536    pub fn parse_optional_select_item_except(
13537        &mut self,
13538    ) -> Result<Option<ExceptSelectItem>, ParserError> {
13539        let opt_except = if self.parse_keyword(Keyword::EXCEPT) {
13540            if self.peek_token().token == Token::LParen {
13541                let idents = self.parse_parenthesized_column_list(Mandatory, false)?;
13542                match &idents[..] {
13543                    [] => {
13544                        return self.expected(
13545                            "at least one column should be parsed by the expect clause",
13546                            self.peek_token(),
13547                        )?;
13548                    }
13549                    [first, idents @ ..] => Some(ExceptSelectItem {
13550                        first_element: first.clone(),
13551                        additional_elements: idents.to_vec(),
13552                    }),
13553                }
13554            } else {
13555                // Clickhouse allows EXCEPT column_name
13556                let ident = self.parse_identifier()?;
13557                Some(ExceptSelectItem {
13558                    first_element: ident,
13559                    additional_elements: vec![],
13560                })
13561            }
13562        } else {
13563            None
13564        };
13565
13566        Ok(opt_except)
13567    }
13568
13569    /// Parse a [`Rename`](RenameSelectItem) information for wildcard select items.
13570    pub fn parse_optional_select_item_rename(
13571        &mut self,
13572    ) -> Result<Option<RenameSelectItem>, ParserError> {
13573        let opt_rename = if self.parse_keyword(Keyword::RENAME) {
13574            if self.consume_token(&Token::LParen) {
13575                let idents =
13576                    self.parse_comma_separated(|parser| parser.parse_identifier_with_alias())?;
13577                self.expect_token(&Token::RParen)?;
13578                Some(RenameSelectItem::Multiple(idents))
13579            } else {
13580                let ident = self.parse_identifier_with_alias()?;
13581                Some(RenameSelectItem::Single(ident))
13582            }
13583        } else {
13584            None
13585        };
13586
13587        Ok(opt_rename)
13588    }
13589
13590    /// Parse a [`Replace`](ReplaceSelectItem) information for wildcard select items.
13591    pub fn parse_optional_select_item_replace(
13592        &mut self,
13593    ) -> Result<Option<ReplaceSelectItem>, ParserError> {
13594        let opt_replace = if self.parse_keyword(Keyword::REPLACE) {
13595            if self.consume_token(&Token::LParen) {
13596                let items = self.parse_comma_separated(|parser| {
13597                    Ok(Box::new(parser.parse_replace_elements()?))
13598                })?;
13599                self.expect_token(&Token::RParen)?;
13600                Some(ReplaceSelectItem { items })
13601            } else {
13602                let tok = self.next_token();
13603                return self.expected("( after REPLACE but", tok);
13604            }
13605        } else {
13606            None
13607        };
13608
13609        Ok(opt_replace)
13610    }
13611    pub fn parse_replace_elements(&mut self) -> Result<ReplaceSelectElement, ParserError> {
13612        let expr = self.parse_expr()?;
13613        let as_keyword = self.parse_keyword(Keyword::AS);
13614        let ident = self.parse_identifier()?;
13615        Ok(ReplaceSelectElement {
13616            expr,
13617            column_name: ident,
13618            as_keyword,
13619        })
13620    }
13621
13622    /// Parse ASC or DESC, returns an Option with true if ASC, false of DESC or `None` if none of
13623    /// them.
13624    pub fn parse_asc_desc(&mut self) -> Option<bool> {
13625        if self.parse_keyword(Keyword::ASC) {
13626            Some(true)
13627        } else if self.parse_keyword(Keyword::DESC) {
13628            Some(false)
13629        } else {
13630            None
13631        }
13632    }
13633
13634    /// Parse an expression, optionally followed by ASC or DESC (used in ORDER BY)
13635    pub fn parse_order_by_expr(&mut self) -> Result<OrderByExpr, ParserError> {
13636        let expr = self.parse_expr()?;
13637
13638        let options = self.parse_order_by_options()?;
13639
13640        let with_fill = if dialect_of!(self is ClickHouseDialect | GenericDialect)
13641            && self.parse_keywords(&[Keyword::WITH, Keyword::FILL])
13642        {
13643            Some(self.parse_with_fill()?)
13644        } else {
13645            None
13646        };
13647
13648        Ok(OrderByExpr {
13649            expr,
13650            options,
13651            with_fill,
13652        })
13653    }
13654
13655    fn parse_order_by_options(&mut self) -> Result<OrderByOptions, ParserError> {
13656        let asc = self.parse_asc_desc();
13657
13658        let nulls_first = if self.parse_keywords(&[Keyword::NULLS, Keyword::FIRST]) {
13659            Some(true)
13660        } else if self.parse_keywords(&[Keyword::NULLS, Keyword::LAST]) {
13661            Some(false)
13662        } else {
13663            None
13664        };
13665
13666        Ok(OrderByOptions { asc, nulls_first })
13667    }
13668
13669    // Parse a WITH FILL clause (ClickHouse dialect)
13670    // that follow the WITH FILL keywords in a ORDER BY clause
13671    pub fn parse_with_fill(&mut self) -> Result<WithFill, ParserError> {
13672        let from = if self.parse_keyword(Keyword::FROM) {
13673            Some(self.parse_expr()?)
13674        } else {
13675            None
13676        };
13677
13678        let to = if self.parse_keyword(Keyword::TO) {
13679            Some(self.parse_expr()?)
13680        } else {
13681            None
13682        };
13683
13684        let step = if self.parse_keyword(Keyword::STEP) {
13685            Some(self.parse_expr()?)
13686        } else {
13687            None
13688        };
13689
13690        Ok(WithFill { from, to, step })
13691    }
13692
13693    // Parse a set of comma separated INTERPOLATE expressions (ClickHouse dialect)
13694    // that follow the INTERPOLATE keyword in an ORDER BY clause with the WITH FILL modifier
13695    pub fn parse_interpolations(&mut self) -> Result<Option<Interpolate>, ParserError> {
13696        if !self.parse_keyword(Keyword::INTERPOLATE) {
13697            return Ok(None);
13698        }
13699
13700        if self.consume_token(&Token::LParen) {
13701            let interpolations =
13702                self.parse_comma_separated0(|p| p.parse_interpolation(), Token::RParen)?;
13703            self.expect_token(&Token::RParen)?;
13704            // INTERPOLATE () and INTERPOLATE ( ... ) variants
13705            return Ok(Some(Interpolate {
13706                exprs: Some(interpolations),
13707            }));
13708        }
13709
13710        // INTERPOLATE
13711        Ok(Some(Interpolate { exprs: None }))
13712    }
13713
13714    // Parse a INTERPOLATE expression (ClickHouse dialect)
13715    pub fn parse_interpolation(&mut self) -> Result<InterpolateExpr, ParserError> {
13716        let column = self.parse_identifier()?;
13717        let expr = if self.parse_keyword(Keyword::AS) {
13718            Some(self.parse_expr()?)
13719        } else {
13720            None
13721        };
13722        Ok(InterpolateExpr { column, expr })
13723    }
13724
13725    /// Parse a TOP clause, MSSQL equivalent of LIMIT,
13726    /// that follows after `SELECT [DISTINCT]`.
13727    pub fn parse_top(&mut self) -> Result<Top, ParserError> {
13728        let quantity = if self.consume_token(&Token::LParen) {
13729            let quantity = self.parse_expr()?;
13730            self.expect_token(&Token::RParen)?;
13731            Some(TopQuantity::Expr(quantity))
13732        } else {
13733            let next_token = self.next_token();
13734            let quantity = match next_token.token {
13735                Token::Number(s, _) => Self::parse::<u64>(s, next_token.span.start)?,
13736                _ => self.expected("literal int", next_token)?,
13737            };
13738            Some(TopQuantity::Constant(quantity))
13739        };
13740
13741        let percent = self.parse_keyword(Keyword::PERCENT);
13742
13743        let with_ties = self.parse_keywords(&[Keyword::WITH, Keyword::TIES]);
13744
13745        Ok(Top {
13746            with_ties,
13747            percent,
13748            quantity,
13749        })
13750    }
13751
13752    /// Parse a LIMIT clause
13753    pub fn parse_limit(&mut self) -> Result<Option<Expr>, ParserError> {
13754        if self.parse_keyword(Keyword::ALL) {
13755            Ok(None)
13756        } else {
13757            Ok(Some(self.parse_expr()?))
13758        }
13759    }
13760
13761    /// Parse an OFFSET clause
13762    pub fn parse_offset(&mut self) -> Result<Offset, ParserError> {
13763        let value = self.parse_expr()?;
13764        let rows = if self.parse_keyword(Keyword::ROW) {
13765            OffsetRows::Row
13766        } else if self.parse_keyword(Keyword::ROWS) {
13767            OffsetRows::Rows
13768        } else {
13769            OffsetRows::None
13770        };
13771        Ok(Offset { value, rows })
13772    }
13773
13774    /// Parse a FETCH clause
13775    pub fn parse_fetch(&mut self) -> Result<Fetch, ParserError> {
13776        self.expect_one_of_keywords(&[Keyword::FIRST, Keyword::NEXT])?;
13777        let (quantity, percent) = if self
13778            .parse_one_of_keywords(&[Keyword::ROW, Keyword::ROWS])
13779            .is_some()
13780        {
13781            (None, false)
13782        } else {
13783            let quantity = Expr::Value(self.parse_value()?);
13784            let percent = self.parse_keyword(Keyword::PERCENT);
13785            self.expect_one_of_keywords(&[Keyword::ROW, Keyword::ROWS])?;
13786            (Some(quantity), percent)
13787        };
13788        let with_ties = if self.parse_keyword(Keyword::ONLY) {
13789            false
13790        } else if self.parse_keywords(&[Keyword::WITH, Keyword::TIES]) {
13791            true
13792        } else {
13793            return self.expected("one of ONLY or WITH TIES", self.peek_token());
13794        };
13795        Ok(Fetch {
13796            with_ties,
13797            percent,
13798            quantity,
13799        })
13800    }
13801
13802    /// Parse a FOR UPDATE/FOR SHARE clause
13803    pub fn parse_lock(&mut self) -> Result<LockClause, ParserError> {
13804        let lock_type = match self.expect_one_of_keywords(&[Keyword::UPDATE, Keyword::SHARE])? {
13805            Keyword::UPDATE => LockType::Update,
13806            Keyword::SHARE => LockType::Share,
13807            _ => unreachable!(),
13808        };
13809        let of = if self.parse_keyword(Keyword::OF) {
13810            Some(self.parse_object_name(false)?)
13811        } else {
13812            None
13813        };
13814        let nonblock = if self.parse_keyword(Keyword::NOWAIT) {
13815            Some(NonBlock::Nowait)
13816        } else if self.parse_keywords(&[Keyword::SKIP, Keyword::LOCKED]) {
13817            Some(NonBlock::SkipLocked)
13818        } else {
13819            None
13820        };
13821        Ok(LockClause {
13822            lock_type,
13823            of,
13824            nonblock,
13825        })
13826    }
13827
13828    pub fn parse_values(&mut self, allow_empty: bool) -> Result<Values, ParserError> {
13829        let mut explicit_row = false;
13830
13831        let rows = self.parse_comma_separated(|parser| {
13832            if parser.parse_keyword(Keyword::ROW) {
13833                explicit_row = true;
13834            }
13835
13836            parser.expect_token(&Token::LParen)?;
13837            if allow_empty && parser.peek_token().token == Token::RParen {
13838                parser.next_token();
13839                Ok(vec![])
13840            } else {
13841                let exprs = parser.parse_comma_separated(Parser::parse_expr)?;
13842                parser.expect_token(&Token::RParen)?;
13843                Ok(exprs)
13844            }
13845        })?;
13846        Ok(Values { explicit_row, rows })
13847    }
13848
13849    pub fn parse_start_transaction(&mut self) -> Result<Statement, ParserError> {
13850        self.expect_keyword_is(Keyword::TRANSACTION)?;
13851        Ok(Statement::StartTransaction {
13852            modes: self.parse_transaction_modes()?,
13853            begin: false,
13854            transaction: Some(BeginTransactionKind::Transaction),
13855            modifier: None,
13856            statements: vec![],
13857            exception_statements: None,
13858            has_end_keyword: false,
13859        })
13860    }
13861
13862    pub fn parse_begin(&mut self) -> Result<Statement, ParserError> {
13863        let modifier = if !self.dialect.supports_start_transaction_modifier() {
13864            None
13865        } else if self.parse_keyword(Keyword::DEFERRED) {
13866            Some(TransactionModifier::Deferred)
13867        } else if self.parse_keyword(Keyword::IMMEDIATE) {
13868            Some(TransactionModifier::Immediate)
13869        } else if self.parse_keyword(Keyword::EXCLUSIVE) {
13870            Some(TransactionModifier::Exclusive)
13871        } else if self.parse_keyword(Keyword::TRY) {
13872            Some(TransactionModifier::Try)
13873        } else if self.parse_keyword(Keyword::CATCH) {
13874            Some(TransactionModifier::Catch)
13875        } else {
13876            None
13877        };
13878        let transaction = match self.parse_one_of_keywords(&[Keyword::TRANSACTION, Keyword::WORK]) {
13879            Some(Keyword::TRANSACTION) => Some(BeginTransactionKind::Transaction),
13880            Some(Keyword::WORK) => Some(BeginTransactionKind::Work),
13881            _ => None,
13882        };
13883        Ok(Statement::StartTransaction {
13884            modes: self.parse_transaction_modes()?,
13885            begin: true,
13886            transaction,
13887            modifier,
13888            statements: vec![],
13889            exception_statements: None,
13890            has_end_keyword: false,
13891        })
13892    }
13893
13894    pub fn parse_end(&mut self) -> Result<Statement, ParserError> {
13895        let modifier = if !self.dialect.supports_end_transaction_modifier() {
13896            None
13897        } else if self.parse_keyword(Keyword::TRY) {
13898            Some(TransactionModifier::Try)
13899        } else if self.parse_keyword(Keyword::CATCH) {
13900            Some(TransactionModifier::Catch)
13901        } else {
13902            None
13903        };
13904        Ok(Statement::Commit {
13905            chain: self.parse_commit_rollback_chain()?,
13906            end: true,
13907            modifier,
13908        })
13909    }
13910
13911    pub fn parse_transaction_modes(&mut self) -> Result<Vec<TransactionMode>, ParserError> {
13912        let mut modes = vec![];
13913        let mut required = false;
13914        loop {
13915            let mode = if self.parse_keywords(&[Keyword::ISOLATION, Keyword::LEVEL]) {
13916                let iso_level = if self.parse_keywords(&[Keyword::READ, Keyword::UNCOMMITTED]) {
13917                    TransactionIsolationLevel::ReadUncommitted
13918                } else if self.parse_keywords(&[Keyword::READ, Keyword::COMMITTED]) {
13919                    TransactionIsolationLevel::ReadCommitted
13920                } else if self.parse_keywords(&[Keyword::REPEATABLE, Keyword::READ]) {
13921                    TransactionIsolationLevel::RepeatableRead
13922                } else if self.parse_keyword(Keyword::SERIALIZABLE) {
13923                    TransactionIsolationLevel::Serializable
13924                } else if self.parse_keyword(Keyword::SNAPSHOT) {
13925                    TransactionIsolationLevel::Snapshot
13926                } else {
13927                    self.expected("isolation level", self.peek_token())?
13928                };
13929                TransactionMode::IsolationLevel(iso_level)
13930            } else if self.parse_keywords(&[Keyword::READ, Keyword::ONLY]) {
13931                TransactionMode::AccessMode(TransactionAccessMode::ReadOnly)
13932            } else if self.parse_keywords(&[Keyword::READ, Keyword::WRITE]) {
13933                TransactionMode::AccessMode(TransactionAccessMode::ReadWrite)
13934            } else if required {
13935                self.expected("transaction mode", self.peek_token())?
13936            } else {
13937                break;
13938            };
13939            modes.push(mode);
13940            // ANSI requires a comma after each transaction mode, but
13941            // PostgreSQL, for historical reasons, does not. We follow
13942            // PostgreSQL in making the comma optional, since that is strictly
13943            // more general.
13944            required = self.consume_token(&Token::Comma);
13945        }
13946        Ok(modes)
13947    }
13948
13949    pub fn parse_commit(&mut self) -> Result<Statement, ParserError> {
13950        Ok(Statement::Commit {
13951            chain: self.parse_commit_rollback_chain()?,
13952            end: false,
13953            modifier: None,
13954        })
13955    }
13956
13957    pub fn parse_rollback(&mut self) -> Result<Statement, ParserError> {
13958        let chain = self.parse_commit_rollback_chain()?;
13959        let savepoint = self.parse_rollback_savepoint()?;
13960
13961        Ok(Statement::Rollback { chain, savepoint })
13962    }
13963
13964    pub fn parse_commit_rollback_chain(&mut self) -> Result<bool, ParserError> {
13965        let _ = self.parse_one_of_keywords(&[Keyword::TRANSACTION, Keyword::WORK]);
13966        if self.parse_keyword(Keyword::AND) {
13967            let chain = !self.parse_keyword(Keyword::NO);
13968            self.expect_keyword_is(Keyword::CHAIN)?;
13969            Ok(chain)
13970        } else {
13971            Ok(false)
13972        }
13973    }
13974
13975    pub fn parse_rollback_savepoint(&mut self) -> Result<Option<Ident>, ParserError> {
13976        if self.parse_keyword(Keyword::TO) {
13977            let _ = self.parse_keyword(Keyword::SAVEPOINT);
13978            let savepoint = self.parse_identifier()?;
13979
13980            Ok(Some(savepoint))
13981        } else {
13982            Ok(None)
13983        }
13984    }
13985
13986    /// Parse a 'RAISERROR' statement
13987    pub fn parse_raiserror(&mut self) -> Result<Statement, ParserError> {
13988        self.expect_token(&Token::LParen)?;
13989        let message = Box::new(self.parse_expr()?);
13990        self.expect_token(&Token::Comma)?;
13991        let severity = Box::new(self.parse_expr()?);
13992        self.expect_token(&Token::Comma)?;
13993        let state = Box::new(self.parse_expr()?);
13994        let arguments = if self.consume_token(&Token::Comma) {
13995            self.parse_comma_separated(Parser::parse_expr)?
13996        } else {
13997            vec![]
13998        };
13999        self.expect_token(&Token::RParen)?;
14000        let options = if self.parse_keyword(Keyword::WITH) {
14001            self.parse_comma_separated(Parser::parse_raiserror_option)?
14002        } else {
14003            vec![]
14004        };
14005        Ok(Statement::RaisError {
14006            message,
14007            severity,
14008            state,
14009            arguments,
14010            options,
14011        })
14012    }
14013
14014    pub fn parse_raiserror_option(&mut self) -> Result<RaisErrorOption, ParserError> {
14015        match self.expect_one_of_keywords(&[Keyword::LOG, Keyword::NOWAIT, Keyword::SETERROR])? {
14016            Keyword::LOG => Ok(RaisErrorOption::Log),
14017            Keyword::NOWAIT => Ok(RaisErrorOption::NoWait),
14018            Keyword::SETERROR => Ok(RaisErrorOption::SetError),
14019            _ => self.expected(
14020                "LOG, NOWAIT OR SETERROR raiserror option",
14021                self.peek_token(),
14022            ),
14023        }
14024    }
14025
14026    pub fn parse_deallocate(&mut self) -> Result<Statement, ParserError> {
14027        let prepare = self.parse_keyword(Keyword::PREPARE);
14028        let name = self.parse_identifier()?;
14029        Ok(Statement::Deallocate { name, prepare })
14030    }
14031
14032    pub fn parse_execute(&mut self) -> Result<Statement, ParserError> {
14033        let name = if self.dialect.supports_execute_immediate()
14034            && self.parse_keyword(Keyword::IMMEDIATE)
14035        {
14036            None
14037        } else {
14038            let name = self.parse_object_name(false)?;
14039            Some(name)
14040        };
14041
14042        let has_parentheses = self.consume_token(&Token::LParen);
14043
14044        let end_token = match (has_parentheses, self.peek_token().token) {
14045            (true, _) => Token::RParen,
14046            (false, Token::EOF) => Token::EOF,
14047            (false, Token::Word(w)) if w.keyword == Keyword::USING => Token::Word(w),
14048            (false, _) => Token::SemiColon,
14049        };
14050
14051        let parameters = self.parse_comma_separated0(Parser::parse_expr, end_token)?;
14052
14053        if has_parentheses {
14054            self.expect_token(&Token::RParen)?;
14055        }
14056
14057        let into = if self.parse_keyword(Keyword::INTO) {
14058            self.parse_comma_separated(Self::parse_identifier)?
14059        } else {
14060            vec![]
14061        };
14062
14063        let using = if self.parse_keyword(Keyword::USING) {
14064            self.parse_comma_separated(Self::parse_expr_with_alias)?
14065        } else {
14066            vec![]
14067        };
14068
14069        Ok(Statement::Execute {
14070            immediate: name.is_none(),
14071            name,
14072            parameters,
14073            has_parentheses,
14074            into,
14075            using,
14076        })
14077    }
14078
14079    pub fn parse_prepare(&mut self) -> Result<Statement, ParserError> {
14080        let name = self.parse_identifier()?;
14081
14082        let mut data_types = vec![];
14083        if self.consume_token(&Token::LParen) {
14084            data_types = self.parse_comma_separated(Parser::parse_data_type)?;
14085            self.expect_token(&Token::RParen)?;
14086        }
14087
14088        self.expect_keyword_is(Keyword::AS)?;
14089        let statement = Box::new(self.parse_statement()?);
14090        Ok(Statement::Prepare {
14091            name,
14092            data_types,
14093            statement,
14094        })
14095    }
14096
14097    pub fn parse_unload(&mut self) -> Result<Statement, ParserError> {
14098        self.expect_token(&Token::LParen)?;
14099        let query = self.parse_query()?;
14100        self.expect_token(&Token::RParen)?;
14101
14102        self.expect_keyword_is(Keyword::TO)?;
14103        let to = self.parse_identifier()?;
14104
14105        let with_options = self.parse_options(Keyword::WITH)?;
14106
14107        Ok(Statement::Unload {
14108            query,
14109            to,
14110            with: with_options,
14111        })
14112    }
14113
14114    pub fn parse_merge_clauses(&mut self) -> Result<Vec<MergeClause>, ParserError> {
14115        let mut clauses = vec![];
14116        loop {
14117            if self.peek_token() == Token::EOF || self.peek_token() == Token::SemiColon {
14118                break;
14119            }
14120            self.expect_keyword_is(Keyword::WHEN)?;
14121
14122            let mut clause_kind = MergeClauseKind::Matched;
14123            if self.parse_keyword(Keyword::NOT) {
14124                clause_kind = MergeClauseKind::NotMatched;
14125            }
14126            self.expect_keyword_is(Keyword::MATCHED)?;
14127
14128            if matches!(clause_kind, MergeClauseKind::NotMatched)
14129                && self.parse_keywords(&[Keyword::BY, Keyword::SOURCE])
14130            {
14131                clause_kind = MergeClauseKind::NotMatchedBySource;
14132            } else if matches!(clause_kind, MergeClauseKind::NotMatched)
14133                && self.parse_keywords(&[Keyword::BY, Keyword::TARGET])
14134            {
14135                clause_kind = MergeClauseKind::NotMatchedByTarget;
14136            }
14137
14138            let predicate = if self.parse_keyword(Keyword::AND) {
14139                Some(self.parse_expr()?)
14140            } else {
14141                None
14142            };
14143
14144            self.expect_keyword_is(Keyword::THEN)?;
14145
14146            let merge_clause = match self.parse_one_of_keywords(&[
14147                Keyword::UPDATE,
14148                Keyword::INSERT,
14149                Keyword::DELETE,
14150            ]) {
14151                Some(Keyword::UPDATE) => {
14152                    if matches!(
14153                        clause_kind,
14154                        MergeClauseKind::NotMatched | MergeClauseKind::NotMatchedByTarget
14155                    ) {
14156                        return Err(ParserError::ParserError(format!(
14157                            "UPDATE is not allowed in a {clause_kind} merge clause"
14158                        )));
14159                    }
14160                    self.expect_keyword_is(Keyword::SET)?;
14161                    MergeAction::Update {
14162                        assignments: self.parse_comma_separated(Parser::parse_assignment)?,
14163                    }
14164                }
14165                Some(Keyword::DELETE) => {
14166                    if matches!(
14167                        clause_kind,
14168                        MergeClauseKind::NotMatched | MergeClauseKind::NotMatchedByTarget
14169                    ) {
14170                        return Err(ParserError::ParserError(format!(
14171                            "DELETE is not allowed in a {clause_kind} merge clause"
14172                        )));
14173                    }
14174                    MergeAction::Delete
14175                }
14176                Some(Keyword::INSERT) => {
14177                    if !matches!(
14178                        clause_kind,
14179                        MergeClauseKind::NotMatched | MergeClauseKind::NotMatchedByTarget
14180                    ) {
14181                        return Err(ParserError::ParserError(format!(
14182                            "INSERT is not allowed in a {clause_kind} merge clause"
14183                        )));
14184                    }
14185                    let is_mysql = dialect_of!(self is MySqlDialect);
14186
14187                    let columns = self.parse_parenthesized_column_list(Optional, is_mysql)?;
14188                    let kind = if dialect_of!(self is BigQueryDialect | GenericDialect)
14189                        && self.parse_keyword(Keyword::ROW)
14190                    {
14191                        MergeInsertKind::Row
14192                    } else {
14193                        self.expect_keyword_is(Keyword::VALUES)?;
14194                        let values = self.parse_values(is_mysql)?;
14195                        MergeInsertKind::Values(values)
14196                    };
14197                    MergeAction::Insert(MergeInsertExpr { columns, kind })
14198                }
14199                _ => {
14200                    return Err(ParserError::ParserError(
14201                        "expected UPDATE, DELETE or INSERT in merge clause".to_string(),
14202                    ));
14203                }
14204            };
14205            clauses.push(MergeClause {
14206                clause_kind,
14207                predicate,
14208                action: merge_clause,
14209            });
14210        }
14211        Ok(clauses)
14212    }
14213
14214    pub fn parse_merge(&mut self) -> Result<Statement, ParserError> {
14215        let into = self.parse_keyword(Keyword::INTO);
14216
14217        let table = self.parse_table_factor()?;
14218
14219        self.expect_keyword_is(Keyword::USING)?;
14220        let source = self.parse_table_factor()?;
14221        self.expect_keyword_is(Keyword::ON)?;
14222        let on = self.parse_expr()?;
14223        let clauses = self.parse_merge_clauses()?;
14224
14225        Ok(Statement::Merge {
14226            into,
14227            table,
14228            source,
14229            on: Box::new(on),
14230            clauses,
14231        })
14232    }
14233
14234    fn parse_pragma_value(&mut self) -> Result<Value, ParserError> {
14235        match self.parse_value()?.value {
14236            v @ Value::SingleQuotedString(_) => Ok(v),
14237            v @ Value::DoubleQuotedString(_) => Ok(v),
14238            v @ Value::Number(_, _) => Ok(v),
14239            v @ Value::Placeholder(_) => Ok(v),
14240            _ => {
14241                self.prev_token();
14242                self.expected("number or string or ? placeholder", self.peek_token())
14243            }
14244        }
14245    }
14246
14247    // PRAGMA [schema-name '.'] pragma-name [('=' pragma-value) | '(' pragma-value ')']
14248    pub fn parse_pragma(&mut self) -> Result<Statement, ParserError> {
14249        let name = self.parse_object_name(false)?;
14250        if self.consume_token(&Token::LParen) {
14251            let value = self.parse_pragma_value()?;
14252            self.expect_token(&Token::RParen)?;
14253            Ok(Statement::Pragma {
14254                name,
14255                value: Some(value),
14256                is_eq: false,
14257            })
14258        } else if self.consume_token(&Token::Eq) {
14259            Ok(Statement::Pragma {
14260                name,
14261                value: Some(self.parse_pragma_value()?),
14262                is_eq: true,
14263            })
14264        } else {
14265            Ok(Statement::Pragma {
14266                name,
14267                value: None,
14268                is_eq: false,
14269            })
14270        }
14271    }
14272
14273    /// `INSTALL [extension_name]`
14274    pub fn parse_install(&mut self) -> Result<Statement, ParserError> {
14275        let extension_name = self.parse_identifier()?;
14276
14277        Ok(Statement::Install { extension_name })
14278    }
14279
14280    /// Parse a SQL LOAD statement
14281    pub fn parse_load(&mut self) -> Result<Statement, ParserError> {
14282        if self.dialect.supports_load_extension() {
14283            let extension_name = self.parse_identifier()?;
14284            Ok(Statement::Load { extension_name })
14285        } else if self.parse_keyword(Keyword::DATA) && self.dialect.supports_load_data() {
14286            let local = self.parse_one_of_keywords(&[Keyword::LOCAL]).is_some();
14287            self.expect_keyword_is(Keyword::INPATH)?;
14288            let inpath = self.parse_literal_string()?;
14289            let overwrite = self.parse_one_of_keywords(&[Keyword::OVERWRITE]).is_some();
14290            self.expect_keyword_is(Keyword::INTO)?;
14291            self.expect_keyword_is(Keyword::TABLE)?;
14292            let table_name = self.parse_object_name(false)?;
14293            let partitioned = self.parse_insert_partition()?;
14294            let table_format = self.parse_load_data_table_format()?;
14295            Ok(Statement::LoadData {
14296                local,
14297                inpath,
14298                overwrite,
14299                table_name,
14300                partitioned,
14301                table_format,
14302            })
14303        } else {
14304            self.expected(
14305                "`DATA` or an extension name after `LOAD`",
14306                self.peek_token(),
14307            )
14308        }
14309    }
14310
14311    /// ```sql
14312    /// OPTIMIZE TABLE [db.]name [ON CLUSTER cluster] [PARTITION partition | PARTITION ID 'partition_id'] [FINAL] [DEDUPLICATE [BY expression]]
14313    /// ```
14314    /// [ClickHouse](https://clickhouse.com/docs/en/sql-reference/statements/optimize)
14315    pub fn parse_optimize_table(&mut self) -> Result<Statement, ParserError> {
14316        self.expect_keyword_is(Keyword::TABLE)?;
14317        let name = self.parse_object_name(false)?;
14318        let on_cluster = self.parse_optional_on_cluster()?;
14319
14320        let partition = if self.parse_keyword(Keyword::PARTITION) {
14321            if self.parse_keyword(Keyword::ID) {
14322                Some(Partition::Identifier(self.parse_identifier()?))
14323            } else {
14324                Some(Partition::Expr(self.parse_expr()?))
14325            }
14326        } else {
14327            None
14328        };
14329
14330        let include_final = self.parse_keyword(Keyword::FINAL);
14331        let deduplicate = if self.parse_keyword(Keyword::DEDUPLICATE) {
14332            if self.parse_keyword(Keyword::BY) {
14333                Some(Deduplicate::ByExpression(self.parse_expr()?))
14334            } else {
14335                Some(Deduplicate::All)
14336            }
14337        } else {
14338            None
14339        };
14340
14341        Ok(Statement::OptimizeTable {
14342            name,
14343            on_cluster,
14344            partition,
14345            include_final,
14346            deduplicate,
14347        })
14348    }
14349
14350    /// ```sql
14351    /// CREATE [ { TEMPORARY | TEMP } ] SEQUENCE [ IF NOT EXISTS ] <sequence_name>
14352    /// ```
14353    ///
14354    /// See [Postgres docs](https://www.postgresql.org/docs/current/sql-createsequence.html) for more details.
14355    pub fn parse_create_sequence(&mut self, temporary: bool) -> Result<Statement, ParserError> {
14356        //[ IF NOT EXISTS ]
14357        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
14358        //name
14359        let name = self.parse_object_name(false)?;
14360        //[ AS data_type ]
14361        let mut data_type: Option<DataType> = None;
14362        if self.parse_keywords(&[Keyword::AS]) {
14363            data_type = Some(self.parse_data_type()?)
14364        }
14365        let sequence_options = self.parse_create_sequence_options()?;
14366        // [ OWNED BY { table_name.column_name | NONE } ]
14367        let owned_by = if self.parse_keywords(&[Keyword::OWNED, Keyword::BY]) {
14368            if self.parse_keywords(&[Keyword::NONE]) {
14369                Some(ObjectName::from(vec![Ident::new("NONE")]))
14370            } else {
14371                Some(self.parse_object_name(false)?)
14372            }
14373        } else {
14374            None
14375        };
14376        Ok(Statement::CreateSequence {
14377            temporary,
14378            if_not_exists,
14379            name,
14380            data_type,
14381            sequence_options,
14382            owned_by,
14383        })
14384    }
14385
14386    fn parse_create_sequence_options(&mut self) -> Result<Vec<SequenceOptions>, ParserError> {
14387        let mut sequence_options = vec![];
14388        //[ INCREMENT [ BY ] increment ]
14389        if self.parse_keywords(&[Keyword::INCREMENT]) {
14390            if self.parse_keywords(&[Keyword::BY]) {
14391                sequence_options.push(SequenceOptions::IncrementBy(self.parse_number()?, true));
14392            } else {
14393                sequence_options.push(SequenceOptions::IncrementBy(self.parse_number()?, false));
14394            }
14395        }
14396        //[ MINVALUE minvalue | NO MINVALUE ]
14397        if self.parse_keyword(Keyword::MINVALUE) {
14398            sequence_options.push(SequenceOptions::MinValue(Some(self.parse_number()?)));
14399        } else if self.parse_keywords(&[Keyword::NO, Keyword::MINVALUE]) {
14400            sequence_options.push(SequenceOptions::MinValue(None));
14401        }
14402        //[ MAXVALUE maxvalue | NO MAXVALUE ]
14403        if self.parse_keywords(&[Keyword::MAXVALUE]) {
14404            sequence_options.push(SequenceOptions::MaxValue(Some(self.parse_number()?)));
14405        } else if self.parse_keywords(&[Keyword::NO, Keyword::MAXVALUE]) {
14406            sequence_options.push(SequenceOptions::MaxValue(None));
14407        }
14408
14409        //[ START [ WITH ] start ]
14410        if self.parse_keywords(&[Keyword::START]) {
14411            if self.parse_keywords(&[Keyword::WITH]) {
14412                sequence_options.push(SequenceOptions::StartWith(self.parse_number()?, true));
14413            } else {
14414                sequence_options.push(SequenceOptions::StartWith(self.parse_number()?, false));
14415            }
14416        }
14417        //[ CACHE cache ]
14418        if self.parse_keywords(&[Keyword::CACHE]) {
14419            sequence_options.push(SequenceOptions::Cache(self.parse_number()?));
14420        }
14421        // [ [ NO ] CYCLE ]
14422        if self.parse_keywords(&[Keyword::NO, Keyword::CYCLE]) {
14423            sequence_options.push(SequenceOptions::Cycle(true));
14424        } else if self.parse_keywords(&[Keyword::CYCLE]) {
14425            sequence_options.push(SequenceOptions::Cycle(false));
14426        }
14427
14428        Ok(sequence_options)
14429    }
14430
14431    /// The index of the first unprocessed token.
14432    pub fn index(&self) -> usize {
14433        self.index
14434    }
14435
14436    pub fn parse_named_window(&mut self) -> Result<NamedWindowDefinition, ParserError> {
14437        let ident = self.parse_identifier()?;
14438        self.expect_keyword_is(Keyword::AS)?;
14439
14440        let window_expr = if self.consume_token(&Token::LParen) {
14441            NamedWindowExpr::WindowSpec(self.parse_window_spec()?)
14442        } else if self.dialect.supports_window_clause_named_window_reference() {
14443            NamedWindowExpr::NamedWindow(self.parse_identifier()?)
14444        } else {
14445            return self.expected("(", self.peek_token());
14446        };
14447
14448        Ok(NamedWindowDefinition(ident, window_expr))
14449    }
14450
14451    pub fn parse_create_procedure(&mut self, or_alter: bool) -> Result<Statement, ParserError> {
14452        let name = self.parse_object_name(false)?;
14453        let params = self.parse_optional_procedure_parameters()?;
14454        self.expect_keyword_is(Keyword::AS)?;
14455        self.expect_keyword_is(Keyword::BEGIN)?;
14456        let statements = self.parse_statements()?;
14457        self.expect_keyword_is(Keyword::END)?;
14458        Ok(Statement::CreateProcedure {
14459            name,
14460            or_alter,
14461            params,
14462            body: statements,
14463        })
14464    }
14465
14466    pub fn parse_window_spec(&mut self) -> Result<WindowSpec, ParserError> {
14467        let window_name = match self.peek_token().token {
14468            Token::Word(word) if word.keyword == Keyword::NoKeyword => {
14469                self.parse_optional_indent()?
14470            }
14471            _ => None,
14472        };
14473
14474        let partition_by = if self.parse_keywords(&[Keyword::PARTITION, Keyword::BY]) {
14475            self.parse_comma_separated(Parser::parse_expr)?
14476        } else {
14477            vec![]
14478        };
14479        let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
14480            self.parse_comma_separated(Parser::parse_order_by_expr)?
14481        } else {
14482            vec![]
14483        };
14484
14485        let window_frame = if !self.consume_token(&Token::RParen) {
14486            let window_frame = self.parse_window_frame()?;
14487            self.expect_token(&Token::RParen)?;
14488            Some(window_frame)
14489        } else {
14490            None
14491        };
14492        Ok(WindowSpec {
14493            window_name,
14494            partition_by,
14495            order_by,
14496            window_frame,
14497        })
14498    }
14499
14500    pub fn parse_create_type(&mut self) -> Result<Statement, ParserError> {
14501        let name = self.parse_object_name(false)?;
14502        self.expect_keyword_is(Keyword::AS)?;
14503
14504        if self.parse_keyword(Keyword::ENUM) {
14505            return self.parse_create_type_enum(name);
14506        }
14507
14508        let mut attributes = vec![];
14509        if !self.consume_token(&Token::LParen) || self.consume_token(&Token::RParen) {
14510            return Ok(Statement::CreateType {
14511                name,
14512                representation: UserDefinedTypeRepresentation::Composite { attributes },
14513            });
14514        }
14515
14516        loop {
14517            let attr_name = self.parse_identifier()?;
14518            let attr_data_type = self.parse_data_type()?;
14519            let attr_collation = if self.parse_keyword(Keyword::COLLATE) {
14520                Some(self.parse_object_name(false)?)
14521            } else {
14522                None
14523            };
14524            attributes.push(UserDefinedTypeCompositeAttributeDef {
14525                name: attr_name,
14526                data_type: attr_data_type,
14527                collation: attr_collation,
14528            });
14529            let comma = self.consume_token(&Token::Comma);
14530            if self.consume_token(&Token::RParen) {
14531                // allow a trailing comma
14532                break;
14533            } else if !comma {
14534                return self.expected("',' or ')' after attribute definition", self.peek_token());
14535            }
14536        }
14537
14538        Ok(Statement::CreateType {
14539            name,
14540            representation: UserDefinedTypeRepresentation::Composite { attributes },
14541        })
14542    }
14543
14544    /// Parse remainder of `CREATE TYPE AS ENUM` statement (see [Statement::CreateType] and [Self::parse_create_type])
14545    ///
14546    /// See [PostgreSQL](https://www.postgresql.org/docs/current/sql-createtype.html)
14547    pub fn parse_create_type_enum(&mut self, name: ObjectName) -> Result<Statement, ParserError> {
14548        self.expect_token(&Token::LParen)?;
14549        let labels = self.parse_comma_separated0(|p| p.parse_identifier(), Token::RParen)?;
14550        self.expect_token(&Token::RParen)?;
14551
14552        Ok(Statement::CreateType {
14553            name,
14554            representation: UserDefinedTypeRepresentation::Enum { labels },
14555        })
14556    }
14557
14558    fn parse_parenthesized_identifiers(&mut self) -> Result<Vec<Ident>, ParserError> {
14559        self.expect_token(&Token::LParen)?;
14560        let partitions = self.parse_comma_separated(|p| p.parse_identifier())?;
14561        self.expect_token(&Token::RParen)?;
14562        Ok(partitions)
14563    }
14564
14565    fn parse_column_position(&mut self) -> Result<Option<MySQLColumnPosition>, ParserError> {
14566        if dialect_of!(self is MySqlDialect | GenericDialect) {
14567            if self.parse_keyword(Keyword::FIRST) {
14568                Ok(Some(MySQLColumnPosition::First))
14569            } else if self.parse_keyword(Keyword::AFTER) {
14570                let ident = self.parse_identifier()?;
14571                Ok(Some(MySQLColumnPosition::After(ident)))
14572            } else {
14573                Ok(None)
14574            }
14575        } else {
14576            Ok(None)
14577        }
14578    }
14579
14580    /// Consume the parser and return its underlying token buffer
14581    pub fn into_tokens(self) -> Vec<TokenWithSpan> {
14582        self.tokens
14583    }
14584
14585    /// Returns true if the next keyword indicates a sub query, i.e. SELECT or WITH
14586    fn peek_sub_query(&mut self) -> bool {
14587        if self
14588            .parse_one_of_keywords(&[Keyword::SELECT, Keyword::WITH])
14589            .is_some()
14590        {
14591            self.prev_token();
14592            return true;
14593        }
14594        false
14595    }
14596
14597    pub(crate) fn parse_show_stmt_options(&mut self) -> Result<ShowStatementOptions, ParserError> {
14598        let show_in;
14599        let mut filter_position = None;
14600        if self.dialect.supports_show_like_before_in() {
14601            if let Some(filter) = self.parse_show_statement_filter()? {
14602                filter_position = Some(ShowStatementFilterPosition::Infix(filter));
14603            }
14604            show_in = self.maybe_parse_show_stmt_in()?;
14605        } else {
14606            show_in = self.maybe_parse_show_stmt_in()?;
14607            if let Some(filter) = self.parse_show_statement_filter()? {
14608                filter_position = Some(ShowStatementFilterPosition::Suffix(filter));
14609            }
14610        }
14611        let starts_with = self.maybe_parse_show_stmt_starts_with()?;
14612        let limit = self.maybe_parse_show_stmt_limit()?;
14613        let from = self.maybe_parse_show_stmt_from()?;
14614        Ok(ShowStatementOptions {
14615            filter_position,
14616            show_in,
14617            starts_with,
14618            limit,
14619            limit_from: from,
14620        })
14621    }
14622
14623    fn maybe_parse_show_stmt_in(&mut self) -> Result<Option<ShowStatementIn>, ParserError> {
14624        let clause = match self.parse_one_of_keywords(&[Keyword::FROM, Keyword::IN]) {
14625            Some(Keyword::FROM) => ShowStatementInClause::FROM,
14626            Some(Keyword::IN) => ShowStatementInClause::IN,
14627            None => return Ok(None),
14628            _ => return self.expected("FROM or IN", self.peek_token()),
14629        };
14630
14631        let (parent_type, parent_name) = match self.parse_one_of_keywords(&[
14632            Keyword::ACCOUNT,
14633            Keyword::DATABASE,
14634            Keyword::SCHEMA,
14635            Keyword::TABLE,
14636            Keyword::VIEW,
14637        ]) {
14638            // If we see these next keywords it means we don't have a parent name
14639            Some(Keyword::DATABASE)
14640                if self.peek_keywords(&[Keyword::STARTS, Keyword::WITH])
14641                    | self.peek_keyword(Keyword::LIMIT) =>
14642            {
14643                (Some(ShowStatementInParentType::Database), None)
14644            }
14645            Some(Keyword::SCHEMA)
14646                if self.peek_keywords(&[Keyword::STARTS, Keyword::WITH])
14647                    | self.peek_keyword(Keyword::LIMIT) =>
14648            {
14649                (Some(ShowStatementInParentType::Schema), None)
14650            }
14651            Some(parent_kw) => {
14652                // The parent name here is still optional, for example:
14653                // SHOW TABLES IN ACCOUNT, so parsing the object name
14654                // may fail because the statement ends.
14655                let parent_name = self.maybe_parse(|p| p.parse_object_name(false))?;
14656                match parent_kw {
14657                    Keyword::ACCOUNT => (Some(ShowStatementInParentType::Account), parent_name),
14658                    Keyword::DATABASE => (Some(ShowStatementInParentType::Database), parent_name),
14659                    Keyword::SCHEMA => (Some(ShowStatementInParentType::Schema), parent_name),
14660                    Keyword::TABLE => (Some(ShowStatementInParentType::Table), parent_name),
14661                    Keyword::VIEW => (Some(ShowStatementInParentType::View), parent_name),
14662                    _ => {
14663                        return self.expected(
14664                            "one of ACCOUNT, DATABASE, SCHEMA, TABLE or VIEW",
14665                            self.peek_token(),
14666                        )
14667                    }
14668                }
14669            }
14670            None => {
14671                // Parsing MySQL style FROM tbl_name FROM db_name
14672                // which is equivalent to FROM tbl_name.db_name
14673                let mut parent_name = self.parse_object_name(false)?;
14674                if self
14675                    .parse_one_of_keywords(&[Keyword::FROM, Keyword::IN])
14676                    .is_some()
14677                {
14678                    parent_name
14679                        .0
14680                        .insert(0, ObjectNamePart::Identifier(self.parse_identifier()?));
14681                }
14682                (None, Some(parent_name))
14683            }
14684        };
14685
14686        Ok(Some(ShowStatementIn {
14687            clause,
14688            parent_type,
14689            parent_name,
14690        }))
14691    }
14692
14693    fn maybe_parse_show_stmt_starts_with(&mut self) -> Result<Option<Value>, ParserError> {
14694        if self.parse_keywords(&[Keyword::STARTS, Keyword::WITH]) {
14695            Ok(Some(self.parse_value()?.value))
14696        } else {
14697            Ok(None)
14698        }
14699    }
14700
14701    fn maybe_parse_show_stmt_limit(&mut self) -> Result<Option<Expr>, ParserError> {
14702        if self.parse_keyword(Keyword::LIMIT) {
14703            Ok(self.parse_limit()?)
14704        } else {
14705            Ok(None)
14706        }
14707    }
14708
14709    fn maybe_parse_show_stmt_from(&mut self) -> Result<Option<Value>, ParserError> {
14710        if self.parse_keyword(Keyword::FROM) {
14711            Ok(Some(self.parse_value()?.value))
14712        } else {
14713            Ok(None)
14714        }
14715    }
14716}
14717
14718impl Word {
14719    #[deprecated(since = "0.54.0", note = "please use `into_ident` instead")]
14720    pub fn to_ident(&self, span: Span) -> Ident {
14721        Ident {
14722            value: self.value.clone(),
14723            quote_style: self.quote_style,
14724            span,
14725        }
14726    }
14727
14728    /// Convert this word into an [`Ident`] identifier
14729    pub fn into_ident(self, span: Span) -> Ident {
14730        Ident {
14731            value: self.value,
14732            quote_style: self.quote_style,
14733            span,
14734        }
14735    }
14736}
14737
14738#[cfg(test)]
14739mod tests {
14740    use crate::test_utils::{all_dialects, TestedDialects};
14741
14742    use super::*;
14743
14744    #[test]
14745    fn test_prev_index() {
14746        let sql = "SELECT version";
14747        all_dialects().run_parser_method(sql, |parser| {
14748            assert_eq!(parser.peek_token(), Token::make_keyword("SELECT"));
14749            assert_eq!(parser.next_token(), Token::make_keyword("SELECT"));
14750            parser.prev_token();
14751            assert_eq!(parser.next_token(), Token::make_keyword("SELECT"));
14752            assert_eq!(parser.next_token(), Token::make_word("version", None));
14753            parser.prev_token();
14754            assert_eq!(parser.peek_token(), Token::make_word("version", None));
14755            assert_eq!(parser.next_token(), Token::make_word("version", None));
14756            assert_eq!(parser.peek_token(), Token::EOF);
14757            parser.prev_token();
14758            assert_eq!(parser.next_token(), Token::make_word("version", None));
14759            assert_eq!(parser.next_token(), Token::EOF);
14760            assert_eq!(parser.next_token(), Token::EOF);
14761            parser.prev_token();
14762        });
14763    }
14764
14765    #[test]
14766    fn test_peek_tokens() {
14767        all_dialects().run_parser_method("SELECT foo AS bar FROM baz", |parser| {
14768            assert!(matches!(
14769                parser.peek_tokens(),
14770                [Token::Word(Word {
14771                    keyword: Keyword::SELECT,
14772                    ..
14773                })]
14774            ));
14775
14776            assert!(matches!(
14777                parser.peek_tokens(),
14778                [
14779                    Token::Word(Word {
14780                        keyword: Keyword::SELECT,
14781                        ..
14782                    }),
14783                    Token::Word(_),
14784                    Token::Word(Word {
14785                        keyword: Keyword::AS,
14786                        ..
14787                    }),
14788                ]
14789            ));
14790
14791            for _ in 0..4 {
14792                parser.next_token();
14793            }
14794
14795            assert!(matches!(
14796                parser.peek_tokens(),
14797                [
14798                    Token::Word(Word {
14799                        keyword: Keyword::FROM,
14800                        ..
14801                    }),
14802                    Token::Word(_),
14803                    Token::EOF,
14804                    Token::EOF,
14805                ]
14806            ))
14807        })
14808    }
14809
14810    #[cfg(test)]
14811    mod test_parse_data_type {
14812        use crate::ast::{
14813            CharLengthUnits, CharacterLength, DataType, ExactNumberInfo, ObjectName, TimezoneInfo,
14814        };
14815        use crate::dialect::{AnsiDialect, GenericDialect};
14816        use crate::test_utils::TestedDialects;
14817
14818        macro_rules! test_parse_data_type {
14819            ($dialect:expr, $input:expr, $expected_type:expr $(,)?) => {{
14820                $dialect.run_parser_method(&*$input, |parser| {
14821                    let data_type = parser.parse_data_type().unwrap();
14822                    assert_eq!($expected_type, data_type);
14823                    assert_eq!($input.to_string(), data_type.to_string());
14824                });
14825            }};
14826        }
14827
14828        #[test]
14829        fn test_ansii_character_string_types() {
14830            // Character string types: <https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#character-string-type>
14831            let dialect =
14832                TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
14833
14834            test_parse_data_type!(dialect, "CHARACTER", DataType::Character(None));
14835
14836            test_parse_data_type!(
14837                dialect,
14838                "CHARACTER(20)",
14839                DataType::Character(Some(CharacterLength::IntegerLength {
14840                    length: 20,
14841                    unit: None
14842                }))
14843            );
14844
14845            test_parse_data_type!(
14846                dialect,
14847                "CHARACTER(20 CHARACTERS)",
14848                DataType::Character(Some(CharacterLength::IntegerLength {
14849                    length: 20,
14850                    unit: Some(CharLengthUnits::Characters)
14851                }))
14852            );
14853
14854            test_parse_data_type!(
14855                dialect,
14856                "CHARACTER(20 OCTETS)",
14857                DataType::Character(Some(CharacterLength::IntegerLength {
14858                    length: 20,
14859                    unit: Some(CharLengthUnits::Octets)
14860                }))
14861            );
14862
14863            test_parse_data_type!(dialect, "CHAR", DataType::Char(None));
14864
14865            test_parse_data_type!(
14866                dialect,
14867                "CHAR(20)",
14868                DataType::Char(Some(CharacterLength::IntegerLength {
14869                    length: 20,
14870                    unit: None
14871                }))
14872            );
14873
14874            test_parse_data_type!(
14875                dialect,
14876                "CHAR(20 CHARACTERS)",
14877                DataType::Char(Some(CharacterLength::IntegerLength {
14878                    length: 20,
14879                    unit: Some(CharLengthUnits::Characters)
14880                }))
14881            );
14882
14883            test_parse_data_type!(
14884                dialect,
14885                "CHAR(20 OCTETS)",
14886                DataType::Char(Some(CharacterLength::IntegerLength {
14887                    length: 20,
14888                    unit: Some(CharLengthUnits::Octets)
14889                }))
14890            );
14891
14892            test_parse_data_type!(
14893                dialect,
14894                "CHARACTER VARYING(20)",
14895                DataType::CharacterVarying(Some(CharacterLength::IntegerLength {
14896                    length: 20,
14897                    unit: None
14898                }))
14899            );
14900
14901            test_parse_data_type!(
14902                dialect,
14903                "CHARACTER VARYING(20 CHARACTERS)",
14904                DataType::CharacterVarying(Some(CharacterLength::IntegerLength {
14905                    length: 20,
14906                    unit: Some(CharLengthUnits::Characters)
14907                }))
14908            );
14909
14910            test_parse_data_type!(
14911                dialect,
14912                "CHARACTER VARYING(20 OCTETS)",
14913                DataType::CharacterVarying(Some(CharacterLength::IntegerLength {
14914                    length: 20,
14915                    unit: Some(CharLengthUnits::Octets)
14916                }))
14917            );
14918
14919            test_parse_data_type!(
14920                dialect,
14921                "CHAR VARYING(20)",
14922                DataType::CharVarying(Some(CharacterLength::IntegerLength {
14923                    length: 20,
14924                    unit: None
14925                }))
14926            );
14927
14928            test_parse_data_type!(
14929                dialect,
14930                "CHAR VARYING(20 CHARACTERS)",
14931                DataType::CharVarying(Some(CharacterLength::IntegerLength {
14932                    length: 20,
14933                    unit: Some(CharLengthUnits::Characters)
14934                }))
14935            );
14936
14937            test_parse_data_type!(
14938                dialect,
14939                "CHAR VARYING(20 OCTETS)",
14940                DataType::CharVarying(Some(CharacterLength::IntegerLength {
14941                    length: 20,
14942                    unit: Some(CharLengthUnits::Octets)
14943                }))
14944            );
14945
14946            test_parse_data_type!(
14947                dialect,
14948                "VARCHAR(20)",
14949                DataType::Varchar(Some(CharacterLength::IntegerLength {
14950                    length: 20,
14951                    unit: None
14952                }))
14953            );
14954        }
14955
14956        #[test]
14957        fn test_ansii_character_large_object_types() {
14958            // Character large object types: <https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#character-large-object-length>
14959            let dialect =
14960                TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
14961
14962            test_parse_data_type!(
14963                dialect,
14964                "CHARACTER LARGE OBJECT",
14965                DataType::CharacterLargeObject(None)
14966            );
14967            test_parse_data_type!(
14968                dialect,
14969                "CHARACTER LARGE OBJECT(20)",
14970                DataType::CharacterLargeObject(Some(20))
14971            );
14972
14973            test_parse_data_type!(
14974                dialect,
14975                "CHAR LARGE OBJECT",
14976                DataType::CharLargeObject(None)
14977            );
14978            test_parse_data_type!(
14979                dialect,
14980                "CHAR LARGE OBJECT(20)",
14981                DataType::CharLargeObject(Some(20))
14982            );
14983
14984            test_parse_data_type!(dialect, "CLOB", DataType::Clob(None));
14985            test_parse_data_type!(dialect, "CLOB(20)", DataType::Clob(Some(20)));
14986        }
14987
14988        #[test]
14989        fn test_parse_custom_types() {
14990            let dialect =
14991                TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
14992
14993            test_parse_data_type!(
14994                dialect,
14995                "GEOMETRY",
14996                DataType::Custom(ObjectName::from(vec!["GEOMETRY".into()]), vec![])
14997            );
14998
14999            test_parse_data_type!(
15000                dialect,
15001                "GEOMETRY(POINT)",
15002                DataType::Custom(
15003                    ObjectName::from(vec!["GEOMETRY".into()]),
15004                    vec!["POINT".to_string()]
15005                )
15006            );
15007
15008            test_parse_data_type!(
15009                dialect,
15010                "GEOMETRY(POINT, 4326)",
15011                DataType::Custom(
15012                    ObjectName::from(vec!["GEOMETRY".into()]),
15013                    vec!["POINT".to_string(), "4326".to_string()]
15014                )
15015            );
15016        }
15017
15018        #[test]
15019        fn test_ansii_exact_numeric_types() {
15020            // Exact numeric types: <https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#exact-numeric-type>
15021            let dialect =
15022                TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
15023
15024            test_parse_data_type!(dialect, "NUMERIC", DataType::Numeric(ExactNumberInfo::None));
15025
15026            test_parse_data_type!(
15027                dialect,
15028                "NUMERIC(2)",
15029                DataType::Numeric(ExactNumberInfo::Precision(2))
15030            );
15031
15032            test_parse_data_type!(
15033                dialect,
15034                "NUMERIC(2,10)",
15035                DataType::Numeric(ExactNumberInfo::PrecisionAndScale(2, 10))
15036            );
15037
15038            test_parse_data_type!(dialect, "DECIMAL", DataType::Decimal(ExactNumberInfo::None));
15039
15040            test_parse_data_type!(
15041                dialect,
15042                "DECIMAL(2)",
15043                DataType::Decimal(ExactNumberInfo::Precision(2))
15044            );
15045
15046            test_parse_data_type!(
15047                dialect,
15048                "DECIMAL(2,10)",
15049                DataType::Decimal(ExactNumberInfo::PrecisionAndScale(2, 10))
15050            );
15051
15052            test_parse_data_type!(dialect, "DEC", DataType::Dec(ExactNumberInfo::None));
15053
15054            test_parse_data_type!(
15055                dialect,
15056                "DEC(2)",
15057                DataType::Dec(ExactNumberInfo::Precision(2))
15058            );
15059
15060            test_parse_data_type!(
15061                dialect,
15062                "DEC(2,10)",
15063                DataType::Dec(ExactNumberInfo::PrecisionAndScale(2, 10))
15064            );
15065        }
15066
15067        #[test]
15068        fn test_ansii_date_type() {
15069            // Datetime types: <https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#datetime-type>
15070            let dialect =
15071                TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
15072
15073            test_parse_data_type!(dialect, "DATE", DataType::Date);
15074
15075            test_parse_data_type!(dialect, "TIME", DataType::Time(None, TimezoneInfo::None));
15076
15077            test_parse_data_type!(
15078                dialect,
15079                "TIME(6)",
15080                DataType::Time(Some(6), TimezoneInfo::None)
15081            );
15082
15083            test_parse_data_type!(
15084                dialect,
15085                "TIME WITH TIME ZONE",
15086                DataType::Time(None, TimezoneInfo::WithTimeZone)
15087            );
15088
15089            test_parse_data_type!(
15090                dialect,
15091                "TIME(6) WITH TIME ZONE",
15092                DataType::Time(Some(6), TimezoneInfo::WithTimeZone)
15093            );
15094
15095            test_parse_data_type!(
15096                dialect,
15097                "TIME WITHOUT TIME ZONE",
15098                DataType::Time(None, TimezoneInfo::WithoutTimeZone)
15099            );
15100
15101            test_parse_data_type!(
15102                dialect,
15103                "TIME(6) WITHOUT TIME ZONE",
15104                DataType::Time(Some(6), TimezoneInfo::WithoutTimeZone)
15105            );
15106
15107            test_parse_data_type!(
15108                dialect,
15109                "TIMESTAMP",
15110                DataType::Timestamp(None, TimezoneInfo::None)
15111            );
15112
15113            test_parse_data_type!(
15114                dialect,
15115                "TIMESTAMP(22)",
15116                DataType::Timestamp(Some(22), TimezoneInfo::None)
15117            );
15118
15119            test_parse_data_type!(
15120                dialect,
15121                "TIMESTAMP(22) WITH TIME ZONE",
15122                DataType::Timestamp(Some(22), TimezoneInfo::WithTimeZone)
15123            );
15124
15125            test_parse_data_type!(
15126                dialect,
15127                "TIMESTAMP(33) WITHOUT TIME ZONE",
15128                DataType::Timestamp(Some(33), TimezoneInfo::WithoutTimeZone)
15129            );
15130        }
15131    }
15132
15133    #[test]
15134    fn test_parse_schema_name() {
15135        // The expected name should be identical as the input name, that's why I don't receive both
15136        macro_rules! test_parse_schema_name {
15137            ($input:expr, $expected_name:expr $(,)?) => {{
15138                all_dialects().run_parser_method(&*$input, |parser| {
15139                    let schema_name = parser.parse_schema_name().unwrap();
15140                    // Validate that the structure is the same as expected
15141                    assert_eq!(schema_name, $expected_name);
15142                    // Validate that the input and the expected structure serialization are the same
15143                    assert_eq!(schema_name.to_string(), $input.to_string());
15144                });
15145            }};
15146        }
15147
15148        let dummy_name = ObjectName::from(vec![Ident::new("dummy_name")]);
15149        let dummy_authorization = Ident::new("dummy_authorization");
15150
15151        test_parse_schema_name!(
15152            format!("{dummy_name}"),
15153            SchemaName::Simple(dummy_name.clone())
15154        );
15155
15156        test_parse_schema_name!(
15157            format!("AUTHORIZATION {dummy_authorization}"),
15158            SchemaName::UnnamedAuthorization(dummy_authorization.clone()),
15159        );
15160        test_parse_schema_name!(
15161            format!("{dummy_name} AUTHORIZATION {dummy_authorization}"),
15162            SchemaName::NamedAuthorization(dummy_name.clone(), dummy_authorization.clone()),
15163        );
15164    }
15165
15166    #[test]
15167    fn mysql_parse_index_table_constraint() {
15168        macro_rules! test_parse_table_constraint {
15169            ($dialect:expr, $input:expr, $expected:expr $(,)?) => {{
15170                $dialect.run_parser_method(&*$input, |parser| {
15171                    let constraint = parser.parse_optional_table_constraint().unwrap().unwrap();
15172                    // Validate that the structure is the same as expected
15173                    assert_eq!(constraint, $expected);
15174                    // Validate that the input and the expected structure serialization are the same
15175                    assert_eq!(constraint.to_string(), $input.to_string());
15176                });
15177            }};
15178        }
15179
15180        let dialect =
15181            TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(MySqlDialect {})]);
15182
15183        test_parse_table_constraint!(
15184            dialect,
15185            "INDEX (c1)",
15186            TableConstraint::Index {
15187                display_as_key: false,
15188                name: None,
15189                index_type: None,
15190                columns: vec![Ident::new("c1")],
15191            }
15192        );
15193
15194        test_parse_table_constraint!(
15195            dialect,
15196            "KEY (c1)",
15197            TableConstraint::Index {
15198                display_as_key: true,
15199                name: None,
15200                index_type: None,
15201                columns: vec![Ident::new("c1")],
15202            }
15203        );
15204
15205        test_parse_table_constraint!(
15206            dialect,
15207            "INDEX 'index' (c1, c2)",
15208            TableConstraint::Index {
15209                display_as_key: false,
15210                name: Some(Ident::with_quote('\'', "index")),
15211                index_type: None,
15212                columns: vec![Ident::new("c1"), Ident::new("c2")],
15213            }
15214        );
15215
15216        test_parse_table_constraint!(
15217            dialect,
15218            "INDEX USING BTREE (c1)",
15219            TableConstraint::Index {
15220                display_as_key: false,
15221                name: None,
15222                index_type: Some(IndexType::BTree),
15223                columns: vec![Ident::new("c1")],
15224            }
15225        );
15226
15227        test_parse_table_constraint!(
15228            dialect,
15229            "INDEX USING HASH (c1)",
15230            TableConstraint::Index {
15231                display_as_key: false,
15232                name: None,
15233                index_type: Some(IndexType::Hash),
15234                columns: vec![Ident::new("c1")],
15235            }
15236        );
15237
15238        test_parse_table_constraint!(
15239            dialect,
15240            "INDEX idx_name USING BTREE (c1)",
15241            TableConstraint::Index {
15242                display_as_key: false,
15243                name: Some(Ident::new("idx_name")),
15244                index_type: Some(IndexType::BTree),
15245                columns: vec![Ident::new("c1")],
15246            }
15247        );
15248
15249        test_parse_table_constraint!(
15250            dialect,
15251            "INDEX idx_name USING HASH (c1)",
15252            TableConstraint::Index {
15253                display_as_key: false,
15254                name: Some(Ident::new("idx_name")),
15255                index_type: Some(IndexType::Hash),
15256                columns: vec![Ident::new("c1")],
15257            }
15258        );
15259    }
15260
15261    #[test]
15262    fn test_tokenizer_error_loc() {
15263        let sql = "foo '";
15264        let ast = Parser::parse_sql(&GenericDialect, sql);
15265        assert_eq!(
15266            ast,
15267            Err(ParserError::TokenizerError(
15268                "Unterminated string literal at Line: 1, Column: 5".to_string()
15269            ))
15270        );
15271    }
15272
15273    #[test]
15274    fn test_parser_error_loc() {
15275        let sql = "SELECT this is a syntax error";
15276        let ast = Parser::parse_sql(&GenericDialect, sql);
15277        assert_eq!(
15278            ast,
15279            Err(ParserError::ParserError(
15280                "Expected: [NOT] NULL | TRUE | FALSE | DISTINCT | [form] NORMALIZED FROM after IS, found: a at Line: 1, Column: 16"
15281                    .to_string()
15282            ))
15283        );
15284    }
15285
15286    #[test]
15287    fn test_nested_explain_error() {
15288        let sql = "EXPLAIN EXPLAIN SELECT 1";
15289        let ast = Parser::parse_sql(&GenericDialect, sql);
15290        assert_eq!(
15291            ast,
15292            Err(ParserError::ParserError(
15293                "Explain must be root of the plan".to_string()
15294            ))
15295        );
15296    }
15297
15298    #[test]
15299    fn test_parse_multipart_identifier_positive() {
15300        let dialect = TestedDialects::new(vec![Box::new(GenericDialect {})]);
15301
15302        // parse multipart with quotes
15303        let expected = vec![
15304            Ident {
15305                value: "CATALOG".to_string(),
15306                quote_style: None,
15307                span: Span::empty(),
15308            },
15309            Ident {
15310                value: "F(o)o. \"bar".to_string(),
15311                quote_style: Some('"'),
15312                span: Span::empty(),
15313            },
15314            Ident {
15315                value: "table".to_string(),
15316                quote_style: None,
15317                span: Span::empty(),
15318            },
15319        ];
15320        dialect.run_parser_method(r#"CATALOG."F(o)o. ""bar".table"#, |parser| {
15321            let actual = parser.parse_multipart_identifier().unwrap();
15322            assert_eq!(expected, actual);
15323        });
15324
15325        // allow whitespace between ident parts
15326        let expected = vec![
15327            Ident {
15328                value: "CATALOG".to_string(),
15329                quote_style: None,
15330                span: Span::empty(),
15331            },
15332            Ident {
15333                value: "table".to_string(),
15334                quote_style: None,
15335                span: Span::empty(),
15336            },
15337        ];
15338        dialect.run_parser_method("CATALOG . table", |parser| {
15339            let actual = parser.parse_multipart_identifier().unwrap();
15340            assert_eq!(expected, actual);
15341        });
15342    }
15343
15344    #[test]
15345    fn test_parse_multipart_identifier_negative() {
15346        macro_rules! test_parse_multipart_identifier_error {
15347            ($input:expr, $expected_err:expr $(,)?) => {{
15348                all_dialects().run_parser_method(&*$input, |parser| {
15349                    let actual_err = parser.parse_multipart_identifier().unwrap_err();
15350                    assert_eq!(actual_err.to_string(), $expected_err);
15351                });
15352            }};
15353        }
15354
15355        test_parse_multipart_identifier_error!(
15356            "",
15357            "sql parser error: Empty input when parsing identifier",
15358        );
15359
15360        test_parse_multipart_identifier_error!(
15361            "*schema.table",
15362            "sql parser error: Unexpected token in identifier: *",
15363        );
15364
15365        test_parse_multipart_identifier_error!(
15366            "schema.table*",
15367            "sql parser error: Unexpected token in identifier: *",
15368        );
15369
15370        test_parse_multipart_identifier_error!(
15371            "schema.table.",
15372            "sql parser error: Trailing period in identifier",
15373        );
15374
15375        test_parse_multipart_identifier_error!(
15376            "schema.*",
15377            "sql parser error: Unexpected token following period in identifier: *",
15378        );
15379    }
15380
15381    #[test]
15382    fn test_mysql_partition_selection() {
15383        let sql = "SELECT * FROM employees PARTITION (p0, p2)";
15384        let expected = vec!["p0", "p2"];
15385
15386        let ast: Vec<Statement> = Parser::parse_sql(&MySqlDialect {}, sql).unwrap();
15387        assert_eq!(ast.len(), 1);
15388        if let Statement::Query(v) = &ast[0] {
15389            if let SetExpr::Select(select) = &*v.body {
15390                assert_eq!(select.from.len(), 1);
15391                let from: &TableWithJoins = &select.from[0];
15392                let table_factor = &from.relation;
15393                if let TableFactor::Table { partitions, .. } = table_factor {
15394                    let actual: Vec<&str> = partitions
15395                        .iter()
15396                        .map(|ident| ident.value.as_str())
15397                        .collect();
15398                    assert_eq!(expected, actual);
15399                }
15400            }
15401        } else {
15402            panic!("fail to parse mysql partition selection");
15403        }
15404    }
15405
15406    #[test]
15407    fn test_replace_into_placeholders() {
15408        let sql = "REPLACE INTO t (a) VALUES (&a)";
15409
15410        assert!(Parser::parse_sql(&GenericDialect {}, sql).is_err());
15411    }
15412
15413    #[test]
15414    fn test_replace_into_set_placeholder() {
15415        let sql = "REPLACE INTO t SET ?";
15416
15417        assert!(Parser::parse_sql(&GenericDialect {}, sql).is_err());
15418    }
15419
15420    #[test]
15421    fn test_replace_incomplete() {
15422        let sql = r#"REPLACE"#;
15423
15424        assert!(Parser::parse_sql(&MySqlDialect {}, sql).is_err());
15425    }
15426}