1#[cfg(not(feature = "std"))]
16use alloc::{
17 boxed::Box,
18 format,
19 string::{String, ToString},
20 vec,
21 vec::Vec,
22};
23use core::{
24 fmt::{self, Display},
25 str::FromStr,
26};
27use helpers::attached_token::AttachedToken;
28
29use log::debug;
30
31use recursion::RecursionCounter;
32use IsLateral::*;
33use IsOptional::*;
34
35use crate::ast::*;
36use crate::ast::{
37 comments,
38 helpers::{
39 key_value_options::{
40 KeyValueOption, KeyValueOptionKind, KeyValueOptions, KeyValueOptionsDelimiter,
41 },
42 stmt_create_table::{CreateTableBuilder, CreateTableConfiguration},
43 },
44};
45use crate::dialect::*;
46use crate::keywords::{Keyword, ALL_KEYWORDS};
47use crate::tokenizer::*;
48use sqlparser::parser::ParserState::ColumnDefinition;
49
50#[derive(Debug, Clone, PartialEq, Eq)]
52pub enum ParserError {
53 TokenizerError(String),
55 ParserError(String),
57 RecursionLimitExceeded,
59}
60
61macro_rules! parser_err {
63 ($MSG:expr, $loc:expr) => {
64 Err(ParserError::ParserError(format!("{}{}", $MSG, $loc)))
65 };
66}
67
68mod alter;
69mod merge;
70
71#[cfg(feature = "std")]
72mod recursion {
74 use std::cell::Cell;
75 use std::rc::Rc;
76
77 use super::ParserError;
78
79 pub(crate) struct RecursionCounter {
90 remaining_depth: Rc<Cell<usize>>,
91 }
92
93 impl RecursionCounter {
94 pub fn new(remaining_depth: usize) -> Self {
97 Self {
98 remaining_depth: Rc::new(remaining_depth.into()),
99 }
100 }
101
102 pub fn try_decrease(&self) -> Result<DepthGuard, ParserError> {
109 let old_value = self.remaining_depth.get();
110 if old_value == 0 {
112 Err(ParserError::RecursionLimitExceeded)
113 } else {
114 self.remaining_depth.set(old_value - 1);
115 Ok(DepthGuard::new(Rc::clone(&self.remaining_depth)))
116 }
117 }
118 }
119
120 pub struct DepthGuard {
122 remaining_depth: Rc<Cell<usize>>,
123 }
124
125 impl DepthGuard {
126 fn new(remaining_depth: Rc<Cell<usize>>) -> Self {
127 Self { remaining_depth }
128 }
129 }
130 impl Drop for DepthGuard {
131 fn drop(&mut self) {
132 let old_value = self.remaining_depth.get();
133 self.remaining_depth.set(old_value + 1);
134 }
135 }
136}
137
138#[cfg(not(feature = "std"))]
139mod recursion {
140 pub(crate) struct RecursionCounter {}
146
147 impl RecursionCounter {
148 pub fn new(_remaining_depth: usize) -> Self {
149 Self {}
150 }
151 pub fn try_decrease(&self) -> Result<DepthGuard, super::ParserError> {
152 Ok(DepthGuard {})
153 }
154 }
155
156 pub struct DepthGuard {}
157}
158
159#[derive(PartialEq, Eq)]
160pub enum IsOptional {
162 Optional,
164 Mandatory,
166}
167
168pub enum IsLateral {
170 Lateral,
172 NotLateral,
174}
175
176pub enum WildcardExpr {
178 Expr(Expr),
180 QualifiedWildcard(ObjectName),
182 Wildcard,
184}
185
186impl From<TokenizerError> for ParserError {
187 fn from(e: TokenizerError) -> Self {
188 ParserError::TokenizerError(e.to_string())
189 }
190}
191
192impl fmt::Display for ParserError {
193 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
194 write!(
195 f,
196 "sql parser error: {}",
197 match self {
198 ParserError::TokenizerError(s) => s,
199 ParserError::ParserError(s) => s,
200 ParserError::RecursionLimitExceeded => "recursion limit exceeded",
201 }
202 )
203 }
204}
205
206impl core::error::Error for ParserError {}
207
208const DEFAULT_REMAINING_DEPTH: usize = 50;
210
211const EOF_TOKEN: TokenWithSpan = TokenWithSpan {
213 token: Token::EOF,
214 span: Span {
215 start: Location { line: 0, column: 0 },
216 end: Location { line: 0, column: 0 },
217 },
218};
219
220struct MatchedTrailingBracket(bool);
233
234impl From<bool> for MatchedTrailingBracket {
235 fn from(value: bool) -> Self {
236 Self(value)
237 }
238}
239
240#[derive(Debug, Clone, PartialEq, Eq)]
242pub struct ParserOptions {
243 pub trailing_commas: bool,
245 pub unescape: bool,
248 pub require_semicolon_stmt_delimiter: bool,
251}
252
253impl Default for ParserOptions {
254 fn default() -> Self {
255 Self {
256 trailing_commas: false,
257 unescape: true,
258 require_semicolon_stmt_delimiter: true,
259 }
260 }
261}
262
263impl ParserOptions {
264 pub fn new() -> Self {
266 Default::default()
267 }
268
269 pub fn with_trailing_commas(mut self, trailing_commas: bool) -> Self {
281 self.trailing_commas = trailing_commas;
282 self
283 }
284
285 pub fn with_unescape(mut self, unescape: bool) -> Self {
288 self.unescape = unescape;
289 self
290 }
291}
292
293#[derive(Copy, Clone)]
294enum ParserState {
295 Normal,
297 ConnectBy,
301 ColumnDefinition,
307}
308
309pub struct Parser<'a> {
348 tokens: Vec<TokenWithSpan>,
350 index: usize,
352 state: ParserState,
354 dialect: &'a dyn Dialect,
356 options: ParserOptions,
360 recursion_counter: RecursionCounter,
362}
363
364impl<'a> Parser<'a> {
365 pub fn new(dialect: &'a dyn Dialect) -> Self {
381 Self {
382 tokens: vec![],
383 index: 0,
384 state: ParserState::Normal,
385 dialect,
386 recursion_counter: RecursionCounter::new(DEFAULT_REMAINING_DEPTH),
387 options: ParserOptions::new().with_trailing_commas(dialect.supports_trailing_commas()),
388 }
389 }
390
391 pub fn with_recursion_limit(mut self, recursion_limit: usize) -> Self {
414 self.recursion_counter = RecursionCounter::new(recursion_limit);
415 self
416 }
417
418 pub fn with_options(mut self, options: ParserOptions) -> Self {
441 self.options = options;
442 self
443 }
444
445 pub fn with_tokens_with_locations(mut self, tokens: Vec<TokenWithSpan>) -> Self {
447 self.tokens = tokens;
448 self.index = 0;
449 self
450 }
451
452 pub fn with_tokens(self, tokens: Vec<Token>) -> Self {
454 let tokens_with_locations: Vec<TokenWithSpan> = tokens
456 .into_iter()
457 .map(|token| TokenWithSpan {
458 token,
459 span: Span::empty(),
460 })
461 .collect();
462 self.with_tokens_with_locations(tokens_with_locations)
463 }
464
465 pub fn try_with_sql(self, sql: &str) -> Result<Self, ParserError> {
472 debug!("Parsing sql '{sql}'...");
473 let tokens = Tokenizer::new(self.dialect, sql)
474 .with_unescape(self.options.unescape)
475 .tokenize_with_location()?;
476 Ok(self.with_tokens_with_locations(tokens))
477 }
478
479 pub fn parse_statements(&mut self) -> Result<Vec<Statement>, ParserError> {
495 let mut stmts = Vec::new();
496 let mut expecting_statement_delimiter = false;
497 loop {
498 while self.consume_token(&Token::SemiColon) {
500 expecting_statement_delimiter = false;
501 }
502
503 if !self.options.require_semicolon_stmt_delimiter {
504 expecting_statement_delimiter = false;
505 }
506
507 match &self.peek_token_ref().token {
508 Token::EOF => break,
509
510 Token::Word(word) => {
512 if expecting_statement_delimiter && word.keyword == Keyword::END {
513 break;
514 }
515 }
516 _ => {}
517 }
518
519 if expecting_statement_delimiter {
520 return self.expected_ref("end of statement", self.peek_token_ref());
521 }
522
523 let statement = self.parse_statement()?;
524 stmts.push(statement);
525 expecting_statement_delimiter = true;
526 }
527 Ok(stmts)
528 }
529
530 pub fn parse_sql(dialect: &dyn Dialect, sql: &str) -> Result<Vec<Statement>, ParserError> {
546 Parser::new(dialect).try_with_sql(sql)?.parse_statements()
547 }
548
549 pub fn parse_sql_with_comments(
554 dialect: &'a dyn Dialect,
555 sql: &str,
556 ) -> Result<(Vec<Statement>, comments::Comments), ParserError> {
557 let mut p = Parser::new(dialect).try_with_sql(sql)?;
558 p.parse_statements().map(|stmts| (stmts, p.into_comments()))
559 }
560
561 fn into_comments(self) -> comments::Comments {
563 let mut comments = comments::Comments::default();
564 for t in self.tokens.into_iter() {
565 match t.token {
566 Token::Whitespace(Whitespace::SingleLineComment { comment, prefix }) => {
567 comments.offer(comments::CommentWithSpan {
568 comment: comments::Comment::SingleLine {
569 content: comment,
570 prefix,
571 },
572 span: t.span,
573 });
574 }
575 Token::Whitespace(Whitespace::MultiLineComment(comment)) => {
576 comments.offer(comments::CommentWithSpan {
577 comment: comments::Comment::MultiLine(comment),
578 span: t.span,
579 });
580 }
581 _ => {}
582 }
583 }
584 comments
585 }
586
587 pub fn parse_statement(&mut self) -> Result<Statement, ParserError> {
590 let _guard = self.recursion_counter.try_decrease()?;
591
592 if let Some(statement) = self.dialect.parse_statement(self) {
594 return statement;
595 }
596
597 let next_token = self.next_token();
598 match &next_token.token {
599 Token::Word(w) => match w.keyword {
600 Keyword::KILL => self.parse_kill(),
601 Keyword::FLUSH => self.parse_flush(),
602 Keyword::DESC => self.parse_explain(DescribeAlias::Desc),
603 Keyword::DESCRIBE => self.parse_explain(DescribeAlias::Describe),
604 Keyword::EXPLAIN => self.parse_explain(DescribeAlias::Explain),
605 Keyword::ANALYZE => self.parse_analyze().map(Into::into),
606 Keyword::CASE => {
607 self.prev_token();
608 self.parse_case_stmt().map(Into::into)
609 }
610 Keyword::IF => {
611 self.prev_token();
612 self.parse_if_stmt().map(Into::into)
613 }
614 Keyword::WHILE => {
615 self.prev_token();
616 self.parse_while().map(Into::into)
617 }
618 Keyword::RAISE => {
619 self.prev_token();
620 self.parse_raise_stmt().map(Into::into)
621 }
622 Keyword::SELECT | Keyword::WITH | Keyword::VALUES | Keyword::FROM => {
623 self.prev_token();
624 self.parse_query().map(Into::into)
625 }
626 Keyword::TRUNCATE => self.parse_truncate().map(Into::into),
627 Keyword::ATTACH => {
628 if dialect_of!(self is DuckDbDialect) {
629 self.parse_attach_duckdb_database()
630 } else {
631 self.parse_attach_database()
632 }
633 }
634 Keyword::DETACH if self.dialect.supports_detach() => {
635 self.parse_detach_duckdb_database()
636 }
637 Keyword::MSCK => self.parse_msck().map(Into::into),
638 Keyword::CREATE => self.parse_create(),
639 Keyword::CACHE => self.parse_cache_table(),
640 Keyword::DROP => self.parse_drop(),
641 Keyword::DISCARD => self.parse_discard(),
642 Keyword::DECLARE => self.parse_declare(),
643 Keyword::FETCH => self.parse_fetch_statement(),
644 Keyword::DELETE => self.parse_delete(next_token),
645 Keyword::INSERT => self.parse_insert(next_token),
646 Keyword::REPLACE => self.parse_replace(next_token),
647 Keyword::UNCACHE => self.parse_uncache_table(),
648 Keyword::UPDATE => self.parse_update(next_token),
649 Keyword::ALTER => self.parse_alter(),
650 Keyword::CALL => self.parse_call(),
651 Keyword::COPY => self.parse_copy(),
652 Keyword::OPEN => {
653 self.prev_token();
654 self.parse_open()
655 }
656 Keyword::CLOSE => self.parse_close(),
657 Keyword::SET => self.parse_set(),
658 Keyword::SHOW => self.parse_show(),
659 Keyword::USE => self.parse_use(),
660 Keyword::GRANT => self.parse_grant().map(Into::into),
661 Keyword::DENY => {
662 self.prev_token();
663 self.parse_deny()
664 }
665 Keyword::REVOKE => self.parse_revoke().map(Into::into),
666 Keyword::START => self.parse_start_transaction(),
667 Keyword::BEGIN => self.parse_begin(),
668 Keyword::END => self.parse_end(),
669 Keyword::SAVEPOINT => self.parse_savepoint(),
670 Keyword::RELEASE => self.parse_release(),
671 Keyword::COMMIT => self.parse_commit(),
672 Keyword::RAISERROR => Ok(self.parse_raiserror()?),
673 Keyword::THROW => {
674 self.prev_token();
675 self.parse_throw().map(Into::into)
676 }
677 Keyword::ROLLBACK => self.parse_rollback(),
678 Keyword::ASSERT => self.parse_assert(),
679 Keyword::DEALLOCATE => self.parse_deallocate(),
682 Keyword::EXECUTE | Keyword::EXEC => self.parse_execute(),
683 Keyword::PREPARE => self.parse_prepare(),
684 Keyword::MERGE => self.parse_merge(next_token).map(Into::into),
685 Keyword::LISTEN if self.dialect.supports_listen_notify() => self.parse_listen(),
688 Keyword::UNLISTEN if self.dialect.supports_listen_notify() => self.parse_unlisten(),
689 Keyword::NOTIFY if self.dialect.supports_listen_notify() => self.parse_notify(),
690 Keyword::PRAGMA => self.parse_pragma(),
692 Keyword::UNLOAD => {
693 self.prev_token();
694 self.parse_unload()
695 }
696 Keyword::RENAME => self.parse_rename(),
697 Keyword::INSTALL if self.dialect.supports_install() => self.parse_install(),
699 Keyword::LOAD => self.parse_load(),
700 Keyword::LOCK => {
701 self.prev_token();
702 self.parse_lock_statement().map(Into::into)
703 }
704 Keyword::OPTIMIZE if self.dialect.supports_optimize_table() => {
705 self.parse_optimize_table()
706 }
707 Keyword::COMMENT if self.dialect.supports_comment_on() => self.parse_comment(),
709 Keyword::PRINT => self.parse_print(),
710 Keyword::WAITFOR => self.parse_waitfor(),
712 Keyword::RETURN => self.parse_return(),
713 Keyword::EXPORT => {
714 self.prev_token();
715 self.parse_export_data()
716 }
717 Keyword::VACUUM => {
718 self.prev_token();
719 self.parse_vacuum()
720 }
721 Keyword::RESET => self.parse_reset().map(Into::into),
722 _ => self.expected("an SQL statement", next_token),
723 },
724 Token::LParen => {
725 self.prev_token();
726 self.parse_query().map(Into::into)
727 }
728 _ => self.expected("an SQL statement", next_token),
729 }
730 }
731
732 pub fn parse_case_stmt(&mut self) -> Result<CaseStatement, ParserError> {
736 let case_token = self.expect_keyword(Keyword::CASE)?;
737
738 let match_expr = if self.peek_keyword(Keyword::WHEN) {
739 None
740 } else {
741 Some(self.parse_expr()?)
742 };
743
744 self.expect_keyword_is(Keyword::WHEN)?;
745 let when_blocks = self.parse_keyword_separated(Keyword::WHEN, |parser| {
746 parser.parse_conditional_statement_block(&[Keyword::WHEN, Keyword::ELSE, Keyword::END])
747 })?;
748
749 let else_block = if self.parse_keyword(Keyword::ELSE) {
750 Some(self.parse_conditional_statement_block(&[Keyword::END])?)
751 } else {
752 None
753 };
754
755 let mut end_case_token = self.expect_keyword(Keyword::END)?;
756 if self.peek_keyword(Keyword::CASE) {
757 end_case_token = self.expect_keyword(Keyword::CASE)?;
758 }
759
760 Ok(CaseStatement {
761 case_token: AttachedToken(case_token),
762 match_expr,
763 when_blocks,
764 else_block,
765 end_case_token: AttachedToken(end_case_token),
766 })
767 }
768
769 pub fn parse_if_stmt(&mut self) -> Result<IfStatement, ParserError> {
773 self.expect_keyword_is(Keyword::IF)?;
774 let if_block = self.parse_conditional_statement_block(&[
775 Keyword::ELSE,
776 Keyword::ELSEIF,
777 Keyword::END,
778 ])?;
779
780 let elseif_blocks = if self.parse_keyword(Keyword::ELSEIF) {
781 self.parse_keyword_separated(Keyword::ELSEIF, |parser| {
782 parser.parse_conditional_statement_block(&[
783 Keyword::ELSEIF,
784 Keyword::ELSE,
785 Keyword::END,
786 ])
787 })?
788 } else {
789 vec![]
790 };
791
792 let else_block = if self.parse_keyword(Keyword::ELSE) {
793 Some(self.parse_conditional_statement_block(&[Keyword::END])?)
794 } else {
795 None
796 };
797
798 self.expect_keyword_is(Keyword::END)?;
799 let end_token = self.expect_keyword(Keyword::IF)?;
800
801 Ok(IfStatement {
802 if_block,
803 elseif_blocks,
804 else_block,
805 end_token: Some(AttachedToken(end_token)),
806 })
807 }
808
809 fn parse_while(&mut self) -> Result<WhileStatement, ParserError> {
813 self.expect_keyword_is(Keyword::WHILE)?;
814 let while_block = self.parse_conditional_statement_block(&[Keyword::END])?;
815
816 Ok(WhileStatement { while_block })
817 }
818
819 fn parse_conditional_statement_block(
827 &mut self,
828 terminal_keywords: &[Keyword],
829 ) -> Result<ConditionalStatementBlock, ParserError> {
830 let start_token = self.get_current_token().clone(); let mut then_token = None;
832
833 let condition = match &start_token.token {
834 Token::Word(w) if w.keyword == Keyword::ELSE => None,
835 Token::Word(w) if w.keyword == Keyword::WHILE => {
836 let expr = self.parse_expr()?;
837 Some(expr)
838 }
839 _ => {
840 let expr = self.parse_expr()?;
841 then_token = Some(AttachedToken(self.expect_keyword(Keyword::THEN)?));
842 Some(expr)
843 }
844 };
845
846 let conditional_statements = self.parse_conditional_statements(terminal_keywords)?;
847
848 Ok(ConditionalStatementBlock {
849 start_token: AttachedToken(start_token),
850 condition,
851 then_token,
852 conditional_statements,
853 })
854 }
855
856 pub(crate) fn parse_conditional_statements(
859 &mut self,
860 terminal_keywords: &[Keyword],
861 ) -> Result<ConditionalStatements, ParserError> {
862 let conditional_statements = if self.peek_keyword(Keyword::BEGIN) {
863 let begin_token = self.expect_keyword(Keyword::BEGIN)?;
864 let statements = self.parse_statement_list(terminal_keywords)?;
865 let end_token = self.expect_keyword(Keyword::END)?;
866
867 ConditionalStatements::BeginEnd(BeginEndStatements {
868 begin_token: AttachedToken(begin_token),
869 statements,
870 end_token: AttachedToken(end_token),
871 })
872 } else {
873 ConditionalStatements::Sequence {
874 statements: self.parse_statement_list(terminal_keywords)?,
875 }
876 };
877 Ok(conditional_statements)
878 }
879
880 pub fn parse_raise_stmt(&mut self) -> Result<RaiseStatement, ParserError> {
884 self.expect_keyword_is(Keyword::RAISE)?;
885
886 let value = if self.parse_keywords(&[Keyword::USING, Keyword::MESSAGE]) {
887 self.expect_token(&Token::Eq)?;
888 Some(RaiseStatementValue::UsingMessage(self.parse_expr()?))
889 } else {
890 self.maybe_parse(|parser| parser.parse_expr().map(RaiseStatementValue::Expr))?
891 };
892
893 Ok(RaiseStatement { value })
894 }
895 pub fn parse_comment(&mut self) -> Result<Statement, ParserError> {
899 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
900
901 self.expect_keyword_is(Keyword::ON)?;
902 let token = self.next_token();
903
904 let (object_type, object_name) = match token.token {
905 Token::Word(w) if w.keyword == Keyword::COLLATION => {
906 (CommentObject::Collation, self.parse_object_name(false)?)
907 }
908 Token::Word(w) if w.keyword == Keyword::COLUMN => {
909 (CommentObject::Column, self.parse_object_name(false)?)
910 }
911 Token::Word(w) if w.keyword == Keyword::DATABASE => {
912 (CommentObject::Database, self.parse_object_name(false)?)
913 }
914 Token::Word(w) if w.keyword == Keyword::DOMAIN => {
915 (CommentObject::Domain, self.parse_object_name(false)?)
916 }
917 Token::Word(w) if w.keyword == Keyword::EXTENSION => {
918 (CommentObject::Extension, self.parse_object_name(false)?)
919 }
920 Token::Word(w) if w.keyword == Keyword::FUNCTION => {
921 (CommentObject::Function, self.parse_object_name(false)?)
922 }
923 Token::Word(w) if w.keyword == Keyword::INDEX => {
924 (CommentObject::Index, self.parse_object_name(false)?)
925 }
926 Token::Word(w) if w.keyword == Keyword::MATERIALIZED => {
927 self.expect_keyword_is(Keyword::VIEW)?;
928 (
929 CommentObject::MaterializedView,
930 self.parse_object_name(false)?,
931 )
932 }
933 Token::Word(w) if w.keyword == Keyword::PROCEDURE => {
934 (CommentObject::Procedure, self.parse_object_name(false)?)
935 }
936 Token::Word(w) if w.keyword == Keyword::ROLE => {
937 (CommentObject::Role, self.parse_object_name(false)?)
938 }
939 Token::Word(w) if w.keyword == Keyword::SCHEMA => {
940 (CommentObject::Schema, self.parse_object_name(false)?)
941 }
942 Token::Word(w) if w.keyword == Keyword::SEQUENCE => {
943 (CommentObject::Sequence, self.parse_object_name(false)?)
944 }
945 Token::Word(w) if w.keyword == Keyword::TABLE => {
946 (CommentObject::Table, self.parse_object_name(false)?)
947 }
948 Token::Word(w) if w.keyword == Keyword::TYPE => {
949 (CommentObject::Type, self.parse_object_name(false)?)
950 }
951 Token::Word(w) if w.keyword == Keyword::USER => {
952 (CommentObject::User, self.parse_object_name(false)?)
953 }
954 Token::Word(w) if w.keyword == Keyword::VIEW => {
955 (CommentObject::View, self.parse_object_name(false)?)
956 }
957 _ => self.expected("comment object_type", token)?,
958 };
959
960 self.expect_keyword_is(Keyword::IS)?;
961 let comment = if self.parse_keyword(Keyword::NULL) {
962 None
963 } else {
964 Some(self.parse_literal_string()?)
965 };
966 Ok(Statement::Comment {
967 object_type,
968 object_name,
969 comment,
970 if_exists,
971 })
972 }
973
974 pub fn parse_flush(&mut self) -> Result<Statement, ParserError> {
976 let mut channel = None;
977 let mut tables: Vec<ObjectName> = vec![];
978 let mut read_lock = false;
979 let mut export = false;
980
981 if !dialect_of!(self is MySqlDialect | GenericDialect) {
982 return parser_err!(
983 "Unsupported statement FLUSH",
984 self.peek_token_ref().span.start
985 );
986 }
987
988 let location = if self.parse_keyword(Keyword::NO_WRITE_TO_BINLOG) {
989 Some(FlushLocation::NoWriteToBinlog)
990 } else if self.parse_keyword(Keyword::LOCAL) {
991 Some(FlushLocation::Local)
992 } else {
993 None
994 };
995
996 let object_type = if self.parse_keywords(&[Keyword::BINARY, Keyword::LOGS]) {
997 FlushType::BinaryLogs
998 } else if self.parse_keywords(&[Keyword::ENGINE, Keyword::LOGS]) {
999 FlushType::EngineLogs
1000 } else if self.parse_keywords(&[Keyword::ERROR, Keyword::LOGS]) {
1001 FlushType::ErrorLogs
1002 } else if self.parse_keywords(&[Keyword::GENERAL, Keyword::LOGS]) {
1003 FlushType::GeneralLogs
1004 } else if self.parse_keywords(&[Keyword::HOSTS]) {
1005 FlushType::Hosts
1006 } else if self.parse_keyword(Keyword::PRIVILEGES) {
1007 FlushType::Privileges
1008 } else if self.parse_keyword(Keyword::OPTIMIZER_COSTS) {
1009 FlushType::OptimizerCosts
1010 } else if self.parse_keywords(&[Keyword::RELAY, Keyword::LOGS]) {
1011 if self.parse_keywords(&[Keyword::FOR, Keyword::CHANNEL]) {
1012 channel = Some(self.parse_object_name(false).unwrap().to_string());
1013 }
1014 FlushType::RelayLogs
1015 } else if self.parse_keywords(&[Keyword::SLOW, Keyword::LOGS]) {
1016 FlushType::SlowLogs
1017 } else if self.parse_keyword(Keyword::STATUS) {
1018 FlushType::Status
1019 } else if self.parse_keyword(Keyword::USER_RESOURCES) {
1020 FlushType::UserResources
1021 } else if self.parse_keywords(&[Keyword::LOGS]) {
1022 FlushType::Logs
1023 } else if self.parse_keywords(&[Keyword::TABLES]) {
1024 loop {
1025 let next_token = self.next_token();
1026 match &next_token.token {
1027 Token::Word(w) => match w.keyword {
1028 Keyword::WITH => {
1029 read_lock = self.parse_keywords(&[Keyword::READ, Keyword::LOCK]);
1030 }
1031 Keyword::FOR => {
1032 export = self.parse_keyword(Keyword::EXPORT);
1033 }
1034 Keyword::NoKeyword => {
1035 self.prev_token();
1036 tables = self.parse_comma_separated(|p| p.parse_object_name(false))?;
1037 }
1038 _ => {}
1039 },
1040 _ => {
1041 break;
1042 }
1043 }
1044 }
1045
1046 FlushType::Tables
1047 } else {
1048 return self.expected_ref(
1049 "BINARY LOGS, ENGINE LOGS, ERROR LOGS, GENERAL LOGS, HOSTS, LOGS, PRIVILEGES, OPTIMIZER_COSTS,\
1050 RELAY LOGS [FOR CHANNEL channel], SLOW LOGS, STATUS, USER_RESOURCES",
1051 self.peek_token_ref(),
1052 );
1053 };
1054
1055 Ok(Statement::Flush {
1056 object_type,
1057 location,
1058 channel,
1059 read_lock,
1060 export,
1061 tables,
1062 })
1063 }
1064
1065 pub fn parse_msck(&mut self) -> Result<Msck, ParserError> {
1067 let repair = self.parse_keyword(Keyword::REPAIR);
1068 self.expect_keyword_is(Keyword::TABLE)?;
1069 let table_name = self.parse_object_name(false)?;
1070 let partition_action = self
1071 .maybe_parse(|parser| {
1072 let pa = match parser.parse_one_of_keywords(&[
1073 Keyword::ADD,
1074 Keyword::DROP,
1075 Keyword::SYNC,
1076 ]) {
1077 Some(Keyword::ADD) => Some(AddDropSync::ADD),
1078 Some(Keyword::DROP) => Some(AddDropSync::DROP),
1079 Some(Keyword::SYNC) => Some(AddDropSync::SYNC),
1080 _ => None,
1081 };
1082 parser.expect_keyword_is(Keyword::PARTITIONS)?;
1083 Ok(pa)
1084 })?
1085 .unwrap_or_default();
1086 Ok(Msck {
1087 repair,
1088 table_name,
1089 partition_action,
1090 })
1091 }
1092
1093 pub fn parse_truncate(&mut self) -> Result<Truncate, ParserError> {
1095 let table = self.parse_keyword(Keyword::TABLE);
1096 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
1097
1098 let table_names = self.parse_comma_separated(|p| {
1099 let only = p.parse_keyword(Keyword::ONLY);
1100 let name = p.parse_object_name(false)?;
1101 let has_asterisk = p.consume_token(&Token::Mul);
1102 Ok(TruncateTableTarget {
1103 name,
1104 only,
1105 has_asterisk,
1106 })
1107 })?;
1108
1109 let mut partitions = None;
1110 if self.parse_keyword(Keyword::PARTITION) {
1111 self.expect_token(&Token::LParen)?;
1112 partitions = Some(self.parse_comma_separated(Parser::parse_expr)?);
1113 self.expect_token(&Token::RParen)?;
1114 }
1115
1116 let mut identity = None;
1117 let mut cascade = None;
1118
1119 if dialect_of!(self is PostgreSqlDialect | GenericDialect) {
1120 identity = if self.parse_keywords(&[Keyword::RESTART, Keyword::IDENTITY]) {
1121 Some(TruncateIdentityOption::Restart)
1122 } else if self.parse_keywords(&[Keyword::CONTINUE, Keyword::IDENTITY]) {
1123 Some(TruncateIdentityOption::Continue)
1124 } else {
1125 None
1126 };
1127
1128 cascade = self.parse_cascade_option();
1129 };
1130
1131 let on_cluster = self.parse_optional_on_cluster()?;
1132
1133 Ok(Truncate {
1134 table_names,
1135 partitions,
1136 table,
1137 if_exists,
1138 identity,
1139 cascade,
1140 on_cluster,
1141 })
1142 }
1143
1144 fn parse_cascade_option(&mut self) -> Option<CascadeOption> {
1145 if self.parse_keyword(Keyword::CASCADE) {
1146 Some(CascadeOption::Cascade)
1147 } else if self.parse_keyword(Keyword::RESTRICT) {
1148 Some(CascadeOption::Restrict)
1149 } else {
1150 None
1151 }
1152 }
1153
1154 pub fn parse_attach_duckdb_database_options(
1156 &mut self,
1157 ) -> Result<Vec<AttachDuckDBDatabaseOption>, ParserError> {
1158 if !self.consume_token(&Token::LParen) {
1159 return Ok(vec![]);
1160 }
1161
1162 let mut options = vec![];
1163 loop {
1164 if self.parse_keyword(Keyword::READ_ONLY) {
1165 let boolean = if self.parse_keyword(Keyword::TRUE) {
1166 Some(true)
1167 } else if self.parse_keyword(Keyword::FALSE) {
1168 Some(false)
1169 } else {
1170 None
1171 };
1172 options.push(AttachDuckDBDatabaseOption::ReadOnly(boolean));
1173 } else if self.parse_keyword(Keyword::TYPE) {
1174 let ident = self.parse_identifier()?;
1175 options.push(AttachDuckDBDatabaseOption::Type(ident));
1176 } else {
1177 return self
1178 .expected_ref("expected one of: ), READ_ONLY, TYPE", self.peek_token_ref());
1179 };
1180
1181 if self.consume_token(&Token::RParen) {
1182 return Ok(options);
1183 } else if self.consume_token(&Token::Comma) {
1184 continue;
1185 } else {
1186 return self.expected_ref("expected one of: ')', ','", self.peek_token_ref());
1187 }
1188 }
1189 }
1190
1191 pub fn parse_attach_duckdb_database(&mut self) -> Result<Statement, ParserError> {
1193 let database = self.parse_keyword(Keyword::DATABASE);
1194 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
1195 let database_path = self.parse_identifier()?;
1196 let database_alias = if self.parse_keyword(Keyword::AS) {
1197 Some(self.parse_identifier()?)
1198 } else {
1199 None
1200 };
1201
1202 let attach_options = self.parse_attach_duckdb_database_options()?;
1203 Ok(Statement::AttachDuckDBDatabase {
1204 if_not_exists,
1205 database,
1206 database_path,
1207 database_alias,
1208 attach_options,
1209 })
1210 }
1211
1212 pub fn parse_detach_duckdb_database(&mut self) -> Result<Statement, ParserError> {
1214 let database = self.parse_keyword(Keyword::DATABASE);
1215 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
1216 let database_alias = self.parse_identifier()?;
1217 Ok(Statement::DetachDuckDBDatabase {
1218 if_exists,
1219 database,
1220 database_alias,
1221 })
1222 }
1223
1224 pub fn parse_attach_database(&mut self) -> Result<Statement, ParserError> {
1226 let database = self.parse_keyword(Keyword::DATABASE);
1227 let database_file_name = self.parse_expr()?;
1228 self.expect_keyword_is(Keyword::AS)?;
1229 let schema_name = self.parse_identifier()?;
1230 Ok(Statement::AttachDatabase {
1231 database,
1232 schema_name,
1233 database_file_name,
1234 })
1235 }
1236
1237 pub fn parse_analyze(&mut self) -> Result<Analyze, ParserError> {
1239 let has_table_keyword = self.parse_keyword(Keyword::TABLE);
1240 let table_name = self.maybe_parse(|parser| parser.parse_object_name(false))?;
1241 let mut for_columns = false;
1242 let mut cache_metadata = false;
1243 let mut noscan = false;
1244 let mut partitions = None;
1245 let mut compute_statistics = false;
1246 let mut columns = vec![];
1247
1248 if table_name.is_some() && self.consume_token(&Token::LParen) {
1250 columns = self.parse_comma_separated(|p| p.parse_identifier())?;
1251 self.expect_token(&Token::RParen)?;
1252 }
1253
1254 loop {
1255 match self.parse_one_of_keywords(&[
1256 Keyword::PARTITION,
1257 Keyword::FOR,
1258 Keyword::CACHE,
1259 Keyword::NOSCAN,
1260 Keyword::COMPUTE,
1261 ]) {
1262 Some(Keyword::PARTITION) => {
1263 self.expect_token(&Token::LParen)?;
1264 partitions = Some(self.parse_comma_separated(Parser::parse_expr)?);
1265 self.expect_token(&Token::RParen)?;
1266 }
1267 Some(Keyword::NOSCAN) => noscan = true,
1268 Some(Keyword::FOR) => {
1269 self.expect_keyword_is(Keyword::COLUMNS)?;
1270
1271 columns = self
1272 .maybe_parse(|parser| {
1273 parser.parse_comma_separated(|p| p.parse_identifier())
1274 })?
1275 .unwrap_or_default();
1276 for_columns = true
1277 }
1278 Some(Keyword::CACHE) => {
1279 self.expect_keyword_is(Keyword::METADATA)?;
1280 cache_metadata = true
1281 }
1282 Some(Keyword::COMPUTE) => {
1283 self.expect_keyword_is(Keyword::STATISTICS)?;
1284 compute_statistics = true
1285 }
1286 _ => break,
1287 }
1288 }
1289
1290 Ok(Analyze {
1291 has_table_keyword,
1292 table_name,
1293 for_columns,
1294 columns,
1295 partitions,
1296 cache_metadata,
1297 noscan,
1298 compute_statistics,
1299 })
1300 }
1301
1302 pub fn parse_wildcard_expr(&mut self) -> Result<Expr, ParserError> {
1304 let index = self.index;
1305
1306 let next_token = self.next_token();
1307 match next_token.token {
1308 t @ (Token::Word(_) | Token::SingleQuotedString(_)) => {
1309 if self.peek_token_ref().token == Token::Period {
1310 let mut id_parts: Vec<Ident> = vec![match t {
1311 Token::Word(w) => w.into_ident(next_token.span),
1312 Token::SingleQuotedString(s) => Ident::with_quote('\'', s),
1313 _ => {
1314 return Err(ParserError::ParserError(
1315 "Internal parser error: unexpected token type".to_string(),
1316 ))
1317 }
1318 }];
1319
1320 while self.consume_token(&Token::Period) {
1321 let next_token = self.next_token();
1322 match next_token.token {
1323 Token::Word(w) => id_parts.push(w.into_ident(next_token.span)),
1324 Token::SingleQuotedString(s) => {
1325 id_parts.push(Ident::with_quote('\'', s))
1327 }
1328 Token::Placeholder(s) => {
1329 id_parts.push(Ident::new(s))
1332 }
1333 Token::Mul => {
1334 return Ok(Expr::QualifiedWildcard(
1335 ObjectName::from(id_parts),
1336 AttachedToken(next_token),
1337 ));
1338 }
1339 _ => {
1340 return self
1341 .expected("an identifier or a '*' after '.'", next_token);
1342 }
1343 }
1344 }
1345 }
1346 }
1347 Token::Mul => {
1348 return Ok(Expr::Wildcard(AttachedToken(next_token)));
1349 }
1350 Token::LParen => {
1352 let [maybe_mul, maybe_rparen] = self.peek_tokens_ref();
1353 if maybe_mul.token == Token::Mul && maybe_rparen.token == Token::RParen {
1354 let mul_token = self.next_token(); self.next_token(); return Ok(Expr::Wildcard(AttachedToken(mul_token)));
1357 }
1358 }
1359 _ => (),
1360 };
1361
1362 self.index = index;
1363 self.parse_expr()
1364 }
1365
1366 pub fn parse_expr(&mut self) -> Result<Expr, ParserError> {
1368 self.parse_subexpr(self.dialect.prec_unknown())
1369 }
1370
1371 pub fn parse_expr_with_alias_and_order_by(
1373 &mut self,
1374 ) -> Result<ExprWithAliasAndOrderBy, ParserError> {
1375 let expr = self.parse_expr()?;
1376
1377 fn validator(explicit: bool, kw: &Keyword, _parser: &mut Parser) -> bool {
1378 explicit || !&[Keyword::ASC, Keyword::DESC, Keyword::GROUP].contains(kw)
1379 }
1380 let alias = self.parse_optional_alias_inner(None, validator)?;
1381 let order_by = OrderByOptions {
1382 asc: self.parse_asc_desc(),
1383 nulls_first: None,
1384 };
1385 Ok(ExprWithAliasAndOrderBy {
1386 expr: ExprWithAlias { expr, alias },
1387 order_by,
1388 })
1389 }
1390
1391 #[cfg_attr(feature = "recursive-protection", recursive::recursive)]
1393 pub fn parse_subexpr(&mut self, precedence: u8) -> Result<Expr, ParserError> {
1394 let _guard = self.recursion_counter.try_decrease()?;
1395 debug!("parsing expr");
1396 let mut expr = self.parse_prefix()?;
1397
1398 expr = self.parse_compound_expr(expr, vec![])?;
1399
1400 if !self.in_column_definition_state() && self.parse_keyword(Keyword::COLLATE) {
1404 expr = Expr::Collate {
1405 expr: Box::new(expr),
1406 collation: self.parse_object_name(false)?,
1407 };
1408 }
1409
1410 debug!("prefix: {expr:?}");
1411 loop {
1412 let next_precedence = self.get_next_precedence()?;
1413 debug!("next precedence: {next_precedence:?}");
1414
1415 if precedence >= next_precedence {
1416 break;
1417 }
1418
1419 if Token::Period == self.peek_token_ref().token {
1422 break;
1423 }
1424
1425 expr = self.parse_infix(expr, next_precedence)?;
1426 }
1427 Ok(expr)
1428 }
1429
1430 pub fn parse_assert(&mut self) -> Result<Statement, ParserError> {
1432 let condition = self.parse_expr()?;
1433 let message = if self.parse_keyword(Keyword::AS) {
1434 Some(self.parse_expr()?)
1435 } else {
1436 None
1437 };
1438
1439 Ok(Statement::Assert { condition, message })
1440 }
1441
1442 pub fn parse_savepoint(&mut self) -> Result<Statement, ParserError> {
1444 let name = self.parse_identifier()?;
1445 Ok(Statement::Savepoint { name })
1446 }
1447
1448 pub fn parse_release(&mut self) -> Result<Statement, ParserError> {
1450 let _ = self.parse_keyword(Keyword::SAVEPOINT);
1451 let name = self.parse_identifier()?;
1452
1453 Ok(Statement::ReleaseSavepoint { name })
1454 }
1455
1456 pub fn parse_listen(&mut self) -> Result<Statement, ParserError> {
1458 let channel = self.parse_identifier()?;
1459 Ok(Statement::LISTEN { channel })
1460 }
1461
1462 pub fn parse_unlisten(&mut self) -> Result<Statement, ParserError> {
1464 let channel = if self.consume_token(&Token::Mul) {
1465 Ident::new(Expr::Wildcard(AttachedToken::empty()).to_string())
1466 } else {
1467 match self.parse_identifier() {
1468 Ok(expr) => expr,
1469 _ => {
1470 self.prev_token();
1471 return self.expected_ref("wildcard or identifier", self.peek_token_ref());
1472 }
1473 }
1474 };
1475 Ok(Statement::UNLISTEN { channel })
1476 }
1477
1478 pub fn parse_notify(&mut self) -> Result<Statement, ParserError> {
1480 let channel = self.parse_identifier()?;
1481 let payload = if self.consume_token(&Token::Comma) {
1482 Some(self.parse_literal_string()?)
1483 } else {
1484 None
1485 };
1486 Ok(Statement::NOTIFY { channel, payload })
1487 }
1488
1489 pub fn parse_rename(&mut self) -> Result<Statement, ParserError> {
1491 if self.peek_keyword(Keyword::TABLE) {
1492 self.expect_keyword(Keyword::TABLE)?;
1493 let rename_tables = self.parse_comma_separated(|parser| {
1494 let old_name = parser.parse_object_name(false)?;
1495 parser.expect_keyword(Keyword::TO)?;
1496 let new_name = parser.parse_object_name(false)?;
1497
1498 Ok(RenameTable { old_name, new_name })
1499 })?;
1500 Ok(rename_tables.into())
1501 } else {
1502 self.expected_ref("KEYWORD `TABLE` after RENAME", self.peek_token_ref())
1503 }
1504 }
1505
1506 fn parse_expr_prefix_by_reserved_word(
1509 &mut self,
1510 w: &Word,
1511 w_span: Span,
1512 ) -> Result<Option<Expr>, ParserError> {
1513 match w.keyword {
1514 Keyword::TRUE | Keyword::FALSE if self.dialect.supports_boolean_literals() => {
1515 self.prev_token();
1516 Ok(Some(Expr::Value(self.parse_value()?)))
1517 }
1518 Keyword::NULL => {
1519 self.prev_token();
1520 Ok(Some(Expr::Value(self.parse_value()?)))
1521 }
1522 Keyword::CURRENT_CATALOG
1523 | Keyword::CURRENT_USER
1524 | Keyword::SESSION_USER
1525 | Keyword::USER
1526 if dialect_of!(self is PostgreSqlDialect | GenericDialect) =>
1527 {
1528 Ok(Some(Expr::Function(Function {
1529 name: ObjectName::from(vec![w.to_ident(w_span)]),
1530 uses_odbc_syntax: false,
1531 parameters: FunctionArguments::None,
1532 args: FunctionArguments::None,
1533 null_treatment: None,
1534 filter: None,
1535 over: None,
1536 within_group: vec![],
1537 })))
1538 }
1539 Keyword::CURRENT_TIMESTAMP
1540 | Keyword::CURRENT_TIME
1541 | Keyword::CURRENT_DATE
1542 | Keyword::LOCALTIME
1543 | Keyword::LOCALTIMESTAMP => {
1544 Ok(Some(self.parse_time_functions(ObjectName::from(vec![w.to_ident(w_span)]))?))
1545 }
1546 Keyword::CASE => Ok(Some(self.parse_case_expr()?)),
1547 Keyword::CONVERT => Ok(Some(self.parse_convert_expr(false)?)),
1548 Keyword::TRY_CONVERT if self.dialect.supports_try_convert() => Ok(Some(self.parse_convert_expr(true)?)),
1549 Keyword::CAST => Ok(Some(self.parse_cast_expr(CastKind::Cast)?)),
1550 Keyword::TRY_CAST => Ok(Some(self.parse_cast_expr(CastKind::TryCast)?)),
1551 Keyword::SAFE_CAST => Ok(Some(self.parse_cast_expr(CastKind::SafeCast)?)),
1552 Keyword::EXISTS
1553 if !dialect_of!(self is DatabricksDialect)
1555 || matches!(
1556 self.peek_nth_token_ref(1).token,
1557 Token::Word(Word {
1558 keyword: Keyword::SELECT | Keyword::WITH,
1559 ..
1560 })
1561 ) =>
1562 {
1563 Ok(Some(self.parse_exists_expr(false)?))
1564 }
1565 Keyword::EXTRACT => Ok(Some(self.parse_extract_expr()?)),
1566 Keyword::CEIL => Ok(Some(self.parse_ceil_floor_expr(true)?)),
1567 Keyword::FLOOR => Ok(Some(self.parse_ceil_floor_expr(false)?)),
1568 Keyword::POSITION if self.peek_token_ref().token == Token::LParen => {
1569 Ok(Some(self.parse_position_expr(w.to_ident(w_span))?))
1570 }
1571 Keyword::SUBSTR | Keyword::SUBSTRING => {
1572 self.prev_token();
1573 Ok(Some(self.parse_substring()?))
1574 }
1575 Keyword::OVERLAY => Ok(Some(self.parse_overlay_expr()?)),
1576 Keyword::TRIM => Ok(Some(self.parse_trim_expr()?)),
1577 Keyword::INTERVAL => Ok(Some(self.parse_interval()?)),
1578 Keyword::ARRAY if *self.peek_token_ref() == Token::LBracket => {
1580 self.expect_token(&Token::LBracket)?;
1581 Ok(Some(self.parse_array_expr(true)?))
1582 }
1583 Keyword::ARRAY
1584 if self.peek_token_ref().token == Token::LParen
1585 && !dialect_of!(self is ClickHouseDialect | DatabricksDialect) =>
1586 {
1587 self.expect_token(&Token::LParen)?;
1588 let query = self.parse_query()?;
1589 self.expect_token(&Token::RParen)?;
1590 Ok(Some(Expr::Function(Function {
1591 name: ObjectName::from(vec![w.to_ident(w_span)]),
1592 uses_odbc_syntax: false,
1593 parameters: FunctionArguments::None,
1594 args: FunctionArguments::Subquery(query),
1595 filter: None,
1596 null_treatment: None,
1597 over: None,
1598 within_group: vec![],
1599 })))
1600 }
1601 Keyword::NOT => Ok(Some(self.parse_not()?)),
1602 Keyword::MATCH if self.dialect.supports_match_against() => {
1603 Ok(Some(self.parse_match_against()?))
1604 }
1605 Keyword::STRUCT if self.dialect.supports_struct_literal() => {
1606 let struct_expr = self.parse_struct_literal()?;
1607 Ok(Some(struct_expr))
1608 }
1609 Keyword::PRIOR if matches!(self.state, ParserState::ConnectBy) => {
1610 let expr = self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?;
1611 Ok(Some(Expr::Prior(Box::new(expr))))
1612 }
1613 Keyword::MAP if *self.peek_token_ref() == Token::LBrace && self.dialect.support_map_literal_syntax() => {
1614 Ok(Some(self.parse_duckdb_map_literal()?))
1615 }
1616 Keyword::LAMBDA if self.dialect.supports_lambda_functions() => {
1617 Ok(Some(self.parse_lambda_expr()?))
1618 }
1619 _ if self.dialect.supports_geometric_types() => match w.keyword {
1620 Keyword::CIRCLE => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Circle)?)),
1621 Keyword::BOX => Ok(Some(self.parse_geometric_type(GeometricTypeKind::GeometricBox)?)),
1622 Keyword::PATH => Ok(Some(self.parse_geometric_type(GeometricTypeKind::GeometricPath)?)),
1623 Keyword::LINE => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Line)?)),
1624 Keyword::LSEG => Ok(Some(self.parse_geometric_type(GeometricTypeKind::LineSegment)?)),
1625 Keyword::POINT => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Point)?)),
1626 Keyword::POLYGON => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Polygon)?)),
1627 _ => Ok(None),
1628 },
1629 _ => Ok(None),
1630 }
1631 }
1632
1633 fn parse_expr_prefix_by_unreserved_word(
1635 &mut self,
1636 w: &Word,
1637 w_span: Span,
1638 ) -> Result<Expr, ParserError> {
1639 let is_outer_join = self.peek_outer_join_operator();
1640 match &self.peek_token_ref().token {
1641 Token::LParen if !is_outer_join => {
1642 let id_parts = vec![w.to_ident(w_span)];
1643 self.parse_function(ObjectName::from(id_parts))
1644 }
1645 Token::SingleQuotedString(_)
1647 | Token::DoubleQuotedString(_)
1648 | Token::HexStringLiteral(_)
1649 if w.value.starts_with('_') =>
1650 {
1651 Ok(Expr::Prefixed {
1652 prefix: w.to_ident(w_span),
1653 value: self.parse_introduced_string_expr()?.into(),
1654 })
1655 }
1656 Token::SingleQuotedString(_)
1658 | Token::DoubleQuotedString(_)
1659 | Token::HexStringLiteral(_)
1660 if w.value.starts_with('_') =>
1661 {
1662 Ok(Expr::Prefixed {
1663 prefix: w.to_ident(w_span),
1664 value: self.parse_introduced_string_expr()?.into(),
1665 })
1666 }
1667 Token::Arrow if self.dialect.supports_lambda_functions() => {
1671 self.expect_token(&Token::Arrow)?;
1672 Ok(Expr::Lambda(LambdaFunction {
1673 params: OneOrManyWithParens::One(LambdaFunctionParameter {
1674 name: w.to_ident(w_span),
1675 data_type: None,
1676 }),
1677 body: Box::new(self.parse_expr()?),
1678 syntax: LambdaSyntax::Arrow,
1679 }))
1680 }
1681 Token::Word(_)
1685 if self.dialect.supports_lambda_functions()
1686 && self.peek_nth_token_ref(1).token == Token::Arrow =>
1687 {
1688 let data_type = self.parse_data_type()?;
1689 self.expect_token(&Token::Arrow)?;
1690 Ok(Expr::Lambda(LambdaFunction {
1691 params: OneOrManyWithParens::One(LambdaFunctionParameter {
1692 name: w.to_ident(w_span),
1693 data_type: Some(data_type),
1694 }),
1695 body: Box::new(self.parse_expr()?),
1696 syntax: LambdaSyntax::Arrow,
1697 }))
1698 }
1699 _ => Ok(Expr::Identifier(w.to_ident(w_span))),
1700 }
1701 }
1702
1703 fn is_simple_unquoted_object_name(name: &ObjectName, expected: &str) -> bool {
1706 if let [ObjectNamePart::Identifier(ident)] = name.0.as_slice() {
1707 ident.quote_style.is_none() && ident.value.eq_ignore_ascii_case(expected)
1708 } else {
1709 false
1710 }
1711 }
1712
1713 pub fn parse_prefix(&mut self) -> Result<Expr, ParserError> {
1715 if let Some(prefix) = self.dialect.parse_prefix(self) {
1717 return prefix;
1718 }
1719
1720 let loc = self.peek_token_ref().span.start;
1737 let opt_expr = self.maybe_parse(|parser| {
1738 match parser.parse_data_type()? {
1739 DataType::Interval { .. } => parser.parse_interval(),
1740 DataType::Custom(ref name, ref modifiers)
1751 if modifiers.is_empty()
1752 && Self::is_simple_unquoted_object_name(name, "xml")
1753 && parser.dialect.supports_xml_expressions() =>
1754 {
1755 Ok(Expr::TypedString(TypedString {
1756 data_type: DataType::Custom(name.clone(), modifiers.clone()),
1757 value: parser.parse_value()?,
1758 uses_odbc_syntax: false,
1759 }))
1760 }
1761 DataType::Custom(..) => parser_err!("dummy", loc),
1762 DataType::Binary(..) if self.dialect.supports_binary_kw_as_cast() => {
1764 Ok(Expr::Cast {
1765 kind: CastKind::Cast,
1766 expr: Box::new(parser.parse_expr()?),
1767 data_type: DataType::Binary(None),
1768 array: false,
1769 format: None,
1770 })
1771 }
1772 data_type => Ok(Expr::TypedString(TypedString {
1773 data_type,
1774 value: parser.parse_value()?,
1775 uses_odbc_syntax: false,
1776 })),
1777 }
1778 })?;
1779
1780 if let Some(expr) = opt_expr {
1781 return Ok(expr);
1782 }
1783
1784 let dialect = self.dialect;
1788
1789 self.advance_token();
1790 let next_token_index = self.get_current_index();
1791 let next_token = self.get_current_token();
1792 let span = next_token.span;
1793 let expr = match &next_token.token {
1794 Token::Word(w) => {
1795 let w = w.clone();
1804 match self.try_parse(|parser| parser.parse_expr_prefix_by_reserved_word(&w, span)) {
1805 Ok(Some(expr)) => Ok(expr),
1807
1808 Ok(None) => Ok(self.parse_expr_prefix_by_unreserved_word(&w, span)?),
1810
1811 Err(e) => {
1818 if !self.dialect.is_reserved_for_identifier(w.keyword) {
1819 if let Ok(Some(expr)) = self.maybe_parse(|parser| {
1820 parser.parse_expr_prefix_by_unreserved_word(&w, span)
1821 }) {
1822 return Ok(expr);
1823 }
1824 }
1825 return Err(e);
1826 }
1827 }
1828 } Token::LBracket => self.parse_array_expr(false),
1831 tok @ Token::Minus | tok @ Token::Plus => {
1832 let op = if *tok == Token::Plus {
1833 UnaryOperator::Plus
1834 } else {
1835 UnaryOperator::Minus
1836 };
1837 Ok(Expr::UnaryOp {
1838 op,
1839 expr: Box::new(
1840 self.parse_subexpr(self.dialect.prec_value(Precedence::MulDivModOp))?,
1841 ),
1842 })
1843 }
1844 Token::ExclamationMark if dialect.supports_bang_not_operator() => Ok(Expr::UnaryOp {
1845 op: UnaryOperator::BangNot,
1846 expr: Box::new(self.parse_subexpr(self.dialect.prec_value(Precedence::UnaryNot))?),
1847 }),
1848 tok @ Token::DoubleExclamationMark
1849 | tok @ Token::PGSquareRoot
1850 | tok @ Token::PGCubeRoot
1851 | tok @ Token::AtSign
1852 if dialect_is!(dialect is PostgreSqlDialect) =>
1853 {
1854 let op = match tok {
1855 Token::DoubleExclamationMark => UnaryOperator::PGPrefixFactorial,
1856 Token::PGSquareRoot => UnaryOperator::PGSquareRoot,
1857 Token::PGCubeRoot => UnaryOperator::PGCubeRoot,
1858 Token::AtSign => UnaryOperator::PGAbs,
1859 _ => {
1860 return Err(ParserError::ParserError(
1861 "Internal parser error: unexpected unary operator token".to_string(),
1862 ))
1863 }
1864 };
1865 Ok(Expr::UnaryOp {
1866 op,
1867 expr: Box::new(
1868 self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?,
1869 ),
1870 })
1871 }
1872 Token::Tilde => Ok(Expr::UnaryOp {
1873 op: UnaryOperator::BitwiseNot,
1874 expr: Box::new(self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?),
1875 }),
1876 tok @ Token::Sharp
1877 | tok @ Token::AtDashAt
1878 | tok @ Token::AtAt
1879 | tok @ Token::QuestionMarkDash
1880 | tok @ Token::QuestionPipe
1881 if self.dialect.supports_geometric_types() =>
1882 {
1883 let op = match tok {
1884 Token::Sharp => UnaryOperator::Hash,
1885 Token::AtDashAt => UnaryOperator::AtDashAt,
1886 Token::AtAt => UnaryOperator::DoubleAt,
1887 Token::QuestionMarkDash => UnaryOperator::QuestionDash,
1888 Token::QuestionPipe => UnaryOperator::QuestionPipe,
1889 _ => {
1890 return Err(ParserError::ParserError(format!(
1891 "Unexpected token in unary operator parsing: {tok:?}"
1892 )))
1893 }
1894 };
1895 Ok(Expr::UnaryOp {
1896 op,
1897 expr: Box::new(
1898 self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?,
1899 ),
1900 })
1901 }
1902 Token::EscapedStringLiteral(_) if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) =>
1903 {
1904 self.prev_token();
1905 Ok(Expr::Value(self.parse_value()?))
1906 }
1907 Token::UnicodeStringLiteral(_) => {
1908 self.prev_token();
1909 Ok(Expr::Value(self.parse_value()?))
1910 }
1911 Token::Number(_, _)
1912 | Token::SingleQuotedString(_)
1913 | Token::DoubleQuotedString(_)
1914 | Token::TripleSingleQuotedString(_)
1915 | Token::TripleDoubleQuotedString(_)
1916 | Token::DollarQuotedString(_)
1917 | Token::SingleQuotedByteStringLiteral(_)
1918 | Token::DoubleQuotedByteStringLiteral(_)
1919 | Token::TripleSingleQuotedByteStringLiteral(_)
1920 | Token::TripleDoubleQuotedByteStringLiteral(_)
1921 | Token::SingleQuotedRawStringLiteral(_)
1922 | Token::DoubleQuotedRawStringLiteral(_)
1923 | Token::TripleSingleQuotedRawStringLiteral(_)
1924 | Token::TripleDoubleQuotedRawStringLiteral(_)
1925 | Token::NationalStringLiteral(_)
1926 | Token::QuoteDelimitedStringLiteral(_)
1927 | Token::NationalQuoteDelimitedStringLiteral(_)
1928 | Token::HexStringLiteral(_) => {
1929 self.prev_token();
1930 Ok(Expr::Value(self.parse_value()?))
1931 }
1932 Token::LParen => {
1933 let expr =
1934 if let Some(expr) = self.try_parse_expr_sub_query()? {
1935 expr
1936 } else if let Some(lambda) = self.try_parse_lambda()? {
1937 return Ok(lambda);
1938 } else {
1939 let exprs = self.with_state(ParserState::Normal, |p| {
1950 p.parse_comma_separated(Parser::parse_expr)
1951 })?;
1952 match exprs.len() {
1953 0 => return Err(ParserError::ParserError(
1954 "Internal parser error: parse_comma_separated returned empty list"
1955 .to_string(),
1956 )),
1957 1 => Expr::Nested(Box::new(exprs.into_iter().next().unwrap())),
1958 _ => Expr::Tuple(exprs),
1959 }
1960 };
1961 self.expect_token(&Token::RParen)?;
1962 Ok(expr)
1963 }
1964 Token::Placeholder(_) | Token::Colon | Token::AtSign => {
1965 self.prev_token();
1966 Ok(Expr::Value(self.parse_value()?))
1967 }
1968 Token::LBrace => {
1969 self.prev_token();
1970 self.parse_lbrace_expr()
1971 }
1972 _ => self.expected_at("an expression", next_token_index),
1973 }?;
1974
1975 Ok(expr)
1976 }
1977
1978 fn parse_geometric_type(&mut self, kind: GeometricTypeKind) -> Result<Expr, ParserError> {
1979 Ok(Expr::TypedString(TypedString {
1980 data_type: DataType::GeometricType(kind),
1981 value: self.parse_value()?,
1982 uses_odbc_syntax: false,
1983 }))
1984 }
1985
1986 pub fn parse_compound_expr(
1993 &mut self,
1994 root: Expr,
1995 mut chain: Vec<AccessExpr>,
1996 ) -> Result<Expr, ParserError> {
1997 let mut ending_wildcard: Option<TokenWithSpan> = None;
1998 loop {
1999 if self.consume_token(&Token::Period) {
2000 let next_token = self.peek_token_ref();
2001 match &next_token.token {
2002 Token::Mul => {
2003 if dialect_of!(self is PostgreSqlDialect) {
2006 ending_wildcard = Some(self.next_token());
2007 } else {
2008 self.prev_token(); }
2015
2016 break;
2017 }
2018 Token::SingleQuotedString(s) => {
2019 let expr =
2020 Expr::Identifier(Ident::with_quote_and_span('\'', next_token.span, s));
2021 chain.push(AccessExpr::Dot(expr));
2022 self.advance_token(); }
2024 Token::Placeholder(s) => {
2025 let expr = Expr::Identifier(Ident::with_span(next_token.span, s));
2028 chain.push(AccessExpr::Dot(expr));
2029 self.advance_token(); }
2031 _ => {
2036 let expr = self.maybe_parse(|parser| {
2037 let expr = parser
2038 .parse_subexpr(parser.dialect.prec_value(Precedence::Period))?;
2039 match &expr {
2040 Expr::CompoundFieldAccess { .. }
2041 | Expr::CompoundIdentifier(_)
2042 | Expr::Identifier(_)
2043 | Expr::Value(_)
2044 | Expr::Function(_) => Ok(expr),
2045 _ => parser.expected_ref(
2046 "an identifier or value",
2047 parser.peek_token_ref(),
2048 ),
2049 }
2050 })?;
2051
2052 match expr {
2053 Some(Expr::CompoundFieldAccess { root, access_chain }) => {
2062 chain.push(AccessExpr::Dot(*root));
2063 chain.extend(access_chain);
2064 }
2065 Some(Expr::CompoundIdentifier(parts)) => chain.extend(
2066 parts.into_iter().map(Expr::Identifier).map(AccessExpr::Dot),
2067 ),
2068 Some(expr) => {
2069 chain.push(AccessExpr::Dot(expr));
2070 }
2071 None => {
2075 chain.push(AccessExpr::Dot(Expr::Identifier(
2076 self.parse_identifier()?,
2077 )));
2078 }
2079 }
2080 }
2081 }
2082 } else if !self.dialect.supports_partiql()
2083 && self.peek_token_ref().token == Token::LBracket
2084 {
2085 self.parse_multi_dim_subscript(&mut chain)?;
2086 } else {
2087 break;
2088 }
2089 }
2090
2091 let tok_index = self.get_current_index();
2092 if let Some(wildcard_token) = ending_wildcard {
2093 if !Self::is_all_ident(&root, &chain) {
2094 return self
2095 .expected_ref("an identifier or a '*' after '.'", self.peek_token_ref());
2096 };
2097 Ok(Expr::QualifiedWildcard(
2098 ObjectName::from(Self::exprs_to_idents(root, chain)?),
2099 AttachedToken(wildcard_token),
2100 ))
2101 } else if self.maybe_parse_outer_join_operator() {
2102 if !Self::is_all_ident(&root, &chain) {
2103 return self.expected_at("column identifier before (+)", tok_index);
2104 };
2105 let expr = if chain.is_empty() {
2106 root
2107 } else {
2108 Expr::CompoundIdentifier(Self::exprs_to_idents(root, chain)?)
2109 };
2110 Ok(Expr::OuterJoin(expr.into()))
2111 } else {
2112 Self::build_compound_expr(root, chain)
2113 }
2114 }
2115
2116 fn build_compound_expr(
2121 root: Expr,
2122 mut access_chain: Vec<AccessExpr>,
2123 ) -> Result<Expr, ParserError> {
2124 if access_chain.is_empty() {
2125 return Ok(root);
2126 }
2127
2128 if Self::is_all_ident(&root, &access_chain) {
2129 return Ok(Expr::CompoundIdentifier(Self::exprs_to_idents(
2130 root,
2131 access_chain,
2132 )?));
2133 }
2134
2135 if matches!(root, Expr::Identifier(_))
2140 && matches!(
2141 access_chain.last(),
2142 Some(AccessExpr::Dot(Expr::Function(_)))
2143 )
2144 && access_chain
2145 .iter()
2146 .rev()
2147 .skip(1) .all(|access| matches!(access, AccessExpr::Dot(Expr::Identifier(_))))
2149 {
2150 let Some(AccessExpr::Dot(Expr::Function(mut func))) = access_chain.pop() else {
2151 return parser_err!("expected function expression", root.span().start);
2152 };
2153
2154 let compound_func_name = [root]
2155 .into_iter()
2156 .chain(access_chain.into_iter().flat_map(|access| match access {
2157 AccessExpr::Dot(expr) => Some(expr),
2158 _ => None,
2159 }))
2160 .flat_map(|expr| match expr {
2161 Expr::Identifier(ident) => Some(ident),
2162 _ => None,
2163 })
2164 .map(ObjectNamePart::Identifier)
2165 .chain(func.name.0)
2166 .collect::<Vec<_>>();
2167 func.name = ObjectName(compound_func_name);
2168
2169 return Ok(Expr::Function(func));
2170 }
2171
2172 if access_chain.len() == 1
2177 && matches!(
2178 access_chain.last(),
2179 Some(AccessExpr::Dot(Expr::OuterJoin(_)))
2180 )
2181 {
2182 let Some(AccessExpr::Dot(Expr::OuterJoin(inner_expr))) = access_chain.pop() else {
2183 return parser_err!("expected (+) expression", root.span().start);
2184 };
2185
2186 if !Self::is_all_ident(&root, &[]) {
2187 return parser_err!("column identifier before (+)", root.span().start);
2188 };
2189
2190 let token_start = root.span().start;
2191 let mut idents = Self::exprs_to_idents(root, vec![])?;
2192 match *inner_expr {
2193 Expr::CompoundIdentifier(suffix) => idents.extend(suffix),
2194 Expr::Identifier(suffix) => idents.push(suffix),
2195 _ => {
2196 return parser_err!("column identifier before (+)", token_start);
2197 }
2198 }
2199
2200 return Ok(Expr::OuterJoin(Expr::CompoundIdentifier(idents).into()));
2201 }
2202
2203 Ok(Expr::CompoundFieldAccess {
2204 root: Box::new(root),
2205 access_chain,
2206 })
2207 }
2208
2209 fn keyword_to_modifier(k: Keyword) -> Option<ContextModifier> {
2210 match k {
2211 Keyword::LOCAL => Some(ContextModifier::Local),
2212 Keyword::GLOBAL => Some(ContextModifier::Global),
2213 Keyword::SESSION => Some(ContextModifier::Session),
2214 _ => None,
2215 }
2216 }
2217
2218 fn is_all_ident(root: &Expr, fields: &[AccessExpr]) -> bool {
2220 if !matches!(root, Expr::Identifier(_)) {
2221 return false;
2222 }
2223 fields
2224 .iter()
2225 .all(|x| matches!(x, AccessExpr::Dot(Expr::Identifier(_))))
2226 }
2227
2228 fn exprs_to_idents(root: Expr, fields: Vec<AccessExpr>) -> Result<Vec<Ident>, ParserError> {
2230 let mut idents = vec![];
2231 if let Expr::Identifier(root) = root {
2232 idents.push(root);
2233 for x in fields {
2234 if let AccessExpr::Dot(Expr::Identifier(ident)) = x {
2235 idents.push(ident);
2236 } else {
2237 return parser_err!(
2238 format!("Expected identifier, found: {}", x),
2239 x.span().start
2240 );
2241 }
2242 }
2243 Ok(idents)
2244 } else {
2245 parser_err!(
2246 format!("Expected identifier, found: {}", root),
2247 root.span().start
2248 )
2249 }
2250 }
2251
2252 fn peek_outer_join_operator(&mut self) -> bool {
2254 if !self.dialect.supports_outer_join_operator() {
2255 return false;
2256 }
2257
2258 let [maybe_lparen, maybe_plus, maybe_rparen] = self.peek_tokens_ref();
2259 Token::LParen == maybe_lparen.token
2260 && Token::Plus == maybe_plus.token
2261 && Token::RParen == maybe_rparen.token
2262 }
2263
2264 fn maybe_parse_outer_join_operator(&mut self) -> bool {
2267 self.dialect.supports_outer_join_operator()
2268 && self.consume_tokens(&[Token::LParen, Token::Plus, Token::RParen])
2269 }
2270
2271 pub fn parse_utility_options(&mut self) -> Result<Vec<UtilityOption>, ParserError> {
2273 self.expect_token(&Token::LParen)?;
2274 let options = self.parse_comma_separated(Self::parse_utility_option)?;
2275 self.expect_token(&Token::RParen)?;
2276
2277 Ok(options)
2278 }
2279
2280 fn parse_utility_option(&mut self) -> Result<UtilityOption, ParserError> {
2281 let name = self.parse_identifier()?;
2282
2283 let next_token = self.peek_token_ref();
2284 if next_token == &Token::Comma || next_token == &Token::RParen {
2285 return Ok(UtilityOption { name, arg: None });
2286 }
2287 let arg = self.parse_expr()?;
2288
2289 Ok(UtilityOption {
2290 name,
2291 arg: Some(arg),
2292 })
2293 }
2294
2295 fn try_parse_expr_sub_query(&mut self) -> Result<Option<Expr>, ParserError> {
2296 if !self.peek_sub_query() {
2297 return Ok(None);
2298 }
2299
2300 Ok(Some(Expr::Subquery(self.parse_query()?)))
2301 }
2302
2303 fn try_parse_lambda(&mut self) -> Result<Option<Expr>, ParserError> {
2304 if !self.dialect.supports_lambda_functions() {
2305 return Ok(None);
2306 }
2307 self.maybe_parse(|p| {
2308 let params = p.parse_comma_separated(|p| p.parse_lambda_function_parameter())?;
2309 p.expect_token(&Token::RParen)?;
2310 p.expect_token(&Token::Arrow)?;
2311 let expr = p.parse_expr()?;
2312 Ok(Expr::Lambda(LambdaFunction {
2313 params: OneOrManyWithParens::Many(params),
2314 body: Box::new(expr),
2315 syntax: LambdaSyntax::Arrow,
2316 }))
2317 })
2318 }
2319
2320 fn parse_lambda_expr(&mut self) -> Result<Expr, ParserError> {
2330 let params = self.parse_lambda_function_parameters()?;
2332 self.expect_token(&Token::Colon)?;
2334 let body = self.parse_expr()?;
2336 Ok(Expr::Lambda(LambdaFunction {
2337 params,
2338 body: Box::new(body),
2339 syntax: LambdaSyntax::LambdaKeyword,
2340 }))
2341 }
2342
2343 fn parse_lambda_function_parameters(
2345 &mut self,
2346 ) -> Result<OneOrManyWithParens<LambdaFunctionParameter>, ParserError> {
2347 let params = if self.consume_token(&Token::LParen) {
2349 let params = self.parse_comma_separated(|p| p.parse_lambda_function_parameter())?;
2351 self.expect_token(&Token::RParen)?;
2352 OneOrManyWithParens::Many(params)
2353 } else {
2354 let params = self.parse_comma_separated(|p| p.parse_lambda_function_parameter())?;
2356 if params.len() == 1 {
2357 OneOrManyWithParens::One(params.into_iter().next().unwrap())
2358 } else {
2359 OneOrManyWithParens::Many(params)
2360 }
2361 };
2362 Ok(params)
2363 }
2364
2365 fn parse_lambda_function_parameter(&mut self) -> Result<LambdaFunctionParameter, ParserError> {
2367 let name = self.parse_identifier()?;
2368 let data_type = match &self.peek_token_ref().token {
2369 Token::Word(_) => self.maybe_parse(|p| p.parse_data_type())?,
2370 _ => None,
2371 };
2372 Ok(LambdaFunctionParameter { name, data_type })
2373 }
2374
2375 fn maybe_parse_odbc_body(&mut self) -> Result<Option<Expr>, ParserError> {
2382 if let Some(expr) = self.maybe_parse_odbc_fn_body()? {
2384 return Ok(Some(expr));
2385 }
2386 self.maybe_parse_odbc_body_datetime()
2388 }
2389
2390 fn maybe_parse_odbc_body_datetime(&mut self) -> Result<Option<Expr>, ParserError> {
2401 self.maybe_parse(|p| {
2402 let token = p.next_token().clone();
2403 let word_string = token.token.to_string();
2404 let data_type = match word_string.as_str() {
2405 "t" => DataType::Time(None, TimezoneInfo::None),
2406 "d" => DataType::Date,
2407 "ts" => DataType::Timestamp(None, TimezoneInfo::None),
2408 _ => return p.expected("ODBC datetime keyword (t, d, or ts)", token),
2409 };
2410 let value = p.parse_value()?;
2411 Ok(Expr::TypedString(TypedString {
2412 data_type,
2413 value,
2414 uses_odbc_syntax: true,
2415 }))
2416 })
2417 }
2418
2419 fn maybe_parse_odbc_fn_body(&mut self) -> Result<Option<Expr>, ParserError> {
2428 self.maybe_parse(|p| {
2429 p.expect_keyword(Keyword::FN)?;
2430 let fn_name = p.parse_object_name(false)?;
2431 let mut fn_call = p.parse_function_call(fn_name)?;
2432 fn_call.uses_odbc_syntax = true;
2433 Ok(Expr::Function(fn_call))
2434 })
2435 }
2436
2437 pub fn parse_function(&mut self, name: ObjectName) -> Result<Expr, ParserError> {
2439 self.parse_function_call(name).map(Expr::Function)
2440 }
2441
2442 fn parse_function_call(&mut self, name: ObjectName) -> Result<Function, ParserError> {
2443 self.expect_token(&Token::LParen)?;
2444
2445 if self.dialect.supports_subquery_as_function_arg() && self.peek_sub_query() {
2448 let subquery = self.parse_query()?;
2449 self.expect_token(&Token::RParen)?;
2450 return Ok(Function {
2451 name,
2452 uses_odbc_syntax: false,
2453 parameters: FunctionArguments::None,
2454 args: FunctionArguments::Subquery(subquery),
2455 filter: None,
2456 null_treatment: None,
2457 over: None,
2458 within_group: vec![],
2459 });
2460 }
2461
2462 let mut args = self.parse_function_argument_list()?;
2463 let mut parameters = FunctionArguments::None;
2464 if dialect_of!(self is ClickHouseDialect | GenericDialect)
2467 && self.consume_token(&Token::LParen)
2468 {
2469 parameters = FunctionArguments::List(args);
2470 args = self.parse_function_argument_list()?;
2471 }
2472
2473 let within_group = if self.parse_keywords(&[Keyword::WITHIN, Keyword::GROUP]) {
2474 self.expect_token(&Token::LParen)?;
2475 self.expect_keywords(&[Keyword::ORDER, Keyword::BY])?;
2476 let order_by = self.parse_comma_separated(Parser::parse_order_by_expr)?;
2477 self.expect_token(&Token::RParen)?;
2478 order_by
2479 } else {
2480 vec![]
2481 };
2482
2483 let filter = if self.dialect.supports_filter_during_aggregation()
2484 && self.parse_keyword(Keyword::FILTER)
2485 && self.consume_token(&Token::LParen)
2486 && self.parse_keyword(Keyword::WHERE)
2487 {
2488 let filter = Some(Box::new(self.parse_expr()?));
2489 self.expect_token(&Token::RParen)?;
2490 filter
2491 } else {
2492 None
2493 };
2494
2495 let null_treatment = if args
2498 .clauses
2499 .iter()
2500 .all(|clause| !matches!(clause, FunctionArgumentClause::IgnoreOrRespectNulls(_)))
2501 {
2502 self.parse_null_treatment()?
2503 } else {
2504 None
2505 };
2506
2507 let over = if self.parse_keyword(Keyword::OVER) {
2508 if self.consume_token(&Token::LParen) {
2509 let window_spec = self.parse_window_spec()?;
2510 Some(WindowType::WindowSpec(window_spec))
2511 } else {
2512 Some(WindowType::NamedWindow(self.parse_identifier()?))
2513 }
2514 } else {
2515 None
2516 };
2517
2518 Ok(Function {
2519 name,
2520 uses_odbc_syntax: false,
2521 parameters,
2522 args: FunctionArguments::List(args),
2523 null_treatment,
2524 filter,
2525 over,
2526 within_group,
2527 })
2528 }
2529
2530 fn parse_null_treatment(&mut self) -> Result<Option<NullTreatment>, ParserError> {
2532 match self.parse_one_of_keywords(&[Keyword::RESPECT, Keyword::IGNORE]) {
2533 Some(keyword) => {
2534 self.expect_keyword_is(Keyword::NULLS)?;
2535
2536 Ok(match keyword {
2537 Keyword::RESPECT => Some(NullTreatment::RespectNulls),
2538 Keyword::IGNORE => Some(NullTreatment::IgnoreNulls),
2539 _ => None,
2540 })
2541 }
2542 None => Ok(None),
2543 }
2544 }
2545
2546 pub fn parse_time_functions(&mut self, name: ObjectName) -> Result<Expr, ParserError> {
2548 let args = if self.consume_token(&Token::LParen) {
2549 FunctionArguments::List(self.parse_function_argument_list()?)
2550 } else {
2551 FunctionArguments::None
2552 };
2553 Ok(Expr::Function(Function {
2554 name,
2555 uses_odbc_syntax: false,
2556 parameters: FunctionArguments::None,
2557 args,
2558 filter: None,
2559 over: None,
2560 null_treatment: None,
2561 within_group: vec![],
2562 }))
2563 }
2564
2565 pub fn parse_window_frame_units(&mut self) -> Result<WindowFrameUnits, ParserError> {
2567 let next_token = self.next_token();
2568 match &next_token.token {
2569 Token::Word(w) => match w.keyword {
2570 Keyword::ROWS => Ok(WindowFrameUnits::Rows),
2571 Keyword::RANGE => Ok(WindowFrameUnits::Range),
2572 Keyword::GROUPS => Ok(WindowFrameUnits::Groups),
2573 _ => self.expected("ROWS, RANGE, GROUPS", next_token)?,
2574 },
2575 _ => self.expected("ROWS, RANGE, GROUPS", next_token),
2576 }
2577 }
2578
2579 pub fn parse_window_frame(&mut self) -> Result<WindowFrame, ParserError> {
2581 let units = self.parse_window_frame_units()?;
2582 let (start_bound, end_bound) = if self.parse_keyword(Keyword::BETWEEN) {
2583 let start_bound = self.parse_window_frame_bound()?;
2584 self.expect_keyword_is(Keyword::AND)?;
2585 let end_bound = Some(self.parse_window_frame_bound()?);
2586 (start_bound, end_bound)
2587 } else {
2588 (self.parse_window_frame_bound()?, None)
2589 };
2590 Ok(WindowFrame {
2591 units,
2592 start_bound,
2593 end_bound,
2594 })
2595 }
2596
2597 pub fn parse_window_frame_bound(&mut self) -> Result<WindowFrameBound, ParserError> {
2599 if self.parse_keywords(&[Keyword::CURRENT, Keyword::ROW]) {
2600 Ok(WindowFrameBound::CurrentRow)
2601 } else {
2602 let rows = if self.parse_keyword(Keyword::UNBOUNDED) {
2603 None
2604 } else {
2605 Some(Box::new(match &self.peek_token_ref().token {
2606 Token::SingleQuotedString(_) => self.parse_interval()?,
2607 _ => self.parse_expr()?,
2608 }))
2609 };
2610 if self.parse_keyword(Keyword::PRECEDING) {
2611 Ok(WindowFrameBound::Preceding(rows))
2612 } else if self.parse_keyword(Keyword::FOLLOWING) {
2613 Ok(WindowFrameBound::Following(rows))
2614 } else {
2615 self.expected_ref("PRECEDING or FOLLOWING", self.peek_token_ref())
2616 }
2617 }
2618 }
2619
2620 fn parse_group_by_expr(&mut self) -> Result<Expr, ParserError> {
2622 if self.dialect.supports_group_by_expr() {
2623 if self.parse_keywords(&[Keyword::GROUPING, Keyword::SETS]) {
2624 self.expect_token(&Token::LParen)?;
2625 let result = self.parse_comma_separated(|p| p.parse_tuple(true, true))?;
2626 self.expect_token(&Token::RParen)?;
2627 Ok(Expr::GroupingSets(result))
2628 } else if self.parse_keyword(Keyword::CUBE) {
2629 self.expect_token(&Token::LParen)?;
2630 let result = self.parse_comma_separated(|p| p.parse_tuple(true, true))?;
2631 self.expect_token(&Token::RParen)?;
2632 Ok(Expr::Cube(result))
2633 } else if self.parse_keyword(Keyword::ROLLUP) {
2634 self.expect_token(&Token::LParen)?;
2635 let result = self.parse_comma_separated(|p| p.parse_tuple(true, true))?;
2636 self.expect_token(&Token::RParen)?;
2637 Ok(Expr::Rollup(result))
2638 } else if self.consume_tokens(&[Token::LParen, Token::RParen]) {
2639 Ok(Expr::Tuple(vec![]))
2643 } else {
2644 self.parse_expr()
2645 }
2646 } else {
2647 self.parse_expr()
2649 }
2650 }
2651
2652 fn parse_tuple(
2656 &mut self,
2657 lift_singleton: bool,
2658 allow_empty: bool,
2659 ) -> Result<Vec<Expr>, ParserError> {
2660 if lift_singleton {
2661 if self.consume_token(&Token::LParen) {
2662 let result = if allow_empty && self.consume_token(&Token::RParen) {
2663 vec![]
2664 } else {
2665 let result = self.parse_comma_separated(Parser::parse_expr)?;
2666 self.expect_token(&Token::RParen)?;
2667 result
2668 };
2669 Ok(result)
2670 } else {
2671 Ok(vec![self.parse_expr()?])
2672 }
2673 } else {
2674 self.expect_token(&Token::LParen)?;
2675 let result = if allow_empty && self.consume_token(&Token::RParen) {
2676 vec![]
2677 } else {
2678 let result = self.parse_comma_separated(Parser::parse_expr)?;
2679 self.expect_token(&Token::RParen)?;
2680 result
2681 };
2682 Ok(result)
2683 }
2684 }
2685
2686 pub fn parse_case_expr(&mut self) -> Result<Expr, ParserError> {
2688 let case_token = AttachedToken(self.get_current_token().clone());
2689 let mut operand = None;
2690 if !self.parse_keyword(Keyword::WHEN) {
2691 operand = Some(Box::new(self.parse_expr()?));
2692 self.expect_keyword_is(Keyword::WHEN)?;
2693 }
2694 let mut conditions = vec![];
2695 loop {
2696 let condition = self.parse_expr()?;
2697 self.expect_keyword_is(Keyword::THEN)?;
2698 let result = self.parse_expr()?;
2699 conditions.push(CaseWhen { condition, result });
2700 if !self.parse_keyword(Keyword::WHEN) {
2701 break;
2702 }
2703 }
2704 let else_result = if self.parse_keyword(Keyword::ELSE) {
2705 Some(Box::new(self.parse_expr()?))
2706 } else {
2707 None
2708 };
2709 let end_token = AttachedToken(self.expect_keyword(Keyword::END)?);
2710 Ok(Expr::Case {
2711 case_token,
2712 end_token,
2713 operand,
2714 conditions,
2715 else_result,
2716 })
2717 }
2718
2719 pub fn parse_optional_cast_format(&mut self) -> Result<Option<CastFormat>, ParserError> {
2721 if self.parse_keyword(Keyword::FORMAT) {
2722 let value = self.parse_value()?;
2723 match self.parse_optional_time_zone()? {
2724 Some(tz) => Ok(Some(CastFormat::ValueAtTimeZone(value, tz))),
2725 None => Ok(Some(CastFormat::Value(value))),
2726 }
2727 } else {
2728 Ok(None)
2729 }
2730 }
2731
2732 pub fn parse_optional_time_zone(&mut self) -> Result<Option<ValueWithSpan>, ParserError> {
2734 if self.parse_keywords(&[Keyword::AT, Keyword::TIME, Keyword::ZONE]) {
2735 self.parse_value().map(Some)
2736 } else {
2737 Ok(None)
2738 }
2739 }
2740
2741 fn parse_mssql_convert(&mut self, is_try: bool) -> Result<Expr, ParserError> {
2743 self.expect_token(&Token::LParen)?;
2744 let data_type = self.parse_data_type()?;
2745 self.expect_token(&Token::Comma)?;
2746 let expr = self.parse_expr()?;
2747 let styles = if self.consume_token(&Token::Comma) {
2748 self.parse_comma_separated(Parser::parse_expr)?
2749 } else {
2750 Default::default()
2751 };
2752 self.expect_token(&Token::RParen)?;
2753 Ok(Expr::Convert {
2754 is_try,
2755 expr: Box::new(expr),
2756 data_type: Some(data_type),
2757 charset: None,
2758 target_before_value: true,
2759 styles,
2760 })
2761 }
2762
2763 pub fn parse_convert_expr(&mut self, is_try: bool) -> Result<Expr, ParserError> {
2768 if self.dialect.convert_type_before_value() {
2769 return self.parse_mssql_convert(is_try);
2770 }
2771 self.expect_token(&Token::LParen)?;
2772 let expr = self.parse_expr()?;
2773 if self.parse_keyword(Keyword::USING) {
2774 let charset = self.parse_object_name(false)?;
2775 self.expect_token(&Token::RParen)?;
2776 return Ok(Expr::Convert {
2777 is_try,
2778 expr: Box::new(expr),
2779 data_type: None,
2780 charset: Some(charset),
2781 target_before_value: false,
2782 styles: vec![],
2783 });
2784 }
2785 self.expect_token(&Token::Comma)?;
2786 let data_type = self.parse_data_type()?;
2787 let charset = if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
2788 Some(self.parse_object_name(false)?)
2789 } else {
2790 None
2791 };
2792 self.expect_token(&Token::RParen)?;
2793 Ok(Expr::Convert {
2794 is_try,
2795 expr: Box::new(expr),
2796 data_type: Some(data_type),
2797 charset,
2798 target_before_value: false,
2799 styles: vec![],
2800 })
2801 }
2802
2803 pub fn parse_cast_expr(&mut self, kind: CastKind) -> Result<Expr, ParserError> {
2805 self.expect_token(&Token::LParen)?;
2806 let expr = self.parse_expr()?;
2807 self.expect_keyword_is(Keyword::AS)?;
2808 let data_type = self.parse_data_type()?;
2809 let array = self.parse_keyword(Keyword::ARRAY);
2810 let format = self.parse_optional_cast_format()?;
2811 self.expect_token(&Token::RParen)?;
2812 Ok(Expr::Cast {
2813 kind,
2814 expr: Box::new(expr),
2815 data_type,
2816 array,
2817 format,
2818 })
2819 }
2820
2821 pub fn parse_exists_expr(&mut self, negated: bool) -> Result<Expr, ParserError> {
2823 self.expect_token(&Token::LParen)?;
2824 let exists_node = Expr::Exists {
2825 negated,
2826 subquery: self.parse_query()?,
2827 };
2828 self.expect_token(&Token::RParen)?;
2829 Ok(exists_node)
2830 }
2831
2832 pub fn parse_extract_expr(&mut self) -> Result<Expr, ParserError> {
2834 self.expect_token(&Token::LParen)?;
2835 let field = self.parse_date_time_field()?;
2836
2837 let syntax = if self.parse_keyword(Keyword::FROM) {
2838 ExtractSyntax::From
2839 } else if self.dialect.supports_extract_comma_syntax() && self.consume_token(&Token::Comma)
2840 {
2841 ExtractSyntax::Comma
2842 } else {
2843 return Err(ParserError::ParserError(
2844 "Expected 'FROM' or ','".to_string(),
2845 ));
2846 };
2847
2848 let expr = self.parse_expr()?;
2849 self.expect_token(&Token::RParen)?;
2850 Ok(Expr::Extract {
2851 field,
2852 expr: Box::new(expr),
2853 syntax,
2854 })
2855 }
2856
2857 pub fn parse_ceil_floor_expr(&mut self, is_ceil: bool) -> Result<Expr, ParserError> {
2859 self.expect_token(&Token::LParen)?;
2860 let expr = self.parse_expr()?;
2861 let field = if self.parse_keyword(Keyword::TO) {
2863 CeilFloorKind::DateTimeField(self.parse_date_time_field()?)
2865 } else if self.consume_token(&Token::Comma) {
2866 let v = self.parse_value()?;
2868 if matches!(v.value, Value::Number(_, _)) {
2869 CeilFloorKind::Scale(v)
2870 } else {
2871 return Err(ParserError::ParserError(
2872 "Scale field can only be of number type".to_string(),
2873 ));
2874 }
2875 } else {
2876 CeilFloorKind::DateTimeField(DateTimeField::NoDateTime)
2877 };
2878 self.expect_token(&Token::RParen)?;
2879 if is_ceil {
2880 Ok(Expr::Ceil {
2881 expr: Box::new(expr),
2882 field,
2883 })
2884 } else {
2885 Ok(Expr::Floor {
2886 expr: Box::new(expr),
2887 field,
2888 })
2889 }
2890 }
2891
2892 pub fn parse_position_expr(&mut self, ident: Ident) -> Result<Expr, ParserError> {
2894 let between_prec = self.dialect.prec_value(Precedence::Between);
2895 let position_expr = self.maybe_parse(|p| {
2896 p.expect_token(&Token::LParen)?;
2898
2899 let expr = p.parse_subexpr(between_prec)?;
2901 p.expect_keyword_is(Keyword::IN)?;
2902 let from = p.parse_expr()?;
2903 p.expect_token(&Token::RParen)?;
2904 Ok(Expr::Position {
2905 expr: Box::new(expr),
2906 r#in: Box::new(from),
2907 })
2908 })?;
2909 match position_expr {
2910 Some(expr) => Ok(expr),
2911 None => self.parse_function(ObjectName::from(vec![ident])),
2914 }
2915 }
2916
2917 pub fn parse_substring(&mut self) -> Result<Expr, ParserError> {
2919 let shorthand = match self.expect_one_of_keywords(&[Keyword::SUBSTR, Keyword::SUBSTRING])? {
2920 Keyword::SUBSTR => true,
2921 Keyword::SUBSTRING => false,
2922 _ => {
2923 self.prev_token();
2924 return self.expected_ref("SUBSTR or SUBSTRING", self.peek_token_ref());
2925 }
2926 };
2927 self.expect_token(&Token::LParen)?;
2928 let expr = self.parse_expr()?;
2929 let mut from_expr = None;
2930 let special = self.consume_token(&Token::Comma);
2931 if special || self.parse_keyword(Keyword::FROM) {
2932 from_expr = Some(self.parse_expr()?);
2933 }
2934
2935 let mut to_expr = None;
2936 if self.parse_keyword(Keyword::FOR) || self.consume_token(&Token::Comma) {
2937 to_expr = Some(self.parse_expr()?);
2938 }
2939 self.expect_token(&Token::RParen)?;
2940
2941 Ok(Expr::Substring {
2942 expr: Box::new(expr),
2943 substring_from: from_expr.map(Box::new),
2944 substring_for: to_expr.map(Box::new),
2945 special,
2946 shorthand,
2947 })
2948 }
2949
2950 pub fn parse_overlay_expr(&mut self) -> Result<Expr, ParserError> {
2954 self.expect_token(&Token::LParen)?;
2956 let expr = self.parse_expr()?;
2957 self.expect_keyword_is(Keyword::PLACING)?;
2958 let what_expr = self.parse_expr()?;
2959 self.expect_keyword_is(Keyword::FROM)?;
2960 let from_expr = self.parse_expr()?;
2961 let mut for_expr = None;
2962 if self.parse_keyword(Keyword::FOR) {
2963 for_expr = Some(self.parse_expr()?);
2964 }
2965 self.expect_token(&Token::RParen)?;
2966
2967 Ok(Expr::Overlay {
2968 expr: Box::new(expr),
2969 overlay_what: Box::new(what_expr),
2970 overlay_from: Box::new(from_expr),
2971 overlay_for: for_expr.map(Box::new),
2972 })
2973 }
2974
2975 pub fn parse_trim_expr(&mut self) -> Result<Expr, ParserError> {
2981 self.expect_token(&Token::LParen)?;
2982 let mut trim_where = None;
2983 if let Token::Word(word) = &self.peek_token_ref().token {
2984 if [Keyword::BOTH, Keyword::LEADING, Keyword::TRAILING].contains(&word.keyword) {
2985 trim_where = Some(self.parse_trim_where()?);
2986 }
2987 }
2988 let expr = self.parse_expr()?;
2989 if self.parse_keyword(Keyword::FROM) {
2990 let trim_what = Box::new(expr);
2991 let expr = self.parse_expr()?;
2992 self.expect_token(&Token::RParen)?;
2993 Ok(Expr::Trim {
2994 expr: Box::new(expr),
2995 trim_where,
2996 trim_what: Some(trim_what),
2997 trim_characters: None,
2998 })
2999 } else if self.dialect.supports_comma_separated_trim() && self.consume_token(&Token::Comma)
3000 {
3001 let characters = self.parse_comma_separated(Parser::parse_expr)?;
3002 self.expect_token(&Token::RParen)?;
3003 Ok(Expr::Trim {
3004 expr: Box::new(expr),
3005 trim_where: None,
3006 trim_what: None,
3007 trim_characters: Some(characters),
3008 })
3009 } else {
3010 self.expect_token(&Token::RParen)?;
3011 Ok(Expr::Trim {
3012 expr: Box::new(expr),
3013 trim_where,
3014 trim_what: None,
3015 trim_characters: None,
3016 })
3017 }
3018 }
3019
3020 pub fn parse_trim_where(&mut self) -> Result<TrimWhereField, ParserError> {
3024 let next_token = self.next_token();
3025 match &next_token.token {
3026 Token::Word(w) => match w.keyword {
3027 Keyword::BOTH => Ok(TrimWhereField::Both),
3028 Keyword::LEADING => Ok(TrimWhereField::Leading),
3029 Keyword::TRAILING => Ok(TrimWhereField::Trailing),
3030 _ => self.expected("trim_where field", next_token)?,
3031 },
3032 _ => self.expected("trim_where field", next_token),
3033 }
3034 }
3035
3036 pub fn parse_array_expr(&mut self, named: bool) -> Result<Expr, ParserError> {
3039 let exprs = self.parse_comma_separated0(Parser::parse_expr, Token::RBracket)?;
3040 self.expect_token(&Token::RBracket)?;
3041 Ok(Expr::Array(Array { elem: exprs, named }))
3042 }
3043
3044 pub fn parse_listagg_on_overflow(&mut self) -> Result<Option<ListAggOnOverflow>, ParserError> {
3048 if self.parse_keywords(&[Keyword::ON, Keyword::OVERFLOW]) {
3049 if self.parse_keyword(Keyword::ERROR) {
3050 Ok(Some(ListAggOnOverflow::Error))
3051 } else {
3052 self.expect_keyword_is(Keyword::TRUNCATE)?;
3053 let filler = match &self.peek_token_ref().token {
3054 Token::Word(w)
3055 if w.keyword == Keyword::WITH || w.keyword == Keyword::WITHOUT =>
3056 {
3057 None
3058 }
3059 Token::SingleQuotedString(_)
3060 | Token::EscapedStringLiteral(_)
3061 | Token::UnicodeStringLiteral(_)
3062 | Token::NationalStringLiteral(_)
3063 | Token::QuoteDelimitedStringLiteral(_)
3064 | Token::NationalQuoteDelimitedStringLiteral(_)
3065 | Token::HexStringLiteral(_) => Some(Box::new(self.parse_expr()?)),
3066 _ => self.expected_ref(
3067 "either filler, WITH, or WITHOUT in LISTAGG",
3068 self.peek_token_ref(),
3069 )?,
3070 };
3071 let with_count = self.parse_keyword(Keyword::WITH);
3072 if !with_count && !self.parse_keyword(Keyword::WITHOUT) {
3073 self.expected_ref("either WITH or WITHOUT in LISTAGG", self.peek_token_ref())?;
3074 }
3075 self.expect_keyword_is(Keyword::COUNT)?;
3076 Ok(Some(ListAggOnOverflow::Truncate { filler, with_count }))
3077 }
3078 } else {
3079 Ok(None)
3080 }
3081 }
3082
3083 pub fn parse_date_time_field(&mut self) -> Result<DateTimeField, ParserError> {
3090 let next_token = self.next_token();
3091 match &next_token.token {
3092 Token::Word(w) => match w.keyword {
3093 Keyword::YEAR => Ok(DateTimeField::Year),
3094 Keyword::YEARS => Ok(DateTimeField::Years),
3095 Keyword::MONTH => Ok(DateTimeField::Month),
3096 Keyword::MONTHS => Ok(DateTimeField::Months),
3097 Keyword::WEEK => {
3098 let week_day = if dialect_of!(self is BigQueryDialect | GenericDialect)
3099 && self.consume_token(&Token::LParen)
3100 {
3101 let week_day = self.parse_identifier()?;
3102 self.expect_token(&Token::RParen)?;
3103 Some(week_day)
3104 } else {
3105 None
3106 };
3107 Ok(DateTimeField::Week(week_day))
3108 }
3109 Keyword::WEEKS => Ok(DateTimeField::Weeks),
3110 Keyword::DAY => Ok(DateTimeField::Day),
3111 Keyword::DAYOFWEEK => Ok(DateTimeField::DayOfWeek),
3112 Keyword::DAYOFYEAR => Ok(DateTimeField::DayOfYear),
3113 Keyword::DAYS => Ok(DateTimeField::Days),
3114 Keyword::DATE => Ok(DateTimeField::Date),
3115 Keyword::DATETIME => Ok(DateTimeField::Datetime),
3116 Keyword::HOUR => Ok(DateTimeField::Hour),
3117 Keyword::HOURS => Ok(DateTimeField::Hours),
3118 Keyword::MINUTE => Ok(DateTimeField::Minute),
3119 Keyword::MINUTES => Ok(DateTimeField::Minutes),
3120 Keyword::SECOND => Ok(DateTimeField::Second),
3121 Keyword::SECONDS => Ok(DateTimeField::Seconds),
3122 Keyword::CENTURY => Ok(DateTimeField::Century),
3123 Keyword::DECADE => Ok(DateTimeField::Decade),
3124 Keyword::DOY => Ok(DateTimeField::Doy),
3125 Keyword::DOW => Ok(DateTimeField::Dow),
3126 Keyword::EPOCH => Ok(DateTimeField::Epoch),
3127 Keyword::ISODOW => Ok(DateTimeField::Isodow),
3128 Keyword::ISOYEAR => Ok(DateTimeField::Isoyear),
3129 Keyword::ISOWEEK => Ok(DateTimeField::IsoWeek),
3130 Keyword::JULIAN => Ok(DateTimeField::Julian),
3131 Keyword::MICROSECOND => Ok(DateTimeField::Microsecond),
3132 Keyword::MICROSECONDS => Ok(DateTimeField::Microseconds),
3133 Keyword::MILLENIUM => Ok(DateTimeField::Millenium),
3134 Keyword::MILLENNIUM => Ok(DateTimeField::Millennium),
3135 Keyword::MILLISECOND => Ok(DateTimeField::Millisecond),
3136 Keyword::MILLISECONDS => Ok(DateTimeField::Milliseconds),
3137 Keyword::NANOSECOND => Ok(DateTimeField::Nanosecond),
3138 Keyword::NANOSECONDS => Ok(DateTimeField::Nanoseconds),
3139 Keyword::QUARTER => Ok(DateTimeField::Quarter),
3140 Keyword::TIME => Ok(DateTimeField::Time),
3141 Keyword::TIMEZONE => Ok(DateTimeField::Timezone),
3142 Keyword::TIMEZONE_ABBR => Ok(DateTimeField::TimezoneAbbr),
3143 Keyword::TIMEZONE_HOUR => Ok(DateTimeField::TimezoneHour),
3144 Keyword::TIMEZONE_MINUTE => Ok(DateTimeField::TimezoneMinute),
3145 Keyword::TIMEZONE_REGION => Ok(DateTimeField::TimezoneRegion),
3146 _ if self.dialect.allow_extract_custom() => {
3147 self.prev_token();
3148 let custom = self.parse_identifier()?;
3149 Ok(DateTimeField::Custom(custom))
3150 }
3151 _ => self.expected("date/time field", next_token),
3152 },
3153 Token::SingleQuotedString(_) if self.dialect.allow_extract_single_quotes() => {
3154 self.prev_token();
3155 let custom = self.parse_identifier()?;
3156 Ok(DateTimeField::Custom(custom))
3157 }
3158 _ => self.expected("date/time field", next_token),
3159 }
3160 }
3161
3162 pub fn parse_not(&mut self) -> Result<Expr, ParserError> {
3166 match &self.peek_token_ref().token {
3167 Token::Word(w) => match w.keyword {
3168 Keyword::EXISTS => {
3169 let negated = true;
3170 let _ = self.parse_keyword(Keyword::EXISTS);
3171 self.parse_exists_expr(negated)
3172 }
3173 _ => Ok(Expr::UnaryOp {
3174 op: UnaryOperator::Not,
3175 expr: Box::new(
3176 self.parse_subexpr(self.dialect.prec_value(Precedence::UnaryNot))?,
3177 ),
3178 }),
3179 },
3180 _ => Ok(Expr::UnaryOp {
3181 op: UnaryOperator::Not,
3182 expr: Box::new(self.parse_subexpr(self.dialect.prec_value(Precedence::UnaryNot))?),
3183 }),
3184 }
3185 }
3186
3187 fn parse_lbrace_expr(&mut self) -> Result<Expr, ParserError> {
3197 let token = self.expect_token(&Token::LBrace)?;
3198
3199 if let Some(fn_expr) = self.maybe_parse_odbc_body()? {
3200 self.expect_token(&Token::RBrace)?;
3201 return Ok(fn_expr);
3202 }
3203
3204 if self.dialect.supports_dictionary_syntax() {
3205 self.prev_token(); return self.parse_dictionary();
3207 }
3208
3209 self.expected("an expression", token)
3210 }
3211
3212 pub fn parse_match_against(&mut self) -> Result<Expr, ParserError> {
3218 let columns = self.parse_parenthesized_qualified_column_list(Mandatory, false)?;
3219
3220 self.expect_keyword_is(Keyword::AGAINST)?;
3221
3222 self.expect_token(&Token::LParen)?;
3223
3224 let match_value = self.parse_value()?;
3226
3227 let in_natural_language_mode_keywords = &[
3228 Keyword::IN,
3229 Keyword::NATURAL,
3230 Keyword::LANGUAGE,
3231 Keyword::MODE,
3232 ];
3233
3234 let with_query_expansion_keywords = &[Keyword::WITH, Keyword::QUERY, Keyword::EXPANSION];
3235
3236 let in_boolean_mode_keywords = &[Keyword::IN, Keyword::BOOLEAN, Keyword::MODE];
3237
3238 let opt_search_modifier = if self.parse_keywords(in_natural_language_mode_keywords) {
3239 if self.parse_keywords(with_query_expansion_keywords) {
3240 Some(SearchModifier::InNaturalLanguageModeWithQueryExpansion)
3241 } else {
3242 Some(SearchModifier::InNaturalLanguageMode)
3243 }
3244 } else if self.parse_keywords(in_boolean_mode_keywords) {
3245 Some(SearchModifier::InBooleanMode)
3246 } else if self.parse_keywords(with_query_expansion_keywords) {
3247 Some(SearchModifier::WithQueryExpansion)
3248 } else {
3249 None
3250 };
3251
3252 self.expect_token(&Token::RParen)?;
3253
3254 Ok(Expr::MatchAgainst {
3255 columns,
3256 match_value,
3257 opt_search_modifier,
3258 })
3259 }
3260
3261 pub fn parse_interval(&mut self) -> Result<Expr, ParserError> {
3277 let value = if self.dialect.require_interval_qualifier() {
3286 self.parse_expr()?
3288 } else {
3289 self.parse_prefix()?
3292 };
3293
3294 let leading_field = if self.next_token_is_temporal_unit() {
3300 Some(self.parse_date_time_field()?)
3301 } else if self.dialect.require_interval_qualifier() {
3302 return parser_err!(
3303 "INTERVAL requires a unit after the literal value",
3304 self.peek_token_ref().span.start
3305 );
3306 } else {
3307 None
3308 };
3309
3310 let (leading_precision, last_field, fsec_precision) =
3311 if leading_field == Some(DateTimeField::Second) {
3312 let last_field = None;
3318 let (leading_precision, fsec_precision) = self.parse_optional_precision_scale()?;
3319 (leading_precision, last_field, fsec_precision)
3320 } else {
3321 let leading_precision = self.parse_optional_precision()?;
3322 if self.parse_keyword(Keyword::TO) {
3323 let last_field = Some(self.parse_date_time_field()?);
3324 let fsec_precision = if last_field == Some(DateTimeField::Second) {
3325 self.parse_optional_precision()?
3326 } else {
3327 None
3328 };
3329 (leading_precision, last_field, fsec_precision)
3330 } else {
3331 (leading_precision, None, None)
3332 }
3333 };
3334
3335 Ok(Expr::Interval(Interval {
3336 value: Box::new(value),
3337 leading_field,
3338 leading_precision,
3339 last_field,
3340 fractional_seconds_precision: fsec_precision,
3341 }))
3342 }
3343
3344 pub fn next_token_is_temporal_unit(&mut self) -> bool {
3347 if let Token::Word(word) = &self.peek_token_ref().token {
3348 matches!(
3349 word.keyword,
3350 Keyword::YEAR
3351 | Keyword::YEARS
3352 | Keyword::MONTH
3353 | Keyword::MONTHS
3354 | Keyword::WEEK
3355 | Keyword::WEEKS
3356 | Keyword::DAY
3357 | Keyword::DAYS
3358 | Keyword::HOUR
3359 | Keyword::HOURS
3360 | Keyword::MINUTE
3361 | Keyword::MINUTES
3362 | Keyword::SECOND
3363 | Keyword::SECONDS
3364 | Keyword::CENTURY
3365 | Keyword::DECADE
3366 | Keyword::DOW
3367 | Keyword::DOY
3368 | Keyword::EPOCH
3369 | Keyword::ISODOW
3370 | Keyword::ISOYEAR
3371 | Keyword::JULIAN
3372 | Keyword::MICROSECOND
3373 | Keyword::MICROSECONDS
3374 | Keyword::MILLENIUM
3375 | Keyword::MILLENNIUM
3376 | Keyword::MILLISECOND
3377 | Keyword::MILLISECONDS
3378 | Keyword::NANOSECOND
3379 | Keyword::NANOSECONDS
3380 | Keyword::QUARTER
3381 | Keyword::TIMEZONE
3382 | Keyword::TIMEZONE_HOUR
3383 | Keyword::TIMEZONE_MINUTE
3384 )
3385 } else {
3386 false
3387 }
3388 }
3389
3390 fn parse_struct_literal(&mut self) -> Result<Expr, ParserError> {
3398 self.prev_token();
3400 let (fields, trailing_bracket) =
3401 self.parse_struct_type_def(Self::parse_struct_field_def)?;
3402 if trailing_bracket.0 {
3403 return parser_err!(
3404 "unmatched > in STRUCT literal",
3405 self.peek_token_ref().span.start
3406 );
3407 }
3408
3409 self.expect_token(&Token::LParen)?;
3411 let values = self
3412 .parse_comma_separated(|parser| parser.parse_struct_field_expr(!fields.is_empty()))?;
3413 self.expect_token(&Token::RParen)?;
3414
3415 Ok(Expr::Struct { values, fields })
3416 }
3417
3418 fn parse_struct_field_expr(&mut self, typed_syntax: bool) -> Result<Expr, ParserError> {
3432 let expr = self.parse_expr()?;
3433 if self.parse_keyword(Keyword::AS) {
3434 if typed_syntax {
3435 return parser_err!("Typed syntax does not allow AS", {
3436 self.prev_token();
3437 self.peek_token_ref().span.start
3438 });
3439 }
3440 let field_name = self.parse_identifier()?;
3441 Ok(Expr::Named {
3442 expr: expr.into(),
3443 name: field_name,
3444 })
3445 } else {
3446 Ok(expr)
3447 }
3448 }
3449
3450 fn parse_struct_type_def<F>(
3463 &mut self,
3464 mut elem_parser: F,
3465 ) -> Result<(Vec<StructField>, MatchedTrailingBracket), ParserError>
3466 where
3467 F: FnMut(&mut Parser<'a>) -> Result<(StructField, MatchedTrailingBracket), ParserError>,
3468 {
3469 self.expect_keyword_is(Keyword::STRUCT)?;
3470
3471 if self.peek_token_ref().token != Token::Lt {
3473 return Ok((Default::default(), false.into()));
3474 }
3475 self.next_token();
3476
3477 let mut field_defs = vec![];
3478 let trailing_bracket = loop {
3479 let (def, trailing_bracket) = elem_parser(self)?;
3480 field_defs.push(def);
3481 if trailing_bracket.0 || !self.consume_token(&Token::Comma) {
3483 break trailing_bracket;
3484 }
3485 };
3486
3487 Ok((
3488 field_defs,
3489 self.expect_closing_angle_bracket(trailing_bracket)?,
3490 ))
3491 }
3492
3493 fn parse_duckdb_struct_type_def(&mut self) -> Result<Vec<StructField>, ParserError> {
3495 self.expect_keyword_is(Keyword::STRUCT)?;
3496 self.expect_token(&Token::LParen)?;
3497 let struct_body = self.parse_comma_separated(|parser| {
3498 let field_name = parser.parse_identifier()?;
3499 let field_type = parser.parse_data_type()?;
3500
3501 Ok(StructField {
3502 field_name: Some(field_name),
3503 field_type,
3504 options: None,
3505 })
3506 });
3507 self.expect_token(&Token::RParen)?;
3508 struct_body
3509 }
3510
3511 fn parse_struct_field_def(
3523 &mut self,
3524 ) -> Result<(StructField, MatchedTrailingBracket), ParserError> {
3525 let is_named_field = matches!(
3528 (self.peek_nth_token(0).token, self.peek_nth_token(1).token),
3529 (Token::Word(_), Token::Word(_)) | (Token::Word(_), Token::Colon)
3530 );
3531
3532 let field_name = if is_named_field {
3533 let name = self.parse_identifier()?;
3534 let _ = self.consume_token(&Token::Colon);
3535 Some(name)
3536 } else {
3537 None
3538 };
3539
3540 let (field_type, trailing_bracket) = self.parse_data_type_helper()?;
3541
3542 let options = self.maybe_parse_options(Keyword::OPTIONS)?;
3543 Ok((
3544 StructField {
3545 field_name,
3546 field_type,
3547 options,
3548 },
3549 trailing_bracket,
3550 ))
3551 }
3552
3553 fn parse_union_type_def(&mut self) -> Result<Vec<UnionField>, ParserError> {
3563 self.expect_keyword_is(Keyword::UNION)?;
3564
3565 self.expect_token(&Token::LParen)?;
3566
3567 let fields = self.parse_comma_separated(|p| {
3568 Ok(UnionField {
3569 field_name: p.parse_identifier()?,
3570 field_type: p.parse_data_type()?,
3571 })
3572 })?;
3573
3574 self.expect_token(&Token::RParen)?;
3575
3576 Ok(fields)
3577 }
3578
3579 fn parse_dictionary(&mut self) -> Result<Expr, ParserError> {
3590 self.expect_token(&Token::LBrace)?;
3591
3592 let fields = self.parse_comma_separated0(Self::parse_dictionary_field, Token::RBrace)?;
3593
3594 self.expect_token(&Token::RBrace)?;
3595
3596 Ok(Expr::Dictionary(fields))
3597 }
3598
3599 fn parse_dictionary_field(&mut self) -> Result<DictionaryField, ParserError> {
3610 let key = self.parse_identifier()?;
3611
3612 self.expect_token(&Token::Colon)?;
3613
3614 let expr = self.parse_expr()?;
3615
3616 Ok(DictionaryField {
3617 key,
3618 value: Box::new(expr),
3619 })
3620 }
3621
3622 fn parse_duckdb_map_literal(&mut self) -> Result<Expr, ParserError> {
3632 self.expect_token(&Token::LBrace)?;
3633 let fields = self.parse_comma_separated0(Self::parse_duckdb_map_field, Token::RBrace)?;
3634 self.expect_token(&Token::RBrace)?;
3635 Ok(Expr::Map(Map { entries: fields }))
3636 }
3637
3638 fn parse_duckdb_map_field(&mut self) -> Result<MapEntry, ParserError> {
3648 let key = self.parse_subexpr(self.dialect.prec_value(Precedence::Colon))?;
3650
3651 self.expect_token(&Token::Colon)?;
3652
3653 let value = self.parse_expr()?;
3654
3655 Ok(MapEntry {
3656 key: Box::new(key),
3657 value: Box::new(value),
3658 })
3659 }
3660
3661 fn parse_click_house_map_def(&mut self) -> Result<(DataType, DataType), ParserError> {
3671 self.expect_keyword_is(Keyword::MAP)?;
3672 self.expect_token(&Token::LParen)?;
3673 let key_data_type = self.parse_data_type()?;
3674 self.expect_token(&Token::Comma)?;
3675 let value_data_type = self.parse_data_type()?;
3676 self.expect_token(&Token::RParen)?;
3677
3678 Ok((key_data_type, value_data_type))
3679 }
3680
3681 fn parse_click_house_tuple_def(&mut self) -> Result<Vec<StructField>, ParserError> {
3691 self.expect_keyword_is(Keyword::TUPLE)?;
3692 self.expect_token(&Token::LParen)?;
3693 let mut field_defs = vec![];
3694 loop {
3695 let (def, _) = self.parse_struct_field_def()?;
3696 field_defs.push(def);
3697 if !self.consume_token(&Token::Comma) {
3698 break;
3699 }
3700 }
3701 self.expect_token(&Token::RParen)?;
3702
3703 Ok(field_defs)
3704 }
3705
3706 fn expect_closing_angle_bracket(
3711 &mut self,
3712 trailing_bracket: MatchedTrailingBracket,
3713 ) -> Result<MatchedTrailingBracket, ParserError> {
3714 let trailing_bracket = if !trailing_bracket.0 {
3715 match &self.peek_token_ref().token {
3716 Token::Gt => {
3717 self.next_token();
3718 false.into()
3719 }
3720 Token::ShiftRight => {
3721 self.next_token();
3722 true.into()
3723 }
3724 _ => return self.expected_ref(">", self.peek_token_ref()),
3725 }
3726 } else {
3727 false.into()
3728 };
3729
3730 Ok(trailing_bracket)
3731 }
3732
3733 pub fn parse_infix(&mut self, expr: Expr, precedence: u8) -> Result<Expr, ParserError> {
3735 if let Some(infix) = self.dialect.parse_infix(self, &expr, precedence) {
3737 return infix;
3738 }
3739
3740 let dialect = self.dialect;
3741
3742 self.advance_token();
3743 let tok = self.get_current_token();
3744 debug!("infix: {tok:?}");
3745 let tok_index = self.get_current_index();
3746 let span = tok.span;
3747 let regular_binary_operator = match &tok.token {
3748 Token::Spaceship => Some(BinaryOperator::Spaceship),
3749 Token::DoubleEq => Some(BinaryOperator::Eq),
3750 Token::Assignment => Some(BinaryOperator::Assignment),
3751 Token::Eq => Some(BinaryOperator::Eq),
3752 Token::Neq => Some(BinaryOperator::NotEq),
3753 Token::Gt => Some(BinaryOperator::Gt),
3754 Token::GtEq => Some(BinaryOperator::GtEq),
3755 Token::Lt => Some(BinaryOperator::Lt),
3756 Token::LtEq => Some(BinaryOperator::LtEq),
3757 Token::Plus => Some(BinaryOperator::Plus),
3758 Token::Minus => Some(BinaryOperator::Minus),
3759 Token::Mul => Some(BinaryOperator::Multiply),
3760 Token::Mod => Some(BinaryOperator::Modulo),
3761 Token::StringConcat => Some(BinaryOperator::StringConcat),
3762 Token::Pipe => Some(BinaryOperator::BitwiseOr),
3763 Token::Caret => {
3764 if dialect_is!(dialect is PostgreSqlDialect) {
3767 Some(BinaryOperator::PGExp)
3768 } else {
3769 Some(BinaryOperator::BitwiseXor)
3770 }
3771 }
3772 Token::Ampersand => Some(BinaryOperator::BitwiseAnd),
3773 Token::Div => Some(BinaryOperator::Divide),
3774 Token::DuckIntDiv if dialect_is!(dialect is DuckDbDialect | GenericDialect) => {
3775 Some(BinaryOperator::DuckIntegerDivide)
3776 }
3777 Token::ShiftLeft if dialect.supports_bitwise_shift_operators() => {
3778 Some(BinaryOperator::PGBitwiseShiftLeft)
3779 }
3780 Token::ShiftRight if dialect.supports_bitwise_shift_operators() => {
3781 Some(BinaryOperator::PGBitwiseShiftRight)
3782 }
3783 Token::Sharp if dialect_is!(dialect is PostgreSqlDialect | RedshiftSqlDialect) => {
3784 Some(BinaryOperator::PGBitwiseXor)
3785 }
3786 Token::Overlap if dialect_is!(dialect is PostgreSqlDialect | RedshiftSqlDialect) => {
3787 Some(BinaryOperator::PGOverlap)
3788 }
3789 Token::Overlap if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
3790 Some(BinaryOperator::PGOverlap)
3791 }
3792 Token::Overlap if dialect.supports_double_ampersand_operator() => {
3793 Some(BinaryOperator::And)
3794 }
3795 Token::CaretAt if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
3796 Some(BinaryOperator::PGStartsWith)
3797 }
3798 Token::Tilde => Some(BinaryOperator::PGRegexMatch),
3799 Token::TildeAsterisk => Some(BinaryOperator::PGRegexIMatch),
3800 Token::ExclamationMarkTilde => Some(BinaryOperator::PGRegexNotMatch),
3801 Token::ExclamationMarkTildeAsterisk => Some(BinaryOperator::PGRegexNotIMatch),
3802 Token::DoubleTilde => Some(BinaryOperator::PGLikeMatch),
3803 Token::DoubleTildeAsterisk => Some(BinaryOperator::PGILikeMatch),
3804 Token::ExclamationMarkDoubleTilde => Some(BinaryOperator::PGNotLikeMatch),
3805 Token::ExclamationMarkDoubleTildeAsterisk => Some(BinaryOperator::PGNotILikeMatch),
3806 Token::Arrow => Some(BinaryOperator::Arrow),
3807 Token::LongArrow => Some(BinaryOperator::LongArrow),
3808 Token::HashArrow => Some(BinaryOperator::HashArrow),
3809 Token::HashLongArrow => Some(BinaryOperator::HashLongArrow),
3810 Token::AtArrow => Some(BinaryOperator::AtArrow),
3811 Token::ArrowAt => Some(BinaryOperator::ArrowAt),
3812 Token::HashMinus => Some(BinaryOperator::HashMinus),
3813 Token::AtQuestion => Some(BinaryOperator::AtQuestion),
3814 Token::AtAt => Some(BinaryOperator::AtAt),
3815 Token::Question => Some(BinaryOperator::Question),
3816 Token::QuestionAnd => Some(BinaryOperator::QuestionAnd),
3817 Token::QuestionPipe => Some(BinaryOperator::QuestionPipe),
3818 Token::CustomBinaryOperator(s) => Some(BinaryOperator::Custom(s.clone())),
3819 Token::DoubleSharp if self.dialect.supports_geometric_types() => {
3820 Some(BinaryOperator::DoubleHash)
3821 }
3822
3823 Token::AmpersandLeftAngleBracket if self.dialect.supports_geometric_types() => {
3824 Some(BinaryOperator::AndLt)
3825 }
3826 Token::AmpersandRightAngleBracket if self.dialect.supports_geometric_types() => {
3827 Some(BinaryOperator::AndGt)
3828 }
3829 Token::QuestionMarkDash if self.dialect.supports_geometric_types() => {
3830 Some(BinaryOperator::QuestionDash)
3831 }
3832 Token::AmpersandLeftAngleBracketVerticalBar
3833 if self.dialect.supports_geometric_types() =>
3834 {
3835 Some(BinaryOperator::AndLtPipe)
3836 }
3837 Token::VerticalBarAmpersandRightAngleBracket
3838 if self.dialect.supports_geometric_types() =>
3839 {
3840 Some(BinaryOperator::PipeAndGt)
3841 }
3842 Token::TwoWayArrow if self.dialect.supports_geometric_types() => {
3843 Some(BinaryOperator::LtDashGt)
3844 }
3845 Token::LeftAngleBracketCaret if self.dialect.supports_geometric_types() => {
3846 Some(BinaryOperator::LtCaret)
3847 }
3848 Token::RightAngleBracketCaret if self.dialect.supports_geometric_types() => {
3849 Some(BinaryOperator::GtCaret)
3850 }
3851 Token::QuestionMarkSharp if self.dialect.supports_geometric_types() => {
3852 Some(BinaryOperator::QuestionHash)
3853 }
3854 Token::QuestionMarkDoubleVerticalBar if self.dialect.supports_geometric_types() => {
3855 Some(BinaryOperator::QuestionDoublePipe)
3856 }
3857 Token::QuestionMarkDashVerticalBar if self.dialect.supports_geometric_types() => {
3858 Some(BinaryOperator::QuestionDashPipe)
3859 }
3860 Token::TildeEqual if self.dialect.supports_geometric_types() => {
3861 Some(BinaryOperator::TildeEq)
3862 }
3863 Token::ShiftLeftVerticalBar if self.dialect.supports_geometric_types() => {
3864 Some(BinaryOperator::LtLtPipe)
3865 }
3866 Token::VerticalBarShiftRight if self.dialect.supports_geometric_types() => {
3867 Some(BinaryOperator::PipeGtGt)
3868 }
3869 Token::AtSign if self.dialect.supports_geometric_types() => Some(BinaryOperator::At),
3870
3871 Token::Word(w) => match w.keyword {
3872 Keyword::AND => Some(BinaryOperator::And),
3873 Keyword::OR => Some(BinaryOperator::Or),
3874 Keyword::XOR => Some(BinaryOperator::Xor),
3875 Keyword::OVERLAPS => Some(BinaryOperator::Overlaps),
3876 Keyword::OPERATOR if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
3877 self.expect_token(&Token::LParen)?;
3878 let mut idents = vec![];
3883 loop {
3884 self.advance_token();
3885 idents.push(self.get_current_token().to_string());
3886 if !self.consume_token(&Token::Period) {
3887 break;
3888 }
3889 }
3890 self.expect_token(&Token::RParen)?;
3891 Some(BinaryOperator::PGCustomBinaryOperator(idents))
3892 }
3893 _ => None,
3894 },
3895 _ => None,
3896 };
3897
3898 let tok = self.token_at(tok_index);
3899 if let Some(op) = regular_binary_operator {
3900 if let Some(keyword) =
3901 self.parse_one_of_keywords(&[Keyword::ANY, Keyword::ALL, Keyword::SOME])
3902 {
3903 self.expect_token(&Token::LParen)?;
3904 let right = if self.peek_sub_query() {
3905 self.prev_token(); self.parse_subexpr(precedence)?
3909 } else {
3910 let right = self.parse_subexpr(precedence)?;
3912 self.expect_token(&Token::RParen)?;
3913 right
3914 };
3915
3916 if !matches!(
3917 op,
3918 BinaryOperator::Gt
3919 | BinaryOperator::Lt
3920 | BinaryOperator::GtEq
3921 | BinaryOperator::LtEq
3922 | BinaryOperator::Eq
3923 | BinaryOperator::NotEq
3924 | BinaryOperator::PGRegexMatch
3925 | BinaryOperator::PGRegexIMatch
3926 | BinaryOperator::PGRegexNotMatch
3927 | BinaryOperator::PGRegexNotIMatch
3928 | BinaryOperator::PGLikeMatch
3929 | BinaryOperator::PGILikeMatch
3930 | BinaryOperator::PGNotLikeMatch
3931 | BinaryOperator::PGNotILikeMatch
3932 ) {
3933 return parser_err!(
3934 format!(
3935 "Expected one of [=, >, <, =>, =<, !=, ~, ~*, !~, !~*, ~~, ~~*, !~~, !~~*] as comparison operator, found: {op}"
3936 ),
3937 span.start
3938 );
3939 };
3940
3941 Ok(match keyword {
3942 Keyword::ALL => Expr::AllOp {
3943 left: Box::new(expr),
3944 compare_op: op,
3945 right: Box::new(right),
3946 },
3947 Keyword::ANY | Keyword::SOME => Expr::AnyOp {
3948 left: Box::new(expr),
3949 compare_op: op,
3950 right: Box::new(right),
3951 is_some: keyword == Keyword::SOME,
3952 },
3953 unexpected_keyword => return Err(ParserError::ParserError(
3954 format!("Internal parser error: expected any of {{ALL, ANY, SOME}}, got {unexpected_keyword:?}"),
3955 )),
3956 })
3957 } else {
3958 Ok(Expr::BinaryOp {
3959 left: Box::new(expr),
3960 op,
3961 right: Box::new(self.parse_subexpr(precedence)?),
3962 })
3963 }
3964 } else if let Token::Word(w) = &tok.token {
3965 match w.keyword {
3966 Keyword::IS => {
3967 if self.parse_keyword(Keyword::NULL) {
3968 Ok(Expr::IsNull(Box::new(expr)))
3969 } else if self.parse_keywords(&[Keyword::NOT, Keyword::NULL]) {
3970 Ok(Expr::IsNotNull(Box::new(expr)))
3971 } else if self.parse_keywords(&[Keyword::TRUE]) {
3972 Ok(Expr::IsTrue(Box::new(expr)))
3973 } else if self.parse_keywords(&[Keyword::NOT, Keyword::TRUE]) {
3974 Ok(Expr::IsNotTrue(Box::new(expr)))
3975 } else if self.parse_keywords(&[Keyword::FALSE]) {
3976 Ok(Expr::IsFalse(Box::new(expr)))
3977 } else if self.parse_keywords(&[Keyword::NOT, Keyword::FALSE]) {
3978 Ok(Expr::IsNotFalse(Box::new(expr)))
3979 } else if self.parse_keywords(&[Keyword::UNKNOWN]) {
3980 Ok(Expr::IsUnknown(Box::new(expr)))
3981 } else if self.parse_keywords(&[Keyword::NOT, Keyword::UNKNOWN]) {
3982 Ok(Expr::IsNotUnknown(Box::new(expr)))
3983 } else if self.parse_keywords(&[Keyword::DISTINCT, Keyword::FROM]) {
3984 let expr2 = self.parse_expr()?;
3985 Ok(Expr::IsDistinctFrom(Box::new(expr), Box::new(expr2)))
3986 } else if self.parse_keywords(&[Keyword::NOT, Keyword::DISTINCT, Keyword::FROM])
3987 {
3988 let expr2 = self.parse_expr()?;
3989 Ok(Expr::IsNotDistinctFrom(Box::new(expr), Box::new(expr2)))
3990 } else if let Ok(is_normalized) = self.parse_unicode_is_normalized(expr) {
3991 Ok(is_normalized)
3992 } else {
3993 self.expected_ref(
3994 "[NOT] NULL | TRUE | FALSE | DISTINCT | [form] NORMALIZED FROM after IS",
3995 self.peek_token_ref(),
3996 )
3997 }
3998 }
3999 Keyword::AT => {
4000 self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
4001 Ok(Expr::AtTimeZone {
4002 timestamp: Box::new(expr),
4003 time_zone: Box::new(self.parse_subexpr(precedence)?),
4004 })
4005 }
4006 Keyword::NOT
4007 | Keyword::IN
4008 | Keyword::BETWEEN
4009 | Keyword::LIKE
4010 | Keyword::ILIKE
4011 | Keyword::SIMILAR
4012 | Keyword::REGEXP
4013 | Keyword::RLIKE => {
4014 self.prev_token();
4015 let negated = self.parse_keyword(Keyword::NOT);
4016 let regexp = self.parse_keyword(Keyword::REGEXP);
4017 let rlike = self.parse_keyword(Keyword::RLIKE);
4018 let null = if !self.in_column_definition_state() {
4019 self.parse_keyword(Keyword::NULL)
4020 } else {
4021 false
4022 };
4023 if regexp || rlike {
4024 Ok(Expr::RLike {
4025 negated,
4026 expr: Box::new(expr),
4027 pattern: Box::new(
4028 self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
4029 ),
4030 regexp,
4031 })
4032 } else if negated && null {
4033 Ok(Expr::IsNotNull(Box::new(expr)))
4034 } else if self.parse_keyword(Keyword::IN) {
4035 self.parse_in(expr, negated)
4036 } else if self.parse_keyword(Keyword::BETWEEN) {
4037 self.parse_between(expr, negated)
4038 } else if self.parse_keyword(Keyword::LIKE) {
4039 Ok(Expr::Like {
4040 negated,
4041 any: self.parse_keyword(Keyword::ANY),
4042 expr: Box::new(expr),
4043 pattern: Box::new(
4044 self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
4045 ),
4046 escape_char: self.parse_escape_char()?,
4047 })
4048 } else if self.parse_keyword(Keyword::ILIKE) {
4049 Ok(Expr::ILike {
4050 negated,
4051 any: self.parse_keyword(Keyword::ANY),
4052 expr: Box::new(expr),
4053 pattern: Box::new(
4054 self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
4055 ),
4056 escape_char: self.parse_escape_char()?,
4057 })
4058 } else if self.parse_keywords(&[Keyword::SIMILAR, Keyword::TO]) {
4059 Ok(Expr::SimilarTo {
4060 negated,
4061 expr: Box::new(expr),
4062 pattern: Box::new(
4063 self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
4064 ),
4065 escape_char: self.parse_escape_char()?,
4066 })
4067 } else {
4068 self.expected_ref("IN or BETWEEN after NOT", self.peek_token_ref())
4069 }
4070 }
4071 Keyword::NOTNULL if dialect.supports_notnull_operator() => {
4072 Ok(Expr::IsNotNull(Box::new(expr)))
4073 }
4074 Keyword::MEMBER => {
4075 if self.parse_keyword(Keyword::OF) {
4076 self.expect_token(&Token::LParen)?;
4077 let array = self.parse_expr()?;
4078 self.expect_token(&Token::RParen)?;
4079 Ok(Expr::MemberOf(MemberOf {
4080 value: Box::new(expr),
4081 array: Box::new(array),
4082 }))
4083 } else {
4084 self.expected_ref("OF after MEMBER", self.peek_token_ref())
4085 }
4086 }
4087 _ => parser_err!(
4089 format!("No infix parser for token {:?}", tok.token),
4090 tok.span.start
4091 ),
4092 }
4093 } else if Token::DoubleColon == *tok {
4094 Ok(Expr::Cast {
4095 kind: CastKind::DoubleColon,
4096 expr: Box::new(expr),
4097 data_type: self.parse_data_type()?,
4098 array: false,
4099 format: None,
4100 })
4101 } else if Token::ExclamationMark == *tok && self.dialect.supports_factorial_operator() {
4102 Ok(Expr::UnaryOp {
4103 op: UnaryOperator::PGPostfixFactorial,
4104 expr: Box::new(expr),
4105 })
4106 } else if Token::LBracket == *tok && self.dialect.supports_partiql()
4107 || (Token::Colon == *tok)
4108 {
4109 self.prev_token();
4110 self.parse_json_access(expr)
4111 } else {
4112 parser_err!(
4114 format!("No infix parser for token {:?}", tok.token),
4115 tok.span.start
4116 )
4117 }
4118 }
4119
4120 pub fn parse_escape_char(&mut self) -> Result<Option<ValueWithSpan>, ParserError> {
4122 if self.parse_keyword(Keyword::ESCAPE) {
4123 Ok(Some(self.parse_value()?))
4124 } else {
4125 Ok(None)
4126 }
4127 }
4128
4129 fn parse_subscript_inner(&mut self) -> Result<Subscript, ParserError> {
4139 let lower_bound = if self.consume_token(&Token::Colon) {
4141 None
4142 } else {
4143 Some(self.parse_subexpr(self.dialect.prec_value(Precedence::Colon))?)
4145 };
4146
4147 if self.consume_token(&Token::RBracket) {
4149 if let Some(lower_bound) = lower_bound {
4150 return Ok(Subscript::Index { index: lower_bound });
4151 };
4152 return Ok(Subscript::Slice {
4153 lower_bound,
4154 upper_bound: None,
4155 stride: None,
4156 });
4157 }
4158
4159 if lower_bound.is_some() {
4161 self.expect_token(&Token::Colon)?;
4162 }
4163
4164 let upper_bound = if self.consume_token(&Token::RBracket) {
4166 return Ok(Subscript::Slice {
4167 lower_bound,
4168 upper_bound: None,
4169 stride: None,
4170 });
4171 } else {
4172 Some(self.parse_subexpr(self.dialect.prec_value(Precedence::Colon))?)
4174 };
4175
4176 if self.consume_token(&Token::RBracket) {
4178 return Ok(Subscript::Slice {
4179 lower_bound,
4180 upper_bound,
4181 stride: None,
4182 });
4183 }
4184
4185 self.expect_token(&Token::Colon)?;
4187 let stride = if self.consume_token(&Token::RBracket) {
4188 None
4189 } else {
4190 Some(self.parse_expr()?)
4191 };
4192
4193 if stride.is_some() {
4194 self.expect_token(&Token::RBracket)?;
4195 }
4196
4197 Ok(Subscript::Slice {
4198 lower_bound,
4199 upper_bound,
4200 stride,
4201 })
4202 }
4203
4204 pub fn parse_multi_dim_subscript(
4206 &mut self,
4207 chain: &mut Vec<AccessExpr>,
4208 ) -> Result<(), ParserError> {
4209 while self.consume_token(&Token::LBracket) {
4210 self.parse_subscript(chain)?;
4211 }
4212 Ok(())
4213 }
4214
4215 fn parse_subscript(&mut self, chain: &mut Vec<AccessExpr>) -> Result<(), ParserError> {
4219 let subscript = self.parse_subscript_inner()?;
4220 chain.push(AccessExpr::Subscript(subscript));
4221 Ok(())
4222 }
4223
4224 fn parse_json_path_object_key(&mut self) -> Result<JsonPathElem, ParserError> {
4225 let token = self.next_token();
4226 match token.token {
4227 Token::Word(Word {
4228 value,
4229 quote_style: quote_style @ (Some('"') | Some('`') | None),
4232 keyword: _,
4235 }) => Ok(JsonPathElem::Dot {
4236 key: value,
4237 quoted: quote_style.is_some(),
4238 }),
4239
4240 Token::DoubleQuotedString(key) => Ok(JsonPathElem::Dot { key, quoted: true }),
4244
4245 _ => self.expected("variant object key name", token),
4246 }
4247 }
4248
4249 fn parse_json_access(&mut self, expr: Expr) -> Result<Expr, ParserError> {
4250 let path = self.parse_json_path()?;
4251 Ok(Expr::JsonAccess {
4252 value: Box::new(expr),
4253 path,
4254 })
4255 }
4256
4257 fn parse_json_path(&mut self) -> Result<JsonPath, ParserError> {
4258 let mut path = Vec::new();
4259 loop {
4260 match self.next_token().token {
4261 Token::Colon if path.is_empty() && self.peek_token_ref() == &Token::LBracket => {
4262 self.next_token();
4263 let key = self.parse_wildcard_expr()?;
4264 self.expect_token(&Token::RBracket)?;
4265 path.push(JsonPathElem::ColonBracket { key });
4266 }
4267 Token::Colon if path.is_empty() => {
4268 path.push(self.parse_json_path_object_key()?);
4269 }
4270 Token::Period if !path.is_empty() => {
4271 path.push(self.parse_json_path_object_key()?);
4272 }
4273 Token::LBracket => {
4274 let key = self.parse_wildcard_expr()?;
4275 self.expect_token(&Token::RBracket)?;
4276
4277 path.push(JsonPathElem::Bracket { key });
4278 }
4279 _ => {
4280 self.prev_token();
4281 break;
4282 }
4283 };
4284 }
4285
4286 debug_assert!(!path.is_empty());
4287 Ok(JsonPath { path })
4288 }
4289
4290 pub fn parse_in(&mut self, expr: Expr, negated: bool) -> Result<Expr, ParserError> {
4292 if self.parse_keyword(Keyword::UNNEST) {
4295 self.expect_token(&Token::LParen)?;
4296 let array_expr = self.parse_expr()?;
4297 self.expect_token(&Token::RParen)?;
4298 return Ok(Expr::InUnnest {
4299 expr: Box::new(expr),
4300 array_expr: Box::new(array_expr),
4301 negated,
4302 });
4303 }
4304 self.expect_token(&Token::LParen)?;
4305 let in_op = match self.maybe_parse(|p| p.parse_query())? {
4306 Some(subquery) => Expr::InSubquery {
4307 expr: Box::new(expr),
4308 subquery,
4309 negated,
4310 },
4311 None => Expr::InList {
4312 expr: Box::new(expr),
4313 list: if self.dialect.supports_in_empty_list() {
4314 self.parse_comma_separated0(Parser::parse_expr, Token::RParen)?
4315 } else {
4316 self.parse_comma_separated(Parser::parse_expr)?
4317 },
4318 negated,
4319 },
4320 };
4321 self.expect_token(&Token::RParen)?;
4322 Ok(in_op)
4323 }
4324
4325 pub fn parse_between(&mut self, expr: Expr, negated: bool) -> Result<Expr, ParserError> {
4327 let low = self.parse_subexpr(self.dialect.prec_value(Precedence::Between))?;
4330 self.expect_keyword_is(Keyword::AND)?;
4331 let high = self.parse_subexpr(self.dialect.prec_value(Precedence::Between))?;
4332 Ok(Expr::Between {
4333 expr: Box::new(expr),
4334 negated,
4335 low: Box::new(low),
4336 high: Box::new(high),
4337 })
4338 }
4339
4340 pub fn parse_pg_cast(&mut self, expr: Expr) -> Result<Expr, ParserError> {
4342 Ok(Expr::Cast {
4343 kind: CastKind::DoubleColon,
4344 expr: Box::new(expr),
4345 data_type: self.parse_data_type()?,
4346 array: false,
4347 format: None,
4348 })
4349 }
4350
4351 pub fn get_next_precedence(&self) -> Result<u8, ParserError> {
4353 self.dialect.get_next_precedence_default(self)
4354 }
4355
4356 pub fn token_at(&self, index: usize) -> &TokenWithSpan {
4359 self.tokens.get(index).unwrap_or(&EOF_TOKEN)
4360 }
4361
4362 pub fn peek_token(&self) -> TokenWithSpan {
4367 self.peek_nth_token(0)
4368 }
4369
4370 pub fn peek_token_ref(&self) -> &TokenWithSpan {
4373 self.peek_nth_token_ref(0)
4374 }
4375
4376 pub fn peek_tokens<const N: usize>(&self) -> [Token; N] {
4399 self.peek_tokens_with_location()
4400 .map(|with_loc| with_loc.token)
4401 }
4402
4403 pub fn peek_tokens_with_location<const N: usize>(&self) -> [TokenWithSpan; N] {
4408 let mut index = self.index;
4409 core::array::from_fn(|_| loop {
4410 let token = self.tokens.get(index);
4411 index += 1;
4412 if let Some(TokenWithSpan {
4413 token: Token::Whitespace(_),
4414 span: _,
4415 }) = token
4416 {
4417 continue;
4418 }
4419 break token.cloned().unwrap_or(TokenWithSpan {
4420 token: Token::EOF,
4421 span: Span::empty(),
4422 });
4423 })
4424 }
4425
4426 pub fn peek_tokens_ref<const N: usize>(&self) -> [&TokenWithSpan; N] {
4431 let mut index = self.index;
4432 core::array::from_fn(|_| loop {
4433 let token = self.tokens.get(index);
4434 index += 1;
4435 if let Some(TokenWithSpan {
4436 token: Token::Whitespace(_),
4437 span: _,
4438 }) = token
4439 {
4440 continue;
4441 }
4442 break token.unwrap_or(&EOF_TOKEN);
4443 })
4444 }
4445
4446 pub fn peek_nth_token(&self, n: usize) -> TokenWithSpan {
4448 self.peek_nth_token_ref(n).clone()
4449 }
4450
4451 pub fn peek_nth_token_ref(&self, mut n: usize) -> &TokenWithSpan {
4453 let mut index = self.index;
4454 loop {
4455 index += 1;
4456 match self.tokens.get(index - 1) {
4457 Some(TokenWithSpan {
4458 token: Token::Whitespace(_),
4459 span: _,
4460 }) => continue,
4461 non_whitespace => {
4462 if n == 0 {
4463 return non_whitespace.unwrap_or(&EOF_TOKEN);
4464 }
4465 n -= 1;
4466 }
4467 }
4468 }
4469 }
4470
4471 pub fn peek_token_no_skip(&self) -> TokenWithSpan {
4474 self.peek_nth_token_no_skip(0)
4475 }
4476
4477 pub fn peek_nth_token_no_skip(&self, n: usize) -> TokenWithSpan {
4479 self.tokens
4480 .get(self.index + n)
4481 .cloned()
4482 .unwrap_or(TokenWithSpan {
4483 token: Token::EOF,
4484 span: Span::empty(),
4485 })
4486 }
4487
4488 fn peek_nth_token_no_skip_ref(&self, n: usize) -> &TokenWithSpan {
4490 self.tokens.get(self.index + n).unwrap_or(&EOF_TOKEN)
4491 }
4492
4493 fn peek_keywords(&mut self, expected: &[Keyword]) -> bool {
4497 let index = self.index;
4498 let matched = self.parse_keywords(expected);
4499 self.index = index;
4500 matched
4501 }
4502
4503 pub fn next_token(&mut self) -> TokenWithSpan {
4508 self.advance_token();
4509 self.get_current_token().clone()
4510 }
4511
4512 pub fn get_current_index(&self) -> usize {
4517 self.index.saturating_sub(1)
4518 }
4519
4520 pub fn next_token_no_skip(&mut self) -> Option<&TokenWithSpan> {
4522 self.index += 1;
4523 self.tokens.get(self.index - 1)
4524 }
4525
4526 pub fn advance_token(&mut self) {
4530 loop {
4531 self.index += 1;
4532 match self.tokens.get(self.index - 1) {
4533 Some(TokenWithSpan {
4534 token: Token::Whitespace(_),
4535 span: _,
4536 }) => continue,
4537 _ => break,
4538 }
4539 }
4540 }
4541
4542 pub fn get_current_token(&self) -> &TokenWithSpan {
4546 self.token_at(self.index.saturating_sub(1))
4547 }
4548
4549 pub fn get_previous_token(&self) -> &TokenWithSpan {
4553 self.token_at(self.index.saturating_sub(2))
4554 }
4555
4556 pub fn get_next_token(&self) -> &TokenWithSpan {
4560 self.token_at(self.index)
4561 }
4562
4563 pub fn prev_token(&mut self) {
4570 loop {
4571 assert!(self.index > 0);
4572 self.index -= 1;
4573 if let Some(TokenWithSpan {
4574 token: Token::Whitespace(_),
4575 span: _,
4576 }) = self.tokens.get(self.index)
4577 {
4578 continue;
4579 }
4580 return;
4581 }
4582 }
4583
4584 pub fn expected<T>(&self, expected: &str, found: TokenWithSpan) -> Result<T, ParserError> {
4586 parser_err!(
4587 format!("Expected: {expected}, found: {found}"),
4588 found.span.start
4589 )
4590 }
4591
4592 pub fn expected_ref<T>(&self, expected: &str, found: &TokenWithSpan) -> Result<T, ParserError> {
4594 parser_err!(
4595 format!("Expected: {expected}, found: {found}"),
4596 found.span.start
4597 )
4598 }
4599
4600 pub fn expected_at<T>(&self, expected: &str, index: usize) -> Result<T, ParserError> {
4602 let found = self.tokens.get(index).unwrap_or(&EOF_TOKEN);
4603 parser_err!(
4604 format!("Expected: {expected}, found: {found}"),
4605 found.span.start
4606 )
4607 }
4608
4609 #[must_use]
4612 pub fn parse_keyword(&mut self, expected: Keyword) -> bool {
4613 if self.peek_keyword(expected) {
4614 self.advance_token();
4615 true
4616 } else {
4617 false
4618 }
4619 }
4620
4621 #[must_use]
4622 pub fn peek_keyword(&self, expected: Keyword) -> bool {
4626 matches!(&self.peek_token_ref().token, Token::Word(w) if expected == w.keyword)
4627 }
4628
4629 pub fn parse_keyword_with_tokens(&mut self, expected: Keyword, tokens: &[Token]) -> bool {
4637 self.keyword_with_tokens(expected, tokens, true)
4638 }
4639
4640 pub(crate) fn peek_keyword_with_tokens(&mut self, expected: Keyword, tokens: &[Token]) -> bool {
4645 self.keyword_with_tokens(expected, tokens, false)
4646 }
4647
4648 fn keyword_with_tokens(&mut self, expected: Keyword, tokens: &[Token], consume: bool) -> bool {
4649 match &self.peek_token_ref().token {
4650 Token::Word(w) if expected == w.keyword => {
4651 for (idx, token) in tokens.iter().enumerate() {
4652 if self.peek_nth_token_ref(idx + 1).token != *token {
4653 return false;
4654 }
4655 }
4656
4657 if consume {
4658 for _ in 0..(tokens.len() + 1) {
4659 self.advance_token();
4660 }
4661 }
4662
4663 true
4664 }
4665 _ => false,
4666 }
4667 }
4668
4669 #[must_use]
4673 pub fn parse_keywords(&mut self, keywords: &[Keyword]) -> bool {
4674 self.parse_keywords_indexed(keywords).is_some()
4675 }
4676
4677 #[must_use]
4680 fn parse_keywords_indexed(&mut self, keywords: &[Keyword]) -> Option<usize> {
4681 let start_index = self.index;
4682 let mut first_keyword_index = None;
4683 for &keyword in keywords {
4684 if !self.parse_keyword(keyword) {
4685 self.index = start_index;
4686 return None;
4687 }
4688 if first_keyword_index.is_none() {
4689 first_keyword_index = Some(self.index.saturating_sub(1));
4690 }
4691 }
4692 first_keyword_index
4693 }
4694
4695 #[must_use]
4698 pub fn peek_one_of_keywords(&self, keywords: &[Keyword]) -> Option<Keyword> {
4699 for keyword in keywords {
4700 if self.peek_keyword(*keyword) {
4701 return Some(*keyword);
4702 }
4703 }
4704 None
4705 }
4706
4707 #[must_use]
4711 pub fn parse_one_of_keywords(&mut self, keywords: &[Keyword]) -> Option<Keyword> {
4712 match &self.peek_token_ref().token {
4713 Token::Word(w) => {
4714 keywords
4715 .iter()
4716 .find(|keyword| **keyword == w.keyword)
4717 .map(|keyword| {
4718 self.advance_token();
4719 *keyword
4720 })
4721 }
4722 _ => None,
4723 }
4724 }
4725
4726 pub fn expect_one_of_keywords(&mut self, keywords: &[Keyword]) -> Result<Keyword, ParserError> {
4729 if let Some(keyword) = self.parse_one_of_keywords(keywords) {
4730 Ok(keyword)
4731 } else {
4732 let keywords: Vec<String> = keywords.iter().map(|x| format!("{x:?}")).collect();
4733 self.expected_ref(
4734 &format!("one of {}", keywords.join(" or ")),
4735 self.peek_token_ref(),
4736 )
4737 }
4738 }
4739
4740 pub fn expect_keyword(&mut self, expected: Keyword) -> Result<TokenWithSpan, ParserError> {
4745 if self.parse_keyword(expected) {
4746 Ok(self.get_current_token().clone())
4747 } else {
4748 self.expected_ref(format!("{:?}", &expected).as_str(), self.peek_token_ref())
4749 }
4750 }
4751
4752 pub fn expect_keyword_is(&mut self, expected: Keyword) -> Result<(), ParserError> {
4758 if self.parse_keyword(expected) {
4759 Ok(())
4760 } else {
4761 self.expected_ref(format!("{:?}", &expected).as_str(), self.peek_token_ref())
4762 }
4763 }
4764
4765 pub fn expect_keywords(&mut self, expected: &[Keyword]) -> Result<(), ParserError> {
4768 for &kw in expected {
4769 self.expect_keyword_is(kw)?;
4770 }
4771 Ok(())
4772 }
4773
4774 #[must_use]
4778 pub fn consume_token(&mut self, expected: &Token) -> bool {
4779 if self.peek_token_ref() == expected {
4780 self.advance_token();
4781 true
4782 } else {
4783 false
4784 }
4785 }
4786
4787 #[must_use]
4791 pub fn consume_tokens(&mut self, tokens: &[Token]) -> bool {
4792 let index = self.index;
4793 for token in tokens {
4794 if !self.consume_token(token) {
4795 self.index = index;
4796 return false;
4797 }
4798 }
4799 true
4800 }
4801
4802 pub fn expect_token(&mut self, expected: &Token) -> Result<TokenWithSpan, ParserError> {
4804 if self.peek_token_ref() == expected {
4805 Ok(self.next_token())
4806 } else {
4807 self.expected_ref(&expected.to_string(), self.peek_token_ref())
4808 }
4809 }
4810
4811 fn parse<T: FromStr>(s: String, loc: Location) -> Result<T, ParserError>
4812 where
4813 <T as FromStr>::Err: Display,
4814 {
4815 s.parse::<T>().map_err(|e| {
4816 ParserError::ParserError(format!(
4817 "Could not parse '{s}' as {}: {e}{loc}",
4818 core::any::type_name::<T>()
4819 ))
4820 })
4821 }
4822
4823 pub fn parse_projection(&mut self) -> Result<Vec<SelectItem>, ParserError> {
4825 let trailing_commas =
4831 self.options.trailing_commas | self.dialect.supports_projection_trailing_commas();
4832
4833 self.parse_comma_separated_with_trailing_commas(
4834 |p| p.parse_select_item(),
4835 trailing_commas,
4836 Self::is_reserved_for_column_alias,
4837 )
4838 }
4839
4840 pub fn parse_actions_list(&mut self) -> Result<Vec<Action>, ParserError> {
4842 let mut values = vec![];
4843 loop {
4844 values.push(self.parse_grant_permission()?);
4845 if !self.consume_token(&Token::Comma) {
4846 break;
4847 } else if self.options.trailing_commas {
4848 match &self.peek_token_ref().token {
4849 Token::Word(kw) if kw.keyword == Keyword::ON => {
4850 break;
4851 }
4852 Token::RParen
4853 | Token::SemiColon
4854 | Token::EOF
4855 | Token::RBracket
4856 | Token::RBrace => break,
4857 _ => continue,
4858 }
4859 }
4860 }
4861 Ok(values)
4862 }
4863
4864 fn parse_table_with_joins(&mut self) -> Result<Vec<TableWithJoins>, ParserError> {
4866 let trailing_commas = self.dialect.supports_from_trailing_commas();
4867
4868 self.parse_comma_separated_with_trailing_commas(
4869 Parser::parse_table_and_joins,
4870 trailing_commas,
4871 |kw, parser| !self.dialect.is_table_factor(kw, parser),
4872 )
4873 }
4874
4875 fn is_parse_comma_separated_end_with_trailing_commas<R>(
4882 &mut self,
4883 trailing_commas: bool,
4884 is_reserved_keyword: &R,
4885 ) -> bool
4886 where
4887 R: Fn(&Keyword, &mut Parser) -> bool,
4888 {
4889 if !self.consume_token(&Token::Comma) {
4890 true
4891 } else if trailing_commas {
4892 let token = self.next_token().token;
4893 let is_end = match token {
4894 Token::Word(ref kw) if is_reserved_keyword(&kw.keyword, self) => true,
4895 Token::RParen | Token::SemiColon | Token::EOF | Token::RBracket | Token::RBrace => {
4896 true
4897 }
4898 _ => false,
4899 };
4900 self.prev_token();
4901
4902 is_end
4903 } else {
4904 false
4905 }
4906 }
4907
4908 fn is_parse_comma_separated_end(&mut self) -> bool {
4911 self.is_parse_comma_separated_end_with_trailing_commas(
4912 self.options.trailing_commas,
4913 &Self::is_reserved_for_column_alias,
4914 )
4915 }
4916
4917 pub fn parse_comma_separated<T, F>(&mut self, f: F) -> Result<Vec<T>, ParserError>
4919 where
4920 F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4921 {
4922 self.parse_comma_separated_with_trailing_commas(
4923 f,
4924 self.options.trailing_commas,
4925 Self::is_reserved_for_column_alias,
4926 )
4927 }
4928
4929 fn parse_comma_separated_with_trailing_commas<T, F, R>(
4934 &mut self,
4935 mut f: F,
4936 trailing_commas: bool,
4937 is_reserved_keyword: R,
4938 ) -> Result<Vec<T>, ParserError>
4939 where
4940 F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4941 R: Fn(&Keyword, &mut Parser) -> bool,
4942 {
4943 let mut values = vec![];
4944 loop {
4945 values.push(f(self)?);
4946 if self.is_parse_comma_separated_end_with_trailing_commas(
4947 trailing_commas,
4948 &is_reserved_keyword,
4949 ) {
4950 break;
4951 }
4952 }
4953 Ok(values)
4954 }
4955
4956 fn parse_period_separated<T, F>(&mut self, mut f: F) -> Result<Vec<T>, ParserError>
4958 where
4959 F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4960 {
4961 let mut values = vec![];
4962 loop {
4963 values.push(f(self)?);
4964 if !self.consume_token(&Token::Period) {
4965 break;
4966 }
4967 }
4968 Ok(values)
4969 }
4970
4971 pub fn parse_keyword_separated<T, F>(
4973 &mut self,
4974 keyword: Keyword,
4975 mut f: F,
4976 ) -> Result<Vec<T>, ParserError>
4977 where
4978 F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4979 {
4980 let mut values = vec![];
4981 loop {
4982 values.push(f(self)?);
4983 if !self.parse_keyword(keyword) {
4984 break;
4985 }
4986 }
4987 Ok(values)
4988 }
4989
4990 pub fn parse_parenthesized<T, F>(&mut self, mut f: F) -> Result<T, ParserError>
4992 where
4993 F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4994 {
4995 self.expect_token(&Token::LParen)?;
4996 let res = f(self)?;
4997 self.expect_token(&Token::RParen)?;
4998 Ok(res)
4999 }
5000
5001 pub fn parse_comma_separated0<T, F>(
5004 &mut self,
5005 f: F,
5006 end_token: Token,
5007 ) -> Result<Vec<T>, ParserError>
5008 where
5009 F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
5010 {
5011 if self.peek_token_ref().token == end_token {
5012 return Ok(vec![]);
5013 }
5014
5015 if self.options.trailing_commas && self.peek_tokens() == [Token::Comma, end_token] {
5016 let _ = self.consume_token(&Token::Comma);
5017 return Ok(vec![]);
5018 }
5019
5020 self.parse_comma_separated(f)
5021 }
5022
5023 pub(crate) fn parse_statement_list(
5027 &mut self,
5028 terminal_keywords: &[Keyword],
5029 ) -> Result<Vec<Statement>, ParserError> {
5030 let mut values = vec![];
5031 loop {
5032 match &self.peek_nth_token_ref(0).token {
5033 Token::EOF => break,
5034 Token::Word(w) => {
5035 if w.quote_style.is_none() && terminal_keywords.contains(&w.keyword) {
5036 break;
5037 }
5038 }
5039 _ => {}
5040 }
5041
5042 values.push(self.parse_statement()?);
5043 self.expect_token(&Token::SemiColon)?;
5044 }
5045 Ok(values)
5046 }
5047
5048 fn is_reserved_for_column_alias(kw: &Keyword, parser: &mut Parser) -> bool {
5052 !parser.dialect.is_column_alias(kw, parser)
5053 }
5054
5055 pub fn maybe_parse<T, F>(&mut self, f: F) -> Result<Option<T>, ParserError>
5059 where
5060 F: FnMut(&mut Parser) -> Result<T, ParserError>,
5061 {
5062 match self.try_parse(f) {
5063 Ok(t) => Ok(Some(t)),
5064 Err(ParserError::RecursionLimitExceeded) => Err(ParserError::RecursionLimitExceeded),
5065 _ => Ok(None),
5066 }
5067 }
5068
5069 pub fn try_parse<T, F>(&mut self, mut f: F) -> Result<T, ParserError>
5071 where
5072 F: FnMut(&mut Parser) -> Result<T, ParserError>,
5073 {
5074 let index = self.index;
5075 match f(self) {
5076 Ok(t) => Ok(t),
5077 Err(e) => {
5078 self.index = index;
5080 Err(e)
5081 }
5082 }
5083 }
5084
5085 pub fn parse_all_or_distinct(&mut self) -> Result<Option<Distinct>, ParserError> {
5088 let loc = self.peek_token_ref().span.start;
5089 let distinct = match self.parse_one_of_keywords(&[Keyword::ALL, Keyword::DISTINCT]) {
5090 Some(Keyword::ALL) => {
5091 if self.peek_keyword(Keyword::DISTINCT) {
5092 return parser_err!("Cannot specify ALL then DISTINCT".to_string(), loc);
5093 }
5094 Some(Distinct::All)
5095 }
5096 Some(Keyword::DISTINCT) => {
5097 if self.peek_keyword(Keyword::ALL) {
5098 return parser_err!("Cannot specify DISTINCT then ALL".to_string(), loc);
5099 }
5100 Some(Distinct::Distinct)
5101 }
5102 None => return Ok(None),
5103 _ => return parser_err!("ALL or DISTINCT", loc),
5104 };
5105
5106 let Some(Distinct::Distinct) = distinct else {
5107 return Ok(distinct);
5108 };
5109 if !self.parse_keyword(Keyword::ON) {
5110 return Ok(Some(Distinct::Distinct));
5111 }
5112
5113 self.expect_token(&Token::LParen)?;
5114 let col_names = if self.consume_token(&Token::RParen) {
5115 self.prev_token();
5116 Vec::new()
5117 } else {
5118 self.parse_comma_separated(Parser::parse_expr)?
5119 };
5120 self.expect_token(&Token::RParen)?;
5121 Ok(Some(Distinct::On(col_names)))
5122 }
5123
5124 pub fn parse_create(&mut self) -> Result<Statement, ParserError> {
5126 let or_replace = self.parse_keywords(&[Keyword::OR, Keyword::REPLACE]);
5127 let or_alter = self.parse_keywords(&[Keyword::OR, Keyword::ALTER]);
5128 let local = self.parse_one_of_keywords(&[Keyword::LOCAL]).is_some();
5129 let global = self.parse_one_of_keywords(&[Keyword::GLOBAL]).is_some();
5130 let transient = self.parse_one_of_keywords(&[Keyword::TRANSIENT]).is_some();
5131 let global: Option<bool> = if global {
5132 Some(true)
5133 } else if local {
5134 Some(false)
5135 } else {
5136 None
5137 };
5138 let temporary = self
5139 .parse_one_of_keywords(&[Keyword::TEMP, Keyword::TEMPORARY])
5140 .is_some();
5141 let persistent = dialect_of!(self is DuckDbDialect)
5142 && self.parse_one_of_keywords(&[Keyword::PERSISTENT]).is_some();
5143 let create_view_params = self.parse_create_view_params()?;
5144 if self.peek_keywords(&[Keyword::SNAPSHOT, Keyword::TABLE]) {
5145 self.parse_create_snapshot_table().map(Into::into)
5146 } else if self.parse_keyword(Keyword::TABLE) {
5147 self.parse_create_table(or_replace, temporary, global, transient)
5148 .map(Into::into)
5149 } else if self.peek_keyword(Keyword::MATERIALIZED)
5150 || self.peek_keyword(Keyword::VIEW)
5151 || self.peek_keywords(&[Keyword::SECURE, Keyword::MATERIALIZED, Keyword::VIEW])
5152 || self.peek_keywords(&[Keyword::SECURE, Keyword::VIEW])
5153 {
5154 self.parse_create_view(or_alter, or_replace, temporary, create_view_params)
5155 .map(Into::into)
5156 } else if self.parse_keyword(Keyword::POLICY) {
5157 self.parse_create_policy().map(Into::into)
5158 } else if self.parse_keyword(Keyword::EXTERNAL) {
5159 self.parse_create_external_table(or_replace).map(Into::into)
5160 } else if self.parse_keyword(Keyword::FUNCTION) {
5161 self.parse_create_function(or_alter, or_replace, temporary)
5162 } else if self.parse_keyword(Keyword::DOMAIN) {
5163 self.parse_create_domain().map(Into::into)
5164 } else if self.parse_keyword(Keyword::TRIGGER) {
5165 self.parse_create_trigger(temporary, or_alter, or_replace, false)
5166 .map(Into::into)
5167 } else if self.parse_keywords(&[Keyword::CONSTRAINT, Keyword::TRIGGER]) {
5168 self.parse_create_trigger(temporary, or_alter, or_replace, true)
5169 .map(Into::into)
5170 } else if self.parse_keyword(Keyword::MACRO) {
5171 self.parse_create_macro(or_replace, temporary)
5172 } else if self.parse_keyword(Keyword::SECRET) {
5173 self.parse_create_secret(or_replace, temporary, persistent)
5174 } else if self.parse_keyword(Keyword::USER) {
5175 self.parse_create_user(or_replace).map(Into::into)
5176 } else if self.parse_keyword(Keyword::AGGREGATE) {
5177 self.parse_create_aggregate(or_replace).map(Into::into)
5178 } else if or_replace {
5179 self.expected_ref(
5180 "[EXTERNAL] TABLE or [MATERIALIZED] VIEW or FUNCTION after CREATE OR REPLACE",
5181 self.peek_token_ref(),
5182 )
5183 } else if self.parse_keyword(Keyword::EXTENSION) {
5184 self.parse_create_extension().map(Into::into)
5185 } else if self.parse_keyword(Keyword::INDEX) {
5186 self.parse_create_index(false).map(Into::into)
5187 } else if self.parse_keywords(&[Keyword::UNIQUE, Keyword::INDEX]) {
5188 self.parse_create_index(true).map(Into::into)
5189 } else if self.parse_keyword(Keyword::VIRTUAL) {
5190 self.parse_create_virtual_table()
5191 } else if self.parse_keyword(Keyword::SCHEMA) {
5192 self.parse_create_schema()
5193 } else if self.parse_keyword(Keyword::DATABASE) {
5194 self.parse_create_database()
5195 } else if self.parse_keyword(Keyword::ROLE) {
5196 self.parse_create_role().map(Into::into)
5197 } else if self.parse_keyword(Keyword::SEQUENCE) {
5198 self.parse_create_sequence(temporary)
5199 } else if self.parse_keyword(Keyword::COLLATION) {
5200 self.parse_create_collation().map(Into::into)
5201 } else if self.parse_keyword(Keyword::TYPE) {
5202 self.parse_create_type()
5203 } else if self.parse_keyword(Keyword::PROCEDURE) {
5204 self.parse_create_procedure(or_alter)
5205 } else if self.parse_keyword(Keyword::CONNECTOR) {
5206 self.parse_create_connector().map(Into::into)
5207 } else if self.parse_keyword(Keyword::OPERATOR) {
5208 if self.parse_keyword(Keyword::FAMILY) {
5210 self.parse_create_operator_family().map(Into::into)
5211 } else if self.parse_keyword(Keyword::CLASS) {
5212 self.parse_create_operator_class().map(Into::into)
5213 } else {
5214 self.parse_create_operator().map(Into::into)
5215 }
5216 } else if self.parse_keyword(Keyword::SERVER) {
5217 self.parse_pg_create_server()
5218 } else if self.parse_keyword(Keyword::FOREIGN) {
5219 if self.parse_keywords(&[Keyword::DATA, Keyword::WRAPPER]) {
5220 self.parse_create_foreign_data_wrapper().map(Into::into)
5221 } else if self.parse_keyword(Keyword::TABLE) {
5222 self.parse_create_foreign_table().map(Into::into)
5223 } else {
5224 self.expected_ref(
5225 "DATA WRAPPER or TABLE after CREATE FOREIGN",
5226 self.peek_token_ref(),
5227 )
5228 }
5229 } else if self.parse_keywords(&[Keyword::TEXT, Keyword::SEARCH]) {
5230 self.parse_create_text_search()
5231 } else if self.parse_keyword(Keyword::PUBLICATION) {
5232 self.parse_create_publication().map(Into::into)
5233 } else if self.parse_keyword(Keyword::SUBSCRIPTION) {
5234 self.parse_create_subscription().map(Into::into)
5235 } else {
5236 self.expected_ref("an object type after CREATE", self.peek_token_ref())
5237 }
5238 }
5239
5240 fn parse_create_user(&mut self, or_replace: bool) -> Result<CreateUser, ParserError> {
5241 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5242 let name = self.parse_identifier()?;
5243 let options = self
5244 .parse_key_value_options(false, &[Keyword::WITH, Keyword::TAG])?
5245 .options;
5246 let with_tags = self.parse_keyword(Keyword::WITH);
5247 let tags = if self.parse_keyword(Keyword::TAG) {
5248 self.parse_key_value_options(true, &[])?.options
5249 } else {
5250 vec![]
5251 };
5252 Ok(CreateUser {
5253 or_replace,
5254 if_not_exists,
5255 name,
5256 options: KeyValueOptions {
5257 options,
5258 delimiter: KeyValueOptionsDelimiter::Space,
5259 },
5260 with_tags,
5261 tags: KeyValueOptions {
5262 options: tags,
5263 delimiter: KeyValueOptionsDelimiter::Comma,
5264 },
5265 })
5266 }
5267
5268 pub fn parse_create_secret(
5270 &mut self,
5271 or_replace: bool,
5272 temporary: bool,
5273 persistent: bool,
5274 ) -> Result<Statement, ParserError> {
5275 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5276
5277 let mut storage_specifier = None;
5278 let mut name = None;
5279 if self.peek_token_ref().token != Token::LParen {
5280 if self.parse_keyword(Keyword::IN) {
5281 storage_specifier = self.parse_identifier().ok()
5282 } else {
5283 name = self.parse_identifier().ok();
5284 }
5285
5286 if storage_specifier.is_none()
5288 && self.peek_token_ref().token != Token::LParen
5289 && self.parse_keyword(Keyword::IN)
5290 {
5291 storage_specifier = self.parse_identifier().ok();
5292 }
5293 }
5294
5295 self.expect_token(&Token::LParen)?;
5296 self.expect_keyword_is(Keyword::TYPE)?;
5297 let secret_type = self.parse_identifier()?;
5298
5299 let mut options = Vec::new();
5300 if self.consume_token(&Token::Comma) {
5301 options.append(&mut self.parse_comma_separated(|p| {
5302 let key = p.parse_identifier()?;
5303 let value = p.parse_identifier()?;
5304 Ok(SecretOption { key, value })
5305 })?);
5306 }
5307 self.expect_token(&Token::RParen)?;
5308
5309 let temp = match (temporary, persistent) {
5310 (true, false) => Some(true),
5311 (false, true) => Some(false),
5312 (false, false) => None,
5313 _ => self.expected_ref("TEMPORARY or PERSISTENT", self.peek_token_ref())?,
5314 };
5315
5316 Ok(Statement::CreateSecret {
5317 or_replace,
5318 temporary: temp,
5319 if_not_exists,
5320 name,
5321 storage_specifier,
5322 secret_type,
5323 options,
5324 })
5325 }
5326
5327 pub fn parse_cache_table(&mut self) -> Result<Statement, ParserError> {
5329 let (mut table_flag, mut options, mut has_as, mut query) = (None, vec![], false, None);
5330 if self.parse_keyword(Keyword::TABLE) {
5331 let table_name = self.parse_object_name(false)?;
5332 if self.peek_token_ref().token != Token::EOF {
5333 if let Token::Word(word) = &self.peek_token_ref().token {
5334 if word.keyword == Keyword::OPTIONS {
5335 options = self.parse_options(Keyword::OPTIONS)?
5336 }
5337 };
5338
5339 if self.peek_token_ref().token != Token::EOF {
5340 let (a, q) = self.parse_as_query()?;
5341 has_as = a;
5342 query = Some(q);
5343 }
5344
5345 Ok(Statement::Cache {
5346 table_flag,
5347 table_name,
5348 has_as,
5349 options,
5350 query,
5351 })
5352 } else {
5353 Ok(Statement::Cache {
5354 table_flag,
5355 table_name,
5356 has_as,
5357 options,
5358 query,
5359 })
5360 }
5361 } else {
5362 table_flag = Some(self.parse_object_name(false)?);
5363 if self.parse_keyword(Keyword::TABLE) {
5364 let table_name = self.parse_object_name(false)?;
5365 if self.peek_token_ref().token != Token::EOF {
5366 if let Token::Word(word) = &self.peek_token_ref().token {
5367 if word.keyword == Keyword::OPTIONS {
5368 options = self.parse_options(Keyword::OPTIONS)?
5369 }
5370 };
5371
5372 if self.peek_token_ref().token != Token::EOF {
5373 let (a, q) = self.parse_as_query()?;
5374 has_as = a;
5375 query = Some(q);
5376 }
5377
5378 Ok(Statement::Cache {
5379 table_flag,
5380 table_name,
5381 has_as,
5382 options,
5383 query,
5384 })
5385 } else {
5386 Ok(Statement::Cache {
5387 table_flag,
5388 table_name,
5389 has_as,
5390 options,
5391 query,
5392 })
5393 }
5394 } else {
5395 if self.peek_token_ref().token == Token::EOF {
5396 self.prev_token();
5397 }
5398 self.expected_ref("a `TABLE` keyword", self.peek_token_ref())
5399 }
5400 }
5401 }
5402
5403 pub fn parse_as_query(&mut self) -> Result<(bool, Box<Query>), ParserError> {
5405 match &self.peek_token_ref().token {
5406 Token::Word(word) => match word.keyword {
5407 Keyword::AS => {
5408 self.next_token();
5409 Ok((true, self.parse_query()?))
5410 }
5411 _ => Ok((false, self.parse_query()?)),
5412 },
5413 _ => self.expected_ref("a QUERY statement", self.peek_token_ref()),
5414 }
5415 }
5416
5417 pub fn parse_uncache_table(&mut self) -> Result<Statement, ParserError> {
5419 self.expect_keyword_is(Keyword::TABLE)?;
5420 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
5421 let table_name = self.parse_object_name(false)?;
5422 Ok(Statement::UNCache {
5423 table_name,
5424 if_exists,
5425 })
5426 }
5427
5428 pub fn parse_create_virtual_table(&mut self) -> Result<Statement, ParserError> {
5430 self.expect_keyword_is(Keyword::TABLE)?;
5431 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5432 let table_name = self.parse_object_name(false)?;
5433 self.expect_keyword_is(Keyword::USING)?;
5434 let module_name = self.parse_identifier()?;
5435 let module_args = self.parse_parenthesized_column_list(Optional, false)?;
5440 Ok(Statement::CreateVirtualTable {
5441 name: table_name,
5442 if_not_exists,
5443 module_name,
5444 module_args,
5445 })
5446 }
5447
5448 pub fn parse_create_schema(&mut self) -> Result<Statement, ParserError> {
5450 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5451
5452 let schema_name = self.parse_schema_name()?;
5453
5454 let default_collate_spec = if self.parse_keywords(&[Keyword::DEFAULT, Keyword::COLLATE]) {
5455 Some(self.parse_expr()?)
5456 } else {
5457 None
5458 };
5459
5460 let with = if self.peek_keyword(Keyword::WITH) {
5461 Some(self.parse_options(Keyword::WITH)?)
5462 } else {
5463 None
5464 };
5465
5466 let options = if self.peek_keyword(Keyword::OPTIONS) {
5467 Some(self.parse_options(Keyword::OPTIONS)?)
5468 } else {
5469 None
5470 };
5471
5472 let clone = if self.parse_keyword(Keyword::CLONE) {
5473 Some(self.parse_object_name(false)?)
5474 } else {
5475 None
5476 };
5477
5478 Ok(Statement::CreateSchema {
5479 schema_name,
5480 if_not_exists,
5481 with,
5482 options,
5483 default_collate_spec,
5484 clone,
5485 })
5486 }
5487
5488 fn parse_schema_name(&mut self) -> Result<SchemaName, ParserError> {
5489 if self.parse_keyword(Keyword::AUTHORIZATION) {
5490 Ok(SchemaName::UnnamedAuthorization(self.parse_identifier()?))
5491 } else {
5492 let name = self.parse_object_name(false)?;
5493
5494 if self.parse_keyword(Keyword::AUTHORIZATION) {
5495 Ok(SchemaName::NamedAuthorization(
5496 name,
5497 self.parse_identifier()?,
5498 ))
5499 } else {
5500 Ok(SchemaName::Simple(name))
5501 }
5502 }
5503 }
5504
5505 pub fn parse_create_database(&mut self) -> Result<Statement, ParserError> {
5507 let ine = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5508 let db_name = self.parse_object_name(false)?;
5509 let mut location = None;
5510 let mut managed_location = None;
5511 loop {
5512 match self.parse_one_of_keywords(&[Keyword::LOCATION, Keyword::MANAGEDLOCATION]) {
5513 Some(Keyword::LOCATION) => location = Some(self.parse_literal_string()?),
5514 Some(Keyword::MANAGEDLOCATION) => {
5515 managed_location = Some(self.parse_literal_string()?)
5516 }
5517 _ => break,
5518 }
5519 }
5520 let clone = if self.parse_keyword(Keyword::CLONE) {
5521 Some(self.parse_object_name(false)?)
5522 } else {
5523 None
5524 };
5525
5526 let mut default_charset = None;
5534 let mut default_collation = None;
5535 loop {
5536 let has_default = self.parse_keyword(Keyword::DEFAULT);
5537 if default_charset.is_none() && self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET])
5538 || self.parse_keyword(Keyword::CHARSET)
5539 {
5540 let _ = self.consume_token(&Token::Eq);
5541 default_charset = Some(self.parse_identifier()?.value);
5542 } else if self.parse_keyword(Keyword::COLLATE) {
5543 let _ = self.consume_token(&Token::Eq);
5544 default_collation = Some(self.parse_identifier()?.value);
5545 } else if has_default {
5546 self.prev_token();
5548 break;
5549 } else {
5550 break;
5551 }
5552 }
5553
5554 Ok(Statement::CreateDatabase {
5555 db_name,
5556 if_not_exists: ine,
5557 location,
5558 managed_location,
5559 or_replace: false,
5560 transient: false,
5561 clone,
5562 data_retention_time_in_days: None,
5563 max_data_extension_time_in_days: None,
5564 external_volume: None,
5565 catalog: None,
5566 replace_invalid_characters: None,
5567 default_ddl_collation: None,
5568 storage_serialization_policy: None,
5569 comment: None,
5570 default_charset,
5571 default_collation,
5572 catalog_sync: None,
5573 catalog_sync_namespace_mode: None,
5574 catalog_sync_namespace_flatten_delimiter: None,
5575 with_tags: None,
5576 with_contacts: None,
5577 })
5578 }
5579
5580 pub fn parse_optional_create_function_using(
5582 &mut self,
5583 ) -> Result<Option<CreateFunctionUsing>, ParserError> {
5584 if !self.parse_keyword(Keyword::USING) {
5585 return Ok(None);
5586 };
5587 let keyword =
5588 self.expect_one_of_keywords(&[Keyword::JAR, Keyword::FILE, Keyword::ARCHIVE])?;
5589
5590 let uri = self.parse_literal_string()?;
5591
5592 match keyword {
5593 Keyword::JAR => Ok(Some(CreateFunctionUsing::Jar(uri))),
5594 Keyword::FILE => Ok(Some(CreateFunctionUsing::File(uri))),
5595 Keyword::ARCHIVE => Ok(Some(CreateFunctionUsing::Archive(uri))),
5596 _ => self.expected(
5597 "JAR, FILE or ARCHIVE, got {:?}",
5598 TokenWithSpan::wrap(Token::make_keyword(format!("{keyword:?}").as_str())),
5599 ),
5600 }
5601 }
5602
5603 pub fn parse_create_function(
5605 &mut self,
5606 or_alter: bool,
5607 or_replace: bool,
5608 temporary: bool,
5609 ) -> Result<Statement, ParserError> {
5610 if dialect_of!(self is HiveDialect) {
5611 self.parse_hive_create_function(or_replace, temporary)
5612 .map(Into::into)
5613 } else if dialect_of!(self is PostgreSqlDialect | GenericDialect) {
5614 self.parse_postgres_create_function(or_replace, temporary)
5615 .map(Into::into)
5616 } else if dialect_of!(self is DuckDbDialect) {
5617 self.parse_create_macro(or_replace, temporary)
5618 } else if dialect_of!(self is BigQueryDialect) {
5619 self.parse_bigquery_create_function(or_replace, temporary)
5620 .map(Into::into)
5621 } else if dialect_of!(self is MsSqlDialect) {
5622 self.parse_mssql_create_function(or_alter, or_replace, temporary)
5623 .map(Into::into)
5624 } else {
5625 self.prev_token();
5626 self.expected_ref("an object type after CREATE", self.peek_token_ref())
5627 }
5628 }
5629
5630 fn parse_postgres_create_function(
5634 &mut self,
5635 or_replace: bool,
5636 temporary: bool,
5637 ) -> Result<CreateFunction, ParserError> {
5638 let name = self.parse_object_name(false)?;
5639
5640 self.expect_token(&Token::LParen)?;
5641 let args = if Token::RParen != self.peek_token_ref().token {
5642 self.parse_comma_separated(Parser::parse_function_arg)?
5643 } else {
5644 vec![]
5645 };
5646 self.expect_token(&Token::RParen)?;
5647
5648 let return_type = if self.parse_keyword(Keyword::RETURNS) {
5649 Some(self.parse_function_return_type()?)
5650 } else {
5651 None
5652 };
5653
5654 #[derive(Default)]
5655 struct Body {
5656 language: Option<Ident>,
5657 behavior: Option<FunctionBehavior>,
5658 function_body: Option<CreateFunctionBody>,
5659 called_on_null: Option<FunctionCalledOnNull>,
5660 parallel: Option<FunctionParallel>,
5661 security: Option<FunctionSecurity>,
5662 }
5663 let mut body = Body::default();
5664 let mut set_params: Vec<FunctionDefinitionSetParam> = Vec::new();
5665 loop {
5666 fn ensure_not_set<T>(field: &Option<T>, name: &str) -> Result<(), ParserError> {
5667 if field.is_some() {
5668 return Err(ParserError::ParserError(format!(
5669 "{name} specified more than once",
5670 )));
5671 }
5672 Ok(())
5673 }
5674 if self.parse_keyword(Keyword::AS) {
5675 ensure_not_set(&body.function_body, "AS")?;
5676 body.function_body = Some(self.parse_create_function_body_string()?);
5677 } else if self.parse_keyword(Keyword::LANGUAGE) {
5678 ensure_not_set(&body.language, "LANGUAGE")?;
5679 body.language = Some(self.parse_identifier()?);
5680 } else if self.parse_keyword(Keyword::IMMUTABLE) {
5681 ensure_not_set(&body.behavior, "IMMUTABLE | STABLE | VOLATILE")?;
5682 body.behavior = Some(FunctionBehavior::Immutable);
5683 } else if self.parse_keyword(Keyword::STABLE) {
5684 ensure_not_set(&body.behavior, "IMMUTABLE | STABLE | VOLATILE")?;
5685 body.behavior = Some(FunctionBehavior::Stable);
5686 } else if self.parse_keyword(Keyword::VOLATILE) {
5687 ensure_not_set(&body.behavior, "IMMUTABLE | STABLE | VOLATILE")?;
5688 body.behavior = Some(FunctionBehavior::Volatile);
5689 } else if self.parse_keywords(&[
5690 Keyword::CALLED,
5691 Keyword::ON,
5692 Keyword::NULL,
5693 Keyword::INPUT,
5694 ]) {
5695 ensure_not_set(
5696 &body.called_on_null,
5697 "CALLED ON NULL INPUT | RETURNS NULL ON NULL INPUT | STRICT",
5698 )?;
5699 body.called_on_null = Some(FunctionCalledOnNull::CalledOnNullInput);
5700 } else if self.parse_keywords(&[
5701 Keyword::RETURNS,
5702 Keyword::NULL,
5703 Keyword::ON,
5704 Keyword::NULL,
5705 Keyword::INPUT,
5706 ]) {
5707 ensure_not_set(
5708 &body.called_on_null,
5709 "CALLED ON NULL INPUT | RETURNS NULL ON NULL INPUT | STRICT",
5710 )?;
5711 body.called_on_null = Some(FunctionCalledOnNull::ReturnsNullOnNullInput);
5712 } else if self.parse_keyword(Keyword::STRICT) {
5713 ensure_not_set(
5714 &body.called_on_null,
5715 "CALLED ON NULL INPUT | RETURNS NULL ON NULL INPUT | STRICT",
5716 )?;
5717 body.called_on_null = Some(FunctionCalledOnNull::Strict);
5718 } else if self.parse_keyword(Keyword::PARALLEL) {
5719 ensure_not_set(&body.parallel, "PARALLEL { UNSAFE | RESTRICTED | SAFE }")?;
5720 if self.parse_keyword(Keyword::UNSAFE) {
5721 body.parallel = Some(FunctionParallel::Unsafe);
5722 } else if self.parse_keyword(Keyword::RESTRICTED) {
5723 body.parallel = Some(FunctionParallel::Restricted);
5724 } else if self.parse_keyword(Keyword::SAFE) {
5725 body.parallel = Some(FunctionParallel::Safe);
5726 } else {
5727 return self
5728 .expected_ref("one of UNSAFE | RESTRICTED | SAFE", self.peek_token_ref());
5729 }
5730 } else if self.parse_keyword(Keyword::SECURITY) {
5731 ensure_not_set(&body.security, "SECURITY { DEFINER | INVOKER }")?;
5732 if self.parse_keyword(Keyword::DEFINER) {
5733 body.security = Some(FunctionSecurity::Definer);
5734 } else if self.parse_keyword(Keyword::INVOKER) {
5735 body.security = Some(FunctionSecurity::Invoker);
5736 } else {
5737 return self.expected_ref("DEFINER or INVOKER", self.peek_token_ref());
5738 }
5739 } else if self.parse_keyword(Keyword::SET) {
5740 let name = self.parse_object_name(false)?;
5741 let value = if self.parse_keywords(&[Keyword::FROM, Keyword::CURRENT]) {
5742 FunctionSetValue::FromCurrent
5743 } else {
5744 if !self.consume_token(&Token::Eq) && !self.parse_keyword(Keyword::TO) {
5745 return self.expected_ref("= or TO", self.peek_token_ref());
5746 }
5747 if self.parse_keyword(Keyword::DEFAULT) {
5748 FunctionSetValue::Default
5749 } else {
5750 let values = self.parse_comma_separated(Parser::parse_expr)?;
5751 FunctionSetValue::Values(values)
5752 }
5753 };
5754 set_params.push(FunctionDefinitionSetParam { name, value });
5755 } else if self.parse_keyword(Keyword::RETURN) {
5756 ensure_not_set(&body.function_body, "RETURN")?;
5757 body.function_body = Some(CreateFunctionBody::Return(self.parse_expr()?));
5758 } else {
5759 break;
5760 }
5761 }
5762
5763 Ok(CreateFunction {
5764 or_alter: false,
5765 or_replace,
5766 temporary,
5767 name,
5768 args: Some(args),
5769 return_type,
5770 behavior: body.behavior,
5771 called_on_null: body.called_on_null,
5772 parallel: body.parallel,
5773 security: body.security,
5774 set_params,
5775 language: body.language,
5776 function_body: body.function_body,
5777 if_not_exists: false,
5778 using: None,
5779 determinism_specifier: None,
5780 options: None,
5781 remote_connection: None,
5782 })
5783 }
5784
5785 fn parse_hive_create_function(
5789 &mut self,
5790 or_replace: bool,
5791 temporary: bool,
5792 ) -> Result<CreateFunction, ParserError> {
5793 let name = self.parse_object_name(false)?;
5794 self.expect_keyword_is(Keyword::AS)?;
5795
5796 let body = self.parse_create_function_body_string()?;
5797 let using = self.parse_optional_create_function_using()?;
5798
5799 Ok(CreateFunction {
5800 or_alter: false,
5801 or_replace,
5802 temporary,
5803 name,
5804 function_body: Some(body),
5805 using,
5806 if_not_exists: false,
5807 args: None,
5808 return_type: None,
5809 behavior: None,
5810 called_on_null: None,
5811 parallel: None,
5812 security: None,
5813 set_params: vec![],
5814 language: None,
5815 determinism_specifier: None,
5816 options: None,
5817 remote_connection: None,
5818 })
5819 }
5820
5821 fn parse_bigquery_create_function(
5825 &mut self,
5826 or_replace: bool,
5827 temporary: bool,
5828 ) -> Result<CreateFunction, ParserError> {
5829 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5830 let (name, args) = self.parse_create_function_name_and_params()?;
5831
5832 let return_type = if self.parse_keyword(Keyword::RETURNS) {
5833 Some(self.parse_function_return_type()?)
5834 } else {
5835 None
5836 };
5837
5838 let determinism_specifier = if self.parse_keyword(Keyword::DETERMINISTIC) {
5839 Some(FunctionDeterminismSpecifier::Deterministic)
5840 } else if self.parse_keywords(&[Keyword::NOT, Keyword::DETERMINISTIC]) {
5841 Some(FunctionDeterminismSpecifier::NotDeterministic)
5842 } else {
5843 None
5844 };
5845
5846 let language = if self.parse_keyword(Keyword::LANGUAGE) {
5847 Some(self.parse_identifier()?)
5848 } else {
5849 None
5850 };
5851
5852 let remote_connection =
5853 if self.parse_keywords(&[Keyword::REMOTE, Keyword::WITH, Keyword::CONNECTION]) {
5854 Some(self.parse_object_name(false)?)
5855 } else {
5856 None
5857 };
5858
5859 let mut options = self.maybe_parse_options(Keyword::OPTIONS)?;
5862
5863 let function_body = if remote_connection.is_none() {
5864 self.expect_keyword_is(Keyword::AS)?;
5865 let expr = self.parse_expr()?;
5866 if options.is_none() {
5867 options = self.maybe_parse_options(Keyword::OPTIONS)?;
5868 Some(CreateFunctionBody::AsBeforeOptions {
5869 body: expr,
5870 link_symbol: None,
5871 })
5872 } else {
5873 Some(CreateFunctionBody::AsAfterOptions(expr))
5874 }
5875 } else {
5876 None
5877 };
5878
5879 Ok(CreateFunction {
5880 or_alter: false,
5881 or_replace,
5882 temporary,
5883 if_not_exists,
5884 name,
5885 args: Some(args),
5886 return_type,
5887 function_body,
5888 language,
5889 determinism_specifier,
5890 options,
5891 remote_connection,
5892 using: None,
5893 behavior: None,
5894 called_on_null: None,
5895 parallel: None,
5896 security: None,
5897 set_params: vec![],
5898 })
5899 }
5900
5901 fn parse_mssql_create_function(
5905 &mut self,
5906 or_alter: bool,
5907 or_replace: bool,
5908 temporary: bool,
5909 ) -> Result<CreateFunction, ParserError> {
5910 let (name, args) = self.parse_create_function_name_and_params()?;
5911
5912 self.expect_keyword(Keyword::RETURNS)?;
5913
5914 let return_table = self.maybe_parse(|p| {
5915 let return_table_name = p.parse_identifier()?;
5916
5917 p.expect_keyword_is(Keyword::TABLE)?;
5918 p.prev_token();
5919
5920 let table_column_defs = match p.parse_data_type()? {
5921 DataType::Table(Some(table_column_defs)) if !table_column_defs.is_empty() => {
5922 table_column_defs
5923 }
5924 _ => parser_err!(
5925 "Expected table column definitions after TABLE keyword",
5926 p.peek_token_ref().span.start
5927 )?,
5928 };
5929
5930 Ok(DataType::NamedTable {
5931 name: ObjectName(vec![ObjectNamePart::Identifier(return_table_name)]),
5932 columns: table_column_defs,
5933 })
5934 })?;
5935
5936 let data_type = match return_table {
5937 Some(table_type) => table_type,
5938 None => self.parse_data_type()?,
5939 };
5940 let return_type = Some(FunctionReturnType::DataType(data_type));
5941
5942 let _ = self.parse_keyword(Keyword::AS);
5943
5944 let function_body = if self.peek_keyword(Keyword::BEGIN) {
5945 let begin_token = self.expect_keyword(Keyword::BEGIN)?;
5946 let statements = self.parse_statement_list(&[Keyword::END])?;
5947 let end_token = self.expect_keyword(Keyword::END)?;
5948
5949 Some(CreateFunctionBody::AsBeginEnd(BeginEndStatements {
5950 begin_token: AttachedToken(begin_token),
5951 statements,
5952 end_token: AttachedToken(end_token),
5953 }))
5954 } else if self.parse_keyword(Keyword::RETURN) {
5955 if self.peek_token_ref().token == Token::LParen {
5956 Some(CreateFunctionBody::AsReturnExpr(self.parse_expr()?))
5957 } else if self.peek_keyword(Keyword::SELECT) {
5958 let select = self.parse_select()?;
5959 Some(CreateFunctionBody::AsReturnSelect(select))
5960 } else {
5961 parser_err!(
5962 "Expected a subquery (or bare SELECT statement) after RETURN",
5963 self.peek_token_ref().span.start
5964 )?
5965 }
5966 } else {
5967 parser_err!("Unparsable function body", self.peek_token_ref().span.start)?
5968 };
5969
5970 Ok(CreateFunction {
5971 or_alter,
5972 or_replace,
5973 temporary,
5974 if_not_exists: false,
5975 name,
5976 args: Some(args),
5977 return_type,
5978 function_body,
5979 language: None,
5980 determinism_specifier: None,
5981 options: None,
5982 remote_connection: None,
5983 using: None,
5984 behavior: None,
5985 called_on_null: None,
5986 parallel: None,
5987 security: None,
5988 set_params: vec![],
5989 })
5990 }
5991
5992 fn parse_function_return_type(&mut self) -> Result<FunctionReturnType, ParserError> {
5993 if self.parse_keyword(Keyword::SETOF) {
5994 Ok(FunctionReturnType::SetOf(self.parse_data_type()?))
5995 } else {
5996 Ok(FunctionReturnType::DataType(self.parse_data_type()?))
5997 }
5998 }
5999
6000 fn parse_create_function_name_and_params(
6001 &mut self,
6002 ) -> Result<(ObjectName, Vec<OperateFunctionArg>), ParserError> {
6003 let name = self.parse_object_name(false)?;
6004 let parse_function_param =
6005 |parser: &mut Parser| -> Result<OperateFunctionArg, ParserError> {
6006 let name = parser.parse_identifier()?;
6007 let data_type = parser.parse_data_type()?;
6008 let default_expr = if parser.consume_token(&Token::Eq) {
6009 Some(parser.parse_expr()?)
6010 } else {
6011 None
6012 };
6013
6014 Ok(OperateFunctionArg {
6015 mode: None,
6016 name: Some(name),
6017 data_type,
6018 default_expr,
6019 })
6020 };
6021 self.expect_token(&Token::LParen)?;
6022 let args = self.parse_comma_separated0(parse_function_param, Token::RParen)?;
6023 self.expect_token(&Token::RParen)?;
6024 Ok((name, args))
6025 }
6026
6027 fn parse_function_arg(&mut self) -> Result<OperateFunctionArg, ParserError> {
6028 let mode = if self.parse_keyword(Keyword::IN) {
6029 Some(ArgMode::In)
6030 } else if self.parse_keyword(Keyword::OUT) {
6031 Some(ArgMode::Out)
6032 } else if self.parse_keyword(Keyword::INOUT) {
6033 Some(ArgMode::InOut)
6034 } else if self.parse_keyword(Keyword::VARIADIC) {
6035 Some(ArgMode::Variadic)
6036 } else {
6037 None
6038 };
6039
6040 let mut name = None;
6042 let mut data_type = self.parse_data_type()?;
6043
6044 let data_type_idx = self.get_current_index();
6048
6049 fn parse_data_type_no_default(parser: &mut Parser) -> Result<DataType, ParserError> {
6051 if parser.peek_keyword(Keyword::DEFAULT) {
6052 parser_err!(
6054 "The DEFAULT keyword is not a type",
6055 parser.peek_token_ref().span.start
6056 )
6057 } else {
6058 parser.parse_data_type()
6059 }
6060 }
6061
6062 if let Some(next_data_type) = self.maybe_parse(parse_data_type_no_default)? {
6063 let token = self.token_at(data_type_idx);
6064
6065 if !matches!(token.token, Token::Word(_)) {
6067 return self.expected("a name or type", token.clone());
6068 }
6069
6070 name = Some(Ident::new(token.to_string()));
6071 data_type = next_data_type;
6072 }
6073
6074 let default_expr = if self.parse_keyword(Keyword::DEFAULT) || self.consume_token(&Token::Eq)
6075 {
6076 Some(self.parse_expr()?)
6077 } else {
6078 None
6079 };
6080 Ok(OperateFunctionArg {
6081 mode,
6082 name,
6083 data_type,
6084 default_expr,
6085 })
6086 }
6087
6088 fn parse_aggregate_function_arg(&mut self) -> Result<OperateFunctionArg, ParserError> {
6089 let mode = if self.parse_keyword(Keyword::IN) {
6090 Some(ArgMode::In)
6091 } else {
6092 if self
6093 .peek_one_of_keywords(&[Keyword::OUT, Keyword::INOUT, Keyword::VARIADIC])
6094 .is_some()
6095 {
6096 return self.expected_ref(
6097 "IN or argument type in aggregate signature",
6098 self.peek_token_ref(),
6099 );
6100 }
6101 None
6102 };
6103
6104 let mut name = None;
6107 let mut data_type = self.parse_data_type()?;
6108 let data_type_idx = self.get_current_index();
6109
6110 fn parse_data_type_for_aggregate_arg(parser: &mut Parser) -> Result<DataType, ParserError> {
6111 if parser.peek_keyword(Keyword::DEFAULT)
6112 || parser.peek_keyword(Keyword::ORDER)
6113 || parser.peek_token_ref().token == Token::Comma
6114 || parser.peek_token_ref().token == Token::RParen
6115 {
6116 parser_err!(
6118 "The current token cannot start an aggregate argument type",
6119 parser.peek_token_ref().span.start
6120 )
6121 } else {
6122 parser.parse_data_type()
6123 }
6124 }
6125
6126 if let Some(next_data_type) = self.maybe_parse(parse_data_type_for_aggregate_arg)? {
6127 let token = self.token_at(data_type_idx);
6128 if !matches!(token.token, Token::Word(_)) {
6129 return self.expected("a name or type", token.clone());
6130 }
6131
6132 name = Some(Ident::new(token.to_string()));
6133 data_type = next_data_type;
6134 }
6135
6136 if self.peek_keyword(Keyword::DEFAULT) || self.peek_token_ref().token == Token::Eq {
6137 return self.expected_ref(
6138 "',' or ')' or ORDER BY after aggregate argument type",
6139 self.peek_token_ref(),
6140 );
6141 }
6142
6143 Ok(OperateFunctionArg {
6144 mode,
6145 name,
6146 data_type,
6147 default_expr: None,
6148 })
6149 }
6150
6151 pub fn parse_drop_trigger(&mut self) -> Result<DropTrigger, ParserError> {
6157 if !dialect_of!(self is PostgreSqlDialect | SQLiteDialect | GenericDialect | MySqlDialect | MsSqlDialect)
6158 {
6159 self.prev_token();
6160 return self.expected_ref("an object type after DROP", self.peek_token_ref());
6161 }
6162 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6163 let trigger_name = self.parse_object_name(false)?;
6164 let table_name = if self.parse_keyword(Keyword::ON) {
6165 Some(self.parse_object_name(false)?)
6166 } else {
6167 None
6168 };
6169 let option = match self.parse_one_of_keywords(&[Keyword::CASCADE, Keyword::RESTRICT]) {
6170 Some(Keyword::CASCADE) => Some(ReferentialAction::Cascade),
6171 Some(Keyword::RESTRICT) => Some(ReferentialAction::Restrict),
6172 Some(unexpected_keyword) => return Err(ParserError::ParserError(
6173 format!("Internal parser error: expected any of {{CASCADE, RESTRICT}}, got {unexpected_keyword:?}"),
6174 )),
6175 None => None,
6176 };
6177 Ok(DropTrigger {
6178 if_exists,
6179 trigger_name,
6180 table_name,
6181 option,
6182 })
6183 }
6184
6185 pub fn parse_create_trigger(
6187 &mut self,
6188 temporary: bool,
6189 or_alter: bool,
6190 or_replace: bool,
6191 is_constraint: bool,
6192 ) -> Result<CreateTrigger, ParserError> {
6193 if !dialect_of!(self is PostgreSqlDialect | SQLiteDialect | GenericDialect | MySqlDialect | MsSqlDialect)
6194 {
6195 self.prev_token();
6196 return self.expected_ref("an object type after CREATE", self.peek_token_ref());
6197 }
6198
6199 let name = self.parse_object_name(false)?;
6200 let period = self.maybe_parse(|parser| parser.parse_trigger_period())?;
6201
6202 let events = self.parse_keyword_separated(Keyword::OR, Parser::parse_trigger_event)?;
6203 self.expect_keyword_is(Keyword::ON)?;
6204 let table_name = self.parse_object_name(false)?;
6205
6206 let referenced_table_name = if self.parse_keyword(Keyword::FROM) {
6207 self.parse_object_name(true).ok()
6208 } else {
6209 None
6210 };
6211
6212 let characteristics = self.parse_constraint_characteristics()?;
6213
6214 let mut referencing = vec![];
6215 if self.parse_keyword(Keyword::REFERENCING) {
6216 while let Some(refer) = self.parse_trigger_referencing()? {
6217 referencing.push(refer);
6218 }
6219 }
6220
6221 let trigger_object = if self.parse_keyword(Keyword::FOR) {
6222 let include_each = self.parse_keyword(Keyword::EACH);
6223 let trigger_object =
6224 match self.expect_one_of_keywords(&[Keyword::ROW, Keyword::STATEMENT])? {
6225 Keyword::ROW => TriggerObject::Row,
6226 Keyword::STATEMENT => TriggerObject::Statement,
6227 unexpected_keyword => return Err(ParserError::ParserError(
6228 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in ROW/STATEMENT"),
6229 )),
6230 };
6231
6232 Some(if include_each {
6233 TriggerObjectKind::ForEach(trigger_object)
6234 } else {
6235 TriggerObjectKind::For(trigger_object)
6236 })
6237 } else {
6238 let _ = self.parse_keyword(Keyword::FOR);
6239
6240 None
6241 };
6242
6243 let condition = self
6244 .parse_keyword(Keyword::WHEN)
6245 .then(|| self.parse_expr())
6246 .transpose()?;
6247
6248 let mut exec_body = None;
6249 let mut statements = None;
6250 if self.parse_keyword(Keyword::EXECUTE) {
6251 exec_body = Some(self.parse_trigger_exec_body()?);
6252 } else {
6253 statements = Some(self.parse_conditional_statements(&[Keyword::END])?);
6254 }
6255
6256 Ok(CreateTrigger {
6257 or_alter,
6258 temporary,
6259 or_replace,
6260 is_constraint,
6261 name,
6262 period,
6263 period_before_table: true,
6264 events,
6265 table_name,
6266 referenced_table_name,
6267 referencing,
6268 trigger_object,
6269 condition,
6270 exec_body,
6271 statements_as: false,
6272 statements,
6273 characteristics,
6274 })
6275 }
6276
6277 pub fn parse_trigger_period(&mut self) -> Result<TriggerPeriod, ParserError> {
6279 Ok(
6280 match self.expect_one_of_keywords(&[
6281 Keyword::FOR,
6282 Keyword::BEFORE,
6283 Keyword::AFTER,
6284 Keyword::INSTEAD,
6285 ])? {
6286 Keyword::FOR => TriggerPeriod::For,
6287 Keyword::BEFORE => TriggerPeriod::Before,
6288 Keyword::AFTER => TriggerPeriod::After,
6289 Keyword::INSTEAD => self
6290 .expect_keyword_is(Keyword::OF)
6291 .map(|_| TriggerPeriod::InsteadOf)?,
6292 unexpected_keyword => return Err(ParserError::ParserError(
6293 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in trigger period"),
6294 )),
6295 },
6296 )
6297 }
6298
6299 pub fn parse_trigger_event(&mut self) -> Result<TriggerEvent, ParserError> {
6301 Ok(
6302 match self.expect_one_of_keywords(&[
6303 Keyword::INSERT,
6304 Keyword::UPDATE,
6305 Keyword::DELETE,
6306 Keyword::TRUNCATE,
6307 ])? {
6308 Keyword::INSERT => TriggerEvent::Insert,
6309 Keyword::UPDATE => {
6310 if self.parse_keyword(Keyword::OF) {
6311 let cols = self.parse_comma_separated(Parser::parse_identifier)?;
6312 TriggerEvent::Update(cols)
6313 } else {
6314 TriggerEvent::Update(vec![])
6315 }
6316 }
6317 Keyword::DELETE => TriggerEvent::Delete,
6318 Keyword::TRUNCATE => TriggerEvent::Truncate,
6319 unexpected_keyword => return Err(ParserError::ParserError(
6320 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in trigger event"),
6321 )),
6322 },
6323 )
6324 }
6325
6326 pub fn parse_trigger_referencing(&mut self) -> Result<Option<TriggerReferencing>, ParserError> {
6328 let refer_type = match self.parse_one_of_keywords(&[Keyword::OLD, Keyword::NEW]) {
6329 Some(Keyword::OLD) if self.parse_keyword(Keyword::TABLE) => {
6330 TriggerReferencingType::OldTable
6331 }
6332 Some(Keyword::NEW) if self.parse_keyword(Keyword::TABLE) => {
6333 TriggerReferencingType::NewTable
6334 }
6335 _ => {
6336 return Ok(None);
6337 }
6338 };
6339
6340 let is_as = self.parse_keyword(Keyword::AS);
6341 let transition_relation_name = self.parse_object_name(false)?;
6342 Ok(Some(TriggerReferencing {
6343 refer_type,
6344 is_as,
6345 transition_relation_name,
6346 }))
6347 }
6348
6349 pub fn parse_trigger_exec_body(&mut self) -> Result<TriggerExecBody, ParserError> {
6351 Ok(TriggerExecBody {
6352 exec_type: match self
6353 .expect_one_of_keywords(&[Keyword::FUNCTION, Keyword::PROCEDURE])?
6354 {
6355 Keyword::FUNCTION => TriggerExecBodyType::Function,
6356 Keyword::PROCEDURE => TriggerExecBodyType::Procedure,
6357 unexpected_keyword => return Err(ParserError::ParserError(
6358 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in trigger exec body"),
6359 )),
6360 },
6361 func_desc: self.parse_function_desc()?,
6362 })
6363 }
6364
6365 pub fn parse_create_macro(
6367 &mut self,
6368 or_replace: bool,
6369 temporary: bool,
6370 ) -> Result<Statement, ParserError> {
6371 if dialect_of!(self is DuckDbDialect | GenericDialect) {
6372 let name = self.parse_object_name(false)?;
6373 self.expect_token(&Token::LParen)?;
6374 let args = if self.consume_token(&Token::RParen) {
6375 self.prev_token();
6376 None
6377 } else {
6378 Some(self.parse_comma_separated(Parser::parse_macro_arg)?)
6379 };
6380
6381 self.expect_token(&Token::RParen)?;
6382 self.expect_keyword_is(Keyword::AS)?;
6383
6384 Ok(Statement::CreateMacro {
6385 or_replace,
6386 temporary,
6387 name,
6388 args,
6389 definition: if self.parse_keyword(Keyword::TABLE) {
6390 MacroDefinition::Table(self.parse_query()?)
6391 } else {
6392 MacroDefinition::Expr(self.parse_expr()?)
6393 },
6394 })
6395 } else {
6396 self.prev_token();
6397 self.expected_ref("an object type after CREATE", self.peek_token_ref())
6398 }
6399 }
6400
6401 fn parse_macro_arg(&mut self) -> Result<MacroArg, ParserError> {
6402 let name = self.parse_identifier()?;
6403
6404 let default_expr =
6405 if self.consume_token(&Token::Assignment) || self.consume_token(&Token::RArrow) {
6406 Some(self.parse_expr()?)
6407 } else {
6408 None
6409 };
6410 Ok(MacroArg { name, default_expr })
6411 }
6412
6413 pub fn parse_create_external_table(
6415 &mut self,
6416 or_replace: bool,
6417 ) -> Result<CreateTable, ParserError> {
6418 self.expect_keyword_is(Keyword::TABLE)?;
6419 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
6420 let table_name = self.parse_object_name(false)?;
6421 let (columns, constraints) = self.parse_columns()?;
6422
6423 let hive_distribution = self.parse_hive_distribution()?;
6424 let hive_formats = self.parse_hive_formats()?;
6425
6426 let file_format = if let Some(ref hf) = hive_formats {
6427 if let Some(ref ff) = hf.storage {
6428 match ff {
6429 HiveIOFormat::FileFormat { format } => Some(*format),
6430 _ => None,
6431 }
6432 } else {
6433 None
6434 }
6435 } else {
6436 None
6437 };
6438 let location = hive_formats.as_ref().and_then(|hf| hf.location.clone());
6439 let table_properties = self.parse_options(Keyword::TBLPROPERTIES)?;
6440 let table_options = if !table_properties.is_empty() {
6441 CreateTableOptions::TableProperties(table_properties)
6442 } else if let Some(options) = self.maybe_parse_options(Keyword::OPTIONS)? {
6443 CreateTableOptions::Options(options)
6444 } else {
6445 CreateTableOptions::None
6446 };
6447 Ok(CreateTableBuilder::new(table_name)
6448 .columns(columns)
6449 .constraints(constraints)
6450 .hive_distribution(hive_distribution)
6451 .hive_formats(hive_formats)
6452 .table_options(table_options)
6453 .or_replace(or_replace)
6454 .if_not_exists(if_not_exists)
6455 .external(true)
6456 .file_format(file_format)
6457 .location(location)
6458 .build())
6459 }
6460
6461 pub fn parse_create_snapshot_table(&mut self) -> Result<CreateTable, ParserError> {
6465 self.expect_keywords(&[Keyword::SNAPSHOT, Keyword::TABLE])?;
6466 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
6467 let table_name = self.parse_object_name(true)?;
6468
6469 self.expect_keyword_is(Keyword::CLONE)?;
6470 let clone = Some(self.parse_object_name(true)?);
6471
6472 let version =
6473 if self.parse_keywords(&[Keyword::FOR, Keyword::SYSTEM_TIME, Keyword::AS, Keyword::OF])
6474 {
6475 Some(TableVersion::ForSystemTimeAsOf(self.parse_expr()?))
6476 } else {
6477 None
6478 };
6479
6480 let table_options = if let Some(options) = self.maybe_parse_options(Keyword::OPTIONS)? {
6481 CreateTableOptions::Options(options)
6482 } else {
6483 CreateTableOptions::None
6484 };
6485
6486 Ok(CreateTableBuilder::new(table_name)
6487 .snapshot(true)
6488 .if_not_exists(if_not_exists)
6489 .clone_clause(clone)
6490 .version(version)
6491 .table_options(table_options)
6492 .build())
6493 }
6494
6495 pub fn parse_file_format(&mut self) -> Result<FileFormat, ParserError> {
6497 let next_token = self.next_token();
6498 match &next_token.token {
6499 Token::Word(w) => match w.keyword {
6500 Keyword::AVRO => Ok(FileFormat::AVRO),
6501 Keyword::JSONFILE => Ok(FileFormat::JSONFILE),
6502 Keyword::ORC => Ok(FileFormat::ORC),
6503 Keyword::PARQUET => Ok(FileFormat::PARQUET),
6504 Keyword::RCFILE => Ok(FileFormat::RCFILE),
6505 Keyword::SEQUENCEFILE => Ok(FileFormat::SEQUENCEFILE),
6506 Keyword::TEXTFILE => Ok(FileFormat::TEXTFILE),
6507 _ => self.expected("fileformat", next_token),
6508 },
6509 _ => self.expected("fileformat", next_token),
6510 }
6511 }
6512
6513 fn parse_analyze_format_kind(&mut self) -> Result<AnalyzeFormatKind, ParserError> {
6514 if self.consume_token(&Token::Eq) {
6515 Ok(AnalyzeFormatKind::Assignment(self.parse_analyze_format()?))
6516 } else {
6517 Ok(AnalyzeFormatKind::Keyword(self.parse_analyze_format()?))
6518 }
6519 }
6520
6521 pub fn parse_analyze_format(&mut self) -> Result<AnalyzeFormat, ParserError> {
6523 let next_token = self.next_token();
6524 match &next_token.token {
6525 Token::Word(w) => match w.keyword {
6526 Keyword::TEXT => Ok(AnalyzeFormat::TEXT),
6527 Keyword::GRAPHVIZ => Ok(AnalyzeFormat::GRAPHVIZ),
6528 Keyword::JSON => Ok(AnalyzeFormat::JSON),
6529 Keyword::TREE => Ok(AnalyzeFormat::TREE),
6530 _ => self.expected("fileformat", next_token),
6531 },
6532 _ => self.expected("fileformat", next_token),
6533 }
6534 }
6535
6536 pub fn parse_create_view(
6538 &mut self,
6539 or_alter: bool,
6540 or_replace: bool,
6541 temporary: bool,
6542 create_view_params: Option<CreateViewParams>,
6543 ) -> Result<CreateView, ParserError> {
6544 let secure = self.parse_keyword(Keyword::SECURE);
6545 let materialized = self.parse_keyword(Keyword::MATERIALIZED);
6546 self.expect_keyword_is(Keyword::VIEW)?;
6547 let allow_unquoted_hyphen = dialect_of!(self is BigQueryDialect);
6548 let if_not_exists_first =
6551 self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
6552 let name = self.parse_object_name(allow_unquoted_hyphen)?;
6553 let name_before_not_exists = !if_not_exists_first
6554 && self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
6555 let if_not_exists = if_not_exists_first || name_before_not_exists;
6556 let copy_grants = self.parse_keywords(&[Keyword::COPY, Keyword::GRANTS]);
6557 let columns = self.parse_view_columns()?;
6560 let mut options = CreateTableOptions::None;
6561 let with_options = self.parse_options(Keyword::WITH)?;
6562 if !with_options.is_empty() {
6563 options = CreateTableOptions::With(with_options);
6564 }
6565
6566 let cluster_by = if self.parse_keyword(Keyword::CLUSTER) {
6567 self.expect_keyword_is(Keyword::BY)?;
6568 self.parse_parenthesized_column_list(Optional, false)?
6569 } else {
6570 vec![]
6571 };
6572
6573 if dialect_of!(self is BigQueryDialect | GenericDialect) {
6574 if let Some(opts) = self.maybe_parse_options(Keyword::OPTIONS)? {
6575 if !opts.is_empty() {
6576 options = CreateTableOptions::Options(opts);
6577 }
6578 };
6579 }
6580
6581 let to = if dialect_of!(self is ClickHouseDialect | GenericDialect)
6582 && self.parse_keyword(Keyword::TO)
6583 {
6584 Some(self.parse_object_name(false)?)
6585 } else {
6586 None
6587 };
6588
6589 let comment = if self.dialect.supports_create_view_comment_syntax()
6590 && self.parse_keyword(Keyword::COMMENT)
6591 {
6592 self.expect_token(&Token::Eq)?;
6593 Some(self.parse_comment_value()?)
6594 } else {
6595 None
6596 };
6597
6598 self.expect_keyword_is(Keyword::AS)?;
6599 let query = self.parse_query()?;
6600 let with_no_schema_binding = dialect_of!(self is RedshiftSqlDialect | GenericDialect)
6603 && self.parse_keywords(&[
6604 Keyword::WITH,
6605 Keyword::NO,
6606 Keyword::SCHEMA,
6607 Keyword::BINDING,
6608 ]);
6609
6610 Ok(CreateView {
6611 or_alter,
6612 name,
6613 columns,
6614 query,
6615 materialized,
6616 secure,
6617 or_replace,
6618 options,
6619 cluster_by,
6620 comment,
6621 with_no_schema_binding,
6622 if_not_exists,
6623 temporary,
6624 copy_grants,
6625 to,
6626 params: create_view_params,
6627 name_before_not_exists,
6628 })
6629 }
6630
6631 fn parse_create_view_params(&mut self) -> Result<Option<CreateViewParams>, ParserError> {
6635 let algorithm = if self.parse_keyword(Keyword::ALGORITHM) {
6636 self.expect_token(&Token::Eq)?;
6637 Some(
6638 match self.expect_one_of_keywords(&[
6639 Keyword::UNDEFINED,
6640 Keyword::MERGE,
6641 Keyword::TEMPTABLE,
6642 ])? {
6643 Keyword::UNDEFINED => CreateViewAlgorithm::Undefined,
6644 Keyword::MERGE => CreateViewAlgorithm::Merge,
6645 Keyword::TEMPTABLE => CreateViewAlgorithm::TempTable,
6646 _ => {
6647 self.prev_token();
6648 let found = self.next_token();
6649 return self
6650 .expected("UNDEFINED or MERGE or TEMPTABLE after ALGORITHM =", found);
6651 }
6652 },
6653 )
6654 } else {
6655 None
6656 };
6657 let definer = if self.parse_keyword(Keyword::DEFINER) {
6658 self.expect_token(&Token::Eq)?;
6659 Some(self.parse_grantee_name()?)
6660 } else {
6661 None
6662 };
6663 let security = if self.parse_keywords(&[Keyword::SQL, Keyword::SECURITY]) {
6664 Some(
6665 match self.expect_one_of_keywords(&[Keyword::DEFINER, Keyword::INVOKER])? {
6666 Keyword::DEFINER => CreateViewSecurity::Definer,
6667 Keyword::INVOKER => CreateViewSecurity::Invoker,
6668 _ => {
6669 self.prev_token();
6670 let found = self.next_token();
6671 return self.expected("DEFINER or INVOKER after SQL SECURITY", found);
6672 }
6673 },
6674 )
6675 } else {
6676 None
6677 };
6678 if algorithm.is_some() || definer.is_some() || security.is_some() {
6679 Ok(Some(CreateViewParams {
6680 algorithm,
6681 definer,
6682 security,
6683 }))
6684 } else {
6685 Ok(None)
6686 }
6687 }
6688
6689 pub fn parse_create_role(&mut self) -> Result<CreateRole, ParserError> {
6691 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
6692 let names = self.parse_comma_separated(|p| p.parse_object_name(false))?;
6693
6694 let _ = self.parse_keyword(Keyword::WITH); let optional_keywords = if dialect_of!(self is MsSqlDialect) {
6697 vec![Keyword::AUTHORIZATION]
6698 } else if dialect_of!(self is PostgreSqlDialect) {
6699 vec![
6700 Keyword::LOGIN,
6701 Keyword::NOLOGIN,
6702 Keyword::INHERIT,
6703 Keyword::NOINHERIT,
6704 Keyword::BYPASSRLS,
6705 Keyword::NOBYPASSRLS,
6706 Keyword::PASSWORD,
6707 Keyword::CREATEDB,
6708 Keyword::NOCREATEDB,
6709 Keyword::CREATEROLE,
6710 Keyword::NOCREATEROLE,
6711 Keyword::SUPERUSER,
6712 Keyword::NOSUPERUSER,
6713 Keyword::REPLICATION,
6714 Keyword::NOREPLICATION,
6715 Keyword::CONNECTION,
6716 Keyword::VALID,
6717 Keyword::IN,
6718 Keyword::ROLE,
6719 Keyword::ADMIN,
6720 Keyword::USER,
6721 ]
6722 } else {
6723 vec![]
6724 };
6725
6726 let mut authorization_owner = None;
6728 let mut login = None;
6730 let mut inherit = None;
6731 let mut bypassrls = None;
6732 let mut password = None;
6733 let mut create_db = None;
6734 let mut create_role = None;
6735 let mut superuser = None;
6736 let mut replication = None;
6737 let mut connection_limit = None;
6738 let mut valid_until = None;
6739 let mut in_role = vec![];
6740 let mut in_group = vec![];
6741 let mut role = vec![];
6742 let mut user = vec![];
6743 let mut admin = vec![];
6744
6745 while let Some(keyword) = self.parse_one_of_keywords(&optional_keywords) {
6746 let loc = self
6747 .tokens
6748 .get(self.index - 1)
6749 .map_or(Location { line: 0, column: 0 }, |t| t.span.start);
6750 match keyword {
6751 Keyword::AUTHORIZATION => {
6752 if authorization_owner.is_some() {
6753 parser_err!("Found multiple AUTHORIZATION", loc)
6754 } else {
6755 authorization_owner = Some(self.parse_object_name(false)?);
6756 Ok(())
6757 }
6758 }
6759 Keyword::LOGIN | Keyword::NOLOGIN => {
6760 if login.is_some() {
6761 parser_err!("Found multiple LOGIN or NOLOGIN", loc)
6762 } else {
6763 login = Some(keyword == Keyword::LOGIN);
6764 Ok(())
6765 }
6766 }
6767 Keyword::INHERIT | Keyword::NOINHERIT => {
6768 if inherit.is_some() {
6769 parser_err!("Found multiple INHERIT or NOINHERIT", loc)
6770 } else {
6771 inherit = Some(keyword == Keyword::INHERIT);
6772 Ok(())
6773 }
6774 }
6775 Keyword::BYPASSRLS | Keyword::NOBYPASSRLS => {
6776 if bypassrls.is_some() {
6777 parser_err!("Found multiple BYPASSRLS or NOBYPASSRLS", loc)
6778 } else {
6779 bypassrls = Some(keyword == Keyword::BYPASSRLS);
6780 Ok(())
6781 }
6782 }
6783 Keyword::CREATEDB | Keyword::NOCREATEDB => {
6784 if create_db.is_some() {
6785 parser_err!("Found multiple CREATEDB or NOCREATEDB", loc)
6786 } else {
6787 create_db = Some(keyword == Keyword::CREATEDB);
6788 Ok(())
6789 }
6790 }
6791 Keyword::CREATEROLE | Keyword::NOCREATEROLE => {
6792 if create_role.is_some() {
6793 parser_err!("Found multiple CREATEROLE or NOCREATEROLE", loc)
6794 } else {
6795 create_role = Some(keyword == Keyword::CREATEROLE);
6796 Ok(())
6797 }
6798 }
6799 Keyword::SUPERUSER | Keyword::NOSUPERUSER => {
6800 if superuser.is_some() {
6801 parser_err!("Found multiple SUPERUSER or NOSUPERUSER", loc)
6802 } else {
6803 superuser = Some(keyword == Keyword::SUPERUSER);
6804 Ok(())
6805 }
6806 }
6807 Keyword::REPLICATION | Keyword::NOREPLICATION => {
6808 if replication.is_some() {
6809 parser_err!("Found multiple REPLICATION or NOREPLICATION", loc)
6810 } else {
6811 replication = Some(keyword == Keyword::REPLICATION);
6812 Ok(())
6813 }
6814 }
6815 Keyword::PASSWORD => {
6816 if password.is_some() {
6817 parser_err!("Found multiple PASSWORD", loc)
6818 } else {
6819 password = if self.parse_keyword(Keyword::NULL) {
6820 Some(Password::NullPassword)
6821 } else {
6822 Some(Password::Password(Expr::Value(self.parse_value()?)))
6823 };
6824 Ok(())
6825 }
6826 }
6827 Keyword::CONNECTION => {
6828 self.expect_keyword_is(Keyword::LIMIT)?;
6829 if connection_limit.is_some() {
6830 parser_err!("Found multiple CONNECTION LIMIT", loc)
6831 } else {
6832 connection_limit = Some(Expr::Value(self.parse_number_value()?));
6833 Ok(())
6834 }
6835 }
6836 Keyword::VALID => {
6837 self.expect_keyword_is(Keyword::UNTIL)?;
6838 if valid_until.is_some() {
6839 parser_err!("Found multiple VALID UNTIL", loc)
6840 } else {
6841 valid_until = Some(Expr::Value(self.parse_value()?));
6842 Ok(())
6843 }
6844 }
6845 Keyword::IN => {
6846 if self.parse_keyword(Keyword::ROLE) {
6847 if !in_role.is_empty() {
6848 parser_err!("Found multiple IN ROLE", loc)
6849 } else {
6850 in_role = self.parse_comma_separated(|p| p.parse_identifier())?;
6851 Ok(())
6852 }
6853 } else if self.parse_keyword(Keyword::GROUP) {
6854 if !in_group.is_empty() {
6855 parser_err!("Found multiple IN GROUP", loc)
6856 } else {
6857 in_group = self.parse_comma_separated(|p| p.parse_identifier())?;
6858 Ok(())
6859 }
6860 } else {
6861 self.expected_ref("ROLE or GROUP after IN", self.peek_token_ref())
6862 }
6863 }
6864 Keyword::ROLE => {
6865 if !role.is_empty() {
6866 parser_err!("Found multiple ROLE", loc)
6867 } else {
6868 role = self.parse_comma_separated(|p| p.parse_identifier())?;
6869 Ok(())
6870 }
6871 }
6872 Keyword::USER => {
6873 if !user.is_empty() {
6874 parser_err!("Found multiple USER", loc)
6875 } else {
6876 user = self.parse_comma_separated(|p| p.parse_identifier())?;
6877 Ok(())
6878 }
6879 }
6880 Keyword::ADMIN => {
6881 if !admin.is_empty() {
6882 parser_err!("Found multiple ADMIN", loc)
6883 } else {
6884 admin = self.parse_comma_separated(|p| p.parse_identifier())?;
6885 Ok(())
6886 }
6887 }
6888 _ => break,
6889 }?
6890 }
6891
6892 Ok(CreateRole {
6893 names,
6894 if_not_exists,
6895 login,
6896 inherit,
6897 bypassrls,
6898 password,
6899 create_db,
6900 create_role,
6901 replication,
6902 superuser,
6903 connection_limit,
6904 valid_until,
6905 in_role,
6906 in_group,
6907 role,
6908 user,
6909 admin,
6910 authorization_owner,
6911 })
6912 }
6913
6914 pub fn parse_owner(&mut self) -> Result<Owner, ParserError> {
6916 let owner = match self.parse_one_of_keywords(&[Keyword::CURRENT_USER, Keyword::CURRENT_ROLE, Keyword::SESSION_USER]) {
6917 Some(Keyword::CURRENT_USER) => Owner::CurrentUser,
6918 Some(Keyword::CURRENT_ROLE) => Owner::CurrentRole,
6919 Some(Keyword::SESSION_USER) => Owner::SessionUser,
6920 Some(unexpected_keyword) => return Err(ParserError::ParserError(
6921 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in owner"),
6922 )),
6923 None => {
6924 match self.parse_identifier() {
6925 Ok(ident) => Owner::Ident(ident),
6926 Err(e) => {
6927 return Err(ParserError::ParserError(format!("Expected: CURRENT_USER, CURRENT_ROLE, SESSION_USER or identifier after OWNER TO. {e}")))
6928 }
6929 }
6930 }
6931 };
6932 Ok(owner)
6933 }
6934
6935 fn parse_create_domain(&mut self) -> Result<CreateDomain, ParserError> {
6937 let name = self.parse_object_name(false)?;
6938 self.expect_keyword_is(Keyword::AS)?;
6939 let data_type = self.parse_data_type()?;
6940 let collation = if self.parse_keyword(Keyword::COLLATE) {
6941 Some(self.parse_identifier()?)
6942 } else {
6943 None
6944 };
6945 let default = if self.parse_keyword(Keyword::DEFAULT) {
6946 Some(self.parse_expr()?)
6947 } else {
6948 None
6949 };
6950 let mut constraints = Vec::new();
6951 while let Some(constraint) = self.parse_optional_table_constraint()? {
6952 constraints.push(constraint);
6953 }
6954
6955 Ok(CreateDomain {
6956 name,
6957 data_type,
6958 collation,
6959 default,
6960 constraints,
6961 })
6962 }
6963
6964 pub fn parse_create_policy(&mut self) -> Result<CreatePolicy, ParserError> {
6974 let name = self.parse_identifier()?;
6975 self.expect_keyword_is(Keyword::ON)?;
6976 let table_name = self.parse_object_name(false)?;
6977
6978 let policy_type = if self.parse_keyword(Keyword::AS) {
6979 let keyword =
6980 self.expect_one_of_keywords(&[Keyword::PERMISSIVE, Keyword::RESTRICTIVE])?;
6981 Some(match keyword {
6982 Keyword::PERMISSIVE => CreatePolicyType::Permissive,
6983 Keyword::RESTRICTIVE => CreatePolicyType::Restrictive,
6984 unexpected_keyword => return Err(ParserError::ParserError(
6985 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in policy type"),
6986 )),
6987 })
6988 } else {
6989 None
6990 };
6991
6992 let command = if self.parse_keyword(Keyword::FOR) {
6993 let keyword = self.expect_one_of_keywords(&[
6994 Keyword::ALL,
6995 Keyword::SELECT,
6996 Keyword::INSERT,
6997 Keyword::UPDATE,
6998 Keyword::DELETE,
6999 ])?;
7000 Some(match keyword {
7001 Keyword::ALL => CreatePolicyCommand::All,
7002 Keyword::SELECT => CreatePolicyCommand::Select,
7003 Keyword::INSERT => CreatePolicyCommand::Insert,
7004 Keyword::UPDATE => CreatePolicyCommand::Update,
7005 Keyword::DELETE => CreatePolicyCommand::Delete,
7006 unexpected_keyword => return Err(ParserError::ParserError(
7007 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in policy command"),
7008 )),
7009 })
7010 } else {
7011 None
7012 };
7013
7014 let to = if self.parse_keyword(Keyword::TO) {
7015 Some(self.parse_comma_separated(|p| p.parse_owner())?)
7016 } else {
7017 None
7018 };
7019
7020 let using = if self.parse_keyword(Keyword::USING) {
7021 self.expect_token(&Token::LParen)?;
7022 let expr = self.parse_expr()?;
7023 self.expect_token(&Token::RParen)?;
7024 Some(expr)
7025 } else {
7026 None
7027 };
7028
7029 let with_check = if self.parse_keywords(&[Keyword::WITH, Keyword::CHECK]) {
7030 self.expect_token(&Token::LParen)?;
7031 let expr = self.parse_expr()?;
7032 self.expect_token(&Token::RParen)?;
7033 Some(expr)
7034 } else {
7035 None
7036 };
7037
7038 Ok(CreatePolicy {
7039 name,
7040 table_name,
7041 policy_type,
7042 command,
7043 to,
7044 using,
7045 with_check,
7046 })
7047 }
7048
7049 pub fn parse_create_connector(&mut self) -> Result<CreateConnector, ParserError> {
7059 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
7060 let name = self.parse_identifier()?;
7061
7062 let connector_type = if self.parse_keyword(Keyword::TYPE) {
7063 Some(self.parse_literal_string()?)
7064 } else {
7065 None
7066 };
7067
7068 let url = if self.parse_keyword(Keyword::URL) {
7069 Some(self.parse_literal_string()?)
7070 } else {
7071 None
7072 };
7073
7074 let comment = self.parse_optional_inline_comment()?;
7075
7076 let with_dcproperties =
7077 match self.parse_options_with_keywords(&[Keyword::WITH, Keyword::DCPROPERTIES])? {
7078 properties if !properties.is_empty() => Some(properties),
7079 _ => None,
7080 };
7081
7082 Ok(CreateConnector {
7083 name,
7084 if_not_exists,
7085 connector_type,
7086 url,
7087 comment,
7088 with_dcproperties,
7089 })
7090 }
7091
7092 fn parse_operator_name(&mut self) -> Result<ObjectName, ParserError> {
7098 let mut parts = vec![];
7099 loop {
7100 parts.push(ObjectNamePart::Identifier(Ident::new(
7101 self.next_token().to_string(),
7102 )));
7103 if !self.consume_token(&Token::Period) {
7104 break;
7105 }
7106 }
7107 Ok(ObjectName(parts))
7108 }
7109
7110 pub fn parse_create_operator(&mut self) -> Result<CreateOperator, ParserError> {
7114 let name = self.parse_operator_name()?;
7115 self.expect_token(&Token::LParen)?;
7116
7117 let mut function: Option<ObjectName> = None;
7118 let mut is_procedure = false;
7119 let mut left_arg: Option<DataType> = None;
7120 let mut right_arg: Option<DataType> = None;
7121 let mut options: Vec<OperatorOption> = Vec::new();
7122
7123 loop {
7124 let keyword = self.expect_one_of_keywords(&[
7125 Keyword::FUNCTION,
7126 Keyword::PROCEDURE,
7127 Keyword::LEFTARG,
7128 Keyword::RIGHTARG,
7129 Keyword::COMMUTATOR,
7130 Keyword::NEGATOR,
7131 Keyword::RESTRICT,
7132 Keyword::JOIN,
7133 Keyword::HASHES,
7134 Keyword::MERGES,
7135 ])?;
7136
7137 match keyword {
7138 Keyword::HASHES if !options.iter().any(|o| matches!(o, OperatorOption::Hashes)) => {
7139 options.push(OperatorOption::Hashes);
7140 }
7141 Keyword::MERGES if !options.iter().any(|o| matches!(o, OperatorOption::Merges)) => {
7142 options.push(OperatorOption::Merges);
7143 }
7144 Keyword::FUNCTION | Keyword::PROCEDURE if function.is_none() => {
7145 self.expect_token(&Token::Eq)?;
7146 function = Some(self.parse_object_name(false)?);
7147 is_procedure = keyword == Keyword::PROCEDURE;
7148 }
7149 Keyword::LEFTARG if left_arg.is_none() => {
7150 self.expect_token(&Token::Eq)?;
7151 left_arg = Some(self.parse_data_type()?);
7152 }
7153 Keyword::RIGHTARG if right_arg.is_none() => {
7154 self.expect_token(&Token::Eq)?;
7155 right_arg = Some(self.parse_data_type()?);
7156 }
7157 Keyword::COMMUTATOR
7158 if !options
7159 .iter()
7160 .any(|o| matches!(o, OperatorOption::Commutator(_))) =>
7161 {
7162 self.expect_token(&Token::Eq)?;
7163 if self.parse_keyword(Keyword::OPERATOR) {
7164 self.expect_token(&Token::LParen)?;
7165 let op = self.parse_operator_name()?;
7166 self.expect_token(&Token::RParen)?;
7167 options.push(OperatorOption::Commutator(op));
7168 } else {
7169 options.push(OperatorOption::Commutator(self.parse_operator_name()?));
7170 }
7171 }
7172 Keyword::NEGATOR
7173 if !options
7174 .iter()
7175 .any(|o| matches!(o, OperatorOption::Negator(_))) =>
7176 {
7177 self.expect_token(&Token::Eq)?;
7178 if self.parse_keyword(Keyword::OPERATOR) {
7179 self.expect_token(&Token::LParen)?;
7180 let op = self.parse_operator_name()?;
7181 self.expect_token(&Token::RParen)?;
7182 options.push(OperatorOption::Negator(op));
7183 } else {
7184 options.push(OperatorOption::Negator(self.parse_operator_name()?));
7185 }
7186 }
7187 Keyword::RESTRICT
7188 if !options
7189 .iter()
7190 .any(|o| matches!(o, OperatorOption::Restrict(_))) =>
7191 {
7192 self.expect_token(&Token::Eq)?;
7193 options.push(OperatorOption::Restrict(Some(
7194 self.parse_object_name(false)?,
7195 )));
7196 }
7197 Keyword::JOIN if !options.iter().any(|o| matches!(o, OperatorOption::Join(_))) => {
7198 self.expect_token(&Token::Eq)?;
7199 options.push(OperatorOption::Join(Some(self.parse_object_name(false)?)));
7200 }
7201 _ => {
7202 return Err(ParserError::ParserError(format!(
7203 "Duplicate or unexpected keyword {:?} in CREATE OPERATOR",
7204 keyword
7205 )))
7206 }
7207 }
7208
7209 if !self.consume_token(&Token::Comma) {
7210 break;
7211 }
7212 }
7213
7214 self.expect_token(&Token::RParen)?;
7216
7217 let function = function.ok_or_else(|| {
7219 ParserError::ParserError("CREATE OPERATOR requires FUNCTION parameter".to_string())
7220 })?;
7221
7222 Ok(CreateOperator {
7223 name,
7224 function,
7225 is_procedure,
7226 left_arg,
7227 right_arg,
7228 options,
7229 })
7230 }
7231
7232 pub fn parse_create_aggregate(
7236 &mut self,
7237 or_replace: bool,
7238 ) -> Result<CreateAggregate, ParserError> {
7239 let name = self.parse_object_name(false)?;
7240
7241 self.expect_token(&Token::LParen)?;
7243 let args = if self.consume_token(&Token::Mul) {
7244 vec![]
7246 } else if self.consume_token(&Token::RParen) {
7247 self.prev_token();
7248 vec![]
7249 } else {
7250 let parsed = self.parse_comma_separated(|p| p.parse_data_type())?;
7251 parsed
7252 };
7253 self.expect_token(&Token::RParen)?;
7254
7255 self.expect_token(&Token::LParen)?;
7257 let mut options: Vec<CreateAggregateOption> = Vec::new();
7258 loop {
7259 let token = self.next_token();
7260 match &token.token {
7261 Token::RParen => break,
7262 Token::Comma => continue,
7263 Token::Word(word) => {
7264 let option = self.parse_create_aggregate_option(&word.value.to_uppercase())?;
7265 options.push(option);
7266 }
7267 other => {
7268 return Err(ParserError::ParserError(format!(
7269 "Unexpected token in CREATE AGGREGATE options: {other:?}"
7270 )));
7271 }
7272 }
7273 }
7274
7275 Ok(CreateAggregate {
7276 or_replace,
7277 name,
7278 args,
7279 options,
7280 })
7281 }
7282
7283 fn parse_create_aggregate_option(
7284 &mut self,
7285 key: &str,
7286 ) -> Result<CreateAggregateOption, ParserError> {
7287 match key {
7288 "SFUNC" => {
7289 self.expect_token(&Token::Eq)?;
7290 Ok(CreateAggregateOption::Sfunc(
7291 self.parse_object_name(false)?,
7292 ))
7293 }
7294 "STYPE" => {
7295 self.expect_token(&Token::Eq)?;
7296 Ok(CreateAggregateOption::Stype(self.parse_data_type()?))
7297 }
7298 "SSPACE" => {
7299 self.expect_token(&Token::Eq)?;
7300 let size = self.parse_literal_uint()?;
7301 Ok(CreateAggregateOption::Sspace(size))
7302 }
7303 "FINALFUNC" => {
7304 self.expect_token(&Token::Eq)?;
7305 Ok(CreateAggregateOption::Finalfunc(
7306 self.parse_object_name(false)?,
7307 ))
7308 }
7309 "FINALFUNC_EXTRA" => Ok(CreateAggregateOption::FinalfuncExtra),
7310 "FINALFUNC_MODIFY" => {
7311 self.expect_token(&Token::Eq)?;
7312 Ok(CreateAggregateOption::FinalfuncModify(
7313 self.parse_aggregate_modify_kind()?,
7314 ))
7315 }
7316 "COMBINEFUNC" => {
7317 self.expect_token(&Token::Eq)?;
7318 Ok(CreateAggregateOption::Combinefunc(
7319 self.parse_object_name(false)?,
7320 ))
7321 }
7322 "SERIALFUNC" => {
7323 self.expect_token(&Token::Eq)?;
7324 Ok(CreateAggregateOption::Serialfunc(
7325 self.parse_object_name(false)?,
7326 ))
7327 }
7328 "DESERIALFUNC" => {
7329 self.expect_token(&Token::Eq)?;
7330 Ok(CreateAggregateOption::Deserialfunc(
7331 self.parse_object_name(false)?,
7332 ))
7333 }
7334 "INITCOND" => {
7335 self.expect_token(&Token::Eq)?;
7336 Ok(CreateAggregateOption::Initcond(self.parse_value()?.value))
7337 }
7338 "MSFUNC" => {
7339 self.expect_token(&Token::Eq)?;
7340 Ok(CreateAggregateOption::Msfunc(
7341 self.parse_object_name(false)?,
7342 ))
7343 }
7344 "MINVFUNC" => {
7345 self.expect_token(&Token::Eq)?;
7346 Ok(CreateAggregateOption::Minvfunc(
7347 self.parse_object_name(false)?,
7348 ))
7349 }
7350 "MSTYPE" => {
7351 self.expect_token(&Token::Eq)?;
7352 Ok(CreateAggregateOption::Mstype(self.parse_data_type()?))
7353 }
7354 "MSSPACE" => {
7355 self.expect_token(&Token::Eq)?;
7356 let size = self.parse_literal_uint()?;
7357 Ok(CreateAggregateOption::Msspace(size))
7358 }
7359 "MFINALFUNC" => {
7360 self.expect_token(&Token::Eq)?;
7361 Ok(CreateAggregateOption::Mfinalfunc(
7362 self.parse_object_name(false)?,
7363 ))
7364 }
7365 "MFINALFUNC_EXTRA" => Ok(CreateAggregateOption::MfinalfuncExtra),
7366 "MFINALFUNC_MODIFY" => {
7367 self.expect_token(&Token::Eq)?;
7368 Ok(CreateAggregateOption::MfinalfuncModify(
7369 self.parse_aggregate_modify_kind()?,
7370 ))
7371 }
7372 "MINITCOND" => {
7373 self.expect_token(&Token::Eq)?;
7374 Ok(CreateAggregateOption::Minitcond(self.parse_value()?.value))
7375 }
7376 "SORTOP" => {
7377 self.expect_token(&Token::Eq)?;
7378 Ok(CreateAggregateOption::Sortop(
7379 self.parse_object_name(false)?,
7380 ))
7381 }
7382 "PARALLEL" => {
7383 self.expect_token(&Token::Eq)?;
7384 let parallel = match self.expect_one_of_keywords(&[
7385 Keyword::SAFE,
7386 Keyword::RESTRICTED,
7387 Keyword::UNSAFE,
7388 ])? {
7389 Keyword::SAFE => FunctionParallel::Safe,
7390 Keyword::RESTRICTED => FunctionParallel::Restricted,
7391 Keyword::UNSAFE => FunctionParallel::Unsafe,
7392 _ => unreachable!(),
7393 };
7394 Ok(CreateAggregateOption::Parallel(parallel))
7395 }
7396 "HYPOTHETICAL" => Ok(CreateAggregateOption::Hypothetical),
7397 other => Err(ParserError::ParserError(format!(
7398 "Unknown CREATE AGGREGATE option: {other}"
7399 ))),
7400 }
7401 }
7402
7403 fn parse_aggregate_modify_kind(&mut self) -> Result<AggregateModifyKind, ParserError> {
7404 let token = self.next_token();
7405 match &token.token {
7406 Token::Word(word) => match word.value.to_uppercase().as_str() {
7407 "READ_ONLY" => Ok(AggregateModifyKind::ReadOnly),
7408 "SHAREABLE" => Ok(AggregateModifyKind::Shareable),
7409 "READ_WRITE" => Ok(AggregateModifyKind::ReadWrite),
7410 other => Err(ParserError::ParserError(format!(
7411 "Expected READ_ONLY, SHAREABLE, or READ_WRITE, got: {other}"
7412 ))),
7413 },
7414 other => Err(ParserError::ParserError(format!(
7415 "Expected READ_ONLY, SHAREABLE, or READ_WRITE, got: {other:?}"
7416 ))),
7417 }
7418 }
7419
7420 pub fn parse_create_operator_family(&mut self) -> Result<CreateOperatorFamily, ParserError> {
7424 let name = self.parse_object_name(false)?;
7425 self.expect_keyword(Keyword::USING)?;
7426 let using = self.parse_identifier()?;
7427
7428 Ok(CreateOperatorFamily { name, using })
7429 }
7430
7431 pub fn parse_create_operator_class(&mut self) -> Result<CreateOperatorClass, ParserError> {
7435 let name = self.parse_object_name(false)?;
7436 let default = self.parse_keyword(Keyword::DEFAULT);
7437 self.expect_keywords(&[Keyword::FOR, Keyword::TYPE])?;
7438 let for_type = self.parse_data_type()?;
7439 self.expect_keyword(Keyword::USING)?;
7440 let using = self.parse_identifier()?;
7441
7442 let family = if self.parse_keyword(Keyword::FAMILY) {
7443 Some(self.parse_object_name(false)?)
7444 } else {
7445 None
7446 };
7447
7448 self.expect_keyword(Keyword::AS)?;
7449
7450 let mut items = vec![];
7451 loop {
7452 if self.parse_keyword(Keyword::OPERATOR) {
7453 let strategy_number = self.parse_literal_uint()?;
7454 let operator_name = self.parse_operator_name()?;
7455
7456 let op_types = if self.consume_token(&Token::LParen) {
7458 let left = self.parse_data_type()?;
7459 self.expect_token(&Token::Comma)?;
7460 let right = self.parse_data_type()?;
7461 self.expect_token(&Token::RParen)?;
7462 Some(OperatorArgTypes { left, right })
7463 } else {
7464 None
7465 };
7466
7467 let purpose = if self.parse_keyword(Keyword::FOR) {
7469 if self.parse_keyword(Keyword::SEARCH) {
7470 Some(OperatorPurpose::ForSearch)
7471 } else if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
7472 let sort_family = self.parse_object_name(false)?;
7473 Some(OperatorPurpose::ForOrderBy { sort_family })
7474 } else {
7475 return self
7476 .expected_ref("SEARCH or ORDER BY after FOR", self.peek_token_ref());
7477 }
7478 } else {
7479 None
7480 };
7481
7482 items.push(OperatorClassItem::Operator {
7483 strategy_number,
7484 operator_name,
7485 op_types,
7486 purpose,
7487 });
7488 } else if self.parse_keyword(Keyword::FUNCTION) {
7489 let support_number = self.parse_literal_uint()?;
7490
7491 let op_types = if self.consume_token(&Token::LParen)
7493 && self.peek_token_ref().token != Token::RParen
7494 {
7495 let mut types = vec![];
7496 loop {
7497 types.push(self.parse_data_type()?);
7498 if !self.consume_token(&Token::Comma) {
7499 break;
7500 }
7501 }
7502 self.expect_token(&Token::RParen)?;
7503 Some(types)
7504 } else if self.consume_token(&Token::LParen) {
7505 self.expect_token(&Token::RParen)?;
7506 Some(vec![])
7507 } else {
7508 None
7509 };
7510
7511 let function_name = self.parse_object_name(false)?;
7512
7513 let argument_types = if self.consume_token(&Token::LParen) {
7515 let mut types = vec![];
7516 loop {
7517 if self.peek_token_ref().token == Token::RParen {
7518 break;
7519 }
7520 types.push(self.parse_data_type()?);
7521 if !self.consume_token(&Token::Comma) {
7522 break;
7523 }
7524 }
7525 self.expect_token(&Token::RParen)?;
7526 types
7527 } else {
7528 vec![]
7529 };
7530
7531 items.push(OperatorClassItem::Function {
7532 support_number,
7533 op_types,
7534 function_name,
7535 argument_types,
7536 });
7537 } else if self.parse_keyword(Keyword::STORAGE) {
7538 let storage_type = self.parse_data_type()?;
7539 items.push(OperatorClassItem::Storage { storage_type });
7540 } else {
7541 break;
7542 }
7543
7544 if !self.consume_token(&Token::Comma) {
7546 break;
7547 }
7548 }
7549
7550 Ok(CreateOperatorClass {
7551 name,
7552 default,
7553 for_type,
7554 using,
7555 family,
7556 items,
7557 })
7558 }
7559
7560 pub fn parse_drop(&mut self) -> Result<Statement, ParserError> {
7562 let temporary = dialect_of!(self is MySqlDialect | GenericDialect | DuckDbDialect)
7564 && self.parse_keyword(Keyword::TEMPORARY);
7565 let persistent = dialect_of!(self is DuckDbDialect)
7566 && self.parse_one_of_keywords(&[Keyword::PERSISTENT]).is_some();
7567
7568 let object_type = if self.parse_keyword(Keyword::TABLE) {
7569 ObjectType::Table
7570 } else if self.parse_keyword(Keyword::COLLATION) {
7571 ObjectType::Collation
7572 } else if self.parse_keyword(Keyword::VIEW) {
7573 ObjectType::View
7574 } else if self.parse_keywords(&[Keyword::MATERIALIZED, Keyword::VIEW]) {
7575 ObjectType::MaterializedView
7576 } else if self.parse_keyword(Keyword::INDEX) {
7577 ObjectType::Index
7578 } else if self.parse_keyword(Keyword::ROLE) {
7579 ObjectType::Role
7580 } else if self.parse_keyword(Keyword::SCHEMA) {
7581 ObjectType::Schema
7582 } else if self.parse_keyword(Keyword::DATABASE) {
7583 ObjectType::Database
7584 } else if self.parse_keyword(Keyword::SEQUENCE) {
7585 ObjectType::Sequence
7586 } else if self.parse_keyword(Keyword::STAGE) {
7587 ObjectType::Stage
7588 } else if self.parse_keyword(Keyword::TYPE) {
7589 ObjectType::Type
7590 } else if self.parse_keyword(Keyword::USER) {
7591 ObjectType::User
7592 } else if self.parse_keyword(Keyword::STREAM) {
7593 ObjectType::Stream
7594 } else if self.parse_keyword(Keyword::FUNCTION) {
7595 return self.parse_drop_function().map(Into::into);
7596 } else if self.parse_keyword(Keyword::POLICY) {
7597 return self.parse_drop_policy().map(Into::into);
7598 } else if self.parse_keyword(Keyword::CONNECTOR) {
7599 return self.parse_drop_connector();
7600 } else if self.parse_keyword(Keyword::DOMAIN) {
7601 return self.parse_drop_domain().map(Into::into);
7602 } else if self.parse_keyword(Keyword::PROCEDURE) {
7603 return self.parse_drop_procedure();
7604 } else if self.parse_keyword(Keyword::SECRET) {
7605 return self.parse_drop_secret(temporary, persistent);
7606 } else if self.parse_keyword(Keyword::TRIGGER) {
7607 return self.parse_drop_trigger().map(Into::into);
7608 } else if self.parse_keyword(Keyword::EXTENSION) {
7609 return self.parse_drop_extension();
7610 } else if self.parse_keyword(Keyword::OPERATOR) {
7611 return if self.parse_keyword(Keyword::FAMILY) {
7613 self.parse_drop_operator_family()
7614 } else if self.parse_keyword(Keyword::CLASS) {
7615 self.parse_drop_operator_class()
7616 } else {
7617 self.parse_drop_operator()
7618 };
7619 } else {
7620 return self.expected_ref(
7621 "COLLATION, CONNECTOR, DATABASE, EXTENSION, FUNCTION, INDEX, OPERATOR, POLICY, PROCEDURE, ROLE, SCHEMA, SECRET, SEQUENCE, STAGE, TABLE, TRIGGER, TYPE, VIEW, MATERIALIZED VIEW or USER after DROP",
7622 self.peek_token_ref(),
7623 );
7624 };
7625 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7628 let names = self.parse_comma_separated(|p| p.parse_object_name(false))?;
7629
7630 let loc = self.peek_token_ref().span.start;
7631 let cascade = self.parse_keyword(Keyword::CASCADE);
7632 let restrict = self.parse_keyword(Keyword::RESTRICT);
7633 let purge = self.parse_keyword(Keyword::PURGE);
7634 if cascade && restrict {
7635 return parser_err!("Cannot specify both CASCADE and RESTRICT in DROP", loc);
7636 }
7637 if object_type == ObjectType::Role && (cascade || restrict || purge) {
7638 return parser_err!(
7639 "Cannot specify CASCADE, RESTRICT, or PURGE in DROP ROLE",
7640 loc
7641 );
7642 }
7643 let table = if self.parse_keyword(Keyword::ON) {
7644 Some(self.parse_object_name(false)?)
7645 } else {
7646 None
7647 };
7648 Ok(Statement::Drop {
7649 object_type,
7650 if_exists,
7651 names,
7652 cascade,
7653 restrict,
7654 purge,
7655 temporary,
7656 table,
7657 })
7658 }
7659
7660 fn parse_optional_drop_behavior(&mut self) -> Option<DropBehavior> {
7661 match self.parse_one_of_keywords(&[Keyword::CASCADE, Keyword::RESTRICT]) {
7662 Some(Keyword::CASCADE) => Some(DropBehavior::Cascade),
7663 Some(Keyword::RESTRICT) => Some(DropBehavior::Restrict),
7664 _ => None,
7665 }
7666 }
7667
7668 fn parse_drop_function(&mut self) -> Result<DropFunction, ParserError> {
7673 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7674 let func_desc = self.parse_comma_separated(Parser::parse_function_desc)?;
7675 let drop_behavior = self.parse_optional_drop_behavior();
7676 Ok(DropFunction {
7677 if_exists,
7678 func_desc,
7679 drop_behavior,
7680 })
7681 }
7682
7683 fn parse_drop_policy(&mut self) -> Result<DropPolicy, ParserError> {
7689 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7690 let name = self.parse_identifier()?;
7691 self.expect_keyword_is(Keyword::ON)?;
7692 let table_name = self.parse_object_name(false)?;
7693 let drop_behavior = self.parse_optional_drop_behavior();
7694 Ok(DropPolicy {
7695 if_exists,
7696 name,
7697 table_name,
7698 drop_behavior,
7699 })
7700 }
7701 fn parse_drop_connector(&mut self) -> Result<Statement, ParserError> {
7707 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7708 let name = self.parse_identifier()?;
7709 Ok(Statement::DropConnector { if_exists, name })
7710 }
7711
7712 fn parse_drop_domain(&mut self) -> Result<DropDomain, ParserError> {
7716 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7717 let name = self.parse_object_name(false)?;
7718 let drop_behavior = self.parse_optional_drop_behavior();
7719 Ok(DropDomain {
7720 if_exists,
7721 name,
7722 drop_behavior,
7723 })
7724 }
7725
7726 fn parse_drop_procedure(&mut self) -> Result<Statement, ParserError> {
7731 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7732 let proc_desc = self.parse_comma_separated(Parser::parse_function_desc)?;
7733 let drop_behavior = self.parse_optional_drop_behavior();
7734 Ok(Statement::DropProcedure {
7735 if_exists,
7736 proc_desc,
7737 drop_behavior,
7738 })
7739 }
7740
7741 fn parse_function_desc(&mut self) -> Result<FunctionDesc, ParserError> {
7742 let name = self.parse_object_name(false)?;
7743
7744 let args = if self.consume_token(&Token::LParen) {
7745 if self.consume_token(&Token::RParen) {
7746 Some(vec![])
7747 } else {
7748 let args = self.parse_comma_separated(Parser::parse_function_arg)?;
7749 self.expect_token(&Token::RParen)?;
7750 Some(args)
7751 }
7752 } else {
7753 None
7754 };
7755
7756 Ok(FunctionDesc { name, args })
7757 }
7758
7759 fn parse_drop_secret(
7761 &mut self,
7762 temporary: bool,
7763 persistent: bool,
7764 ) -> Result<Statement, ParserError> {
7765 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
7766 let name = self.parse_identifier()?;
7767 let storage_specifier = if self.parse_keyword(Keyword::FROM) {
7768 self.parse_identifier().ok()
7769 } else {
7770 None
7771 };
7772 let temp = match (temporary, persistent) {
7773 (true, false) => Some(true),
7774 (false, true) => Some(false),
7775 (false, false) => None,
7776 _ => self.expected_ref("TEMPORARY or PERSISTENT", self.peek_token_ref())?,
7777 };
7778
7779 Ok(Statement::DropSecret {
7780 if_exists,
7781 temporary: temp,
7782 name,
7783 storage_specifier,
7784 })
7785 }
7786
7787 pub fn parse_declare(&mut self) -> Result<Statement, ParserError> {
7797 if dialect_of!(self is BigQueryDialect) {
7798 return self.parse_big_query_declare();
7799 }
7800 if dialect_of!(self is SnowflakeDialect) {
7801 return self.parse_snowflake_declare();
7802 }
7803 if dialect_of!(self is MsSqlDialect) {
7804 return self.parse_mssql_declare();
7805 }
7806
7807 let name = self.parse_identifier()?;
7808
7809 let binary = Some(self.parse_keyword(Keyword::BINARY));
7810 let sensitive = if self.parse_keyword(Keyword::INSENSITIVE) {
7811 Some(true)
7812 } else if self.parse_keyword(Keyword::ASENSITIVE) {
7813 Some(false)
7814 } else {
7815 None
7816 };
7817 let scroll = if self.parse_keyword(Keyword::SCROLL) {
7818 Some(true)
7819 } else if self.parse_keywords(&[Keyword::NO, Keyword::SCROLL]) {
7820 Some(false)
7821 } else {
7822 None
7823 };
7824
7825 self.expect_keyword_is(Keyword::CURSOR)?;
7826 let declare_type = Some(DeclareType::Cursor);
7827
7828 let hold = match self.parse_one_of_keywords(&[Keyword::WITH, Keyword::WITHOUT]) {
7829 Some(keyword) => {
7830 self.expect_keyword_is(Keyword::HOLD)?;
7831
7832 match keyword {
7833 Keyword::WITH => Some(true),
7834 Keyword::WITHOUT => Some(false),
7835 unexpected_keyword => return Err(ParserError::ParserError(
7836 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in cursor hold"),
7837 )),
7838 }
7839 }
7840 None => None,
7841 };
7842
7843 self.expect_keyword_is(Keyword::FOR)?;
7844
7845 let query = Some(self.parse_query()?);
7846
7847 Ok(Statement::Declare {
7848 stmts: vec![Declare {
7849 names: vec![name],
7850 data_type: None,
7851 assignment: None,
7852 declare_type,
7853 binary,
7854 sensitive,
7855 scroll,
7856 hold,
7857 for_query: query,
7858 }],
7859 })
7860 }
7861
7862 pub fn parse_big_query_declare(&mut self) -> Result<Statement, ParserError> {
7870 let names = self.parse_comma_separated(Parser::parse_identifier)?;
7871
7872 let data_type = match &self.peek_token_ref().token {
7873 Token::Word(w) if w.keyword == Keyword::DEFAULT => None,
7874 _ => Some(self.parse_data_type()?),
7875 };
7876
7877 let expr = if data_type.is_some() {
7878 if self.parse_keyword(Keyword::DEFAULT) {
7879 Some(self.parse_expr()?)
7880 } else {
7881 None
7882 }
7883 } else {
7884 self.expect_keyword_is(Keyword::DEFAULT)?;
7887 Some(self.parse_expr()?)
7888 };
7889
7890 Ok(Statement::Declare {
7891 stmts: vec![Declare {
7892 names,
7893 data_type,
7894 assignment: expr.map(|expr| DeclareAssignment::Default(Box::new(expr))),
7895 declare_type: None,
7896 binary: None,
7897 sensitive: None,
7898 scroll: None,
7899 hold: None,
7900 for_query: None,
7901 }],
7902 })
7903 }
7904
7905 pub fn parse_snowflake_declare(&mut self) -> Result<Statement, ParserError> {
7930 let mut stmts = vec![];
7931 loop {
7932 let name = self.parse_identifier()?;
7933 let (declare_type, for_query, assigned_expr, data_type) =
7934 if self.parse_keyword(Keyword::CURSOR) {
7935 self.expect_keyword_is(Keyword::FOR)?;
7936 match &self.peek_token_ref().token {
7937 Token::Word(w) if w.keyword == Keyword::SELECT => (
7938 Some(DeclareType::Cursor),
7939 Some(self.parse_query()?),
7940 None,
7941 None,
7942 ),
7943 _ => (
7944 Some(DeclareType::Cursor),
7945 None,
7946 Some(DeclareAssignment::For(Box::new(self.parse_expr()?))),
7947 None,
7948 ),
7949 }
7950 } else if self.parse_keyword(Keyword::RESULTSET) {
7951 let assigned_expr = if self.peek_token_ref().token != Token::SemiColon {
7952 self.parse_snowflake_variable_declaration_expression()?
7953 } else {
7954 None
7956 };
7957
7958 (Some(DeclareType::ResultSet), None, assigned_expr, None)
7959 } else if self.parse_keyword(Keyword::EXCEPTION) {
7960 let assigned_expr = if self.peek_token_ref().token == Token::LParen {
7961 Some(DeclareAssignment::Expr(Box::new(self.parse_expr()?)))
7962 } else {
7963 None
7965 };
7966
7967 (Some(DeclareType::Exception), None, assigned_expr, None)
7968 } else {
7969 let (assigned_expr, data_type) = if let Some(assigned_expr) =
7971 self.parse_snowflake_variable_declaration_expression()?
7972 {
7973 (Some(assigned_expr), None)
7974 } else if let Token::Word(_) = &self.peek_token_ref().token {
7975 let data_type = self.parse_data_type()?;
7976 (
7977 self.parse_snowflake_variable_declaration_expression()?,
7978 Some(data_type),
7979 )
7980 } else {
7981 (None, None)
7982 };
7983 (None, None, assigned_expr, data_type)
7984 };
7985 let stmt = Declare {
7986 names: vec![name],
7987 data_type,
7988 assignment: assigned_expr,
7989 declare_type,
7990 binary: None,
7991 sensitive: None,
7992 scroll: None,
7993 hold: None,
7994 for_query,
7995 };
7996
7997 stmts.push(stmt);
7998 if self.consume_token(&Token::SemiColon) {
7999 match &self.peek_token_ref().token {
8000 Token::Word(w)
8001 if ALL_KEYWORDS
8002 .binary_search(&w.value.to_uppercase().as_str())
8003 .is_err() =>
8004 {
8005 continue;
8007 }
8008 _ => {
8009 self.prev_token();
8011 }
8012 }
8013 }
8014
8015 break;
8016 }
8017
8018 Ok(Statement::Declare { stmts })
8019 }
8020
8021 pub fn parse_mssql_declare(&mut self) -> Result<Statement, ParserError> {
8033 let stmts = self.parse_comma_separated(Parser::parse_mssql_declare_stmt)?;
8034
8035 Ok(Statement::Declare { stmts })
8036 }
8037
8038 pub fn parse_mssql_declare_stmt(&mut self) -> Result<Declare, ParserError> {
8049 let name = {
8050 let ident = self.parse_identifier()?;
8051 if !ident.value.starts_with('@')
8052 && !matches!(
8053 &self.peek_token_ref().token,
8054 Token::Word(w) if w.keyword == Keyword::CURSOR
8055 )
8056 {
8057 Err(ParserError::TokenizerError(
8058 "Invalid MsSql variable declaration.".to_string(),
8059 ))
8060 } else {
8061 Ok(ident)
8062 }
8063 }?;
8064
8065 let (declare_type, data_type) = match &self.peek_token_ref().token {
8066 Token::Word(w) => match w.keyword {
8067 Keyword::CURSOR => {
8068 self.next_token();
8069 (Some(DeclareType::Cursor), None)
8070 }
8071 Keyword::AS => {
8072 self.next_token();
8073 (None, Some(self.parse_data_type()?))
8074 }
8075 _ => (None, Some(self.parse_data_type()?)),
8076 },
8077 _ => (None, Some(self.parse_data_type()?)),
8078 };
8079
8080 let (for_query, assignment) = if self.peek_keyword(Keyword::FOR) {
8081 self.next_token();
8082 let query = Some(self.parse_query()?);
8083 (query, None)
8084 } else {
8085 let assignment = self.parse_mssql_variable_declaration_expression()?;
8086 (None, assignment)
8087 };
8088
8089 Ok(Declare {
8090 names: vec![name],
8091 data_type,
8092 assignment,
8093 declare_type,
8094 binary: None,
8095 sensitive: None,
8096 scroll: None,
8097 hold: None,
8098 for_query,
8099 })
8100 }
8101
8102 pub fn parse_snowflake_variable_declaration_expression(
8110 &mut self,
8111 ) -> Result<Option<DeclareAssignment>, ParserError> {
8112 Ok(match &self.peek_token_ref().token {
8113 Token::Word(w) if w.keyword == Keyword::DEFAULT => {
8114 self.next_token(); Some(DeclareAssignment::Default(Box::new(self.parse_expr()?)))
8116 }
8117 Token::Assignment => {
8118 self.next_token(); Some(DeclareAssignment::DuckAssignment(Box::new(
8120 self.parse_expr()?,
8121 )))
8122 }
8123 _ => None,
8124 })
8125 }
8126
8127 pub fn parse_mssql_variable_declaration_expression(
8134 &mut self,
8135 ) -> Result<Option<DeclareAssignment>, ParserError> {
8136 Ok(match &self.peek_token_ref().token {
8137 Token::Eq => {
8138 self.next_token(); Some(DeclareAssignment::MsSqlAssignment(Box::new(
8140 self.parse_expr()?,
8141 )))
8142 }
8143 _ => None,
8144 })
8145 }
8146
8147 pub fn parse_fetch_statement(&mut self) -> Result<Statement, ParserError> {
8149 let direction = if self.parse_keyword(Keyword::NEXT) {
8150 FetchDirection::Next
8151 } else if self.parse_keyword(Keyword::PRIOR) {
8152 FetchDirection::Prior
8153 } else if self.parse_keyword(Keyword::FIRST) {
8154 FetchDirection::First
8155 } else if self.parse_keyword(Keyword::LAST) {
8156 FetchDirection::Last
8157 } else if self.parse_keyword(Keyword::ABSOLUTE) {
8158 FetchDirection::Absolute {
8159 limit: self.parse_number_value()?,
8160 }
8161 } else if self.parse_keyword(Keyword::RELATIVE) {
8162 FetchDirection::Relative {
8163 limit: self.parse_number_value()?,
8164 }
8165 } else if self.parse_keyword(Keyword::FORWARD) {
8166 if self.parse_keyword(Keyword::ALL) {
8167 FetchDirection::ForwardAll
8168 } else {
8169 FetchDirection::Forward {
8170 limit: Some(self.parse_number_value()?),
8172 }
8173 }
8174 } else if self.parse_keyword(Keyword::BACKWARD) {
8175 if self.parse_keyword(Keyword::ALL) {
8176 FetchDirection::BackwardAll
8177 } else {
8178 FetchDirection::Backward {
8179 limit: Some(self.parse_number_value()?),
8181 }
8182 }
8183 } else if self.parse_keyword(Keyword::ALL) {
8184 FetchDirection::All
8185 } else {
8186 FetchDirection::Count {
8187 limit: self.parse_number_value()?,
8188 }
8189 };
8190
8191 let position = if self.peek_keyword(Keyword::FROM) {
8192 self.expect_keyword(Keyword::FROM)?;
8193 FetchPosition::From
8194 } else if self.peek_keyword(Keyword::IN) {
8195 self.expect_keyword(Keyword::IN)?;
8196 FetchPosition::In
8197 } else {
8198 return parser_err!("Expected FROM or IN", self.peek_token_ref().span.start);
8199 };
8200
8201 let name = self.parse_identifier()?;
8202
8203 let into = if self.parse_keyword(Keyword::INTO) {
8204 Some(self.parse_object_name(false)?)
8205 } else {
8206 None
8207 };
8208
8209 Ok(Statement::Fetch {
8210 name,
8211 direction,
8212 position,
8213 into,
8214 })
8215 }
8216
8217 pub fn parse_discard(&mut self) -> Result<Statement, ParserError> {
8219 let object_type = if self.parse_keyword(Keyword::ALL) {
8220 DiscardObject::ALL
8221 } else if self.parse_keyword(Keyword::PLANS) {
8222 DiscardObject::PLANS
8223 } else if self.parse_keyword(Keyword::SEQUENCES) {
8224 DiscardObject::SEQUENCES
8225 } else if self.parse_keyword(Keyword::TEMP) || self.parse_keyword(Keyword::TEMPORARY) {
8226 DiscardObject::TEMP
8227 } else {
8228 return self.expected_ref(
8229 "ALL, PLANS, SEQUENCES, TEMP or TEMPORARY after DISCARD",
8230 self.peek_token_ref(),
8231 );
8232 };
8233 Ok(Statement::Discard { object_type })
8234 }
8235
8236 pub fn parse_create_index(&mut self, unique: bool) -> Result<CreateIndex, ParserError> {
8238 let concurrently = self.parse_keyword(Keyword::CONCURRENTLY);
8239 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
8240
8241 let mut using = None;
8242
8243 let index_name = if if_not_exists || !self.parse_keyword(Keyword::ON) {
8244 let index_name = self.parse_object_name(false)?;
8245 using = self.parse_optional_using_then_index_type()?;
8247 self.expect_keyword_is(Keyword::ON)?;
8248 Some(index_name)
8249 } else {
8250 None
8251 };
8252
8253 let table_name = self.parse_object_name(false)?;
8254
8255 using = self.parse_optional_using_then_index_type()?.or(using);
8258
8259 let columns = self.parse_parenthesized_index_column_list()?;
8260
8261 let include = if self.parse_keyword(Keyword::INCLUDE) {
8262 self.expect_token(&Token::LParen)?;
8263 let columns = self.parse_comma_separated(|p| p.parse_identifier())?;
8264 self.expect_token(&Token::RParen)?;
8265 columns
8266 } else {
8267 vec![]
8268 };
8269
8270 let nulls_distinct = if self.parse_keyword(Keyword::NULLS) {
8271 let not = self.parse_keyword(Keyword::NOT);
8272 self.expect_keyword_is(Keyword::DISTINCT)?;
8273 Some(!not)
8274 } else {
8275 None
8276 };
8277
8278 let with = if self.dialect.supports_create_index_with_clause()
8279 && self.parse_keyword(Keyword::WITH)
8280 {
8281 self.expect_token(&Token::LParen)?;
8282 let with_params = self.parse_comma_separated(Parser::parse_expr)?;
8283 self.expect_token(&Token::RParen)?;
8284 with_params
8285 } else {
8286 Vec::new()
8287 };
8288
8289 let predicate = if self.parse_keyword(Keyword::WHERE) {
8290 Some(self.parse_expr()?)
8291 } else {
8292 None
8293 };
8294
8295 let index_options = self.parse_index_options()?;
8301
8302 let mut alter_options = Vec::new();
8304 while self
8305 .peek_one_of_keywords(&[Keyword::ALGORITHM, Keyword::LOCK])
8306 .is_some()
8307 {
8308 alter_options.push(self.parse_alter_table_operation()?)
8309 }
8310
8311 Ok(CreateIndex {
8312 name: index_name,
8313 table_name,
8314 using,
8315 columns,
8316 unique,
8317 concurrently,
8318 if_not_exists,
8319 include,
8320 nulls_distinct,
8321 with,
8322 predicate,
8323 index_options,
8324 alter_options,
8325 })
8326 }
8327
8328 pub fn parse_create_extension(&mut self) -> Result<CreateExtension, ParserError> {
8330 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
8331 let name = self.parse_identifier()?;
8332
8333 let (schema, version, cascade) = if self.parse_keyword(Keyword::WITH) {
8334 let schema = if self.parse_keyword(Keyword::SCHEMA) {
8335 Some(self.parse_identifier()?)
8336 } else {
8337 None
8338 };
8339
8340 let version = if self.parse_keyword(Keyword::VERSION) {
8341 Some(self.parse_identifier()?)
8342 } else {
8343 None
8344 };
8345
8346 let cascade = self.parse_keyword(Keyword::CASCADE);
8347
8348 (schema, version, cascade)
8349 } else {
8350 (None, None, false)
8351 };
8352
8353 Ok(CreateExtension {
8354 name,
8355 if_not_exists,
8356 schema,
8357 version,
8358 cascade,
8359 })
8360 }
8361
8362 pub fn parse_create_collation(&mut self) -> Result<CreateCollation, ParserError> {
8364 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
8365 let name = self.parse_object_name(false)?;
8366
8367 let definition = if self.parse_keyword(Keyword::FROM) {
8368 CreateCollationDefinition::From(self.parse_object_name(false)?)
8369 } else if self.consume_token(&Token::LParen) {
8370 let options = self.parse_comma_separated(Parser::parse_sql_option)?;
8371 self.expect_token(&Token::RParen)?;
8372 CreateCollationDefinition::Options(options)
8373 } else {
8374 return self.expected_ref(
8375 "FROM or parenthesized option list after CREATE COLLATION name",
8376 self.peek_token_ref(),
8377 );
8378 };
8379
8380 Ok(CreateCollation {
8381 if_not_exists,
8382 name,
8383 definition,
8384 })
8385 }
8386
8387 pub fn parse_create_text_search(&mut self) -> Result<Statement, ParserError> {
8389 if self.parse_keyword(Keyword::CONFIGURATION) {
8390 let name = self.parse_object_name(false)?;
8391 self.expect_token(&Token::LParen)?;
8392 let options = self.parse_comma_separated(Parser::parse_sql_option)?;
8393 self.expect_token(&Token::RParen)?;
8394 Ok(Statement::CreateTextSearchConfiguration(
8395 CreateTextSearchConfiguration { name, options },
8396 ))
8397 } else if self.parse_keyword(Keyword::DICTIONARY) {
8398 let name = self.parse_object_name(false)?;
8399 self.expect_token(&Token::LParen)?;
8400 let options = self.parse_comma_separated(Parser::parse_sql_option)?;
8401 self.expect_token(&Token::RParen)?;
8402 Ok(Statement::CreateTextSearchDictionary(
8403 CreateTextSearchDictionary { name, options },
8404 ))
8405 } else if self.parse_keyword(Keyword::PARSER) {
8406 let name = self.parse_object_name(false)?;
8407 self.expect_token(&Token::LParen)?;
8408 let options = self.parse_comma_separated(Parser::parse_sql_option)?;
8409 self.expect_token(&Token::RParen)?;
8410 Ok(Statement::CreateTextSearchParser(CreateTextSearchParser {
8411 name,
8412 options,
8413 }))
8414 } else if self.parse_keyword(Keyword::TEMPLATE) {
8415 let name = self.parse_object_name(false)?;
8416 self.expect_token(&Token::LParen)?;
8417 let options = self.parse_comma_separated(Parser::parse_sql_option)?;
8418 self.expect_token(&Token::RParen)?;
8419 Ok(Statement::CreateTextSearchTemplate(
8420 CreateTextSearchTemplate { name, options },
8421 ))
8422 } else {
8423 self.expected_ref(
8424 "CONFIGURATION, DICTIONARY, PARSER, or TEMPLATE after CREATE TEXT SEARCH",
8425 self.peek_token_ref(),
8426 )
8427 }
8428 }
8429
8430 pub fn parse_drop_extension(&mut self) -> Result<Statement, ParserError> {
8432 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
8433 let names = self.parse_comma_separated(|p| p.parse_identifier())?;
8434 let cascade_or_restrict =
8435 self.parse_one_of_keywords(&[Keyword::CASCADE, Keyword::RESTRICT]);
8436 Ok(Statement::DropExtension(DropExtension {
8437 names,
8438 if_exists,
8439 cascade_or_restrict: cascade_or_restrict
8440 .map(|k| match k {
8441 Keyword::CASCADE => Ok(ReferentialAction::Cascade),
8442 Keyword::RESTRICT => Ok(ReferentialAction::Restrict),
8443 _ => self.expected_ref("CASCADE or RESTRICT", self.peek_token_ref()),
8444 })
8445 .transpose()?,
8446 }))
8447 }
8448
8449 pub fn parse_drop_operator(&mut self) -> Result<Statement, ParserError> {
8452 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
8453 let operators = self.parse_comma_separated(|p| p.parse_drop_operator_signature())?;
8454 let drop_behavior = self.parse_optional_drop_behavior();
8455 Ok(Statement::DropOperator(DropOperator {
8456 if_exists,
8457 operators,
8458 drop_behavior,
8459 }))
8460 }
8461
8462 fn parse_drop_operator_signature(&mut self) -> Result<DropOperatorSignature, ParserError> {
8465 let name = self.parse_operator_name()?;
8466 self.expect_token(&Token::LParen)?;
8467
8468 let left_type = if self.parse_keyword(Keyword::NONE) {
8470 None
8471 } else {
8472 Some(self.parse_data_type()?)
8473 };
8474
8475 self.expect_token(&Token::Comma)?;
8476
8477 let right_type = self.parse_data_type()?;
8479
8480 self.expect_token(&Token::RParen)?;
8481
8482 Ok(DropOperatorSignature {
8483 name,
8484 left_type,
8485 right_type,
8486 })
8487 }
8488
8489 pub fn parse_drop_operator_family(&mut self) -> Result<Statement, ParserError> {
8493 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
8494 let names = self.parse_comma_separated(|p| p.parse_object_name(false))?;
8495 self.expect_keyword(Keyword::USING)?;
8496 let using = self.parse_identifier()?;
8497 let drop_behavior = self.parse_optional_drop_behavior();
8498 Ok(Statement::DropOperatorFamily(DropOperatorFamily {
8499 if_exists,
8500 names,
8501 using,
8502 drop_behavior,
8503 }))
8504 }
8505
8506 pub fn parse_drop_operator_class(&mut self) -> Result<Statement, ParserError> {
8510 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
8511 let names = self.parse_comma_separated(|p| p.parse_object_name(false))?;
8512 self.expect_keyword(Keyword::USING)?;
8513 let using = self.parse_identifier()?;
8514 let drop_behavior = self.parse_optional_drop_behavior();
8515 Ok(Statement::DropOperatorClass(DropOperatorClass {
8516 if_exists,
8517 names,
8518 using,
8519 drop_behavior,
8520 }))
8521 }
8522
8523 pub fn parse_hive_distribution(&mut self) -> Result<HiveDistributionStyle, ParserError> {
8527 if self.parse_keywords(&[Keyword::PARTITIONED, Keyword::BY]) {
8528 self.expect_token(&Token::LParen)?;
8529 let columns =
8530 self.parse_comma_separated(|parser| parser.parse_column_def_inner(true))?;
8531 self.expect_token(&Token::RParen)?;
8532 Ok(HiveDistributionStyle::PARTITIONED { columns })
8533 } else {
8534 Ok(HiveDistributionStyle::NONE)
8535 }
8536 }
8537
8538 fn parse_dist_style(&mut self) -> Result<DistStyle, ParserError> {
8542 let token = self.next_token();
8543 match &token.token {
8544 Token::Word(w) => match w.keyword {
8545 Keyword::AUTO => Ok(DistStyle::Auto),
8546 Keyword::EVEN => Ok(DistStyle::Even),
8547 Keyword::KEY => Ok(DistStyle::Key),
8548 Keyword::ALL => Ok(DistStyle::All),
8549 _ => self.expected("AUTO, EVEN, KEY, or ALL", token),
8550 },
8551 _ => self.expected("AUTO, EVEN, KEY, or ALL", token),
8552 }
8553 }
8554
8555 pub fn parse_hive_formats(&mut self) -> Result<Option<HiveFormat>, ParserError> {
8557 let mut hive_format: Option<HiveFormat> = None;
8558 loop {
8559 match self.parse_one_of_keywords(&[
8560 Keyword::ROW,
8561 Keyword::STORED,
8562 Keyword::LOCATION,
8563 Keyword::WITH,
8564 ]) {
8565 Some(Keyword::ROW) => {
8566 hive_format
8567 .get_or_insert_with(HiveFormat::default)
8568 .row_format = Some(self.parse_row_format()?);
8569 }
8570 Some(Keyword::STORED) => {
8571 self.expect_keyword_is(Keyword::AS)?;
8572 if self.parse_keyword(Keyword::INPUTFORMAT) {
8573 let input_format = self.parse_expr()?;
8574 self.expect_keyword_is(Keyword::OUTPUTFORMAT)?;
8575 let output_format = self.parse_expr()?;
8576 hive_format.get_or_insert_with(HiveFormat::default).storage =
8577 Some(HiveIOFormat::IOF {
8578 input_format,
8579 output_format,
8580 });
8581 } else {
8582 let format = self.parse_file_format()?;
8583 hive_format.get_or_insert_with(HiveFormat::default).storage =
8584 Some(HiveIOFormat::FileFormat { format });
8585 }
8586 }
8587 Some(Keyword::LOCATION) => {
8588 hive_format.get_or_insert_with(HiveFormat::default).location =
8589 Some(self.parse_literal_string()?);
8590 }
8591 Some(Keyword::WITH) => {
8592 self.prev_token();
8593 let properties = self
8594 .parse_options_with_keywords(&[Keyword::WITH, Keyword::SERDEPROPERTIES])?;
8595 if !properties.is_empty() {
8596 hive_format
8597 .get_or_insert_with(HiveFormat::default)
8598 .serde_properties = Some(properties);
8599 } else {
8600 break;
8601 }
8602 }
8603 None => break,
8604 _ => break,
8605 }
8606 }
8607
8608 Ok(hive_format)
8609 }
8610
8611 pub fn parse_row_format(&mut self) -> Result<HiveRowFormat, ParserError> {
8613 self.expect_keyword_is(Keyword::FORMAT)?;
8614 match self.parse_one_of_keywords(&[Keyword::SERDE, Keyword::DELIMITED]) {
8615 Some(Keyword::SERDE) => {
8616 let class = self.parse_literal_string()?;
8617 Ok(HiveRowFormat::SERDE { class })
8618 }
8619 _ => {
8620 let mut row_delimiters = vec![];
8621
8622 loop {
8623 match self.parse_one_of_keywords(&[
8624 Keyword::FIELDS,
8625 Keyword::COLLECTION,
8626 Keyword::MAP,
8627 Keyword::LINES,
8628 Keyword::NULL,
8629 ]) {
8630 Some(Keyword::FIELDS) => {
8631 if self.parse_keywords(&[Keyword::TERMINATED, Keyword::BY]) {
8632 row_delimiters.push(HiveRowDelimiter {
8633 delimiter: HiveDelimiter::FieldsTerminatedBy,
8634 char: self.parse_identifier()?,
8635 });
8636
8637 if self.parse_keywords(&[Keyword::ESCAPED, Keyword::BY]) {
8638 row_delimiters.push(HiveRowDelimiter {
8639 delimiter: HiveDelimiter::FieldsEscapedBy,
8640 char: self.parse_identifier()?,
8641 });
8642 }
8643 } else {
8644 break;
8645 }
8646 }
8647 Some(Keyword::COLLECTION) => {
8648 if self.parse_keywords(&[
8649 Keyword::ITEMS,
8650 Keyword::TERMINATED,
8651 Keyword::BY,
8652 ]) {
8653 row_delimiters.push(HiveRowDelimiter {
8654 delimiter: HiveDelimiter::CollectionItemsTerminatedBy,
8655 char: self.parse_identifier()?,
8656 });
8657 } else {
8658 break;
8659 }
8660 }
8661 Some(Keyword::MAP) => {
8662 if self.parse_keywords(&[
8663 Keyword::KEYS,
8664 Keyword::TERMINATED,
8665 Keyword::BY,
8666 ]) {
8667 row_delimiters.push(HiveRowDelimiter {
8668 delimiter: HiveDelimiter::MapKeysTerminatedBy,
8669 char: self.parse_identifier()?,
8670 });
8671 } else {
8672 break;
8673 }
8674 }
8675 Some(Keyword::LINES) => {
8676 if self.parse_keywords(&[Keyword::TERMINATED, Keyword::BY]) {
8677 row_delimiters.push(HiveRowDelimiter {
8678 delimiter: HiveDelimiter::LinesTerminatedBy,
8679 char: self.parse_identifier()?,
8680 });
8681 } else {
8682 break;
8683 }
8684 }
8685 Some(Keyword::NULL) => {
8686 if self.parse_keywords(&[Keyword::DEFINED, Keyword::AS]) {
8687 row_delimiters.push(HiveRowDelimiter {
8688 delimiter: HiveDelimiter::NullDefinedAs,
8689 char: self.parse_identifier()?,
8690 });
8691 } else {
8692 break;
8693 }
8694 }
8695 _ => {
8696 break;
8697 }
8698 }
8699 }
8700
8701 Ok(HiveRowFormat::DELIMITED {
8702 delimiters: row_delimiters,
8703 })
8704 }
8705 }
8706 }
8707
8708 fn parse_optional_on_cluster(&mut self) -> Result<Option<Ident>, ParserError> {
8709 if self.parse_keywords(&[Keyword::ON, Keyword::CLUSTER]) {
8710 Ok(Some(self.parse_identifier()?))
8711 } else {
8712 Ok(None)
8713 }
8714 }
8715
8716 pub fn parse_create_table(
8718 &mut self,
8719 or_replace: bool,
8720 temporary: bool,
8721 global: Option<bool>,
8722 transient: bool,
8723 ) -> Result<CreateTable, ParserError> {
8724 let allow_unquoted_hyphen = dialect_of!(self is BigQueryDialect);
8725 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
8726 let table_name = self.parse_object_name(allow_unquoted_hyphen)?;
8727
8728 let partition_of = if self.parse_keywords(&[Keyword::PARTITION, Keyword::OF]) {
8738 Some(self.parse_object_name(allow_unquoted_hyphen)?)
8739 } else {
8740 None
8741 };
8742
8743 let on_cluster = self.parse_optional_on_cluster()?;
8745
8746 let like = self.maybe_parse_create_table_like(allow_unquoted_hyphen)?;
8747
8748 let clone = if self.parse_keyword(Keyword::CLONE) {
8749 self.parse_object_name(allow_unquoted_hyphen).ok()
8750 } else {
8751 None
8752 };
8753
8754 let (columns, constraints) = self.parse_columns()?;
8756 let comment_after_column_def =
8757 if dialect_of!(self is HiveDialect) && self.parse_keyword(Keyword::COMMENT) {
8758 let next_token = self.next_token();
8759 match next_token.token {
8760 Token::SingleQuotedString(str) => Some(CommentDef::WithoutEq(str)),
8761 _ => self.expected("comment", next_token)?,
8762 }
8763 } else {
8764 None
8765 };
8766
8767 let for_values = if partition_of.is_some() {
8769 if self.peek_keyword(Keyword::FOR) || self.peek_keyword(Keyword::DEFAULT) {
8770 Some(self.parse_partition_for_values()?)
8771 } else {
8772 return self.expected_ref(
8773 "FOR VALUES or DEFAULT after PARTITION OF",
8774 self.peek_token_ref(),
8775 );
8776 }
8777 } else {
8778 None
8779 };
8780
8781 let without_rowid = self.parse_keywords(&[Keyword::WITHOUT, Keyword::ROWID]);
8783
8784 let hive_distribution = self.parse_hive_distribution()?;
8785 let clustered_by = self.parse_optional_clustered_by()?;
8786 let hive_formats = self.parse_hive_formats()?;
8787
8788 let create_table_config = self.parse_optional_create_table_config()?;
8789
8790 let primary_key = if dialect_of!(self is ClickHouseDialect | GenericDialect)
8793 && self.parse_keywords(&[Keyword::PRIMARY, Keyword::KEY])
8794 {
8795 Some(Box::new(self.parse_expr()?))
8796 } else {
8797 None
8798 };
8799
8800 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
8801 if self.consume_token(&Token::LParen) {
8802 let columns = if self.peek_token_ref().token != Token::RParen {
8803 self.parse_comma_separated(|p| p.parse_expr())?
8804 } else {
8805 vec![]
8806 };
8807 self.expect_token(&Token::RParen)?;
8808 Some(OneOrManyWithParens::Many(columns))
8809 } else {
8810 Some(OneOrManyWithParens::One(self.parse_expr()?))
8811 }
8812 } else {
8813 None
8814 };
8815
8816 let on_commit = if self.parse_keywords(&[Keyword::ON, Keyword::COMMIT]) {
8817 Some(self.parse_create_table_on_commit()?)
8818 } else {
8819 None
8820 };
8821
8822 let strict = self.parse_keyword(Keyword::STRICT);
8823
8824 let backup = if self.parse_keyword(Keyword::BACKUP) {
8826 let keyword = self.expect_one_of_keywords(&[Keyword::YES, Keyword::NO])?;
8827 Some(keyword == Keyword::YES)
8828 } else {
8829 None
8830 };
8831
8832 let diststyle = if self.parse_keyword(Keyword::DISTSTYLE) {
8834 Some(self.parse_dist_style()?)
8835 } else {
8836 None
8837 };
8838 let distkey = if self.parse_keyword(Keyword::DISTKEY) {
8839 self.expect_token(&Token::LParen)?;
8840 let expr = self.parse_expr()?;
8841 self.expect_token(&Token::RParen)?;
8842 Some(expr)
8843 } else {
8844 None
8845 };
8846 let sortkey = if self.parse_keyword(Keyword::SORTKEY) {
8847 self.expect_token(&Token::LParen)?;
8848 let columns = self.parse_comma_separated(|p| p.parse_expr())?;
8849 self.expect_token(&Token::RParen)?;
8850 Some(columns)
8851 } else {
8852 None
8853 };
8854
8855 let query = if self.parse_keyword(Keyword::AS) {
8857 Some(self.parse_query()?)
8858 } else if self.dialect.supports_create_table_select() && self.parse_keyword(Keyword::SELECT)
8859 {
8860 self.prev_token();
8862 Some(self.parse_query()?)
8863 } else {
8864 None
8865 };
8866
8867 Ok(CreateTableBuilder::new(table_name)
8868 .temporary(temporary)
8869 .columns(columns)
8870 .constraints(constraints)
8871 .or_replace(or_replace)
8872 .if_not_exists(if_not_exists)
8873 .transient(transient)
8874 .hive_distribution(hive_distribution)
8875 .hive_formats(hive_formats)
8876 .global(global)
8877 .query(query)
8878 .without_rowid(without_rowid)
8879 .like(like)
8880 .clone_clause(clone)
8881 .comment_after_column_def(comment_after_column_def)
8882 .order_by(order_by)
8883 .on_commit(on_commit)
8884 .on_cluster(on_cluster)
8885 .clustered_by(clustered_by)
8886 .partition_by(create_table_config.partition_by)
8887 .cluster_by(create_table_config.cluster_by)
8888 .inherits(create_table_config.inherits)
8889 .partition_of(partition_of)
8890 .for_values(for_values)
8891 .table_options(create_table_config.table_options)
8892 .primary_key(primary_key)
8893 .strict(strict)
8894 .backup(backup)
8895 .diststyle(diststyle)
8896 .distkey(distkey)
8897 .sortkey(sortkey)
8898 .build())
8899 }
8900
8901 fn maybe_parse_create_table_like(
8902 &mut self,
8903 allow_unquoted_hyphen: bool,
8904 ) -> Result<Option<CreateTableLikeKind>, ParserError> {
8905 let like = if self.dialect.supports_create_table_like_parenthesized()
8906 && self.consume_token(&Token::LParen)
8907 {
8908 if self.parse_keyword(Keyword::LIKE) {
8909 let name = self.parse_object_name(allow_unquoted_hyphen)?;
8910 let defaults = if self.parse_keywords(&[Keyword::INCLUDING, Keyword::DEFAULTS]) {
8911 Some(CreateTableLikeDefaults::Including)
8912 } else if self.parse_keywords(&[Keyword::EXCLUDING, Keyword::DEFAULTS]) {
8913 Some(CreateTableLikeDefaults::Excluding)
8914 } else {
8915 None
8916 };
8917 self.expect_token(&Token::RParen)?;
8918 Some(CreateTableLikeKind::Parenthesized(CreateTableLike {
8919 name,
8920 defaults,
8921 }))
8922 } else {
8923 self.prev_token();
8925 None
8926 }
8927 } else if self.parse_keyword(Keyword::LIKE) || self.parse_keyword(Keyword::ILIKE) {
8928 let name = self.parse_object_name(allow_unquoted_hyphen)?;
8929 Some(CreateTableLikeKind::Plain(CreateTableLike {
8930 name,
8931 defaults: None,
8932 }))
8933 } else {
8934 None
8935 };
8936 Ok(like)
8937 }
8938
8939 pub(crate) fn parse_create_table_on_commit(&mut self) -> Result<OnCommit, ParserError> {
8940 if self.parse_keywords(&[Keyword::DELETE, Keyword::ROWS]) {
8941 Ok(OnCommit::DeleteRows)
8942 } else if self.parse_keywords(&[Keyword::PRESERVE, Keyword::ROWS]) {
8943 Ok(OnCommit::PreserveRows)
8944 } else if self.parse_keywords(&[Keyword::DROP]) {
8945 Ok(OnCommit::Drop)
8946 } else {
8947 parser_err!(
8948 "Expecting DELETE ROWS, PRESERVE ROWS or DROP",
8949 self.peek_token_ref()
8950 )
8951 }
8952 }
8953
8954 fn parse_partition_for_values(&mut self) -> Result<ForValues, ParserError> {
8960 if self.parse_keyword(Keyword::DEFAULT) {
8961 return Ok(ForValues::Default);
8962 }
8963
8964 self.expect_keywords(&[Keyword::FOR, Keyword::VALUES])?;
8965
8966 if self.parse_keyword(Keyword::IN) {
8967 self.expect_token(&Token::LParen)?;
8969 if self.peek_token_ref().token == Token::RParen {
8970 return self.expected_ref("at least one value", self.peek_token_ref());
8971 }
8972 let values = self.parse_comma_separated(Parser::parse_expr)?;
8973 self.expect_token(&Token::RParen)?;
8974 Ok(ForValues::In(values))
8975 } else if self.parse_keyword(Keyword::FROM) {
8976 self.expect_token(&Token::LParen)?;
8978 if self.peek_token_ref().token == Token::RParen {
8979 return self.expected_ref("at least one value", self.peek_token_ref());
8980 }
8981 let from = self.parse_comma_separated(Parser::parse_partition_bound_value)?;
8982 self.expect_token(&Token::RParen)?;
8983 self.expect_keyword(Keyword::TO)?;
8984 self.expect_token(&Token::LParen)?;
8985 if self.peek_token_ref().token == Token::RParen {
8986 return self.expected_ref("at least one value", self.peek_token_ref());
8987 }
8988 let to = self.parse_comma_separated(Parser::parse_partition_bound_value)?;
8989 self.expect_token(&Token::RParen)?;
8990 Ok(ForValues::From { from, to })
8991 } else if self.parse_keyword(Keyword::WITH) {
8992 self.expect_token(&Token::LParen)?;
8994 self.expect_keyword(Keyword::MODULUS)?;
8995 let modulus = self.parse_literal_uint()?;
8996 self.expect_token(&Token::Comma)?;
8997 self.expect_keyword(Keyword::REMAINDER)?;
8998 let remainder = self.parse_literal_uint()?;
8999 self.expect_token(&Token::RParen)?;
9000 Ok(ForValues::With { modulus, remainder })
9001 } else {
9002 self.expected_ref("IN, FROM, or WITH after FOR VALUES", self.peek_token_ref())
9003 }
9004 }
9005
9006 fn parse_partition_bound_value(&mut self) -> Result<PartitionBoundValue, ParserError> {
9008 if self.parse_keyword(Keyword::MINVALUE) {
9009 Ok(PartitionBoundValue::MinValue)
9010 } else if self.parse_keyword(Keyword::MAXVALUE) {
9011 Ok(PartitionBoundValue::MaxValue)
9012 } else {
9013 Ok(PartitionBoundValue::Expr(self.parse_expr()?))
9014 }
9015 }
9016
9017 fn parse_optional_create_table_config(
9023 &mut self,
9024 ) -> Result<CreateTableConfiguration, ParserError> {
9025 let mut table_options = CreateTableOptions::None;
9026
9027 let inherits = if self.parse_keyword(Keyword::INHERITS) {
9028 Some(self.parse_parenthesized_qualified_column_list(IsOptional::Mandatory, false)?)
9029 } else {
9030 None
9031 };
9032
9033 let with_options = self.parse_options(Keyword::WITH)?;
9035 if !with_options.is_empty() {
9036 table_options = CreateTableOptions::With(with_options)
9037 }
9038
9039 let table_properties = self.parse_options(Keyword::TBLPROPERTIES)?;
9040 if !table_properties.is_empty() {
9041 table_options = CreateTableOptions::TableProperties(table_properties);
9042 }
9043 let partition_by = if dialect_of!(self is BigQueryDialect | PostgreSqlDialect | GenericDialect)
9044 && self.parse_keywords(&[Keyword::PARTITION, Keyword::BY])
9045 {
9046 Some(Box::new(self.parse_expr()?))
9047 } else {
9048 None
9049 };
9050
9051 let mut cluster_by = None;
9052 if dialect_of!(self is BigQueryDialect | GenericDialect) {
9053 if self.parse_keywords(&[Keyword::CLUSTER, Keyword::BY]) {
9054 cluster_by = Some(WrappedCollection::NoWrapping(
9055 self.parse_comma_separated(|p| p.parse_expr())?,
9056 ));
9057 };
9058
9059 if let Token::Word(word) = &self.peek_token_ref().token {
9060 if word.keyword == Keyword::OPTIONS {
9061 table_options =
9062 CreateTableOptions::Options(self.parse_options(Keyword::OPTIONS)?)
9063 }
9064 };
9065 }
9066
9067 if !dialect_of!(self is HiveDialect) && table_options == CreateTableOptions::None {
9068 let plain_options = self.parse_plain_options()?;
9069 if !plain_options.is_empty() {
9070 table_options = CreateTableOptions::Plain(plain_options)
9071 }
9072 };
9073
9074 Ok(CreateTableConfiguration {
9075 partition_by,
9076 cluster_by,
9077 inherits,
9078 table_options,
9079 })
9080 }
9081
9082 fn parse_plain_option(&mut self) -> Result<Option<SqlOption>, ParserError> {
9083 if self.parse_keywords(&[Keyword::START, Keyword::TRANSACTION]) {
9086 return Ok(Some(SqlOption::Ident(Ident::new("START TRANSACTION"))));
9087 }
9088
9089 if self.parse_keywords(&[Keyword::COMMENT]) {
9092 let has_eq = self.consume_token(&Token::Eq);
9093 let value = self.next_token();
9094
9095 let comment = match (has_eq, value.token) {
9096 (true, Token::SingleQuotedString(s)) => {
9097 Ok(Some(SqlOption::Comment(CommentDef::WithEq(s))))
9098 }
9099 (false, Token::SingleQuotedString(s)) => {
9100 Ok(Some(SqlOption::Comment(CommentDef::WithoutEq(s))))
9101 }
9102 (_, token) => {
9103 self.expected("Token::SingleQuotedString", TokenWithSpan::wrap(token))
9104 }
9105 };
9106 return comment;
9107 }
9108
9109 if self.parse_keywords(&[Keyword::ENGINE]) {
9112 let _ = self.consume_token(&Token::Eq);
9113 let value = self.next_token();
9114
9115 let engine = match value.token {
9116 Token::Word(w) => {
9117 let parameters = if self.peek_token_ref().token == Token::LParen {
9118 self.parse_parenthesized_identifiers()?
9119 } else {
9120 vec![]
9121 };
9122
9123 Ok(Some(SqlOption::NamedParenthesizedList(
9124 NamedParenthesizedList {
9125 key: Ident::new("ENGINE"),
9126 name: Some(Ident::new(w.value)),
9127 values: parameters,
9128 },
9129 )))
9130 }
9131 _ => {
9132 return self.expected("Token::Word", value)?;
9133 }
9134 };
9135
9136 return engine;
9137 }
9138
9139 if self.parse_keywords(&[Keyword::TABLESPACE]) {
9141 let _ = self.consume_token(&Token::Eq);
9142 let value = self.next_token();
9143
9144 let tablespace = match value.token {
9145 Token::Word(Word { value: name, .. }) | Token::SingleQuotedString(name) => {
9146 let storage = match self.parse_keyword(Keyword::STORAGE) {
9147 true => {
9148 let _ = self.consume_token(&Token::Eq);
9149 let storage_token = self.next_token();
9150 match &storage_token.token {
9151 Token::Word(w) => match w.value.to_uppercase().as_str() {
9152 "DISK" => Some(StorageType::Disk),
9153 "MEMORY" => Some(StorageType::Memory),
9154 _ => self
9155 .expected("Storage type (DISK or MEMORY)", storage_token)?,
9156 },
9157 _ => self.expected("Token::Word", storage_token)?,
9158 }
9159 }
9160 false => None,
9161 };
9162
9163 Ok(Some(SqlOption::TableSpace(TablespaceOption {
9164 name,
9165 storage,
9166 })))
9167 }
9168 _ => {
9169 return self.expected("Token::Word", value)?;
9170 }
9171 };
9172
9173 return tablespace;
9174 }
9175
9176 if self.parse_keyword(Keyword::UNION) {
9178 let _ = self.consume_token(&Token::Eq);
9179 let value = self.next_token();
9180
9181 match value.token {
9182 Token::LParen => {
9183 let tables: Vec<Ident> =
9184 self.parse_comma_separated0(Parser::parse_identifier, Token::RParen)?;
9185 self.expect_token(&Token::RParen)?;
9186
9187 return Ok(Some(SqlOption::NamedParenthesizedList(
9188 NamedParenthesizedList {
9189 key: Ident::new("UNION"),
9190 name: None,
9191 values: tables,
9192 },
9193 )));
9194 }
9195 _ => {
9196 return self.expected("Token::LParen", value)?;
9197 }
9198 }
9199 }
9200
9201 let key = if self.parse_keywords(&[Keyword::DEFAULT, Keyword::CHARSET]) {
9203 Ident::new("DEFAULT CHARSET")
9204 } else if self.parse_keyword(Keyword::CHARSET) {
9205 Ident::new("CHARSET")
9206 } else if self.parse_keywords(&[Keyword::DEFAULT, Keyword::CHARACTER, Keyword::SET]) {
9207 Ident::new("DEFAULT CHARACTER SET")
9208 } else if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
9209 Ident::new("CHARACTER SET")
9210 } else if self.parse_keywords(&[Keyword::DEFAULT, Keyword::COLLATE]) {
9211 Ident::new("DEFAULT COLLATE")
9212 } else if self.parse_keyword(Keyword::COLLATE) {
9213 Ident::new("COLLATE")
9214 } else if self.parse_keywords(&[Keyword::DATA, Keyword::DIRECTORY]) {
9215 Ident::new("DATA DIRECTORY")
9216 } else if self.parse_keywords(&[Keyword::INDEX, Keyword::DIRECTORY]) {
9217 Ident::new("INDEX DIRECTORY")
9218 } else if self.parse_keyword(Keyword::KEY_BLOCK_SIZE) {
9219 Ident::new("KEY_BLOCK_SIZE")
9220 } else if self.parse_keyword(Keyword::ROW_FORMAT) {
9221 Ident::new("ROW_FORMAT")
9222 } else if self.parse_keyword(Keyword::PACK_KEYS) {
9223 Ident::new("PACK_KEYS")
9224 } else if self.parse_keyword(Keyword::STATS_AUTO_RECALC) {
9225 Ident::new("STATS_AUTO_RECALC")
9226 } else if self.parse_keyword(Keyword::STATS_PERSISTENT) {
9227 Ident::new("STATS_PERSISTENT")
9228 } else if self.parse_keyword(Keyword::STATS_SAMPLE_PAGES) {
9229 Ident::new("STATS_SAMPLE_PAGES")
9230 } else if self.parse_keyword(Keyword::DELAY_KEY_WRITE) {
9231 Ident::new("DELAY_KEY_WRITE")
9232 } else if self.parse_keyword(Keyword::COMPRESSION) {
9233 Ident::new("COMPRESSION")
9234 } else if self.parse_keyword(Keyword::ENCRYPTION) {
9235 Ident::new("ENCRYPTION")
9236 } else if self.parse_keyword(Keyword::MAX_ROWS) {
9237 Ident::new("MAX_ROWS")
9238 } else if self.parse_keyword(Keyword::MIN_ROWS) {
9239 Ident::new("MIN_ROWS")
9240 } else if self.parse_keyword(Keyword::AUTOEXTEND_SIZE) {
9241 Ident::new("AUTOEXTEND_SIZE")
9242 } else if self.parse_keyword(Keyword::AVG_ROW_LENGTH) {
9243 Ident::new("AVG_ROW_LENGTH")
9244 } else if self.parse_keyword(Keyword::CHECKSUM) {
9245 Ident::new("CHECKSUM")
9246 } else if self.parse_keyword(Keyword::CONNECTION) {
9247 Ident::new("CONNECTION")
9248 } else if self.parse_keyword(Keyword::ENGINE_ATTRIBUTE) {
9249 Ident::new("ENGINE_ATTRIBUTE")
9250 } else if self.parse_keyword(Keyword::PASSWORD) {
9251 Ident::new("PASSWORD")
9252 } else if self.parse_keyword(Keyword::SECONDARY_ENGINE_ATTRIBUTE) {
9253 Ident::new("SECONDARY_ENGINE_ATTRIBUTE")
9254 } else if self.parse_keyword(Keyword::INSERT_METHOD) {
9255 Ident::new("INSERT_METHOD")
9256 } else if self.parse_keyword(Keyword::AUTO_INCREMENT) {
9257 Ident::new("AUTO_INCREMENT")
9258 } else {
9259 return Ok(None);
9260 };
9261
9262 let _ = self.consume_token(&Token::Eq);
9263
9264 let value = match self
9265 .maybe_parse(|parser| parser.parse_value())?
9266 .map(Expr::Value)
9267 {
9268 Some(expr) => expr,
9269 None => Expr::Identifier(self.parse_identifier()?),
9270 };
9271
9272 Ok(Some(SqlOption::KeyValue { key, value }))
9273 }
9274
9275 pub fn parse_plain_options(&mut self) -> Result<Vec<SqlOption>, ParserError> {
9277 let mut options = Vec::new();
9278
9279 while let Some(option) = self.parse_plain_option()? {
9280 options.push(option);
9281 let _ = self.consume_token(&Token::Comma);
9284 }
9285
9286 Ok(options)
9287 }
9288
9289 pub fn parse_optional_inline_comment(&mut self) -> Result<Option<CommentDef>, ParserError> {
9291 let comment = if self.parse_keyword(Keyword::COMMENT) {
9292 let has_eq = self.consume_token(&Token::Eq);
9293 let comment = self.parse_comment_value()?;
9294 Some(if has_eq {
9295 CommentDef::WithEq(comment)
9296 } else {
9297 CommentDef::WithoutEq(comment)
9298 })
9299 } else {
9300 None
9301 };
9302 Ok(comment)
9303 }
9304
9305 pub fn parse_comment_value(&mut self) -> Result<String, ParserError> {
9307 let next_token = self.next_token();
9308 let value = match next_token.token {
9309 Token::SingleQuotedString(str) => str,
9310 Token::DollarQuotedString(str) => str.value,
9311 _ => self.expected("string literal", next_token)?,
9312 };
9313 Ok(value)
9314 }
9315
9316 pub fn parse_optional_procedure_parameters(
9318 &mut self,
9319 ) -> Result<Option<Vec<ProcedureParam>>, ParserError> {
9320 let mut params = vec![];
9321 if !self.consume_token(&Token::LParen) || self.consume_token(&Token::RParen) {
9322 return Ok(Some(params));
9323 }
9324 loop {
9325 if let Token::Word(_) = &self.peek_token_ref().token {
9326 params.push(self.parse_procedure_param()?)
9327 }
9328 let comma = self.consume_token(&Token::Comma);
9329 if self.consume_token(&Token::RParen) {
9330 break;
9332 } else if !comma {
9333 return self.expected_ref(
9334 "',' or ')' after parameter definition",
9335 self.peek_token_ref(),
9336 );
9337 }
9338 }
9339 Ok(Some(params))
9340 }
9341
9342 pub fn parse_columns(&mut self) -> Result<(Vec<ColumnDef>, Vec<TableConstraint>), ParserError> {
9344 let mut columns = vec![];
9345 let mut constraints = vec![];
9346 if !self.consume_token(&Token::LParen) || self.consume_token(&Token::RParen) {
9347 return Ok((columns, constraints));
9348 }
9349
9350 loop {
9351 if let Some(constraint) = self.parse_optional_table_constraint()? {
9352 constraints.push(constraint);
9353 } else if let Token::Word(_) = &self.peek_token_ref().token {
9354 columns.push(self.parse_column_def()?);
9355 } else {
9356 return self.expected_ref(
9357 "column name or constraint definition",
9358 self.peek_token_ref(),
9359 );
9360 }
9361
9362 let comma = self.consume_token(&Token::Comma);
9363 let rparen = self.peek_token_ref().token == Token::RParen;
9364
9365 if !comma && !rparen {
9366 return self
9367 .expected_ref("',' or ')' after column definition", self.peek_token_ref());
9368 };
9369
9370 if rparen
9371 && (!comma
9372 || self.dialect.supports_column_definition_trailing_commas()
9373 || self.options.trailing_commas)
9374 {
9375 let _ = self.consume_token(&Token::RParen);
9376 break;
9377 }
9378 }
9379
9380 Ok((columns, constraints))
9381 }
9382
9383 pub fn parse_procedure_param(&mut self) -> Result<ProcedureParam, ParserError> {
9385 let mode = if self.parse_keyword(Keyword::IN) {
9386 Some(ArgMode::In)
9387 } else if self.parse_keyword(Keyword::OUT) {
9388 Some(ArgMode::Out)
9389 } else if self.parse_keyword(Keyword::INOUT) {
9390 Some(ArgMode::InOut)
9391 } else {
9392 None
9393 };
9394 let name = self.parse_identifier()?;
9395 let data_type = self.parse_data_type()?;
9396 let default = if self.consume_token(&Token::Eq) {
9397 Some(self.parse_expr()?)
9398 } else {
9399 None
9400 };
9401
9402 Ok(ProcedureParam {
9403 name,
9404 data_type,
9405 mode,
9406 default,
9407 })
9408 }
9409
9410 pub fn parse_column_def(&mut self) -> Result<ColumnDef, ParserError> {
9412 self.parse_column_def_inner(false)
9413 }
9414
9415 fn parse_column_def_inner(
9416 &mut self,
9417 optional_data_type: bool,
9418 ) -> Result<ColumnDef, ParserError> {
9419 let col_name = self.parse_identifier()?;
9420 let data_type = if self.is_column_type_sqlite_unspecified() {
9421 DataType::Unspecified
9422 } else if optional_data_type {
9423 self.maybe_parse(|parser| parser.parse_data_type())?
9424 .unwrap_or(DataType::Unspecified)
9425 } else {
9426 self.parse_data_type()?
9427 };
9428 let mut options = vec![];
9429 loop {
9430 if self.parse_keyword(Keyword::CONSTRAINT) {
9431 let name = Some(self.parse_identifier()?);
9432 if let Some(option) = self.parse_optional_column_option()? {
9433 options.push(ColumnOptionDef { name, option });
9434 } else {
9435 return self.expected_ref(
9436 "constraint details after CONSTRAINT <name>",
9437 self.peek_token_ref(),
9438 );
9439 }
9440 } else if let Some(option) = self.parse_optional_column_option()? {
9441 options.push(ColumnOptionDef { name: None, option });
9442 } else {
9443 break;
9444 };
9445 }
9446 Ok(ColumnDef {
9447 name: col_name,
9448 data_type,
9449 options,
9450 })
9451 }
9452
9453 fn is_column_type_sqlite_unspecified(&mut self) -> bool {
9454 if dialect_of!(self is SQLiteDialect) {
9455 match &self.peek_token_ref().token {
9456 Token::Word(word) => matches!(
9457 word.keyword,
9458 Keyword::CONSTRAINT
9459 | Keyword::PRIMARY
9460 | Keyword::NOT
9461 | Keyword::UNIQUE
9462 | Keyword::CHECK
9463 | Keyword::DEFAULT
9464 | Keyword::COLLATE
9465 | Keyword::REFERENCES
9466 | Keyword::GENERATED
9467 | Keyword::AS
9468 ),
9469 _ => true, }
9471 } else {
9472 false
9473 }
9474 }
9475
9476 pub fn parse_optional_column_option(&mut self) -> Result<Option<ColumnOption>, ParserError> {
9478 if let Some(option) = self.dialect.parse_column_option(self)? {
9479 return option;
9480 }
9481
9482 self.with_state(
9483 ColumnDefinition,
9484 |parser| -> Result<Option<ColumnOption>, ParserError> {
9485 parser.parse_optional_column_option_inner()
9486 },
9487 )
9488 }
9489
9490 fn parse_optional_column_option_inner(&mut self) -> Result<Option<ColumnOption>, ParserError> {
9491 if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
9492 Ok(Some(ColumnOption::CharacterSet(
9493 self.parse_object_name(false)?,
9494 )))
9495 } else if self.parse_keywords(&[Keyword::COLLATE]) {
9496 Ok(Some(ColumnOption::Collation(
9497 self.parse_object_name(false)?,
9498 )))
9499 } else if self.parse_keywords(&[Keyword::NOT, Keyword::NULL]) {
9500 Ok(Some(ColumnOption::NotNull))
9501 } else if self.parse_keywords(&[Keyword::COMMENT]) {
9502 Ok(Some(ColumnOption::Comment(self.parse_comment_value()?)))
9503 } else if self.parse_keyword(Keyword::NULL) {
9504 Ok(Some(ColumnOption::Null))
9505 } else if self.parse_keyword(Keyword::DEFAULT) {
9506 Ok(Some(ColumnOption::Default(self.parse_expr()?)))
9507 } else if dialect_of!(self is ClickHouseDialect| GenericDialect)
9508 && self.parse_keyword(Keyword::MATERIALIZED)
9509 {
9510 Ok(Some(ColumnOption::Materialized(self.parse_expr()?)))
9511 } else if dialect_of!(self is ClickHouseDialect| GenericDialect)
9512 && self.parse_keyword(Keyword::ALIAS)
9513 {
9514 Ok(Some(ColumnOption::Alias(self.parse_expr()?)))
9515 } else if dialect_of!(self is ClickHouseDialect| GenericDialect)
9516 && self.parse_keyword(Keyword::EPHEMERAL)
9517 {
9518 if matches!(self.peek_token_ref().token, Token::Comma | Token::RParen) {
9521 Ok(Some(ColumnOption::Ephemeral(None)))
9522 } else {
9523 Ok(Some(ColumnOption::Ephemeral(Some(self.parse_expr()?))))
9524 }
9525 } else if self.parse_keywords(&[Keyword::PRIMARY, Keyword::KEY]) {
9526 let characteristics = self.parse_constraint_characteristics()?;
9527 Ok(Some(
9528 PrimaryKeyConstraint {
9529 name: None,
9530 index_name: None,
9531 index_type: None,
9532 columns: vec![],
9533 index_options: vec![],
9534 characteristics,
9535 }
9536 .into(),
9537 ))
9538 } else if self.parse_keyword(Keyword::UNIQUE) {
9539 let index_type_display =
9540 if self.dialect.supports_key_column_option() && self.parse_keyword(Keyword::KEY) {
9541 KeyOrIndexDisplay::Key
9542 } else {
9543 KeyOrIndexDisplay::None
9544 };
9545 let characteristics = self.parse_constraint_characteristics()?;
9546 Ok(Some(
9547 UniqueConstraint {
9548 name: None,
9549 index_name: None,
9550 index_type_display,
9551 index_type: None,
9552 columns: vec![],
9553 index_options: vec![],
9554 characteristics,
9555 nulls_distinct: NullsDistinctOption::None,
9556 }
9557 .into(),
9558 ))
9559 } else if self.dialect.supports_key_column_option() && self.parse_keyword(Keyword::KEY) {
9560 let characteristics = self.parse_constraint_characteristics()?;
9563 Ok(Some(
9564 PrimaryKeyConstraint {
9565 name: None,
9566 index_name: None,
9567 index_type: None,
9568 columns: vec![],
9569 index_options: vec![],
9570 characteristics,
9571 }
9572 .into(),
9573 ))
9574 } else if self.parse_keyword(Keyword::REFERENCES) {
9575 let foreign_table = self.parse_object_name(false)?;
9576 let referred_columns = self.parse_parenthesized_column_list(Optional, false)?;
9579 let mut match_kind = None;
9580 let mut on_delete = None;
9581 let mut on_update = None;
9582 loop {
9583 if match_kind.is_none() && self.parse_keyword(Keyword::MATCH) {
9584 match_kind = Some(self.parse_match_kind()?);
9585 } else if on_delete.is_none()
9586 && self.parse_keywords(&[Keyword::ON, Keyword::DELETE])
9587 {
9588 on_delete = Some(self.parse_referential_action()?);
9589 } else if on_update.is_none()
9590 && self.parse_keywords(&[Keyword::ON, Keyword::UPDATE])
9591 {
9592 on_update = Some(self.parse_referential_action()?);
9593 } else {
9594 break;
9595 }
9596 }
9597 let characteristics = self.parse_constraint_characteristics()?;
9598
9599 Ok(Some(
9600 ForeignKeyConstraint {
9601 name: None, index_name: None, columns: vec![], foreign_table,
9605 referred_columns,
9606 on_delete,
9607 on_update,
9608 match_kind,
9609 characteristics,
9610 }
9611 .into(),
9612 ))
9613 } else if self.parse_keyword(Keyword::CHECK) {
9614 self.expect_token(&Token::LParen)?;
9615 let expr: Expr = self.with_state(ParserState::Normal, |p| p.parse_expr())?;
9617 self.expect_token(&Token::RParen)?;
9618
9619 let enforced = if self.parse_keyword(Keyword::ENFORCED) {
9620 Some(true)
9621 } else if self.parse_keywords(&[Keyword::NOT, Keyword::ENFORCED]) {
9622 Some(false)
9623 } else {
9624 None
9625 };
9626
9627 Ok(Some(
9628 CheckConstraint {
9629 name: None, expr: Box::new(expr),
9631 enforced,
9632 }
9633 .into(),
9634 ))
9635 } else if self.parse_keyword(Keyword::AUTO_INCREMENT)
9636 && dialect_of!(self is MySqlDialect | GenericDialect)
9637 {
9638 Ok(Some(ColumnOption::DialectSpecific(vec![
9640 Token::make_keyword("AUTO_INCREMENT"),
9641 ])))
9642 } else if self.parse_keyword(Keyword::AUTOINCREMENT)
9643 && dialect_of!(self is SQLiteDialect | GenericDialect)
9644 {
9645 Ok(Some(ColumnOption::DialectSpecific(vec![
9647 Token::make_keyword("AUTOINCREMENT"),
9648 ])))
9649 } else if self.parse_keyword(Keyword::ASC)
9650 && self.dialect.supports_asc_desc_in_column_definition()
9651 {
9652 Ok(Some(ColumnOption::DialectSpecific(vec![
9654 Token::make_keyword("ASC"),
9655 ])))
9656 } else if self.parse_keyword(Keyword::DESC)
9657 && self.dialect.supports_asc_desc_in_column_definition()
9658 {
9659 Ok(Some(ColumnOption::DialectSpecific(vec![
9661 Token::make_keyword("DESC"),
9662 ])))
9663 } else if self.parse_keywords(&[Keyword::ON, Keyword::UPDATE])
9664 && dialect_of!(self is MySqlDialect | GenericDialect)
9665 {
9666 let expr = self.parse_expr()?;
9667 Ok(Some(ColumnOption::OnUpdate(expr)))
9668 } else if self.parse_keyword(Keyword::GENERATED) {
9669 self.parse_optional_column_option_generated()
9670 } else if dialect_of!(self is BigQueryDialect | GenericDialect)
9671 && self.parse_keyword(Keyword::OPTIONS)
9672 {
9673 self.prev_token();
9674 Ok(Some(ColumnOption::Options(
9675 self.parse_options(Keyword::OPTIONS)?,
9676 )))
9677 } else if self.parse_keyword(Keyword::AS)
9678 && dialect_of!(self is MySqlDialect | SQLiteDialect | DuckDbDialect | GenericDialect)
9679 {
9680 self.parse_optional_column_option_as()
9681 } else if self.parse_keyword(Keyword::SRID)
9682 && dialect_of!(self is MySqlDialect | GenericDialect)
9683 {
9684 Ok(Some(ColumnOption::Srid(Box::new(self.parse_expr()?))))
9685 } else if self.parse_keyword(Keyword::IDENTITY)
9686 && dialect_of!(self is MsSqlDialect | GenericDialect)
9687 {
9688 let parameters = if self.consume_token(&Token::LParen) {
9689 let seed = self.parse_number()?;
9690 self.expect_token(&Token::Comma)?;
9691 let increment = self.parse_number()?;
9692 self.expect_token(&Token::RParen)?;
9693
9694 Some(IdentityPropertyFormatKind::FunctionCall(
9695 IdentityParameters { seed, increment },
9696 ))
9697 } else {
9698 None
9699 };
9700 Ok(Some(ColumnOption::Identity(
9701 IdentityPropertyKind::Identity(IdentityProperty {
9702 parameters,
9703 order: None,
9704 }),
9705 )))
9706 } else if dialect_of!(self is SQLiteDialect | GenericDialect)
9707 && self.parse_keywords(&[Keyword::ON, Keyword::CONFLICT])
9708 {
9709 Ok(Some(ColumnOption::OnConflict(
9711 self.expect_one_of_keywords(&[
9712 Keyword::ROLLBACK,
9713 Keyword::ABORT,
9714 Keyword::FAIL,
9715 Keyword::IGNORE,
9716 Keyword::REPLACE,
9717 ])?,
9718 )))
9719 } else if self.parse_keyword(Keyword::INVISIBLE) {
9720 Ok(Some(ColumnOption::Invisible))
9721 } else {
9722 Ok(None)
9723 }
9724 }
9725
9726 pub(crate) fn parse_tag(&mut self) -> Result<Tag, ParserError> {
9727 let name = self.parse_object_name(false)?;
9728 self.expect_token(&Token::Eq)?;
9729 let value = self.parse_literal_string()?;
9730
9731 Ok(Tag::new(name, value))
9732 }
9733
9734 fn parse_optional_column_option_generated(
9735 &mut self,
9736 ) -> Result<Option<ColumnOption>, ParserError> {
9737 if self.parse_keywords(&[Keyword::ALWAYS, Keyword::AS, Keyword::IDENTITY]) {
9738 let mut sequence_options = vec![];
9739 if self.expect_token(&Token::LParen).is_ok() {
9740 sequence_options = self.parse_create_sequence_options()?;
9741 self.expect_token(&Token::RParen)?;
9742 }
9743 Ok(Some(ColumnOption::Generated {
9744 generated_as: GeneratedAs::Always,
9745 sequence_options: Some(sequence_options),
9746 generation_expr: None,
9747 generation_expr_mode: None,
9748 generated_keyword: true,
9749 }))
9750 } else if self.parse_keywords(&[
9751 Keyword::BY,
9752 Keyword::DEFAULT,
9753 Keyword::AS,
9754 Keyword::IDENTITY,
9755 ]) {
9756 let mut sequence_options = vec![];
9757 if self.expect_token(&Token::LParen).is_ok() {
9758 sequence_options = self.parse_create_sequence_options()?;
9759 self.expect_token(&Token::RParen)?;
9760 }
9761 Ok(Some(ColumnOption::Generated {
9762 generated_as: GeneratedAs::ByDefault,
9763 sequence_options: Some(sequence_options),
9764 generation_expr: None,
9765 generation_expr_mode: None,
9766 generated_keyword: true,
9767 }))
9768 } else if self.parse_keywords(&[Keyword::ALWAYS, Keyword::AS]) {
9769 if self.expect_token(&Token::LParen).is_ok() {
9770 let expr: Expr = self.with_state(ParserState::Normal, |p| p.parse_expr())?;
9771 self.expect_token(&Token::RParen)?;
9772 let (gen_as, expr_mode) = if self.parse_keywords(&[Keyword::STORED]) {
9773 Ok((
9774 GeneratedAs::ExpStored,
9775 Some(GeneratedExpressionMode::Stored),
9776 ))
9777 } else if dialect_of!(self is PostgreSqlDialect) {
9778 self.expected_ref("STORED", self.peek_token_ref())
9780 } else if self.parse_keywords(&[Keyword::VIRTUAL]) {
9781 Ok((GeneratedAs::Always, Some(GeneratedExpressionMode::Virtual)))
9782 } else {
9783 Ok((GeneratedAs::Always, None))
9784 }?;
9785
9786 Ok(Some(ColumnOption::Generated {
9787 generated_as: gen_as,
9788 sequence_options: None,
9789 generation_expr: Some(expr),
9790 generation_expr_mode: expr_mode,
9791 generated_keyword: true,
9792 }))
9793 } else {
9794 Ok(None)
9795 }
9796 } else {
9797 Ok(None)
9798 }
9799 }
9800
9801 fn parse_optional_column_option_as(&mut self) -> Result<Option<ColumnOption>, ParserError> {
9802 self.expect_token(&Token::LParen)?;
9804 let expr = self.parse_expr()?;
9805 self.expect_token(&Token::RParen)?;
9806
9807 let (gen_as, expr_mode) = if self.parse_keywords(&[Keyword::STORED]) {
9808 (
9809 GeneratedAs::ExpStored,
9810 Some(GeneratedExpressionMode::Stored),
9811 )
9812 } else if self.parse_keywords(&[Keyword::VIRTUAL]) {
9813 (GeneratedAs::Always, Some(GeneratedExpressionMode::Virtual))
9814 } else {
9815 (GeneratedAs::Always, None)
9816 };
9817
9818 Ok(Some(ColumnOption::Generated {
9819 generated_as: gen_as,
9820 sequence_options: None,
9821 generation_expr: Some(expr),
9822 generation_expr_mode: expr_mode,
9823 generated_keyword: false,
9824 }))
9825 }
9826
9827 pub fn parse_optional_clustered_by(&mut self) -> Result<Option<ClusteredBy>, ParserError> {
9829 let clustered_by = if dialect_of!(self is HiveDialect|GenericDialect)
9830 && self.parse_keywords(&[Keyword::CLUSTERED, Keyword::BY])
9831 {
9832 let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
9833
9834 let sorted_by = if self.parse_keywords(&[Keyword::SORTED, Keyword::BY]) {
9835 self.expect_token(&Token::LParen)?;
9836 let sorted_by_columns = self.parse_comma_separated(|p| p.parse_order_by_expr())?;
9837 self.expect_token(&Token::RParen)?;
9838 Some(sorted_by_columns)
9839 } else {
9840 None
9841 };
9842
9843 self.expect_keyword_is(Keyword::INTO)?;
9844 let num_buckets = self.parse_number_value()?.value;
9845 self.expect_keyword_is(Keyword::BUCKETS)?;
9846 Some(ClusteredBy {
9847 columns,
9848 sorted_by,
9849 num_buckets,
9850 })
9851 } else {
9852 None
9853 };
9854 Ok(clustered_by)
9855 }
9856
9857 pub fn parse_referential_action(&mut self) -> Result<ReferentialAction, ParserError> {
9861 if self.parse_keyword(Keyword::RESTRICT) {
9862 Ok(ReferentialAction::Restrict)
9863 } else if self.parse_keyword(Keyword::CASCADE) {
9864 Ok(ReferentialAction::Cascade)
9865 } else if self.parse_keywords(&[Keyword::SET, Keyword::NULL]) {
9866 Ok(ReferentialAction::SetNull)
9867 } else if self.parse_keywords(&[Keyword::NO, Keyword::ACTION]) {
9868 Ok(ReferentialAction::NoAction)
9869 } else if self.parse_keywords(&[Keyword::SET, Keyword::DEFAULT]) {
9870 Ok(ReferentialAction::SetDefault)
9871 } else {
9872 self.expected_ref(
9873 "one of RESTRICT, CASCADE, SET NULL, NO ACTION or SET DEFAULT",
9874 self.peek_token_ref(),
9875 )
9876 }
9877 }
9878
9879 pub fn parse_match_kind(&mut self) -> Result<ConstraintReferenceMatchKind, ParserError> {
9881 if self.parse_keyword(Keyword::FULL) {
9882 Ok(ConstraintReferenceMatchKind::Full)
9883 } else if self.parse_keyword(Keyword::PARTIAL) {
9884 Ok(ConstraintReferenceMatchKind::Partial)
9885 } else if self.parse_keyword(Keyword::SIMPLE) {
9886 Ok(ConstraintReferenceMatchKind::Simple)
9887 } else {
9888 self.expected_ref("one of FULL, PARTIAL or SIMPLE", self.peek_token_ref())
9889 }
9890 }
9891
9892 fn parse_constraint_using_index(
9895 &mut self,
9896 name: Option<Ident>,
9897 ) -> Result<ConstraintUsingIndex, ParserError> {
9898 let index_name = self.parse_identifier()?;
9899 let characteristics = self.parse_constraint_characteristics()?;
9900 Ok(ConstraintUsingIndex {
9901 name,
9902 index_name,
9903 characteristics,
9904 })
9905 }
9906
9907 pub fn parse_constraint_characteristics(
9909 &mut self,
9910 ) -> Result<Option<ConstraintCharacteristics>, ParserError> {
9911 let mut cc = ConstraintCharacteristics::default();
9912
9913 loop {
9914 if cc.deferrable.is_none() && self.parse_keywords(&[Keyword::NOT, Keyword::DEFERRABLE])
9915 {
9916 cc.deferrable = Some(false);
9917 } else if cc.deferrable.is_none() && self.parse_keyword(Keyword::DEFERRABLE) {
9918 cc.deferrable = Some(true);
9919 } else if cc.initially.is_none() && self.parse_keyword(Keyword::INITIALLY) {
9920 if self.parse_keyword(Keyword::DEFERRED) {
9921 cc.initially = Some(DeferrableInitial::Deferred);
9922 } else if self.parse_keyword(Keyword::IMMEDIATE) {
9923 cc.initially = Some(DeferrableInitial::Immediate);
9924 } else {
9925 self.expected_ref("one of DEFERRED or IMMEDIATE", self.peek_token_ref())?;
9926 }
9927 } else if cc.enforced.is_none() && self.parse_keyword(Keyword::ENFORCED) {
9928 cc.enforced = Some(true);
9929 } else if cc.enforced.is_none()
9930 && self.parse_keywords(&[Keyword::NOT, Keyword::ENFORCED])
9931 {
9932 cc.enforced = Some(false);
9933 } else {
9934 break;
9935 }
9936 }
9937
9938 if cc.deferrable.is_some() || cc.initially.is_some() || cc.enforced.is_some() {
9939 Ok(Some(cc))
9940 } else {
9941 Ok(None)
9942 }
9943 }
9944
9945 pub fn parse_optional_table_constraint(
9947 &mut self,
9948 ) -> Result<Option<TableConstraint>, ParserError> {
9949 let name = if self.parse_keyword(Keyword::CONSTRAINT) {
9950 if self.dialect.supports_constraint_keyword_without_name()
9951 && self
9952 .peek_one_of_keywords(&[
9953 Keyword::CHECK,
9954 Keyword::PRIMARY,
9955 Keyword::UNIQUE,
9956 Keyword::FOREIGN,
9957 ])
9958 .is_some()
9959 {
9960 None
9961 } else {
9962 Some(self.parse_identifier()?)
9963 }
9964 } else {
9965 None
9966 };
9967
9968 let next_token = self.next_token();
9969 match next_token.token {
9970 Token::Word(w) if w.keyword == Keyword::UNIQUE => {
9971 if self.parse_keywords(&[Keyword::USING, Keyword::INDEX]) {
9974 return Ok(Some(TableConstraint::UniqueUsingIndex(
9975 self.parse_constraint_using_index(name)?,
9976 )));
9977 }
9978
9979 let index_type_display = self.parse_index_type_display();
9980 if !dialect_of!(self is GenericDialect | MySqlDialect)
9981 && !index_type_display.is_none()
9982 {
9983 return self.expected_ref(
9984 "`index_name` or `(column_name [, ...])`",
9985 self.peek_token_ref(),
9986 );
9987 }
9988
9989 let nulls_distinct = self.parse_optional_nulls_distinct()?;
9990
9991 let index_name = self.parse_optional_ident()?;
9993 let index_type = self.parse_optional_using_then_index_type()?;
9994
9995 let columns = self.parse_parenthesized_index_column_list()?;
9996 let index_options = self.parse_index_options()?;
9997 let characteristics = self.parse_constraint_characteristics()?;
9998 Ok(Some(
9999 UniqueConstraint {
10000 name,
10001 index_name,
10002 index_type_display,
10003 index_type,
10004 columns,
10005 index_options,
10006 characteristics,
10007 nulls_distinct,
10008 }
10009 .into(),
10010 ))
10011 }
10012 Token::Word(w) if w.keyword == Keyword::PRIMARY => {
10013 self.expect_keyword_is(Keyword::KEY)?;
10015
10016 if self.parse_keywords(&[Keyword::USING, Keyword::INDEX]) {
10019 return Ok(Some(TableConstraint::PrimaryKeyUsingIndex(
10020 self.parse_constraint_using_index(name)?,
10021 )));
10022 }
10023
10024 let index_name = self.parse_optional_ident()?;
10026 let index_type = self.parse_optional_using_then_index_type()?;
10027
10028 let columns = self.parse_parenthesized_index_column_list()?;
10029 let index_options = self.parse_index_options()?;
10030 let characteristics = self.parse_constraint_characteristics()?;
10031 Ok(Some(
10032 PrimaryKeyConstraint {
10033 name,
10034 index_name,
10035 index_type,
10036 columns,
10037 index_options,
10038 characteristics,
10039 }
10040 .into(),
10041 ))
10042 }
10043 Token::Word(w) if w.keyword == Keyword::FOREIGN => {
10044 self.expect_keyword_is(Keyword::KEY)?;
10045 let index_name = self.parse_optional_ident()?;
10046 let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
10047 self.expect_keyword_is(Keyword::REFERENCES)?;
10048 let foreign_table = self.parse_object_name(false)?;
10049 let referred_columns = self.parse_parenthesized_column_list(Optional, false)?;
10050 let mut match_kind = None;
10051 let mut on_delete = None;
10052 let mut on_update = None;
10053 loop {
10054 if match_kind.is_none() && self.parse_keyword(Keyword::MATCH) {
10055 match_kind = Some(self.parse_match_kind()?);
10056 } else if on_delete.is_none()
10057 && self.parse_keywords(&[Keyword::ON, Keyword::DELETE])
10058 {
10059 on_delete = Some(self.parse_referential_action()?);
10060 } else if on_update.is_none()
10061 && self.parse_keywords(&[Keyword::ON, Keyword::UPDATE])
10062 {
10063 on_update = Some(self.parse_referential_action()?);
10064 } else {
10065 break;
10066 }
10067 }
10068
10069 let characteristics = self.parse_constraint_characteristics()?;
10070
10071 Ok(Some(
10072 ForeignKeyConstraint {
10073 name,
10074 index_name,
10075 columns,
10076 foreign_table,
10077 referred_columns,
10078 on_delete,
10079 on_update,
10080 match_kind,
10081 characteristics,
10082 }
10083 .into(),
10084 ))
10085 }
10086 Token::Word(w) if w.keyword == Keyword::CHECK => {
10087 self.expect_token(&Token::LParen)?;
10088 let expr = Box::new(self.parse_expr()?);
10089 self.expect_token(&Token::RParen)?;
10090
10091 let enforced = if self.parse_keyword(Keyword::ENFORCED) {
10092 Some(true)
10093 } else if self.parse_keywords(&[Keyword::NOT, Keyword::ENFORCED]) {
10094 Some(false)
10095 } else {
10096 None
10097 };
10098
10099 Ok(Some(
10100 CheckConstraint {
10101 name,
10102 expr,
10103 enforced,
10104 }
10105 .into(),
10106 ))
10107 }
10108 Token::Word(w)
10109 if (w.keyword == Keyword::INDEX || w.keyword == Keyword::KEY)
10110 && dialect_of!(self is GenericDialect | MySqlDialect)
10111 && name.is_none() =>
10112 {
10113 let display_as_key = w.keyword == Keyword::KEY;
10114
10115 let name = match &self.peek_token_ref().token {
10116 Token::Word(word) if word.keyword == Keyword::USING => None,
10117 _ => self.parse_optional_ident()?,
10118 };
10119
10120 let index_type = self.parse_optional_using_then_index_type()?;
10121 let columns = self.parse_parenthesized_index_column_list()?;
10122 let index_options = self.parse_index_options()?;
10123
10124 Ok(Some(
10125 IndexConstraint {
10126 display_as_key,
10127 name,
10128 index_type,
10129 columns,
10130 index_options,
10131 }
10132 .into(),
10133 ))
10134 }
10135 Token::Word(w)
10136 if (w.keyword == Keyword::FULLTEXT || w.keyword == Keyword::SPATIAL)
10137 && dialect_of!(self is GenericDialect | MySqlDialect) =>
10138 {
10139 if let Some(name) = name {
10140 return self.expected(
10141 "FULLTEXT or SPATIAL option without constraint name",
10142 TokenWithSpan {
10143 token: Token::make_keyword(&name.to_string()),
10144 span: next_token.span,
10145 },
10146 );
10147 }
10148
10149 let fulltext = w.keyword == Keyword::FULLTEXT;
10150
10151 let index_type_display = self.parse_index_type_display();
10152
10153 let opt_index_name = self.parse_optional_ident()?;
10154
10155 let columns = self.parse_parenthesized_index_column_list()?;
10156
10157 Ok(Some(
10158 FullTextOrSpatialConstraint {
10159 fulltext,
10160 index_type_display,
10161 opt_index_name,
10162 columns,
10163 }
10164 .into(),
10165 ))
10166 }
10167 Token::Word(w) if w.keyword == Keyword::EXCLUDE => {
10168 let index_method = if self.parse_keyword(Keyword::USING) {
10169 Some(self.parse_identifier()?)
10170 } else {
10171 None
10172 };
10173
10174 self.expect_token(&Token::LParen)?;
10175 let elements =
10176 self.parse_comma_separated(|p| p.parse_exclusion_element())?;
10177 self.expect_token(&Token::RParen)?;
10178
10179 let include = if self.parse_keyword(Keyword::INCLUDE) {
10180 self.expect_token(&Token::LParen)?;
10181 let cols = self.parse_comma_separated(|p| p.parse_identifier())?;
10182 self.expect_token(&Token::RParen)?;
10183 cols
10184 } else {
10185 vec![]
10186 };
10187
10188 let where_clause = if self.parse_keyword(Keyword::WHERE) {
10189 self.expect_token(&Token::LParen)?;
10190 let predicate = self.parse_expr()?;
10191 self.expect_token(&Token::RParen)?;
10192 Some(Box::new(predicate))
10193 } else {
10194 None
10195 };
10196
10197 let characteristics = self.parse_constraint_characteristics()?;
10198
10199 Ok(Some(
10200 ExclusionConstraint {
10201 name,
10202 index_method,
10203 elements,
10204 include,
10205 where_clause,
10206 characteristics,
10207 }
10208 .into(),
10209 ))
10210 }
10211 _ => {
10212 if name.is_some() {
10213 self.expected("PRIMARY, UNIQUE, FOREIGN, or CHECK", next_token)
10214 } else {
10215 self.prev_token();
10216 Ok(None)
10217 }
10218 }
10219 }
10220 }
10221
10222 fn parse_exclusion_element(&mut self) -> Result<ExclusionElement, ParserError> {
10223 let expr = self.parse_expr()?;
10224 self.expect_keyword_is(Keyword::WITH)?;
10225 let operator_token = self.next_token();
10226 let operator = operator_token.token.to_string();
10227 Ok(ExclusionElement { expr, operator })
10228 }
10229
10230 fn parse_optional_nulls_distinct(&mut self) -> Result<NullsDistinctOption, ParserError> {
10231 Ok(if self.parse_keyword(Keyword::NULLS) {
10232 let not = self.parse_keyword(Keyword::NOT);
10233 self.expect_keyword_is(Keyword::DISTINCT)?;
10234 if not {
10235 NullsDistinctOption::NotDistinct
10236 } else {
10237 NullsDistinctOption::Distinct
10238 }
10239 } else {
10240 NullsDistinctOption::None
10241 })
10242 }
10243
10244 pub fn maybe_parse_options(
10246 &mut self,
10247 keyword: Keyword,
10248 ) -> Result<Option<Vec<SqlOption>>, ParserError> {
10249 if let Token::Word(word) = &self.peek_token_ref().token {
10250 if word.keyword == keyword {
10251 return Ok(Some(self.parse_options(keyword)?));
10252 }
10253 };
10254 Ok(None)
10255 }
10256
10257 pub fn parse_options(&mut self, keyword: Keyword) -> Result<Vec<SqlOption>, ParserError> {
10259 if self.parse_keyword(keyword) {
10260 self.expect_token(&Token::LParen)?;
10261 let options = self.parse_comma_separated0(Parser::parse_sql_option, Token::RParen)?;
10262 self.expect_token(&Token::RParen)?;
10263 Ok(options)
10264 } else {
10265 Ok(vec![])
10266 }
10267 }
10268
10269 pub fn parse_options_with_keywords(
10271 &mut self,
10272 keywords: &[Keyword],
10273 ) -> Result<Vec<SqlOption>, ParserError> {
10274 if self.parse_keywords(keywords) {
10275 self.expect_token(&Token::LParen)?;
10276 let options = self.parse_comma_separated(Parser::parse_sql_option)?;
10277 self.expect_token(&Token::RParen)?;
10278 Ok(options)
10279 } else {
10280 Ok(vec![])
10281 }
10282 }
10283
10284 pub fn parse_index_type(&mut self) -> Result<IndexType, ParserError> {
10286 Ok(if self.parse_keyword(Keyword::BTREE) {
10287 IndexType::BTree
10288 } else if self.parse_keyword(Keyword::HASH) {
10289 IndexType::Hash
10290 } else if self.parse_keyword(Keyword::GIN) {
10291 IndexType::GIN
10292 } else if self.parse_keyword(Keyword::GIST) {
10293 IndexType::GiST
10294 } else if self.parse_keyword(Keyword::SPGIST) {
10295 IndexType::SPGiST
10296 } else if self.parse_keyword(Keyword::BRIN) {
10297 IndexType::BRIN
10298 } else if self.parse_keyword(Keyword::BLOOM) {
10299 IndexType::Bloom
10300 } else {
10301 IndexType::Custom(self.parse_identifier()?)
10302 })
10303 }
10304
10305 pub fn parse_optional_using_then_index_type(
10312 &mut self,
10313 ) -> Result<Option<IndexType>, ParserError> {
10314 if self.parse_keyword(Keyword::USING) {
10315 Ok(Some(self.parse_index_type()?))
10316 } else {
10317 Ok(None)
10318 }
10319 }
10320
10321 pub fn parse_optional_ident(&mut self) -> Result<Option<Ident>, ParserError> {
10325 self.maybe_parse(|parser| parser.parse_identifier())
10326 }
10327
10328 #[must_use]
10329 pub fn parse_index_type_display(&mut self) -> KeyOrIndexDisplay {
10331 if self.parse_keyword(Keyword::KEY) {
10332 KeyOrIndexDisplay::Key
10333 } else if self.parse_keyword(Keyword::INDEX) {
10334 KeyOrIndexDisplay::Index
10335 } else {
10336 KeyOrIndexDisplay::None
10337 }
10338 }
10339
10340 pub fn parse_optional_index_option(&mut self) -> Result<Option<IndexOption>, ParserError> {
10342 if let Some(index_type) = self.parse_optional_using_then_index_type()? {
10343 Ok(Some(IndexOption::Using(index_type)))
10344 } else if self.parse_keyword(Keyword::COMMENT) {
10345 let s = self.parse_literal_string()?;
10346 Ok(Some(IndexOption::Comment(s)))
10347 } else {
10348 Ok(None)
10349 }
10350 }
10351
10352 pub fn parse_index_options(&mut self) -> Result<Vec<IndexOption>, ParserError> {
10354 let mut options = Vec::new();
10355
10356 loop {
10357 match self.parse_optional_index_option()? {
10358 Some(index_option) => options.push(index_option),
10359 None => return Ok(options),
10360 }
10361 }
10362 }
10363
10364 pub fn parse_sql_option(&mut self) -> Result<SqlOption, ParserError> {
10366 let is_mssql = dialect_of!(self is MsSqlDialect|GenericDialect);
10367
10368 match &self.peek_token_ref().token {
10369 Token::Word(w) if w.keyword == Keyword::HEAP && is_mssql => {
10370 Ok(SqlOption::Ident(self.parse_identifier()?))
10371 }
10372 Token::Word(w) if w.keyword == Keyword::PARTITION && is_mssql => {
10373 self.parse_option_partition()
10374 }
10375 Token::Word(w) if w.keyword == Keyword::CLUSTERED && is_mssql => {
10376 self.parse_option_clustered()
10377 }
10378 _ => {
10379 let name = self.parse_identifier()?;
10380 self.expect_token(&Token::Eq)?;
10381 let value = self.parse_expr()?;
10382
10383 Ok(SqlOption::KeyValue { key: name, value })
10384 }
10385 }
10386 }
10387
10388 pub fn parse_option_clustered(&mut self) -> Result<SqlOption, ParserError> {
10390 if self.parse_keywords(&[
10391 Keyword::CLUSTERED,
10392 Keyword::COLUMNSTORE,
10393 Keyword::INDEX,
10394 Keyword::ORDER,
10395 ]) {
10396 Ok(SqlOption::Clustered(
10397 TableOptionsClustered::ColumnstoreIndexOrder(
10398 self.parse_parenthesized_column_list(IsOptional::Mandatory, false)?,
10399 ),
10400 ))
10401 } else if self.parse_keywords(&[Keyword::CLUSTERED, Keyword::COLUMNSTORE, Keyword::INDEX]) {
10402 Ok(SqlOption::Clustered(
10403 TableOptionsClustered::ColumnstoreIndex,
10404 ))
10405 } else if self.parse_keywords(&[Keyword::CLUSTERED, Keyword::INDEX]) {
10406 self.expect_token(&Token::LParen)?;
10407
10408 let columns = self.parse_comma_separated(|p| {
10409 let name = p.parse_identifier()?;
10410 let asc = p.parse_asc_desc();
10411
10412 Ok(ClusteredIndex { name, asc })
10413 })?;
10414
10415 self.expect_token(&Token::RParen)?;
10416
10417 Ok(SqlOption::Clustered(TableOptionsClustered::Index(columns)))
10418 } else {
10419 Err(ParserError::ParserError(
10420 "invalid CLUSTERED sequence".to_string(),
10421 ))
10422 }
10423 }
10424
10425 pub fn parse_option_partition(&mut self) -> Result<SqlOption, ParserError> {
10427 self.expect_keyword_is(Keyword::PARTITION)?;
10428 self.expect_token(&Token::LParen)?;
10429 let column_name = self.parse_identifier()?;
10430
10431 self.expect_keyword_is(Keyword::RANGE)?;
10432 let range_direction = if self.parse_keyword(Keyword::LEFT) {
10433 Some(PartitionRangeDirection::Left)
10434 } else if self.parse_keyword(Keyword::RIGHT) {
10435 Some(PartitionRangeDirection::Right)
10436 } else {
10437 None
10438 };
10439
10440 self.expect_keywords(&[Keyword::FOR, Keyword::VALUES])?;
10441 self.expect_token(&Token::LParen)?;
10442
10443 let for_values = self.parse_comma_separated(Parser::parse_expr)?;
10444
10445 self.expect_token(&Token::RParen)?;
10446 self.expect_token(&Token::RParen)?;
10447
10448 Ok(SqlOption::Partition {
10449 column_name,
10450 range_direction,
10451 for_values,
10452 })
10453 }
10454
10455 pub fn parse_partition(&mut self) -> Result<Partition, ParserError> {
10457 self.expect_token(&Token::LParen)?;
10458 let partitions = self.parse_comma_separated(Parser::parse_expr)?;
10459 self.expect_token(&Token::RParen)?;
10460 Ok(Partition::Partitions(partitions))
10461 }
10462
10463 pub fn parse_projection_select(&mut self) -> Result<ProjectionSelect, ParserError> {
10465 self.expect_token(&Token::LParen)?;
10466 self.expect_keyword_is(Keyword::SELECT)?;
10467 let projection = self.parse_projection()?;
10468 let group_by = self.parse_optional_group_by()?;
10469 let order_by = self.parse_optional_order_by()?;
10470 self.expect_token(&Token::RParen)?;
10471 Ok(ProjectionSelect {
10472 projection,
10473 group_by,
10474 order_by,
10475 })
10476 }
10477 pub fn parse_alter_table_add_projection(&mut self) -> Result<AlterTableOperation, ParserError> {
10479 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
10480 let name = self.parse_identifier()?;
10481 let query = self.parse_projection_select()?;
10482 Ok(AlterTableOperation::AddProjection {
10483 if_not_exists,
10484 name,
10485 select: query,
10486 })
10487 }
10488
10489 fn parse_alter_sort_key(&mut self) -> Result<AlterTableOperation, ParserError> {
10493 self.expect_keyword_is(Keyword::ALTER)?;
10494 self.expect_keyword_is(Keyword::SORTKEY)?;
10495 self.expect_token(&Token::LParen)?;
10496 let columns = self.parse_comma_separated(|p| p.parse_expr())?;
10497 self.expect_token(&Token::RParen)?;
10498 Ok(AlterTableOperation::AlterSortKey { columns })
10499 }
10500
10501 pub fn parse_alter_table_operation(&mut self) -> Result<AlterTableOperation, ParserError> {
10503 let operation = if self.parse_keyword(Keyword::ADD) {
10504 if let Some(constraint) = self.parse_optional_table_constraint()? {
10505 let not_valid = self.parse_keywords(&[Keyword::NOT, Keyword::VALID]);
10506 AlterTableOperation::AddConstraint {
10507 constraint,
10508 not_valid,
10509 }
10510 } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
10511 && self.parse_keyword(Keyword::PROJECTION)
10512 {
10513 return self.parse_alter_table_add_projection();
10514 } else {
10515 let if_not_exists =
10516 self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
10517 let mut new_partitions = vec![];
10518 loop {
10519 if self.parse_keyword(Keyword::PARTITION) {
10520 new_partitions.push(self.parse_partition()?);
10521 } else {
10522 break;
10523 }
10524 }
10525 if !new_partitions.is_empty() {
10526 AlterTableOperation::AddPartitions {
10527 if_not_exists,
10528 new_partitions,
10529 }
10530 } else {
10531 let column_keyword = self.parse_keyword(Keyword::COLUMN);
10532
10533 let if_not_exists = if dialect_of!(self is PostgreSqlDialect | BigQueryDialect | DuckDbDialect | GenericDialect)
10534 {
10535 self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS])
10536 || if_not_exists
10537 } else {
10538 false
10539 };
10540
10541 let column_def = self.parse_column_def()?;
10542
10543 let column_position = self.parse_column_position()?;
10544
10545 AlterTableOperation::AddColumn {
10546 column_keyword,
10547 if_not_exists,
10548 column_def,
10549 column_position,
10550 }
10551 }
10552 }
10553 } else if self.parse_keyword(Keyword::RENAME) {
10554 if dialect_of!(self is PostgreSqlDialect) && self.parse_keyword(Keyword::CONSTRAINT) {
10555 let old_name = self.parse_identifier()?;
10556 self.expect_keyword_is(Keyword::TO)?;
10557 let new_name = self.parse_identifier()?;
10558 AlterTableOperation::RenameConstraint { old_name, new_name }
10559 } else if self.parse_keyword(Keyword::TO) {
10560 let table_name = self.parse_object_name(false)?;
10561 AlterTableOperation::RenameTable {
10562 table_name: RenameTableNameKind::To(table_name),
10563 }
10564 } else if self.parse_keyword(Keyword::AS) {
10565 let table_name = self.parse_object_name(false)?;
10566 AlterTableOperation::RenameTable {
10567 table_name: RenameTableNameKind::As(table_name),
10568 }
10569 } else {
10570 let _ = self.parse_keyword(Keyword::COLUMN); let old_column_name = self.parse_identifier()?;
10572 self.expect_keyword_is(Keyword::TO)?;
10573 let new_column_name = self.parse_identifier()?;
10574 AlterTableOperation::RenameColumn {
10575 old_column_name,
10576 new_column_name,
10577 }
10578 }
10579 } else if self.parse_keyword(Keyword::DISABLE) {
10580 if self.parse_keywords(&[Keyword::ROW, Keyword::LEVEL, Keyword::SECURITY]) {
10581 AlterTableOperation::DisableRowLevelSecurity {}
10582 } else if self.parse_keyword(Keyword::RULE) {
10583 let name = self.parse_identifier()?;
10584 AlterTableOperation::DisableRule { name }
10585 } else if self.parse_keyword(Keyword::TRIGGER) {
10586 let name = self.parse_identifier()?;
10587 AlterTableOperation::DisableTrigger { name }
10588 } else {
10589 return self.expected_ref(
10590 "ROW LEVEL SECURITY, RULE, or TRIGGER after DISABLE",
10591 self.peek_token_ref(),
10592 );
10593 }
10594 } else if self.parse_keyword(Keyword::ENABLE) {
10595 if self.parse_keywords(&[Keyword::ALWAYS, Keyword::RULE]) {
10596 let name = self.parse_identifier()?;
10597 AlterTableOperation::EnableAlwaysRule { name }
10598 } else if self.parse_keywords(&[Keyword::ALWAYS, Keyword::TRIGGER]) {
10599 let name = self.parse_identifier()?;
10600 AlterTableOperation::EnableAlwaysTrigger { name }
10601 } else if self.parse_keywords(&[Keyword::ROW, Keyword::LEVEL, Keyword::SECURITY]) {
10602 AlterTableOperation::EnableRowLevelSecurity {}
10603 } else if self.parse_keywords(&[Keyword::REPLICA, Keyword::RULE]) {
10604 let name = self.parse_identifier()?;
10605 AlterTableOperation::EnableReplicaRule { name }
10606 } else if self.parse_keywords(&[Keyword::REPLICA, Keyword::TRIGGER]) {
10607 let name = self.parse_identifier()?;
10608 AlterTableOperation::EnableReplicaTrigger { name }
10609 } else if self.parse_keyword(Keyword::RULE) {
10610 let name = self.parse_identifier()?;
10611 AlterTableOperation::EnableRule { name }
10612 } else if self.parse_keyword(Keyword::TRIGGER) {
10613 let name = self.parse_identifier()?;
10614 AlterTableOperation::EnableTrigger { name }
10615 } else {
10616 return self.expected_ref(
10617 "ALWAYS, REPLICA, ROW LEVEL SECURITY, RULE, or TRIGGER after ENABLE",
10618 self.peek_token_ref(),
10619 );
10620 }
10621 } else if self.parse_keywords(&[
10622 Keyword::FORCE,
10623 Keyword::ROW,
10624 Keyword::LEVEL,
10625 Keyword::SECURITY,
10626 ]) {
10627 AlterTableOperation::ForceRowLevelSecurity
10628 } else if self.parse_keywords(&[
10629 Keyword::NO,
10630 Keyword::FORCE,
10631 Keyword::ROW,
10632 Keyword::LEVEL,
10633 Keyword::SECURITY,
10634 ]) {
10635 AlterTableOperation::NoForceRowLevelSecurity
10636 } else if self.parse_keywords(&[Keyword::CLEAR, Keyword::PROJECTION])
10637 && dialect_of!(self is ClickHouseDialect|GenericDialect)
10638 {
10639 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
10640 let name = self.parse_identifier()?;
10641 let partition = if self.parse_keywords(&[Keyword::IN, Keyword::PARTITION]) {
10642 Some(self.parse_identifier()?)
10643 } else {
10644 None
10645 };
10646 AlterTableOperation::ClearProjection {
10647 if_exists,
10648 name,
10649 partition,
10650 }
10651 } else if self.parse_keywords(&[Keyword::MATERIALIZE, Keyword::PROJECTION])
10652 && dialect_of!(self is ClickHouseDialect|GenericDialect)
10653 {
10654 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
10655 let name = self.parse_identifier()?;
10656 let partition = if self.parse_keywords(&[Keyword::IN, Keyword::PARTITION]) {
10657 Some(self.parse_identifier()?)
10658 } else {
10659 None
10660 };
10661 AlterTableOperation::MaterializeProjection {
10662 if_exists,
10663 name,
10664 partition,
10665 }
10666 } else if self.parse_keyword(Keyword::DROP) {
10667 if self.parse_keywords(&[Keyword::IF, Keyword::EXISTS, Keyword::PARTITION]) {
10668 self.expect_token(&Token::LParen)?;
10669 let partitions = self.parse_comma_separated(Parser::parse_expr)?;
10670 self.expect_token(&Token::RParen)?;
10671 AlterTableOperation::DropPartitions {
10672 partitions,
10673 if_exists: true,
10674 }
10675 } else if self.parse_keyword(Keyword::PARTITION) {
10676 self.expect_token(&Token::LParen)?;
10677 let partitions = self.parse_comma_separated(Parser::parse_expr)?;
10678 self.expect_token(&Token::RParen)?;
10679 AlterTableOperation::DropPartitions {
10680 partitions,
10681 if_exists: false,
10682 }
10683 } else if self.parse_keyword(Keyword::CONSTRAINT) {
10684 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
10685 let name = self.parse_identifier()?;
10686 let drop_behavior = self.parse_optional_drop_behavior();
10687 AlterTableOperation::DropConstraint {
10688 if_exists,
10689 name,
10690 drop_behavior,
10691 }
10692 } else if self.parse_keywords(&[Keyword::PRIMARY, Keyword::KEY]) {
10693 let drop_behavior = self.parse_optional_drop_behavior();
10694 AlterTableOperation::DropPrimaryKey { drop_behavior }
10695 } else if self.parse_keywords(&[Keyword::FOREIGN, Keyword::KEY]) {
10696 let name = self.parse_identifier()?;
10697 let drop_behavior = self.parse_optional_drop_behavior();
10698 AlterTableOperation::DropForeignKey {
10699 name,
10700 drop_behavior,
10701 }
10702 } else if self.parse_keyword(Keyword::INDEX) {
10703 let name = self.parse_identifier()?;
10704 AlterTableOperation::DropIndex { name }
10705 } else if self.parse_keyword(Keyword::PROJECTION)
10706 && dialect_of!(self is ClickHouseDialect|GenericDialect)
10707 {
10708 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
10709 let name = self.parse_identifier()?;
10710 AlterTableOperation::DropProjection { if_exists, name }
10711 } else if self.parse_keywords(&[Keyword::CLUSTERING, Keyword::KEY]) {
10712 AlterTableOperation::DropClusteringKey
10713 } else {
10714 let has_column_keyword = self.parse_keyword(Keyword::COLUMN); let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
10716 let column_names = if self.dialect.supports_comma_separated_drop_column_list() {
10717 self.parse_comma_separated(Parser::parse_identifier)?
10718 } else {
10719 vec![self.parse_identifier()?]
10720 };
10721 let drop_behavior = self.parse_optional_drop_behavior();
10722 AlterTableOperation::DropColumn {
10723 has_column_keyword,
10724 column_names,
10725 if_exists,
10726 drop_behavior,
10727 }
10728 }
10729 } else if self.parse_keyword(Keyword::PARTITION) {
10730 self.expect_token(&Token::LParen)?;
10731 let before = self.parse_comma_separated(Parser::parse_expr)?;
10732 self.expect_token(&Token::RParen)?;
10733 self.expect_keyword_is(Keyword::RENAME)?;
10734 self.expect_keywords(&[Keyword::TO, Keyword::PARTITION])?;
10735 self.expect_token(&Token::LParen)?;
10736 let renames = self.parse_comma_separated(Parser::parse_expr)?;
10737 self.expect_token(&Token::RParen)?;
10738 AlterTableOperation::RenamePartitions {
10739 old_partitions: before,
10740 new_partitions: renames,
10741 }
10742 } else if self.parse_keyword(Keyword::CHANGE) {
10743 let _ = self.parse_keyword(Keyword::COLUMN); let old_name = self.parse_identifier()?;
10745 let new_name = self.parse_identifier()?;
10746 let data_type = self.parse_data_type()?;
10747 let mut options = vec![];
10748 while let Some(option) = self.parse_optional_column_option()? {
10749 options.push(option);
10750 }
10751
10752 let column_position = self.parse_column_position()?;
10753
10754 AlterTableOperation::ChangeColumn {
10755 old_name,
10756 new_name,
10757 data_type,
10758 options,
10759 column_position,
10760 }
10761 } else if self.parse_keyword(Keyword::MODIFY) {
10762 let _ = self.parse_keyword(Keyword::COLUMN); let col_name = self.parse_identifier()?;
10764 let data_type = self.parse_data_type()?;
10765 let mut options = vec![];
10766 while let Some(option) = self.parse_optional_column_option()? {
10767 options.push(option);
10768 }
10769
10770 let column_position = self.parse_column_position()?;
10771
10772 AlterTableOperation::ModifyColumn {
10773 col_name,
10774 data_type,
10775 options,
10776 column_position,
10777 }
10778 } else if self.parse_keyword(Keyword::ALTER) {
10779 if self.peek_keyword(Keyword::SORTKEY) {
10780 self.prev_token();
10781 return self.parse_alter_sort_key();
10782 }
10783
10784 let _ = self.parse_keyword(Keyword::COLUMN); let column_name = self.parse_identifier()?;
10786 let is_postgresql = dialect_of!(self is PostgreSqlDialect);
10787
10788 let op: AlterColumnOperation = if self.parse_keywords(&[
10789 Keyword::SET,
10790 Keyword::NOT,
10791 Keyword::NULL,
10792 ]) {
10793 AlterColumnOperation::SetNotNull {}
10794 } else if self.parse_keywords(&[Keyword::DROP, Keyword::NOT, Keyword::NULL]) {
10795 AlterColumnOperation::DropNotNull {}
10796 } else if self.parse_keywords(&[Keyword::SET, Keyword::DEFAULT]) {
10797 AlterColumnOperation::SetDefault {
10798 value: self.parse_expr()?,
10799 }
10800 } else if self.parse_keywords(&[Keyword::DROP, Keyword::DEFAULT]) {
10801 AlterColumnOperation::DropDefault {}
10802 } else if self.parse_keywords(&[Keyword::SET, Keyword::DATA, Keyword::TYPE]) {
10803 self.parse_set_data_type(true)?
10804 } else if self.parse_keyword(Keyword::TYPE) {
10805 self.parse_set_data_type(false)?
10806 } else if self.parse_keywords(&[Keyword::ADD, Keyword::GENERATED]) {
10807 let generated_as = if self.parse_keyword(Keyword::ALWAYS) {
10808 Some(GeneratedAs::Always)
10809 } else if self.parse_keywords(&[Keyword::BY, Keyword::DEFAULT]) {
10810 Some(GeneratedAs::ByDefault)
10811 } else {
10812 None
10813 };
10814
10815 self.expect_keywords(&[Keyword::AS, Keyword::IDENTITY])?;
10816
10817 let mut sequence_options: Option<Vec<SequenceOptions>> = None;
10818
10819 if self.peek_token_ref().token == Token::LParen {
10820 self.expect_token(&Token::LParen)?;
10821 sequence_options = Some(self.parse_create_sequence_options()?);
10822 self.expect_token(&Token::RParen)?;
10823 }
10824
10825 AlterColumnOperation::AddGenerated {
10826 generated_as,
10827 sequence_options,
10828 }
10829 } else {
10830 let message = if is_postgresql {
10831 "SET/DROP NOT NULL, SET DEFAULT, SET DATA TYPE, or ADD GENERATED after ALTER COLUMN"
10832 } else {
10833 "SET/DROP NOT NULL, SET DEFAULT, or SET DATA TYPE after ALTER COLUMN"
10834 };
10835
10836 return self.expected_ref(message, self.peek_token_ref());
10837 };
10838 AlterTableOperation::AlterColumn { column_name, op }
10839 } else if self.parse_keyword(Keyword::SWAP) {
10840 self.expect_keyword_is(Keyword::WITH)?;
10841 let table_name = self.parse_object_name(false)?;
10842 AlterTableOperation::SwapWith { table_name }
10843 } else if dialect_of!(self is PostgreSqlDialect | GenericDialect)
10844 && self.parse_keywords(&[Keyword::OWNER, Keyword::TO])
10845 {
10846 let new_owner = self.parse_owner()?;
10847 AlterTableOperation::OwnerTo { new_owner }
10848 } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
10849 && self.parse_keyword(Keyword::ATTACH)
10850 {
10851 AlterTableOperation::AttachPartition {
10852 partition: self.parse_part_or_partition()?,
10853 }
10854 } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
10855 && self.parse_keyword(Keyword::DETACH)
10856 {
10857 AlterTableOperation::DetachPartition {
10858 partition: self.parse_part_or_partition()?,
10859 }
10860 } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
10861 && self.parse_keyword(Keyword::FREEZE)
10862 {
10863 let partition = self.parse_part_or_partition()?;
10864 let with_name = if self.parse_keyword(Keyword::WITH) {
10865 self.expect_keyword_is(Keyword::NAME)?;
10866 Some(self.parse_identifier()?)
10867 } else {
10868 None
10869 };
10870 AlterTableOperation::FreezePartition {
10871 partition,
10872 with_name,
10873 }
10874 } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
10875 && self.parse_keyword(Keyword::UNFREEZE)
10876 {
10877 let partition = self.parse_part_or_partition()?;
10878 let with_name = if self.parse_keyword(Keyword::WITH) {
10879 self.expect_keyword_is(Keyword::NAME)?;
10880 Some(self.parse_identifier()?)
10881 } else {
10882 None
10883 };
10884 AlterTableOperation::UnfreezePartition {
10885 partition,
10886 with_name,
10887 }
10888 } else if self.parse_keywords(&[Keyword::CLUSTER, Keyword::BY]) {
10889 self.expect_token(&Token::LParen)?;
10890 let exprs = self.parse_comma_separated(|parser| parser.parse_expr())?;
10891 self.expect_token(&Token::RParen)?;
10892 AlterTableOperation::ClusterBy { exprs }
10893 } else if self.parse_keywords(&[Keyword::SUSPEND, Keyword::RECLUSTER]) {
10894 AlterTableOperation::SuspendRecluster
10895 } else if self.parse_keywords(&[Keyword::RESUME, Keyword::RECLUSTER]) {
10896 AlterTableOperation::ResumeRecluster
10897 } else if self.parse_keyword(Keyword::LOCK) {
10898 let equals = self.consume_token(&Token::Eq);
10899 let lock = match self.parse_one_of_keywords(&[
10900 Keyword::DEFAULT,
10901 Keyword::EXCLUSIVE,
10902 Keyword::NONE,
10903 Keyword::SHARED,
10904 ]) {
10905 Some(Keyword::DEFAULT) => AlterTableLock::Default,
10906 Some(Keyword::EXCLUSIVE) => AlterTableLock::Exclusive,
10907 Some(Keyword::NONE) => AlterTableLock::None,
10908 Some(Keyword::SHARED) => AlterTableLock::Shared,
10909 _ => self.expected_ref(
10910 "DEFAULT, EXCLUSIVE, NONE or SHARED after LOCK [=]",
10911 self.peek_token_ref(),
10912 )?,
10913 };
10914 AlterTableOperation::Lock { equals, lock }
10915 } else if self.parse_keyword(Keyword::ALGORITHM) {
10916 let equals = self.consume_token(&Token::Eq);
10917 let algorithm = match self.parse_one_of_keywords(&[
10918 Keyword::DEFAULT,
10919 Keyword::INSTANT,
10920 Keyword::INPLACE,
10921 Keyword::COPY,
10922 ]) {
10923 Some(Keyword::DEFAULT) => AlterTableAlgorithm::Default,
10924 Some(Keyword::INSTANT) => AlterTableAlgorithm::Instant,
10925 Some(Keyword::INPLACE) => AlterTableAlgorithm::Inplace,
10926 Some(Keyword::COPY) => AlterTableAlgorithm::Copy,
10927 _ => self.expected_ref(
10928 "DEFAULT, INSTANT, INPLACE, or COPY after ALGORITHM [=]",
10929 self.peek_token_ref(),
10930 )?,
10931 };
10932 AlterTableOperation::Algorithm { equals, algorithm }
10933 } else if self.parse_keyword(Keyword::AUTO_INCREMENT) {
10934 let equals = self.consume_token(&Token::Eq);
10935 let value = self.parse_number_value()?;
10936 AlterTableOperation::AutoIncrement { equals, value }
10937 } else if self.parse_keywords(&[Keyword::REPLICA, Keyword::IDENTITY]) {
10938 let identity = if self.parse_keyword(Keyword::NOTHING) {
10939 ReplicaIdentity::Nothing
10940 } else if self.parse_keyword(Keyword::FULL) {
10941 ReplicaIdentity::Full
10942 } else if self.parse_keyword(Keyword::DEFAULT) {
10943 ReplicaIdentity::Default
10944 } else if self.parse_keywords(&[Keyword::USING, Keyword::INDEX]) {
10945 ReplicaIdentity::Index(self.parse_identifier()?)
10946 } else {
10947 return self.expected_ref(
10948 "NOTHING, FULL, DEFAULT, or USING INDEX index_name after REPLICA IDENTITY",
10949 self.peek_token_ref(),
10950 );
10951 };
10952
10953 AlterTableOperation::ReplicaIdentity { identity }
10954 } else if self.parse_keywords(&[Keyword::VALIDATE, Keyword::CONSTRAINT]) {
10955 let name = self.parse_identifier()?;
10956 AlterTableOperation::ValidateConstraint { name }
10957 } else if self.parse_keywords(&[Keyword::SET, Keyword::TABLESPACE]) {
10958 let tablespace_name = self.parse_identifier()?;
10959 AlterTableOperation::SetTablespace { tablespace_name }
10960 } else {
10961 let mut options =
10962 self.parse_options_with_keywords(&[Keyword::SET, Keyword::TBLPROPERTIES])?;
10963 if !options.is_empty() {
10964 AlterTableOperation::SetTblProperties {
10965 table_properties: options,
10966 }
10967 } else {
10968 options = self.parse_options(Keyword::SET)?;
10969 if !options.is_empty() {
10970 AlterTableOperation::SetOptionsParens { options }
10971 } else {
10972 return self.expected_ref(
10973 "ADD, RENAME, PARTITION, SWAP, DROP, REPLICA IDENTITY, SET, or SET TBLPROPERTIES after ALTER TABLE",
10974 self.peek_token_ref(),
10975 );
10976 }
10977 }
10978 };
10979 Ok(operation)
10980 }
10981
10982 fn parse_set_data_type(&mut self, had_set: bool) -> Result<AlterColumnOperation, ParserError> {
10983 let data_type = self.parse_data_type()?;
10984 let using = if self.dialect.supports_alter_column_type_using()
10985 && self.parse_keyword(Keyword::USING)
10986 {
10987 Some(self.parse_expr()?)
10988 } else {
10989 None
10990 };
10991 Ok(AlterColumnOperation::SetDataType {
10992 data_type,
10993 using,
10994 had_set,
10995 })
10996 }
10997
10998 fn parse_part_or_partition(&mut self) -> Result<Partition, ParserError> {
10999 let keyword = self.expect_one_of_keywords(&[Keyword::PART, Keyword::PARTITION])?;
11000 match keyword {
11001 Keyword::PART => Ok(Partition::Part(self.parse_expr()?)),
11002 Keyword::PARTITION => Ok(Partition::Expr(self.parse_expr()?)),
11003 unexpected_keyword => Err(ParserError::ParserError(
11005 format!("Internal parser error: expected any of {{PART, PARTITION}}, got {unexpected_keyword:?}"),
11006 )),
11007 }
11008 }
11009
11010 pub fn parse_alter(&mut self) -> Result<Statement, ParserError> {
11012 let object_type = self.expect_one_of_keywords(&[
11013 Keyword::VIEW,
11014 Keyword::TYPE,
11015 Keyword::COLLATION,
11016 Keyword::TABLE,
11017 Keyword::INDEX,
11018 Keyword::FUNCTION,
11019 Keyword::AGGREGATE,
11020 Keyword::ROLE,
11021 Keyword::POLICY,
11022 Keyword::CONNECTOR,
11023 Keyword::ICEBERG,
11024 Keyword::SCHEMA,
11025 Keyword::USER,
11026 Keyword::OPERATOR,
11027 Keyword::DOMAIN,
11028 Keyword::TRIGGER,
11029 Keyword::EXTENSION,
11030 Keyword::PROCEDURE,
11031 ])?;
11032 match object_type {
11033 Keyword::SCHEMA => {
11034 self.prev_token();
11035 self.prev_token();
11036 self.parse_alter_schema()
11037 }
11038 Keyword::VIEW => self.parse_alter_view(),
11039 Keyword::TYPE => self.parse_alter_type(),
11040 Keyword::COLLATION => self.parse_alter_collation().map(Into::into),
11041 Keyword::TABLE => self.parse_alter_table(false),
11042 Keyword::ICEBERG => {
11043 self.expect_keyword(Keyword::TABLE)?;
11044 self.parse_alter_table(true)
11045 }
11046 Keyword::INDEX => {
11047 let index_name = self.parse_object_name(false)?;
11048 let operation = if self.parse_keyword(Keyword::RENAME) {
11049 if self.parse_keyword(Keyword::TO) {
11050 let index_name = self.parse_object_name(false)?;
11051 AlterIndexOperation::RenameIndex { index_name }
11052 } else {
11053 return self.expected_ref("TO after RENAME", self.peek_token_ref());
11054 }
11055 } else if self.parse_keywords(&[Keyword::SET, Keyword::TABLESPACE]) {
11056 let tablespace_name = self.parse_identifier()?;
11057 AlterIndexOperation::SetTablespace { tablespace_name }
11058 } else {
11059 return self.expected_ref(
11060 "RENAME or SET TABLESPACE after ALTER INDEX",
11061 self.peek_token_ref(),
11062 );
11063 };
11064
11065 Ok(Statement::AlterIndex {
11066 name: index_name,
11067 operation,
11068 })
11069 }
11070 Keyword::FUNCTION => self.parse_alter_function(AlterFunctionKind::Function),
11071 Keyword::AGGREGATE => self.parse_alter_function(AlterFunctionKind::Aggregate),
11072 Keyword::PROCEDURE => self.parse_alter_function(AlterFunctionKind::Procedure),
11073 Keyword::OPERATOR => {
11074 if self.parse_keyword(Keyword::FAMILY) {
11075 self.parse_alter_operator_family().map(Into::into)
11076 } else if self.parse_keyword(Keyword::CLASS) {
11077 self.parse_alter_operator_class().map(Into::into)
11078 } else {
11079 self.parse_alter_operator().map(Into::into)
11080 }
11081 }
11082 Keyword::ROLE => self.parse_alter_role(),
11083 Keyword::POLICY => self.parse_alter_policy().map(Into::into),
11084 Keyword::CONNECTOR => self.parse_alter_connector(),
11085 Keyword::USER => self.parse_alter_user().map(Into::into),
11086 Keyword::DOMAIN => self.parse_alter_domain(),
11087 Keyword::TRIGGER => self.parse_alter_trigger(),
11088 Keyword::EXTENSION => self.parse_alter_extension(),
11089 unexpected_keyword => Err(ParserError::ParserError(
11091 format!("Internal parser error: expected any of {{VIEW, TYPE, COLLATION, TABLE, INDEX, FUNCTION, AGGREGATE, ROLE, POLICY, CONNECTOR, ICEBERG, SCHEMA, USER, OPERATOR, DOMAIN, TRIGGER, EXTENSION, PROCEDURE}}, got {unexpected_keyword:?}"),
11092 )),
11093 }
11094 }
11095
11096 fn parse_alter_aggregate_signature(
11097 &mut self,
11098 ) -> Result<(FunctionDesc, bool, Option<Vec<OperateFunctionArg>>), ParserError> {
11099 let name = self.parse_object_name(false)?;
11100 self.expect_token(&Token::LParen)?;
11101
11102 if self.consume_token(&Token::Mul) {
11103 self.expect_token(&Token::RParen)?;
11104 return Ok((
11105 FunctionDesc {
11106 name,
11107 args: Some(vec![]),
11108 },
11109 true,
11110 None,
11111 ));
11112 }
11113
11114 let args =
11115 if self.peek_keyword(Keyword::ORDER) || self.peek_token_ref().token == Token::RParen {
11116 vec![]
11117 } else {
11118 self.parse_comma_separated(Parser::parse_aggregate_function_arg)?
11119 };
11120
11121 let aggregate_order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
11122 Some(self.parse_comma_separated(Parser::parse_aggregate_function_arg)?)
11123 } else {
11124 None
11125 };
11126
11127 self.expect_token(&Token::RParen)?;
11128 Ok((
11129 FunctionDesc {
11130 name,
11131 args: Some(args),
11132 },
11133 false,
11134 aggregate_order_by,
11135 ))
11136 }
11137
11138 fn parse_alter_function_action(&mut self) -> Result<Option<AlterFunctionAction>, ParserError> {
11139 let action = if self.parse_keywords(&[
11140 Keyword::CALLED,
11141 Keyword::ON,
11142 Keyword::NULL,
11143 Keyword::INPUT,
11144 ]) {
11145 Some(AlterFunctionAction::CalledOnNull(
11146 FunctionCalledOnNull::CalledOnNullInput,
11147 ))
11148 } else if self.parse_keywords(&[
11149 Keyword::RETURNS,
11150 Keyword::NULL,
11151 Keyword::ON,
11152 Keyword::NULL,
11153 Keyword::INPUT,
11154 ]) {
11155 Some(AlterFunctionAction::CalledOnNull(
11156 FunctionCalledOnNull::ReturnsNullOnNullInput,
11157 ))
11158 } else if self.parse_keyword(Keyword::STRICT) {
11159 Some(AlterFunctionAction::CalledOnNull(
11160 FunctionCalledOnNull::Strict,
11161 ))
11162 } else if self.parse_keyword(Keyword::IMMUTABLE) {
11163 Some(AlterFunctionAction::Behavior(FunctionBehavior::Immutable))
11164 } else if self.parse_keyword(Keyword::STABLE) {
11165 Some(AlterFunctionAction::Behavior(FunctionBehavior::Stable))
11166 } else if self.parse_keyword(Keyword::VOLATILE) {
11167 Some(AlterFunctionAction::Behavior(FunctionBehavior::Volatile))
11168 } else if self.parse_keyword(Keyword::NOT) {
11169 self.expect_keyword(Keyword::LEAKPROOF)?;
11170 Some(AlterFunctionAction::Leakproof(false))
11171 } else if self.parse_keyword(Keyword::LEAKPROOF) {
11172 Some(AlterFunctionAction::Leakproof(true))
11173 } else if self.parse_keyword(Keyword::EXTERNAL) {
11174 self.expect_keyword(Keyword::SECURITY)?;
11175 let security = if self.parse_keyword(Keyword::DEFINER) {
11176 FunctionSecurity::Definer
11177 } else if self.parse_keyword(Keyword::INVOKER) {
11178 FunctionSecurity::Invoker
11179 } else {
11180 return self.expected_ref("DEFINER or INVOKER", self.peek_token_ref());
11181 };
11182 Some(AlterFunctionAction::Security {
11183 external: true,
11184 security,
11185 })
11186 } else if self.parse_keyword(Keyword::SECURITY) {
11187 let security = if self.parse_keyword(Keyword::DEFINER) {
11188 FunctionSecurity::Definer
11189 } else if self.parse_keyword(Keyword::INVOKER) {
11190 FunctionSecurity::Invoker
11191 } else {
11192 return self.expected_ref("DEFINER or INVOKER", self.peek_token_ref());
11193 };
11194 Some(AlterFunctionAction::Security {
11195 external: false,
11196 security,
11197 })
11198 } else if self.parse_keyword(Keyword::PARALLEL) {
11199 let parallel = if self.parse_keyword(Keyword::UNSAFE) {
11200 FunctionParallel::Unsafe
11201 } else if self.parse_keyword(Keyword::RESTRICTED) {
11202 FunctionParallel::Restricted
11203 } else if self.parse_keyword(Keyword::SAFE) {
11204 FunctionParallel::Safe
11205 } else {
11206 return self
11207 .expected_ref("one of UNSAFE | RESTRICTED | SAFE", self.peek_token_ref());
11208 };
11209 Some(AlterFunctionAction::Parallel(parallel))
11210 } else if self.parse_keyword(Keyword::COST) {
11211 Some(AlterFunctionAction::Cost(self.parse_number()?))
11212 } else if self.parse_keyword(Keyword::ROWS) {
11213 Some(AlterFunctionAction::Rows(self.parse_number()?))
11214 } else if self.parse_keyword(Keyword::SUPPORT) {
11215 Some(AlterFunctionAction::Support(self.parse_object_name(false)?))
11216 } else if self.parse_keyword(Keyword::SET) {
11217 let name = self.parse_object_name(false)?;
11218 let value = if self.parse_keywords(&[Keyword::FROM, Keyword::CURRENT]) {
11219 FunctionSetValue::FromCurrent
11220 } else {
11221 if !self.consume_token(&Token::Eq) && !self.parse_keyword(Keyword::TO) {
11222 return self.expected_ref("= or TO", self.peek_token_ref());
11223 }
11224 if self.parse_keyword(Keyword::DEFAULT) {
11225 FunctionSetValue::Default
11226 } else {
11227 FunctionSetValue::Values(self.parse_comma_separated(Parser::parse_expr)?)
11228 }
11229 };
11230 Some(AlterFunctionAction::Set(FunctionDefinitionSetParam {
11231 name,
11232 value,
11233 }))
11234 } else if self.parse_keyword(Keyword::RESET) {
11235 let reset_config = if self.parse_keyword(Keyword::ALL) {
11236 ResetConfig::ALL
11237 } else {
11238 ResetConfig::ConfigName(self.parse_object_name(false)?)
11239 };
11240 Some(AlterFunctionAction::Reset(reset_config))
11241 } else {
11242 None
11243 };
11244
11245 Ok(action)
11246 }
11247
11248 fn parse_alter_function_actions(
11249 &mut self,
11250 ) -> Result<(Vec<AlterFunctionAction>, bool), ParserError> {
11251 let mut actions = vec![];
11252 while let Some(action) = self.parse_alter_function_action()? {
11253 actions.push(action);
11254 }
11255 if actions.is_empty() {
11256 return self.expected_ref("at least one ALTER FUNCTION action", self.peek_token_ref());
11257 }
11258 let restrict = self.parse_keyword(Keyword::RESTRICT);
11259 Ok((actions, restrict))
11260 }
11261
11262 pub fn parse_alter_function(
11264 &mut self,
11265 kind: AlterFunctionKind,
11266 ) -> Result<Statement, ParserError> {
11267 let (function, aggregate_star, aggregate_order_by) = match kind {
11268 AlterFunctionKind::Function | AlterFunctionKind::Procedure => {
11269 (self.parse_function_desc()?, false, None)
11270 }
11271 AlterFunctionKind::Aggregate => self.parse_alter_aggregate_signature()?,
11272 };
11273
11274 let operation = if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
11275 let new_name = self.parse_identifier()?;
11276 AlterFunctionOperation::RenameTo { new_name }
11277 } else if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
11278 AlterFunctionOperation::OwnerTo(self.parse_owner()?)
11279 } else if self.parse_keywords(&[Keyword::SET, Keyword::SCHEMA]) {
11280 AlterFunctionOperation::SetSchema {
11281 schema_name: self.parse_object_name(false)?,
11282 }
11283 } else if matches!(kind, AlterFunctionKind::Function | AlterFunctionKind::Procedure)
11284 && self.parse_keyword(Keyword::NO)
11285 {
11286 if !self.parse_keyword(Keyword::DEPENDS) {
11287 return self.expected_ref("DEPENDS after NO", self.peek_token_ref());
11288 }
11289 self.expect_keywords(&[Keyword::ON, Keyword::EXTENSION])?;
11290 AlterFunctionOperation::DependsOnExtension {
11291 no: true,
11292 extension_name: self.parse_object_name(false)?,
11293 }
11294 } else if matches!(kind, AlterFunctionKind::Function | AlterFunctionKind::Procedure)
11295 && self.parse_keyword(Keyword::DEPENDS)
11296 {
11297 self.expect_keywords(&[Keyword::ON, Keyword::EXTENSION])?;
11298 AlterFunctionOperation::DependsOnExtension {
11299 no: false,
11300 extension_name: self.parse_object_name(false)?,
11301 }
11302 } else if matches!(kind, AlterFunctionKind::Function | AlterFunctionKind::Procedure) {
11303 let (actions, restrict) = self.parse_alter_function_actions()?;
11304 AlterFunctionOperation::Actions { actions, restrict }
11305 } else {
11306 return self.expected_ref(
11307 "RENAME TO, OWNER TO, or SET SCHEMA after ALTER AGGREGATE",
11308 self.peek_token_ref(),
11309 );
11310 };
11311
11312 Ok(Statement::AlterFunction(AlterFunction {
11313 kind,
11314 function,
11315 aggregate_order_by,
11316 aggregate_star,
11317 operation,
11318 }))
11319 }
11320
11321 pub fn parse_alter_domain(&mut self) -> Result<Statement, ParserError> {
11323 let name = self.parse_object_name(false)?;
11324
11325 let operation = if self.parse_keyword(Keyword::ADD) {
11326 if let Some(constraint) = self.parse_optional_table_constraint()? {
11327 let not_valid = self.parse_keywords(&[Keyword::NOT, Keyword::VALID]);
11328 AlterDomainOperation::AddConstraint {
11329 constraint,
11330 not_valid,
11331 }
11332 } else {
11333 return self.expected_ref("constraint after ADD", self.peek_token_ref());
11334 }
11335 } else if self.parse_keywords(&[Keyword::DROP, Keyword::CONSTRAINT]) {
11336 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
11337 let name = self.parse_identifier()?;
11338 let drop_behavior = self.parse_optional_drop_behavior();
11339 AlterDomainOperation::DropConstraint {
11340 if_exists,
11341 name,
11342 drop_behavior,
11343 }
11344 } else if self.parse_keywords(&[Keyword::DROP, Keyword::DEFAULT]) {
11345 AlterDomainOperation::DropDefault
11346 } else if self.parse_keywords(&[Keyword::RENAME, Keyword::CONSTRAINT]) {
11347 let old_name = self.parse_identifier()?;
11348 self.expect_keyword_is(Keyword::TO)?;
11349 let new_name = self.parse_identifier()?;
11350 AlterDomainOperation::RenameConstraint { old_name, new_name }
11351 } else if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
11352 let new_name = self.parse_identifier()?;
11353 AlterDomainOperation::RenameTo { new_name }
11354 } else if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
11355 AlterDomainOperation::OwnerTo(self.parse_owner()?)
11356 } else if self.parse_keywords(&[Keyword::SET, Keyword::SCHEMA]) {
11357 AlterDomainOperation::SetSchema {
11358 schema_name: self.parse_object_name(false)?,
11359 }
11360 } else if self.parse_keywords(&[Keyword::SET, Keyword::DEFAULT]) {
11361 AlterDomainOperation::SetDefault {
11362 default: self.parse_expr()?,
11363 }
11364 } else if self.parse_keywords(&[Keyword::VALIDATE, Keyword::CONSTRAINT]) {
11365 let name = self.parse_identifier()?;
11366 AlterDomainOperation::ValidateConstraint { name }
11367 } else {
11368 return self.expected_ref(
11369 "ADD, DROP, RENAME, OWNER TO, SET, VALIDATE after ALTER DOMAIN",
11370 self.peek_token_ref(),
11371 );
11372 };
11373
11374 Ok(AlterDomain { name, operation }.into())
11375 }
11376
11377 pub fn parse_alter_trigger(&mut self) -> Result<Statement, ParserError> {
11379 let name = self.parse_identifier()?;
11380 self.expect_keyword_is(Keyword::ON)?;
11381 let table_name = self.parse_object_name(false)?;
11382
11383 let operation = if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
11384 let new_name = self.parse_identifier()?;
11385 AlterTriggerOperation::RenameTo { new_name }
11386 } else {
11387 return self.expected_ref("RENAME TO after ALTER TRIGGER ... ON ...", self.peek_token_ref());
11388 };
11389
11390 Ok(AlterTrigger {
11391 name,
11392 table_name,
11393 operation,
11394 }
11395 .into())
11396 }
11397
11398 pub fn parse_alter_extension(&mut self) -> Result<Statement, ParserError> {
11400 let name = self.parse_identifier()?;
11401
11402 let operation = if self.parse_keyword(Keyword::UPDATE) {
11403 let version = if self.parse_keyword(Keyword::TO) {
11404 Some(self.parse_identifier()?)
11405 } else {
11406 None
11407 };
11408 AlterExtensionOperation::UpdateTo { version }
11409 } else if self.parse_keywords(&[Keyword::SET, Keyword::SCHEMA]) {
11410 AlterExtensionOperation::SetSchema {
11411 schema_name: self.parse_object_name(false)?,
11412 }
11413 } else if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
11414 AlterExtensionOperation::OwnerTo(self.parse_owner()?)
11415 } else if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
11416 let new_name = self.parse_identifier()?;
11417 AlterExtensionOperation::RenameTo { new_name }
11418 } else {
11419 return self.expected_ref(
11420 "UPDATE, SET SCHEMA, OWNER TO, or RENAME TO after ALTER EXTENSION",
11421 self.peek_token_ref(),
11422 );
11423 };
11424
11425 Ok(AlterExtension { name, operation }.into())
11426 }
11427
11428 pub fn parse_alter_table(&mut self, iceberg: bool) -> Result<Statement, ParserError> {
11430 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
11431 let only = self.parse_keyword(Keyword::ONLY); let table_name = self.parse_object_name(false)?;
11433 let on_cluster = self.parse_optional_on_cluster()?;
11434 let operations = self.parse_comma_separated(Parser::parse_alter_table_operation)?;
11435
11436 let mut location = None;
11437 if self.parse_keyword(Keyword::LOCATION) {
11438 location = Some(HiveSetLocation {
11439 has_set: false,
11440 location: self.parse_identifier()?,
11441 });
11442 } else if self.parse_keywords(&[Keyword::SET, Keyword::LOCATION]) {
11443 location = Some(HiveSetLocation {
11444 has_set: true,
11445 location: self.parse_identifier()?,
11446 });
11447 }
11448
11449 let end_token = if self.peek_token_ref().token == Token::SemiColon {
11450 self.peek_token_ref().clone()
11451 } else {
11452 self.get_current_token().clone()
11453 };
11454
11455 Ok(AlterTable {
11456 name: table_name,
11457 if_exists,
11458 only,
11459 operations,
11460 location,
11461 on_cluster,
11462 table_type: if iceberg {
11463 Some(AlterTableType::Iceberg)
11464 } else {
11465 None
11466 },
11467 end_token: AttachedToken(end_token),
11468 }
11469 .into())
11470 }
11471
11472 pub fn parse_alter_view(&mut self) -> Result<Statement, ParserError> {
11474 let name = self.parse_object_name(false)?;
11475 let columns = self.parse_parenthesized_column_list(Optional, false)?;
11476
11477 let with_options = self.parse_options(Keyword::WITH)?;
11478
11479 self.expect_keyword_is(Keyword::AS)?;
11480 let query = self.parse_query()?;
11481
11482 Ok(Statement::AlterView {
11483 name,
11484 columns,
11485 query,
11486 with_options,
11487 })
11488 }
11489
11490 pub fn parse_alter_type(&mut self) -> Result<Statement, ParserError> {
11492 let name = self.parse_object_name(false)?;
11493
11494 if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
11495 let new_name = self.parse_identifier()?;
11496 Ok(Statement::AlterType(AlterType {
11497 name,
11498 operation: AlterTypeOperation::Rename(AlterTypeRename { new_name }),
11499 }))
11500 } else if self.parse_keywords(&[Keyword::ADD, Keyword::VALUE]) {
11501 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
11502 let new_enum_value = self.parse_identifier()?;
11503 let position = if self.parse_keyword(Keyword::BEFORE) {
11504 Some(AlterTypeAddValuePosition::Before(self.parse_identifier()?))
11505 } else if self.parse_keyword(Keyword::AFTER) {
11506 Some(AlterTypeAddValuePosition::After(self.parse_identifier()?))
11507 } else {
11508 None
11509 };
11510
11511 Ok(Statement::AlterType(AlterType {
11512 name,
11513 operation: AlterTypeOperation::AddValue(AlterTypeAddValue {
11514 if_not_exists,
11515 value: new_enum_value,
11516 position,
11517 }),
11518 }))
11519 } else if self.parse_keywords(&[Keyword::RENAME, Keyword::VALUE]) {
11520 let existing_enum_value = self.parse_identifier()?;
11521 self.expect_keyword(Keyword::TO)?;
11522 let new_enum_value = self.parse_identifier()?;
11523
11524 Ok(Statement::AlterType(AlterType {
11525 name,
11526 operation: AlterTypeOperation::RenameValue(AlterTypeRenameValue {
11527 from: existing_enum_value,
11528 to: new_enum_value,
11529 }),
11530 }))
11531 } else {
11532 self.expected_ref(
11533 "{RENAME TO | { RENAME | ADD } VALUE}",
11534 self.peek_token_ref(),
11535 )
11536 }
11537 }
11538
11539 pub fn parse_alter_collation(&mut self) -> Result<AlterCollation, ParserError> {
11543 let name = self.parse_object_name(false)?;
11544 let operation = if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
11545 AlterCollationOperation::RenameTo {
11546 new_name: self.parse_identifier()?,
11547 }
11548 } else if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
11549 AlterCollationOperation::OwnerTo(self.parse_owner()?)
11550 } else if self.parse_keywords(&[Keyword::SET, Keyword::SCHEMA]) {
11551 AlterCollationOperation::SetSchema {
11552 schema_name: self.parse_object_name(false)?,
11553 }
11554 } else if self.parse_keywords(&[Keyword::REFRESH, Keyword::VERSION]) {
11555 AlterCollationOperation::RefreshVersion
11556 } else {
11557 return self.expected_ref(
11558 "RENAME TO, OWNER TO, SET SCHEMA, or REFRESH VERSION after ALTER COLLATION",
11559 self.peek_token_ref(),
11560 );
11561 };
11562
11563 Ok(AlterCollation { name, operation })
11564 }
11565
11566 pub fn parse_alter_operator(&mut self) -> Result<AlterOperator, ParserError> {
11570 let name = self.parse_operator_name()?;
11571
11572 self.expect_token(&Token::LParen)?;
11574
11575 let left_type = if self.parse_keyword(Keyword::NONE) {
11576 None
11577 } else {
11578 Some(self.parse_data_type()?)
11579 };
11580
11581 self.expect_token(&Token::Comma)?;
11582 let right_type = self.parse_data_type()?;
11583 self.expect_token(&Token::RParen)?;
11584
11585 let operation = if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
11587 let owner = if self.parse_keyword(Keyword::CURRENT_ROLE) {
11588 Owner::CurrentRole
11589 } else if self.parse_keyword(Keyword::CURRENT_USER) {
11590 Owner::CurrentUser
11591 } else if self.parse_keyword(Keyword::SESSION_USER) {
11592 Owner::SessionUser
11593 } else {
11594 Owner::Ident(self.parse_identifier()?)
11595 };
11596 AlterOperatorOperation::OwnerTo(owner)
11597 } else if self.parse_keywords(&[Keyword::SET, Keyword::SCHEMA]) {
11598 let schema_name = self.parse_object_name(false)?;
11599 AlterOperatorOperation::SetSchema { schema_name }
11600 } else if self.parse_keyword(Keyword::SET) {
11601 self.expect_token(&Token::LParen)?;
11602
11603 let mut options = Vec::new();
11604 loop {
11605 let keyword = self.expect_one_of_keywords(&[
11606 Keyword::RESTRICT,
11607 Keyword::JOIN,
11608 Keyword::COMMUTATOR,
11609 Keyword::NEGATOR,
11610 Keyword::HASHES,
11611 Keyword::MERGES,
11612 ])?;
11613
11614 match keyword {
11615 Keyword::RESTRICT => {
11616 self.expect_token(&Token::Eq)?;
11617 let proc_name = if self.parse_keyword(Keyword::NONE) {
11618 None
11619 } else {
11620 Some(self.parse_object_name(false)?)
11621 };
11622 options.push(OperatorOption::Restrict(proc_name));
11623 }
11624 Keyword::JOIN => {
11625 self.expect_token(&Token::Eq)?;
11626 let proc_name = if self.parse_keyword(Keyword::NONE) {
11627 None
11628 } else {
11629 Some(self.parse_object_name(false)?)
11630 };
11631 options.push(OperatorOption::Join(proc_name));
11632 }
11633 Keyword::COMMUTATOR => {
11634 self.expect_token(&Token::Eq)?;
11635 let op_name = self.parse_operator_name()?;
11636 options.push(OperatorOption::Commutator(op_name));
11637 }
11638 Keyword::NEGATOR => {
11639 self.expect_token(&Token::Eq)?;
11640 let op_name = self.parse_operator_name()?;
11641 options.push(OperatorOption::Negator(op_name));
11642 }
11643 Keyword::HASHES => {
11644 options.push(OperatorOption::Hashes);
11645 }
11646 Keyword::MERGES => {
11647 options.push(OperatorOption::Merges);
11648 }
11649 unexpected_keyword => return Err(ParserError::ParserError(
11650 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in operator option"),
11651 )),
11652 }
11653
11654 if !self.consume_token(&Token::Comma) {
11655 break;
11656 }
11657 }
11658
11659 self.expect_token(&Token::RParen)?;
11660 AlterOperatorOperation::Set { options }
11661 } else {
11662 return self.expected_ref(
11663 "OWNER TO, SET SCHEMA, or SET after ALTER OPERATOR",
11664 self.peek_token_ref(),
11665 );
11666 };
11667
11668 Ok(AlterOperator {
11669 name,
11670 left_type,
11671 right_type,
11672 operation,
11673 })
11674 }
11675
11676 fn parse_operator_family_add_operator(&mut self) -> Result<OperatorFamilyItem, ParserError> {
11678 let strategy_number = self.parse_literal_uint()?;
11679 let operator_name = self.parse_operator_name()?;
11680
11681 self.expect_token(&Token::LParen)?;
11683 let op_types = self.parse_comma_separated(Parser::parse_data_type)?;
11684 self.expect_token(&Token::RParen)?;
11685
11686 let purpose = if self.parse_keyword(Keyword::FOR) {
11688 if self.parse_keyword(Keyword::SEARCH) {
11689 Some(OperatorPurpose::ForSearch)
11690 } else if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
11691 let sort_family = self.parse_object_name(false)?;
11692 Some(OperatorPurpose::ForOrderBy { sort_family })
11693 } else {
11694 return self.expected_ref("SEARCH or ORDER BY after FOR", self.peek_token_ref());
11695 }
11696 } else {
11697 None
11698 };
11699
11700 Ok(OperatorFamilyItem::Operator {
11701 strategy_number,
11702 operator_name,
11703 op_types,
11704 purpose,
11705 })
11706 }
11707
11708 fn parse_operator_family_add_function(&mut self) -> Result<OperatorFamilyItem, ParserError> {
11710 let support_number = self.parse_literal_uint()?;
11711
11712 let op_types =
11714 if self.consume_token(&Token::LParen) && self.peek_token_ref().token != Token::RParen {
11715 let types = self.parse_comma_separated(Parser::parse_data_type)?;
11716 self.expect_token(&Token::RParen)?;
11717 Some(types)
11718 } else if self.consume_token(&Token::LParen) {
11719 self.expect_token(&Token::RParen)?;
11720 Some(vec![])
11721 } else {
11722 None
11723 };
11724
11725 let function_name = self.parse_object_name(false)?;
11726
11727 let argument_types = if self.consume_token(&Token::LParen) {
11729 if self.peek_token_ref().token == Token::RParen {
11730 self.expect_token(&Token::RParen)?;
11731 vec![]
11732 } else {
11733 let types = self.parse_comma_separated(Parser::parse_data_type)?;
11734 self.expect_token(&Token::RParen)?;
11735 types
11736 }
11737 } else {
11738 vec![]
11739 };
11740
11741 Ok(OperatorFamilyItem::Function {
11742 support_number,
11743 op_types,
11744 function_name,
11745 argument_types,
11746 })
11747 }
11748
11749 fn parse_operator_family_drop_operator(
11751 &mut self,
11752 ) -> Result<OperatorFamilyDropItem, ParserError> {
11753 let strategy_number = self.parse_literal_uint()?;
11754
11755 self.expect_token(&Token::LParen)?;
11757 let op_types = self.parse_comma_separated(Parser::parse_data_type)?;
11758 self.expect_token(&Token::RParen)?;
11759
11760 Ok(OperatorFamilyDropItem::Operator {
11761 strategy_number,
11762 op_types,
11763 })
11764 }
11765
11766 fn parse_operator_family_drop_function(
11768 &mut self,
11769 ) -> Result<OperatorFamilyDropItem, ParserError> {
11770 let support_number = self.parse_literal_uint()?;
11771
11772 self.expect_token(&Token::LParen)?;
11774 let op_types = self.parse_comma_separated(Parser::parse_data_type)?;
11775 self.expect_token(&Token::RParen)?;
11776
11777 Ok(OperatorFamilyDropItem::Function {
11778 support_number,
11779 op_types,
11780 })
11781 }
11782
11783 fn parse_operator_family_add_item(&mut self) -> Result<OperatorFamilyItem, ParserError> {
11785 if self.parse_keyword(Keyword::OPERATOR) {
11786 self.parse_operator_family_add_operator()
11787 } else if self.parse_keyword(Keyword::FUNCTION) {
11788 self.parse_operator_family_add_function()
11789 } else {
11790 self.expected_ref("OPERATOR or FUNCTION", self.peek_token_ref())
11791 }
11792 }
11793
11794 fn parse_operator_family_drop_item(&mut self) -> Result<OperatorFamilyDropItem, ParserError> {
11796 if self.parse_keyword(Keyword::OPERATOR) {
11797 self.parse_operator_family_drop_operator()
11798 } else if self.parse_keyword(Keyword::FUNCTION) {
11799 self.parse_operator_family_drop_function()
11800 } else {
11801 self.expected_ref("OPERATOR or FUNCTION", self.peek_token_ref())
11802 }
11803 }
11804
11805 pub fn parse_alter_operator_family(&mut self) -> Result<AlterOperatorFamily, ParserError> {
11808 let name = self.parse_object_name(false)?;
11809 self.expect_keyword(Keyword::USING)?;
11810 let using = self.parse_identifier()?;
11811
11812 let operation = if self.parse_keyword(Keyword::ADD) {
11813 let items = self.parse_comma_separated(Parser::parse_operator_family_add_item)?;
11814 AlterOperatorFamilyOperation::Add { items }
11815 } else if self.parse_keyword(Keyword::DROP) {
11816 let items = self.parse_comma_separated(Parser::parse_operator_family_drop_item)?;
11817 AlterOperatorFamilyOperation::Drop { items }
11818 } else if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
11819 let new_name = self.parse_object_name(false)?;
11820 AlterOperatorFamilyOperation::RenameTo { new_name }
11821 } else if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
11822 let owner = self.parse_owner()?;
11823 AlterOperatorFamilyOperation::OwnerTo(owner)
11824 } else if self.parse_keywords(&[Keyword::SET, Keyword::SCHEMA]) {
11825 let schema_name = self.parse_object_name(false)?;
11826 AlterOperatorFamilyOperation::SetSchema { schema_name }
11827 } else {
11828 return self.expected_ref(
11829 "ADD, DROP, RENAME TO, OWNER TO, or SET SCHEMA after ALTER OPERATOR FAMILY",
11830 self.peek_token_ref(),
11831 );
11832 };
11833
11834 Ok(AlterOperatorFamily {
11835 name,
11836 using,
11837 operation,
11838 })
11839 }
11840
11841 pub fn parse_alter_operator_class(&mut self) -> Result<AlterOperatorClass, ParserError> {
11845 let name = self.parse_object_name(false)?;
11846 self.expect_keyword(Keyword::USING)?;
11847 let using = self.parse_identifier()?;
11848
11849 let operation = if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
11850 let new_name = self.parse_object_name(false)?;
11851 AlterOperatorClassOperation::RenameTo { new_name }
11852 } else if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
11853 let owner = self.parse_owner()?;
11854 AlterOperatorClassOperation::OwnerTo(owner)
11855 } else if self.parse_keywords(&[Keyword::SET, Keyword::SCHEMA]) {
11856 let schema_name = self.parse_object_name(false)?;
11857 AlterOperatorClassOperation::SetSchema { schema_name }
11858 } else {
11859 return self.expected_ref(
11860 "RENAME TO, OWNER TO, or SET SCHEMA after ALTER OPERATOR CLASS",
11861 self.peek_token_ref(),
11862 );
11863 };
11864
11865 Ok(AlterOperatorClass {
11866 name,
11867 using,
11868 operation,
11869 })
11870 }
11871
11872 pub fn parse_alter_schema(&mut self) -> Result<Statement, ParserError> {
11876 self.expect_keywords(&[Keyword::ALTER, Keyword::SCHEMA])?;
11877 let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
11878 let name = self.parse_object_name(false)?;
11879 let operation = if self.parse_keywords(&[Keyword::SET, Keyword::OPTIONS]) {
11880 self.prev_token();
11881 let options = self.parse_options(Keyword::OPTIONS)?;
11882 AlterSchemaOperation::SetOptionsParens { options }
11883 } else if self.parse_keywords(&[Keyword::SET, Keyword::DEFAULT, Keyword::COLLATE]) {
11884 let collate = self.parse_expr()?;
11885 AlterSchemaOperation::SetDefaultCollate { collate }
11886 } else if self.parse_keywords(&[Keyword::ADD, Keyword::REPLICA]) {
11887 let replica = self.parse_identifier()?;
11888 let options = if self.peek_keyword(Keyword::OPTIONS) {
11889 Some(self.parse_options(Keyword::OPTIONS)?)
11890 } else {
11891 None
11892 };
11893 AlterSchemaOperation::AddReplica { replica, options }
11894 } else if self.parse_keywords(&[Keyword::DROP, Keyword::REPLICA]) {
11895 let replica = self.parse_identifier()?;
11896 AlterSchemaOperation::DropReplica { replica }
11897 } else if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
11898 let new_name = self.parse_object_name(false)?;
11899 AlterSchemaOperation::Rename { name: new_name }
11900 } else if self.parse_keywords(&[Keyword::OWNER, Keyword::TO]) {
11901 let owner = self.parse_owner()?;
11902 AlterSchemaOperation::OwnerTo { owner }
11903 } else {
11904 return self.expected_ref("ALTER SCHEMA operation", self.peek_token_ref());
11905 };
11906 Ok(Statement::AlterSchema(AlterSchema {
11907 name,
11908 if_exists,
11909 operations: vec![operation],
11910 }))
11911 }
11912
11913 pub fn parse_call(&mut self) -> Result<Statement, ParserError> {
11916 let object_name = self.parse_object_name(false)?;
11917 if self.peek_token_ref().token == Token::LParen {
11918 match self.parse_function(object_name)? {
11919 Expr::Function(f) => Ok(Statement::Call(f)),
11920 other => parser_err!(
11921 format!("Expected a simple procedure call but found: {other}"),
11922 self.peek_token_ref().span.start
11923 ),
11924 }
11925 } else {
11926 Ok(Statement::Call(Function {
11927 name: object_name,
11928 uses_odbc_syntax: false,
11929 parameters: FunctionArguments::None,
11930 args: FunctionArguments::None,
11931 over: None,
11932 filter: None,
11933 null_treatment: None,
11934 within_group: vec![],
11935 }))
11936 }
11937 }
11938
11939 pub fn parse_copy(&mut self) -> Result<Statement, ParserError> {
11941 let source;
11942 if self.consume_token(&Token::LParen) {
11943 source = CopySource::Query(self.parse_query()?);
11944 self.expect_token(&Token::RParen)?;
11945 } else {
11946 let table_name = self.parse_object_name(false)?;
11947 let columns = self.parse_parenthesized_column_list(Optional, false)?;
11948 source = CopySource::Table {
11949 table_name,
11950 columns,
11951 };
11952 }
11953 let to = match self.parse_one_of_keywords(&[Keyword::FROM, Keyword::TO]) {
11954 Some(Keyword::FROM) => false,
11955 Some(Keyword::TO) => true,
11956 _ => self.expected_ref("FROM or TO", self.peek_token_ref())?,
11957 };
11958 if !to {
11959 if let CopySource::Query(_) = source {
11962 return Err(ParserError::ParserError(
11963 "COPY ... FROM does not support query as a source".to_string(),
11964 ));
11965 }
11966 }
11967 let target = if self.parse_keyword(Keyword::STDIN) {
11968 CopyTarget::Stdin
11969 } else if self.parse_keyword(Keyword::STDOUT) {
11970 CopyTarget::Stdout
11971 } else if self.parse_keyword(Keyword::PROGRAM) {
11972 CopyTarget::Program {
11973 command: self.parse_literal_string()?,
11974 }
11975 } else {
11976 CopyTarget::File {
11977 filename: self.parse_literal_string()?,
11978 }
11979 };
11980 let _ = self.parse_keyword(Keyword::WITH); let mut options = vec![];
11982 if self.consume_token(&Token::LParen) {
11983 options = self.parse_comma_separated(Parser::parse_copy_option)?;
11984 self.expect_token(&Token::RParen)?;
11985 }
11986 let mut legacy_options = vec![];
11987 while let Some(opt) = self.maybe_parse(|parser| parser.parse_copy_legacy_option())? {
11988 legacy_options.push(opt);
11989 }
11990 let values =
11991 if matches!(target, CopyTarget::Stdin) && self.peek_token_ref().token != Token::EOF {
11992 self.expect_token(&Token::SemiColon)?;
11993 self.parse_tsv()
11994 } else {
11995 vec![]
11996 };
11997 Ok(Statement::Copy {
11998 source,
11999 to,
12000 target,
12001 options,
12002 legacy_options,
12003 values,
12004 })
12005 }
12006
12007 fn parse_open(&mut self) -> Result<Statement, ParserError> {
12009 self.expect_keyword(Keyword::OPEN)?;
12010 Ok(Statement::Open(OpenStatement {
12011 cursor_name: self.parse_identifier()?,
12012 }))
12013 }
12014
12015 pub fn parse_close(&mut self) -> Result<Statement, ParserError> {
12017 let cursor = if self.parse_keyword(Keyword::ALL) {
12018 CloseCursor::All
12019 } else {
12020 let name = self.parse_identifier()?;
12021
12022 CloseCursor::Specific { name }
12023 };
12024
12025 Ok(Statement::Close { cursor })
12026 }
12027
12028 fn parse_copy_option(&mut self) -> Result<CopyOption, ParserError> {
12029 let ret = match self.parse_one_of_keywords(&[
12030 Keyword::FORMAT,
12031 Keyword::FREEZE,
12032 Keyword::DELIMITER,
12033 Keyword::NULL,
12034 Keyword::HEADER,
12035 Keyword::QUOTE,
12036 Keyword::ESCAPE,
12037 Keyword::FORCE_QUOTE,
12038 Keyword::FORCE_NOT_NULL,
12039 Keyword::FORCE_NULL,
12040 Keyword::ENCODING,
12041 ]) {
12042 Some(Keyword::FORMAT) => CopyOption::Format(self.parse_identifier()?),
12043 Some(Keyword::FREEZE) => CopyOption::Freeze(!matches!(
12044 self.parse_one_of_keywords(&[Keyword::TRUE, Keyword::FALSE]),
12045 Some(Keyword::FALSE)
12046 )),
12047 Some(Keyword::DELIMITER) => CopyOption::Delimiter(self.parse_literal_char()?),
12048 Some(Keyword::NULL) => CopyOption::Null(self.parse_literal_string()?),
12049 Some(Keyword::HEADER) => CopyOption::Header(!matches!(
12050 self.parse_one_of_keywords(&[Keyword::TRUE, Keyword::FALSE]),
12051 Some(Keyword::FALSE)
12052 )),
12053 Some(Keyword::QUOTE) => CopyOption::Quote(self.parse_literal_char()?),
12054 Some(Keyword::ESCAPE) => CopyOption::Escape(self.parse_literal_char()?),
12055 Some(Keyword::FORCE_QUOTE) => {
12056 CopyOption::ForceQuote(self.parse_parenthesized_column_list(Mandatory, false)?)
12057 }
12058 Some(Keyword::FORCE_NOT_NULL) => {
12059 CopyOption::ForceNotNull(self.parse_parenthesized_column_list(Mandatory, false)?)
12060 }
12061 Some(Keyword::FORCE_NULL) => {
12062 CopyOption::ForceNull(self.parse_parenthesized_column_list(Mandatory, false)?)
12063 }
12064 Some(Keyword::ENCODING) => CopyOption::Encoding(self.parse_literal_string()?),
12065 _ => self.expected_ref("option", self.peek_token_ref())?,
12066 };
12067 Ok(ret)
12068 }
12069
12070 fn parse_copy_legacy_option(&mut self) -> Result<CopyLegacyOption, ParserError> {
12071 if self.parse_keyword(Keyword::FORMAT) {
12073 let _ = self.parse_keyword(Keyword::AS);
12074 }
12075
12076 let ret = match self.parse_one_of_keywords(&[
12077 Keyword::ACCEPTANYDATE,
12078 Keyword::ACCEPTINVCHARS,
12079 Keyword::ADDQUOTES,
12080 Keyword::ALLOWOVERWRITE,
12081 Keyword::BINARY,
12082 Keyword::BLANKSASNULL,
12083 Keyword::BZIP2,
12084 Keyword::CLEANPATH,
12085 Keyword::COMPUPDATE,
12086 Keyword::CREDENTIALS,
12087 Keyword::CSV,
12088 Keyword::DATEFORMAT,
12089 Keyword::DELIMITER,
12090 Keyword::EMPTYASNULL,
12091 Keyword::ENCRYPTED,
12092 Keyword::ESCAPE,
12093 Keyword::EXTENSION,
12094 Keyword::FIXEDWIDTH,
12095 Keyword::GZIP,
12096 Keyword::HEADER,
12097 Keyword::IAM_ROLE,
12098 Keyword::IGNOREHEADER,
12099 Keyword::JSON,
12100 Keyword::MANIFEST,
12101 Keyword::MAXFILESIZE,
12102 Keyword::NULL,
12103 Keyword::PARALLEL,
12104 Keyword::PARQUET,
12105 Keyword::PARTITION,
12106 Keyword::REGION,
12107 Keyword::REMOVEQUOTES,
12108 Keyword::ROWGROUPSIZE,
12109 Keyword::STATUPDATE,
12110 Keyword::TIMEFORMAT,
12111 Keyword::TRUNCATECOLUMNS,
12112 Keyword::ZSTD,
12113 ]) {
12114 Some(Keyword::ACCEPTANYDATE) => CopyLegacyOption::AcceptAnyDate,
12115 Some(Keyword::ACCEPTINVCHARS) => {
12116 let _ = self.parse_keyword(Keyword::AS); let ch = if matches!(self.peek_token_ref().token, Token::SingleQuotedString(_)) {
12118 Some(self.parse_literal_string()?)
12119 } else {
12120 None
12121 };
12122 CopyLegacyOption::AcceptInvChars(ch)
12123 }
12124 Some(Keyword::ADDQUOTES) => CopyLegacyOption::AddQuotes,
12125 Some(Keyword::ALLOWOVERWRITE) => CopyLegacyOption::AllowOverwrite,
12126 Some(Keyword::BINARY) => CopyLegacyOption::Binary,
12127 Some(Keyword::BLANKSASNULL) => CopyLegacyOption::BlankAsNull,
12128 Some(Keyword::BZIP2) => CopyLegacyOption::Bzip2,
12129 Some(Keyword::CLEANPATH) => CopyLegacyOption::CleanPath,
12130 Some(Keyword::COMPUPDATE) => {
12131 let preset = self.parse_keyword(Keyword::PRESET);
12132 let enabled = match self.parse_one_of_keywords(&[
12133 Keyword::TRUE,
12134 Keyword::FALSE,
12135 Keyword::ON,
12136 Keyword::OFF,
12137 ]) {
12138 Some(Keyword::TRUE) | Some(Keyword::ON) => Some(true),
12139 Some(Keyword::FALSE) | Some(Keyword::OFF) => Some(false),
12140 _ => None,
12141 };
12142 CopyLegacyOption::CompUpdate { preset, enabled }
12143 }
12144 Some(Keyword::CREDENTIALS) => {
12145 CopyLegacyOption::Credentials(self.parse_literal_string()?)
12146 }
12147 Some(Keyword::CSV) => CopyLegacyOption::Csv({
12148 let mut opts = vec![];
12149 while let Some(opt) =
12150 self.maybe_parse(|parser| parser.parse_copy_legacy_csv_option())?
12151 {
12152 opts.push(opt);
12153 }
12154 opts
12155 }),
12156 Some(Keyword::DATEFORMAT) => {
12157 let _ = self.parse_keyword(Keyword::AS);
12158 let fmt = if matches!(self.peek_token_ref().token, Token::SingleQuotedString(_)) {
12159 Some(self.parse_literal_string()?)
12160 } else {
12161 None
12162 };
12163 CopyLegacyOption::DateFormat(fmt)
12164 }
12165 Some(Keyword::DELIMITER) => {
12166 let _ = self.parse_keyword(Keyword::AS);
12167 CopyLegacyOption::Delimiter(self.parse_literal_char()?)
12168 }
12169 Some(Keyword::EMPTYASNULL) => CopyLegacyOption::EmptyAsNull,
12170 Some(Keyword::ENCRYPTED) => {
12171 let auto = self.parse_keyword(Keyword::AUTO);
12172 CopyLegacyOption::Encrypted { auto }
12173 }
12174 Some(Keyword::ESCAPE) => CopyLegacyOption::Escape,
12175 Some(Keyword::EXTENSION) => {
12176 let ext = self.parse_literal_string()?;
12177 CopyLegacyOption::Extension(ext)
12178 }
12179 Some(Keyword::FIXEDWIDTH) => {
12180 let spec = self.parse_literal_string()?;
12181 CopyLegacyOption::FixedWidth(spec)
12182 }
12183 Some(Keyword::GZIP) => CopyLegacyOption::Gzip,
12184 Some(Keyword::HEADER) => CopyLegacyOption::Header,
12185 Some(Keyword::IAM_ROLE) => CopyLegacyOption::IamRole(self.parse_iam_role_kind()?),
12186 Some(Keyword::IGNOREHEADER) => {
12187 let _ = self.parse_keyword(Keyword::AS);
12188 let num_rows = self.parse_literal_uint()?;
12189 CopyLegacyOption::IgnoreHeader(num_rows)
12190 }
12191 Some(Keyword::JSON) => {
12192 let _ = self.parse_keyword(Keyword::AS);
12193 let fmt = if matches!(self.peek_token_ref().token, Token::SingleQuotedString(_)) {
12194 Some(self.parse_literal_string()?)
12195 } else {
12196 None
12197 };
12198 CopyLegacyOption::Json(fmt)
12199 }
12200 Some(Keyword::MANIFEST) => {
12201 let verbose = self.parse_keyword(Keyword::VERBOSE);
12202 CopyLegacyOption::Manifest { verbose }
12203 }
12204 Some(Keyword::MAXFILESIZE) => {
12205 let _ = self.parse_keyword(Keyword::AS);
12206 let size = self.parse_number_value()?;
12207 let unit = match self.parse_one_of_keywords(&[Keyword::MB, Keyword::GB]) {
12208 Some(Keyword::MB) => Some(FileSizeUnit::MB),
12209 Some(Keyword::GB) => Some(FileSizeUnit::GB),
12210 _ => None,
12211 };
12212 CopyLegacyOption::MaxFileSize(FileSize { size, unit })
12213 }
12214 Some(Keyword::NULL) => {
12215 let _ = self.parse_keyword(Keyword::AS);
12216 CopyLegacyOption::Null(self.parse_literal_string()?)
12217 }
12218 Some(Keyword::PARALLEL) => {
12219 let enabled = match self.parse_one_of_keywords(&[
12220 Keyword::TRUE,
12221 Keyword::FALSE,
12222 Keyword::ON,
12223 Keyword::OFF,
12224 ]) {
12225 Some(Keyword::TRUE) | Some(Keyword::ON) => Some(true),
12226 Some(Keyword::FALSE) | Some(Keyword::OFF) => Some(false),
12227 _ => None,
12228 };
12229 CopyLegacyOption::Parallel(enabled)
12230 }
12231 Some(Keyword::PARQUET) => CopyLegacyOption::Parquet,
12232 Some(Keyword::PARTITION) => {
12233 self.expect_keyword(Keyword::BY)?;
12234 let columns = self.parse_parenthesized_column_list(IsOptional::Mandatory, false)?;
12235 let include = self.parse_keyword(Keyword::INCLUDE);
12236 CopyLegacyOption::PartitionBy(UnloadPartitionBy { columns, include })
12237 }
12238 Some(Keyword::REGION) => {
12239 let _ = self.parse_keyword(Keyword::AS);
12240 let region = self.parse_literal_string()?;
12241 CopyLegacyOption::Region(region)
12242 }
12243 Some(Keyword::REMOVEQUOTES) => CopyLegacyOption::RemoveQuotes,
12244 Some(Keyword::ROWGROUPSIZE) => {
12245 let _ = self.parse_keyword(Keyword::AS);
12246 let file_size = self.parse_file_size()?;
12247 CopyLegacyOption::RowGroupSize(file_size)
12248 }
12249 Some(Keyword::STATUPDATE) => {
12250 let enabled = match self.parse_one_of_keywords(&[
12251 Keyword::TRUE,
12252 Keyword::FALSE,
12253 Keyword::ON,
12254 Keyword::OFF,
12255 ]) {
12256 Some(Keyword::TRUE) | Some(Keyword::ON) => Some(true),
12257 Some(Keyword::FALSE) | Some(Keyword::OFF) => Some(false),
12258 _ => None,
12259 };
12260 CopyLegacyOption::StatUpdate(enabled)
12261 }
12262 Some(Keyword::TIMEFORMAT) => {
12263 let _ = self.parse_keyword(Keyword::AS);
12264 let fmt = if matches!(self.peek_token_ref().token, Token::SingleQuotedString(_)) {
12265 Some(self.parse_literal_string()?)
12266 } else {
12267 None
12268 };
12269 CopyLegacyOption::TimeFormat(fmt)
12270 }
12271 Some(Keyword::TRUNCATECOLUMNS) => CopyLegacyOption::TruncateColumns,
12272 Some(Keyword::ZSTD) => CopyLegacyOption::Zstd,
12273 _ => self.expected_ref("option", self.peek_token_ref())?,
12274 };
12275 Ok(ret)
12276 }
12277
12278 fn parse_file_size(&mut self) -> Result<FileSize, ParserError> {
12279 let size = self.parse_number_value()?;
12280 let unit = self.maybe_parse_file_size_unit();
12281 Ok(FileSize { size, unit })
12282 }
12283
12284 fn maybe_parse_file_size_unit(&mut self) -> Option<FileSizeUnit> {
12285 match self.parse_one_of_keywords(&[Keyword::MB, Keyword::GB]) {
12286 Some(Keyword::MB) => Some(FileSizeUnit::MB),
12287 Some(Keyword::GB) => Some(FileSizeUnit::GB),
12288 _ => None,
12289 }
12290 }
12291
12292 fn parse_iam_role_kind(&mut self) -> Result<IamRoleKind, ParserError> {
12293 if self.parse_keyword(Keyword::DEFAULT) {
12294 Ok(IamRoleKind::Default)
12295 } else {
12296 let arn = self.parse_literal_string()?;
12297 Ok(IamRoleKind::Arn(arn))
12298 }
12299 }
12300
12301 fn parse_copy_legacy_csv_option(&mut self) -> Result<CopyLegacyCsvOption, ParserError> {
12302 let ret = match self.parse_one_of_keywords(&[
12303 Keyword::HEADER,
12304 Keyword::QUOTE,
12305 Keyword::ESCAPE,
12306 Keyword::FORCE,
12307 ]) {
12308 Some(Keyword::HEADER) => CopyLegacyCsvOption::Header,
12309 Some(Keyword::QUOTE) => {
12310 let _ = self.parse_keyword(Keyword::AS); CopyLegacyCsvOption::Quote(self.parse_literal_char()?)
12312 }
12313 Some(Keyword::ESCAPE) => {
12314 let _ = self.parse_keyword(Keyword::AS); CopyLegacyCsvOption::Escape(self.parse_literal_char()?)
12316 }
12317 Some(Keyword::FORCE) if self.parse_keywords(&[Keyword::NOT, Keyword::NULL]) => {
12318 CopyLegacyCsvOption::ForceNotNull(
12319 self.parse_comma_separated(|p| p.parse_identifier())?,
12320 )
12321 }
12322 Some(Keyword::FORCE) if self.parse_keywords(&[Keyword::QUOTE]) => {
12323 CopyLegacyCsvOption::ForceQuote(
12324 self.parse_comma_separated(|p| p.parse_identifier())?,
12325 )
12326 }
12327 _ => self.expected_ref("csv option", self.peek_token_ref())?,
12328 };
12329 Ok(ret)
12330 }
12331
12332 fn parse_literal_char(&mut self) -> Result<char, ParserError> {
12333 let s = self.parse_literal_string()?;
12334 if s.len() != 1 {
12335 let loc = self
12336 .tokens
12337 .get(self.index - 1)
12338 .map_or(Location { line: 0, column: 0 }, |t| t.span.start);
12339 return parser_err!(format!("Expect a char, found {s:?}"), loc);
12340 }
12341 Ok(s.chars().next().unwrap())
12342 }
12343
12344 pub fn parse_tsv(&mut self) -> Vec<Option<String>> {
12347 self.parse_tab_value()
12348 }
12349
12350 pub fn parse_tab_value(&mut self) -> Vec<Option<String>> {
12352 let mut values = vec![];
12353 let mut content = String::new();
12354 while let Some(t) = self.next_token_no_skip().map(|t| &t.token) {
12355 match t {
12356 Token::Whitespace(Whitespace::Tab) => {
12357 values.push(Some(core::mem::take(&mut content)));
12358 }
12359 Token::Whitespace(Whitespace::Newline) => {
12360 values.push(Some(core::mem::take(&mut content)));
12361 }
12362 Token::Backslash => {
12363 if self.consume_token(&Token::Period) {
12364 return values;
12365 }
12366 if let Token::Word(w) = self.next_token().token {
12367 if w.value == "N" {
12368 values.push(None);
12369 }
12370 }
12371 }
12372 _ => {
12373 content.push_str(&t.to_string());
12374 }
12375 }
12376 }
12377 values
12378 }
12379
12380 pub fn parse_value(&mut self) -> Result<ValueWithSpan, ParserError> {
12382 let next_token = self.next_token();
12383 let span = next_token.span;
12384 let ok_value = |value: Value| Ok(value.with_span(span));
12385 match next_token.token {
12386 Token::Word(w) => match w.keyword {
12387 Keyword::TRUE if self.dialect.supports_boolean_literals() => {
12388 ok_value(Value::Boolean(true))
12389 }
12390 Keyword::FALSE if self.dialect.supports_boolean_literals() => {
12391 ok_value(Value::Boolean(false))
12392 }
12393 Keyword::NULL => ok_value(Value::Null),
12394 Keyword::NoKeyword if w.quote_style.is_some() => match w.quote_style {
12395 Some('"') => ok_value(Value::DoubleQuotedString(w.value)),
12396 Some('\'') => ok_value(Value::SingleQuotedString(w.value)),
12397 _ => self.expected(
12398 "A value?",
12399 TokenWithSpan {
12400 token: Token::Word(w),
12401 span,
12402 },
12403 )?,
12404 },
12405 _ => self.expected(
12406 "a concrete value",
12407 TokenWithSpan {
12408 token: Token::Word(w),
12409 span,
12410 },
12411 ),
12412 },
12413 Token::Number(n, l) => ok_value(Value::Number(Self::parse(n, span.start)?, l)),
12417 Token::SingleQuotedString(ref s) => ok_value(Value::SingleQuotedString(
12418 self.maybe_concat_string_literal(s.to_string()),
12419 )),
12420 Token::DoubleQuotedString(ref s) => ok_value(Value::DoubleQuotedString(
12421 self.maybe_concat_string_literal(s.to_string()),
12422 )),
12423 Token::TripleSingleQuotedString(ref s) => {
12424 ok_value(Value::TripleSingleQuotedString(s.to_string()))
12425 }
12426 Token::TripleDoubleQuotedString(ref s) => {
12427 ok_value(Value::TripleDoubleQuotedString(s.to_string()))
12428 }
12429 Token::DollarQuotedString(ref s) => ok_value(Value::DollarQuotedString(s.clone())),
12430 Token::SingleQuotedByteStringLiteral(ref s) => {
12431 ok_value(Value::SingleQuotedByteStringLiteral(s.clone()))
12432 }
12433 Token::DoubleQuotedByteStringLiteral(ref s) => {
12434 ok_value(Value::DoubleQuotedByteStringLiteral(s.clone()))
12435 }
12436 Token::TripleSingleQuotedByteStringLiteral(ref s) => {
12437 ok_value(Value::TripleSingleQuotedByteStringLiteral(s.clone()))
12438 }
12439 Token::TripleDoubleQuotedByteStringLiteral(ref s) => {
12440 ok_value(Value::TripleDoubleQuotedByteStringLiteral(s.clone()))
12441 }
12442 Token::SingleQuotedRawStringLiteral(ref s) => {
12443 ok_value(Value::SingleQuotedRawStringLiteral(s.clone()))
12444 }
12445 Token::DoubleQuotedRawStringLiteral(ref s) => {
12446 ok_value(Value::DoubleQuotedRawStringLiteral(s.clone()))
12447 }
12448 Token::TripleSingleQuotedRawStringLiteral(ref s) => {
12449 ok_value(Value::TripleSingleQuotedRawStringLiteral(s.clone()))
12450 }
12451 Token::TripleDoubleQuotedRawStringLiteral(ref s) => {
12452 ok_value(Value::TripleDoubleQuotedRawStringLiteral(s.clone()))
12453 }
12454 Token::NationalStringLiteral(ref s) => {
12455 ok_value(Value::NationalStringLiteral(s.to_string()))
12456 }
12457 Token::QuoteDelimitedStringLiteral(v) => {
12458 ok_value(Value::QuoteDelimitedStringLiteral(v))
12459 }
12460 Token::NationalQuoteDelimitedStringLiteral(v) => {
12461 ok_value(Value::NationalQuoteDelimitedStringLiteral(v))
12462 }
12463 Token::EscapedStringLiteral(ref s) => {
12464 ok_value(Value::EscapedStringLiteral(s.to_string()))
12465 }
12466 Token::UnicodeStringLiteral(ref s) => {
12467 ok_value(Value::UnicodeStringLiteral(s.to_string()))
12468 }
12469 Token::HexStringLiteral(ref s) => ok_value(Value::HexStringLiteral(s.to_string())),
12470 Token::Placeholder(ref s) => ok_value(Value::Placeholder(s.to_string())),
12471 tok @ Token::Colon | tok @ Token::AtSign => {
12472 let next_token = self.next_token_no_skip().unwrap_or(&EOF_TOKEN).clone();
12480 let ident = match next_token.token {
12481 Token::Word(w) => Ok(w.into_ident(next_token.span)),
12482 Token::Number(w, false) => Ok(Ident::with_span(next_token.span, w)),
12483 _ => self.expected("placeholder", next_token),
12484 }?;
12485 Ok(Value::Placeholder(format!("{tok}{}", ident.value))
12486 .with_span(Span::new(span.start, ident.span.end)))
12487 }
12488 unexpected => self.expected(
12489 "a value",
12490 TokenWithSpan {
12491 token: unexpected,
12492 span,
12493 },
12494 ),
12495 }
12496 }
12497
12498 fn maybe_concat_string_literal(&mut self, mut str: String) -> String {
12499 if self.dialect.supports_string_literal_concatenation() {
12500 while let Token::SingleQuotedString(ref s) | Token::DoubleQuotedString(ref s) =
12501 self.peek_token_ref().token
12502 {
12503 str.push_str(s);
12504 self.advance_token();
12505 }
12506 } else if self
12507 .dialect
12508 .supports_string_literal_concatenation_with_newline()
12509 {
12510 let mut after_newline = false;
12513 loop {
12514 match self.peek_token_no_skip().token {
12515 Token::Whitespace(Whitespace::Newline) => {
12516 after_newline = true;
12517 self.next_token_no_skip();
12518 }
12519 Token::Whitespace(_) => {
12520 self.next_token_no_skip();
12521 }
12522 Token::SingleQuotedString(ref s) | Token::DoubleQuotedString(ref s)
12523 if after_newline =>
12524 {
12525 str.push_str(s.clone().as_str());
12526 self.next_token_no_skip();
12527 after_newline = false;
12528 }
12529 _ => break,
12530 }
12531 }
12532 }
12533
12534 str
12535 }
12536
12537 pub fn parse_number_value(&mut self) -> Result<ValueWithSpan, ParserError> {
12539 let value_wrapper = self.parse_value()?;
12540 match &value_wrapper.value {
12541 Value::Number(_, _) => Ok(value_wrapper),
12542 Value::Placeholder(_) => Ok(value_wrapper),
12543 _ => {
12544 self.prev_token();
12545 self.expected_ref("literal number", self.peek_token_ref())
12546 }
12547 }
12548 }
12549
12550 pub fn parse_number(&mut self) -> Result<Expr, ParserError> {
12553 let next_token = self.next_token();
12554 match next_token.token {
12555 Token::Plus => Ok(Expr::UnaryOp {
12556 op: UnaryOperator::Plus,
12557 expr: Box::new(Expr::Value(self.parse_number_value()?)),
12558 }),
12559 Token::Minus => Ok(Expr::UnaryOp {
12560 op: UnaryOperator::Minus,
12561 expr: Box::new(Expr::Value(self.parse_number_value()?)),
12562 }),
12563 _ => {
12564 self.prev_token();
12565 Ok(Expr::Value(self.parse_number_value()?))
12566 }
12567 }
12568 }
12569
12570 fn parse_introduced_string_expr(&mut self) -> Result<Expr, ParserError> {
12571 let next_token = self.next_token();
12572 let span = next_token.span;
12573 match next_token.token {
12574 Token::SingleQuotedString(ref s) => Ok(Expr::Value(
12575 Value::SingleQuotedString(s.to_string()).with_span(span),
12576 )),
12577 Token::DoubleQuotedString(ref s) => Ok(Expr::Value(
12578 Value::DoubleQuotedString(s.to_string()).with_span(span),
12579 )),
12580 Token::HexStringLiteral(ref s) => Ok(Expr::Value(
12581 Value::HexStringLiteral(s.to_string()).with_span(span),
12582 )),
12583 unexpected => self.expected(
12584 "a string value",
12585 TokenWithSpan {
12586 token: unexpected,
12587 span,
12588 },
12589 ),
12590 }
12591 }
12592
12593 pub fn parse_literal_uint(&mut self) -> Result<u64, ParserError> {
12595 let next_token = self.next_token();
12596 match next_token.token {
12597 Token::Number(s, _) => Self::parse::<u64>(s, next_token.span.start),
12598 _ => self.expected("literal int", next_token),
12599 }
12600 }
12601
12602 fn parse_create_function_body_string(&mut self) -> Result<CreateFunctionBody, ParserError> {
12605 let parse_string_expr = |parser: &mut Parser| -> Result<Expr, ParserError> {
12606 let peek_token = parser.peek_token();
12607 let span = peek_token.span;
12608 match peek_token.token {
12609 Token::DollarQuotedString(s) if dialect_of!(parser is PostgreSqlDialect | GenericDialect) =>
12610 {
12611 parser.next_token();
12612 Ok(Expr::Value(Value::DollarQuotedString(s).with_span(span)))
12613 }
12614 _ => Ok(Expr::Value(
12615 Value::SingleQuotedString(parser.parse_literal_string()?).with_span(span),
12616 )),
12617 }
12618 };
12619
12620 Ok(CreateFunctionBody::AsBeforeOptions {
12621 body: parse_string_expr(self)?,
12622 link_symbol: if self.consume_token(&Token::Comma) {
12623 Some(parse_string_expr(self)?)
12624 } else {
12625 None
12626 },
12627 })
12628 }
12629
12630 pub fn parse_literal_string(&mut self) -> Result<String, ParserError> {
12632 let next_token = self.next_token();
12633 match next_token.token {
12634 Token::Word(Word {
12635 value,
12636 keyword: Keyword::NoKeyword,
12637 ..
12638 }) => Ok(value),
12639 Token::SingleQuotedString(s) => Ok(s),
12640 Token::DoubleQuotedString(s) => Ok(s),
12641 Token::EscapedStringLiteral(s) if dialect_of!(self is PostgreSqlDialect | GenericDialect) => {
12642 Ok(s)
12643 }
12644 Token::UnicodeStringLiteral(s) => Ok(s),
12645 _ => self.expected("literal string", next_token),
12646 }
12647 }
12648
12649 pub(crate) fn parse_boolean_string(&mut self) -> Result<bool, ParserError> {
12651 match self.parse_one_of_keywords(&[Keyword::TRUE, Keyword::FALSE]) {
12652 Some(Keyword::TRUE) => Ok(true),
12653 Some(Keyword::FALSE) => Ok(false),
12654 _ => self.expected_ref("TRUE or FALSE", self.peek_token_ref()),
12655 }
12656 }
12657
12658 pub fn parse_unicode_is_normalized(&mut self, expr: Expr) -> Result<Expr, ParserError> {
12660 let neg = self.parse_keyword(Keyword::NOT);
12661 let normalized_form = self.maybe_parse(|parser| {
12662 match parser.parse_one_of_keywords(&[
12663 Keyword::NFC,
12664 Keyword::NFD,
12665 Keyword::NFKC,
12666 Keyword::NFKD,
12667 ]) {
12668 Some(Keyword::NFC) => Ok(NormalizationForm::NFC),
12669 Some(Keyword::NFD) => Ok(NormalizationForm::NFD),
12670 Some(Keyword::NFKC) => Ok(NormalizationForm::NFKC),
12671 Some(Keyword::NFKD) => Ok(NormalizationForm::NFKD),
12672 _ => parser.expected_ref("unicode normalization form", parser.peek_token_ref()),
12673 }
12674 })?;
12675 if self.parse_keyword(Keyword::NORMALIZED) {
12676 return Ok(Expr::IsNormalized {
12677 expr: Box::new(expr),
12678 form: normalized_form,
12679 negated: neg,
12680 });
12681 }
12682 self.expected_ref("unicode normalization form", self.peek_token_ref())
12683 }
12684
12685 pub fn parse_enum_values(&mut self) -> Result<Vec<EnumMember>, ParserError> {
12687 self.expect_token(&Token::LParen)?;
12688 let values = self.parse_comma_separated(|parser| {
12689 let name = parser.parse_literal_string()?;
12690 let e = if parser.consume_token(&Token::Eq) {
12691 let value = parser.parse_number()?;
12692 EnumMember::NamedValue(name, value)
12693 } else {
12694 EnumMember::Name(name)
12695 };
12696 Ok(e)
12697 })?;
12698 self.expect_token(&Token::RParen)?;
12699
12700 Ok(values)
12701 }
12702
12703 pub fn parse_data_type(&mut self) -> Result<DataType, ParserError> {
12705 let (ty, trailing_bracket) = self.parse_data_type_helper()?;
12706 if trailing_bracket.0 {
12707 return parser_err!(
12708 format!("unmatched > after parsing data type {ty}"),
12709 self.peek_token_ref()
12710 );
12711 }
12712
12713 Ok(ty)
12714 }
12715
12716 fn parse_data_type_helper(
12717 &mut self,
12718 ) -> Result<(DataType, MatchedTrailingBracket), ParserError> {
12719 let dialect = self.dialect;
12720 self.advance_token();
12721 let next_token = self.get_current_token();
12722 let next_token_index = self.get_current_index();
12723
12724 let mut trailing_bracket: MatchedTrailingBracket = false.into();
12725 let mut data = match &next_token.token {
12726 Token::Word(w) => match w.keyword {
12727 Keyword::BOOLEAN => Ok(DataType::Boolean),
12728 Keyword::BOOL => Ok(DataType::Bool),
12729 Keyword::FLOAT => {
12730 let precision = self.parse_exact_number_optional_precision_scale()?;
12731
12732 if self.parse_keyword(Keyword::UNSIGNED) {
12733 Ok(DataType::FloatUnsigned(precision))
12734 } else {
12735 Ok(DataType::Float(precision))
12736 }
12737 }
12738 Keyword::REAL => {
12739 if self.parse_keyword(Keyword::UNSIGNED) {
12740 Ok(DataType::RealUnsigned)
12741 } else {
12742 Ok(DataType::Real)
12743 }
12744 }
12745 Keyword::FLOAT4 => Ok(DataType::Float4),
12746 Keyword::FLOAT32 => Ok(DataType::Float32),
12747 Keyword::FLOAT64 => Ok(DataType::Float64),
12748 Keyword::FLOAT8 => Ok(DataType::Float8),
12749 Keyword::DOUBLE => {
12750 if self.parse_keyword(Keyword::PRECISION) {
12751 if self.parse_keyword(Keyword::UNSIGNED) {
12752 Ok(DataType::DoublePrecisionUnsigned)
12753 } else {
12754 Ok(DataType::DoublePrecision)
12755 }
12756 } else {
12757 let precision = self.parse_exact_number_optional_precision_scale()?;
12758
12759 if self.parse_keyword(Keyword::UNSIGNED) {
12760 Ok(DataType::DoubleUnsigned(precision))
12761 } else {
12762 Ok(DataType::Double(precision))
12763 }
12764 }
12765 }
12766 Keyword::TINYINT => {
12767 let optional_precision = self.parse_optional_precision();
12768 if self.parse_keyword(Keyword::UNSIGNED) {
12769 Ok(DataType::TinyIntUnsigned(optional_precision?))
12770 } else {
12771 if dialect.supports_data_type_signed_suffix() {
12772 let _ = self.parse_keyword(Keyword::SIGNED);
12773 }
12774 Ok(DataType::TinyInt(optional_precision?))
12775 }
12776 }
12777 Keyword::INT2 => {
12778 let optional_precision = self.parse_optional_precision();
12779 if self.parse_keyword(Keyword::UNSIGNED) {
12780 Ok(DataType::Int2Unsigned(optional_precision?))
12781 } else {
12782 Ok(DataType::Int2(optional_precision?))
12783 }
12784 }
12785 Keyword::SMALLINT => {
12786 let optional_precision = self.parse_optional_precision();
12787 if self.parse_keyword(Keyword::UNSIGNED) {
12788 Ok(DataType::SmallIntUnsigned(optional_precision?))
12789 } else {
12790 if dialect.supports_data_type_signed_suffix() {
12791 let _ = self.parse_keyword(Keyword::SIGNED);
12792 }
12793 Ok(DataType::SmallInt(optional_precision?))
12794 }
12795 }
12796 Keyword::MEDIUMINT => {
12797 let optional_precision = self.parse_optional_precision();
12798 if self.parse_keyword(Keyword::UNSIGNED) {
12799 Ok(DataType::MediumIntUnsigned(optional_precision?))
12800 } else {
12801 if dialect.supports_data_type_signed_suffix() {
12802 let _ = self.parse_keyword(Keyword::SIGNED);
12803 }
12804 Ok(DataType::MediumInt(optional_precision?))
12805 }
12806 }
12807 Keyword::INT => {
12808 let optional_precision = self.parse_optional_precision();
12809 if self.parse_keyword(Keyword::UNSIGNED) {
12810 Ok(DataType::IntUnsigned(optional_precision?))
12811 } else {
12812 if dialect.supports_data_type_signed_suffix() {
12813 let _ = self.parse_keyword(Keyword::SIGNED);
12814 }
12815 Ok(DataType::Int(optional_precision?))
12816 }
12817 }
12818 Keyword::INT4 => {
12819 let optional_precision = self.parse_optional_precision();
12820 if self.parse_keyword(Keyword::UNSIGNED) {
12821 Ok(DataType::Int4Unsigned(optional_precision?))
12822 } else {
12823 Ok(DataType::Int4(optional_precision?))
12824 }
12825 }
12826 Keyword::INT8 => {
12827 let optional_precision = self.parse_optional_precision();
12828 if self.parse_keyword(Keyword::UNSIGNED) {
12829 Ok(DataType::Int8Unsigned(optional_precision?))
12830 } else {
12831 Ok(DataType::Int8(optional_precision?))
12832 }
12833 }
12834 Keyword::INT16 => Ok(DataType::Int16),
12835 Keyword::INT32 => Ok(DataType::Int32),
12836 Keyword::INT64 => Ok(DataType::Int64),
12837 Keyword::INT128 => Ok(DataType::Int128),
12838 Keyword::INT256 => Ok(DataType::Int256),
12839 Keyword::INTEGER => {
12840 let optional_precision = self.parse_optional_precision();
12841 if self.parse_keyword(Keyword::UNSIGNED) {
12842 Ok(DataType::IntegerUnsigned(optional_precision?))
12843 } else {
12844 if dialect.supports_data_type_signed_suffix() {
12845 let _ = self.parse_keyword(Keyword::SIGNED);
12846 }
12847 Ok(DataType::Integer(optional_precision?))
12848 }
12849 }
12850 Keyword::BIGINT => {
12851 let optional_precision = self.parse_optional_precision();
12852 if self.parse_keyword(Keyword::UNSIGNED) {
12853 Ok(DataType::BigIntUnsigned(optional_precision?))
12854 } else {
12855 if dialect.supports_data_type_signed_suffix() {
12856 let _ = self.parse_keyword(Keyword::SIGNED);
12857 }
12858 Ok(DataType::BigInt(optional_precision?))
12859 }
12860 }
12861 Keyword::HUGEINT => Ok(DataType::HugeInt),
12862 Keyword::UBIGINT => Ok(DataType::UBigInt),
12863 Keyword::UHUGEINT => Ok(DataType::UHugeInt),
12864 Keyword::USMALLINT => Ok(DataType::USmallInt),
12865 Keyword::UTINYINT => Ok(DataType::UTinyInt),
12866 Keyword::UINT8 => Ok(DataType::UInt8),
12867 Keyword::UINT16 => Ok(DataType::UInt16),
12868 Keyword::UINT32 => Ok(DataType::UInt32),
12869 Keyword::UINT64 => Ok(DataType::UInt64),
12870 Keyword::UINT128 => Ok(DataType::UInt128),
12871 Keyword::UINT256 => Ok(DataType::UInt256),
12872 Keyword::VARCHAR => Ok(DataType::Varchar(self.parse_optional_character_length()?)),
12873 Keyword::NVARCHAR => {
12874 Ok(DataType::Nvarchar(self.parse_optional_character_length()?))
12875 }
12876 Keyword::CHARACTER => {
12877 if self.parse_keyword(Keyword::VARYING) {
12878 Ok(DataType::CharacterVarying(
12879 self.parse_optional_character_length()?,
12880 ))
12881 } else if self.parse_keywords(&[Keyword::LARGE, Keyword::OBJECT]) {
12882 Ok(DataType::CharacterLargeObject(
12883 self.parse_optional_precision()?,
12884 ))
12885 } else {
12886 Ok(DataType::Character(self.parse_optional_character_length()?))
12887 }
12888 }
12889 Keyword::CHAR => {
12890 if self.parse_keyword(Keyword::VARYING) {
12891 Ok(DataType::CharVarying(
12892 self.parse_optional_character_length()?,
12893 ))
12894 } else if self.parse_keywords(&[Keyword::LARGE, Keyword::OBJECT]) {
12895 Ok(DataType::CharLargeObject(self.parse_optional_precision()?))
12896 } else {
12897 Ok(DataType::Char(self.parse_optional_character_length()?))
12898 }
12899 }
12900 Keyword::CLOB => Ok(DataType::Clob(self.parse_optional_precision()?)),
12901 Keyword::BINARY => Ok(DataType::Binary(self.parse_optional_precision()?)),
12902 Keyword::VARBINARY => Ok(DataType::Varbinary(self.parse_optional_binary_length()?)),
12903 Keyword::BLOB => Ok(DataType::Blob(self.parse_optional_precision()?)),
12904 Keyword::TINYBLOB => Ok(DataType::TinyBlob),
12905 Keyword::MEDIUMBLOB => Ok(DataType::MediumBlob),
12906 Keyword::LONGBLOB => Ok(DataType::LongBlob),
12907 Keyword::BYTES => Ok(DataType::Bytes(self.parse_optional_precision()?)),
12908 Keyword::BIT => {
12909 if self.parse_keyword(Keyword::VARYING) {
12910 Ok(DataType::BitVarying(self.parse_optional_precision()?))
12911 } else {
12912 Ok(DataType::Bit(self.parse_optional_precision()?))
12913 }
12914 }
12915 Keyword::VARBIT => Ok(DataType::VarBit(self.parse_optional_precision()?)),
12916 Keyword::UUID => Ok(DataType::Uuid),
12917 Keyword::DATE => Ok(DataType::Date),
12918 Keyword::DATE32 => Ok(DataType::Date32),
12919 Keyword::DATETIME => Ok(DataType::Datetime(self.parse_optional_precision()?)),
12920 Keyword::DATETIME64 => {
12921 self.prev_token();
12922 let (precision, time_zone) = self.parse_datetime_64()?;
12923 Ok(DataType::Datetime64(precision, time_zone))
12924 }
12925 Keyword::TIMESTAMP => {
12926 let precision = self.parse_optional_precision()?;
12927 let tz = if self.parse_keyword(Keyword::WITH) {
12928 self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
12929 TimezoneInfo::WithTimeZone
12930 } else if self.parse_keyword(Keyword::WITHOUT) {
12931 self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
12932 TimezoneInfo::WithoutTimeZone
12933 } else {
12934 TimezoneInfo::None
12935 };
12936 Ok(DataType::Timestamp(precision, tz))
12937 }
12938 Keyword::TIMESTAMPTZ => Ok(DataType::Timestamp(
12939 self.parse_optional_precision()?,
12940 TimezoneInfo::Tz,
12941 )),
12942 Keyword::TIMESTAMP_NTZ => {
12943 Ok(DataType::TimestampNtz(self.parse_optional_precision()?))
12944 }
12945 Keyword::TIME => {
12946 let precision = self.parse_optional_precision()?;
12947 let tz = if self.parse_keyword(Keyword::WITH) {
12948 self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
12949 TimezoneInfo::WithTimeZone
12950 } else if self.parse_keyword(Keyword::WITHOUT) {
12951 self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
12952 TimezoneInfo::WithoutTimeZone
12953 } else {
12954 TimezoneInfo::None
12955 };
12956 Ok(DataType::Time(precision, tz))
12957 }
12958 Keyword::TIMETZ => Ok(DataType::Time(
12959 self.parse_optional_precision()?,
12960 TimezoneInfo::Tz,
12961 )),
12962 Keyword::INTERVAL => {
12963 if self.dialect.supports_interval_options() {
12964 let fields = self.maybe_parse_optional_interval_fields()?;
12965 let precision = self.parse_optional_precision()?;
12966 Ok(DataType::Interval { fields, precision })
12967 } else {
12968 Ok(DataType::Interval {
12969 fields: None,
12970 precision: None,
12971 })
12972 }
12973 }
12974 Keyword::JSON => Ok(DataType::JSON),
12975 Keyword::JSONB => Ok(DataType::JSONB),
12976 Keyword::REGCLASS => Ok(DataType::Regclass),
12977 Keyword::STRING => Ok(DataType::String(self.parse_optional_precision()?)),
12978 Keyword::FIXEDSTRING => {
12979 self.expect_token(&Token::LParen)?;
12980 let character_length = self.parse_literal_uint()?;
12981 self.expect_token(&Token::RParen)?;
12982 Ok(DataType::FixedString(character_length))
12983 }
12984 Keyword::TEXT => Ok(DataType::Text),
12985 Keyword::TINYTEXT => Ok(DataType::TinyText),
12986 Keyword::MEDIUMTEXT => Ok(DataType::MediumText),
12987 Keyword::LONGTEXT => Ok(DataType::LongText),
12988 Keyword::BYTEA => Ok(DataType::Bytea),
12989 Keyword::NUMERIC => Ok(DataType::Numeric(
12990 self.parse_exact_number_optional_precision_scale()?,
12991 )),
12992 Keyword::DECIMAL => {
12993 let precision = self.parse_exact_number_optional_precision_scale()?;
12994
12995 if self.parse_keyword(Keyword::UNSIGNED) {
12996 Ok(DataType::DecimalUnsigned(precision))
12997 } else {
12998 Ok(DataType::Decimal(precision))
12999 }
13000 }
13001 Keyword::DEC => {
13002 let precision = self.parse_exact_number_optional_precision_scale()?;
13003
13004 if self.parse_keyword(Keyword::UNSIGNED) {
13005 Ok(DataType::DecUnsigned(precision))
13006 } else {
13007 Ok(DataType::Dec(precision))
13008 }
13009 }
13010 Keyword::BIGNUMERIC => Ok(DataType::BigNumeric(
13011 self.parse_exact_number_optional_precision_scale()?,
13012 )),
13013 Keyword::BIGDECIMAL => Ok(DataType::BigDecimal(
13014 self.parse_exact_number_optional_precision_scale()?,
13015 )),
13016 Keyword::ENUM => Ok(DataType::Enum(self.parse_enum_values()?, None)),
13017 Keyword::ENUM8 => Ok(DataType::Enum(self.parse_enum_values()?, Some(8))),
13018 Keyword::ENUM16 => Ok(DataType::Enum(self.parse_enum_values()?, Some(16))),
13019 Keyword::SET => Ok(DataType::Set(self.parse_string_values()?)),
13020 Keyword::ARRAY => {
13021 if self.dialect.supports_array_typedef_without_element_type() {
13022 Ok(DataType::Array(ArrayElemTypeDef::None))
13023 } else if dialect_of!(self is ClickHouseDialect) {
13024 Ok(self.parse_sub_type(|internal_type| {
13025 DataType::Array(ArrayElemTypeDef::Parenthesis(internal_type))
13026 })?)
13027 } else {
13028 self.expect_token(&Token::Lt)?;
13029 let (inside_type, _trailing_bracket) = self.parse_data_type_helper()?;
13030 trailing_bracket = self.expect_closing_angle_bracket(_trailing_bracket)?;
13031 Ok(DataType::Array(ArrayElemTypeDef::AngleBracket(Box::new(
13032 inside_type,
13033 ))))
13034 }
13035 }
13036 Keyword::STRUCT if dialect_is!(dialect is DuckDbDialect) => {
13037 self.prev_token();
13038 let field_defs = self.parse_duckdb_struct_type_def()?;
13039 Ok(DataType::Struct(field_defs, StructBracketKind::Parentheses))
13040 }
13041 Keyword::STRUCT if dialect_is!(dialect is BigQueryDialect | DatabricksDialect | GenericDialect) =>
13042 {
13043 self.prev_token();
13044 let (field_defs, _trailing_bracket) =
13045 self.parse_struct_type_def(Self::parse_struct_field_def)?;
13046 trailing_bracket = _trailing_bracket;
13047 Ok(DataType::Struct(
13048 field_defs,
13049 StructBracketKind::AngleBrackets,
13050 ))
13051 }
13052 Keyword::UNION if dialect_is!(dialect is DuckDbDialect | GenericDialect) => {
13053 self.prev_token();
13054 let fields = self.parse_union_type_def()?;
13055 Ok(DataType::Union(fields))
13056 }
13057 Keyword::NULLABLE if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
13058 Ok(self.parse_sub_type(DataType::Nullable)?)
13059 }
13060 Keyword::LOWCARDINALITY if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
13061 Ok(self.parse_sub_type(DataType::LowCardinality)?)
13062 }
13063 Keyword::MAP if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
13064 self.prev_token();
13065 let (key_data_type, value_data_type) = self.parse_click_house_map_def()?;
13066 Ok(DataType::Map(
13067 Box::new(key_data_type),
13068 Box::new(value_data_type),
13069 ))
13070 }
13071 Keyword::NESTED if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
13072 self.expect_token(&Token::LParen)?;
13073 let field_defs = self.parse_comma_separated(Parser::parse_column_def)?;
13074 self.expect_token(&Token::RParen)?;
13075 Ok(DataType::Nested(field_defs))
13076 }
13077 Keyword::TUPLE if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
13078 self.prev_token();
13079 let field_defs = self.parse_click_house_tuple_def()?;
13080 Ok(DataType::Tuple(field_defs))
13081 }
13082 Keyword::TRIGGER => Ok(DataType::Trigger),
13083 Keyword::SETOF => {
13084 let inner = self.parse_data_type()?;
13085 Ok(DataType::SetOf(Box::new(inner)))
13086 }
13087 Keyword::ANY if self.peek_keyword(Keyword::TYPE) => {
13088 let _ = self.parse_keyword(Keyword::TYPE);
13089 Ok(DataType::AnyType)
13090 }
13091 Keyword::TABLE => {
13092 if self.peek_token_ref().token == Token::LParen {
13095 let columns = self.parse_returns_table_columns()?;
13096 Ok(DataType::Table(Some(columns)))
13097 } else {
13098 Ok(DataType::Table(None))
13099 }
13100 }
13101 Keyword::SIGNED => {
13102 if self.parse_keyword(Keyword::INTEGER) {
13103 Ok(DataType::SignedInteger)
13104 } else {
13105 Ok(DataType::Signed)
13106 }
13107 }
13108 Keyword::UNSIGNED => {
13109 if self.parse_keyword(Keyword::INTEGER) {
13110 Ok(DataType::UnsignedInteger)
13111 } else {
13112 Ok(DataType::Unsigned)
13113 }
13114 }
13115 Keyword::TSVECTOR if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
13116 Ok(DataType::TsVector)
13117 }
13118 Keyword::TSQUERY if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
13119 Ok(DataType::TsQuery)
13120 }
13121 _ => {
13122 self.prev_token();
13123 let type_name = self.parse_object_name(false)?;
13124 if let Some(modifiers) = self.parse_optional_type_modifiers()? {
13125 Ok(DataType::Custom(type_name, modifiers))
13126 } else {
13127 Ok(DataType::Custom(type_name, vec![]))
13128 }
13129 }
13130 },
13131 _ => self.expected_at("a data type name", next_token_index),
13132 }?;
13133
13134 if self.dialect.supports_array_typedef_with_brackets() {
13135 while self.consume_token(&Token::LBracket) {
13136 let size = self.maybe_parse(|p| p.parse_literal_uint())?;
13138 self.expect_token(&Token::RBracket)?;
13139 data = DataType::Array(ArrayElemTypeDef::SquareBracket(Box::new(data), size))
13140 }
13141 }
13142 Ok((data, trailing_bracket))
13143 }
13144
13145 fn parse_returns_table_column(&mut self) -> Result<ColumnDef, ParserError> {
13146 self.parse_column_def()
13147 }
13148
13149 fn parse_returns_table_columns(&mut self) -> Result<Vec<ColumnDef>, ParserError> {
13150 self.expect_token(&Token::LParen)?;
13151 let columns = self.parse_comma_separated(Parser::parse_returns_table_column)?;
13152 self.expect_token(&Token::RParen)?;
13153 Ok(columns)
13154 }
13155
13156 pub fn parse_string_values(&mut self) -> Result<Vec<String>, ParserError> {
13158 self.expect_token(&Token::LParen)?;
13159 let mut values = Vec::new();
13160 loop {
13161 let next_token = self.next_token();
13162 match next_token.token {
13163 Token::SingleQuotedString(value) => values.push(value),
13164 _ => self.expected("a string", next_token)?,
13165 }
13166 let next_token = self.next_token();
13167 match next_token.token {
13168 Token::Comma => (),
13169 Token::RParen => break,
13170 _ => self.expected(", or }", next_token)?,
13171 }
13172 }
13173 Ok(values)
13174 }
13175
13176 pub fn parse_identifier_with_alias(&mut self) -> Result<IdentWithAlias, ParserError> {
13178 let ident = self.parse_identifier()?;
13179 self.expect_keyword_is(Keyword::AS)?;
13180 let alias = self.parse_identifier()?;
13181 Ok(IdentWithAlias { ident, alias })
13182 }
13183
13184 fn parse_identifier_with_optional_alias(&mut self) -> Result<IdentWithAlias, ParserError> {
13186 let ident = self.parse_identifier()?;
13187 let _after_as = self.parse_keyword(Keyword::AS);
13188 let alias = self.parse_identifier()?;
13189 Ok(IdentWithAlias { ident, alias })
13190 }
13191
13192 fn parse_pipe_operator_queries(&mut self) -> Result<Vec<Query>, ParserError> {
13194 self.parse_comma_separated(|parser| {
13195 parser.expect_token(&Token::LParen)?;
13196 let query = parser.parse_query()?;
13197 parser.expect_token(&Token::RParen)?;
13198 Ok(*query)
13199 })
13200 }
13201
13202 fn parse_distinct_required_set_quantifier(
13204 &mut self,
13205 operator_name: &str,
13206 ) -> Result<SetQuantifier, ParserError> {
13207 let quantifier = self.parse_set_quantifier(&Some(SetOperator::Intersect));
13208 match quantifier {
13209 SetQuantifier::Distinct | SetQuantifier::DistinctByName => Ok(quantifier),
13210 _ => Err(ParserError::ParserError(format!(
13211 "{operator_name} pipe operator requires DISTINCT modifier",
13212 ))),
13213 }
13214 }
13215
13216 fn parse_identifier_optional_alias(&mut self) -> Result<Option<Ident>, ParserError> {
13218 if self.parse_keyword(Keyword::AS) {
13219 Ok(Some(self.parse_identifier()?))
13220 } else {
13221 self.maybe_parse(|parser| parser.parse_identifier())
13223 }
13224 }
13225
13226 fn maybe_parse_select_item_alias(&mut self) -> Result<Option<Ident>, ParserError> {
13228 fn validator(explicit: bool, kw: &Keyword, parser: &mut Parser) -> bool {
13229 parser.dialect.is_select_item_alias(explicit, kw, parser)
13230 }
13231 self.parse_optional_alias_inner(None, validator)
13232 }
13233
13234 pub fn maybe_parse_table_alias(&mut self) -> Result<Option<TableAlias>, ParserError> {
13238 fn validator(explicit: bool, kw: &Keyword, parser: &mut Parser) -> bool {
13239 parser.dialect.is_table_factor_alias(explicit, kw, parser)
13240 }
13241 let explicit = self.peek_keyword(Keyword::AS);
13242 match self.parse_optional_alias_inner(None, validator)? {
13243 Some(name) => {
13244 let columns = self.parse_table_alias_column_defs()?;
13245 Ok(Some(TableAlias {
13246 explicit,
13247 name,
13248 columns,
13249 }))
13250 }
13251 None => Ok(None),
13252 }
13253 }
13254
13255 fn parse_table_index_hints(&mut self) -> Result<Vec<TableIndexHints>, ParserError> {
13256 let mut hints = vec![];
13257 while let Some(hint_type) =
13258 self.parse_one_of_keywords(&[Keyword::USE, Keyword::IGNORE, Keyword::FORCE])
13259 {
13260 let hint_type = match hint_type {
13261 Keyword::USE => TableIndexHintType::Use,
13262 Keyword::IGNORE => TableIndexHintType::Ignore,
13263 Keyword::FORCE => TableIndexHintType::Force,
13264 _ => {
13265 return self.expected_ref(
13266 "expected to match USE/IGNORE/FORCE keyword",
13267 self.peek_token_ref(),
13268 )
13269 }
13270 };
13271 let index_type = match self.parse_one_of_keywords(&[Keyword::INDEX, Keyword::KEY]) {
13272 Some(Keyword::INDEX) => TableIndexType::Index,
13273 Some(Keyword::KEY) => TableIndexType::Key,
13274 _ => {
13275 return self
13276 .expected_ref("expected to match INDEX/KEY keyword", self.peek_token_ref())
13277 }
13278 };
13279 let for_clause = if self.parse_keyword(Keyword::FOR) {
13280 let clause = if self.parse_keyword(Keyword::JOIN) {
13281 TableIndexHintForClause::Join
13282 } else if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
13283 TableIndexHintForClause::OrderBy
13284 } else if self.parse_keywords(&[Keyword::GROUP, Keyword::BY]) {
13285 TableIndexHintForClause::GroupBy
13286 } else {
13287 return self.expected_ref(
13288 "expected to match FOR/ORDER BY/GROUP BY table hint in for clause",
13289 self.peek_token_ref(),
13290 );
13291 };
13292 Some(clause)
13293 } else {
13294 None
13295 };
13296
13297 self.expect_token(&Token::LParen)?;
13298 let index_names = if self.peek_token_ref().token != Token::RParen {
13299 self.parse_comma_separated(Parser::parse_identifier)?
13300 } else {
13301 vec![]
13302 };
13303 self.expect_token(&Token::RParen)?;
13304 hints.push(TableIndexHints {
13305 hint_type,
13306 index_type,
13307 for_clause,
13308 index_names,
13309 });
13310 }
13311 Ok(hints)
13312 }
13313
13314 pub fn parse_optional_alias(
13318 &mut self,
13319 reserved_kwds: &[Keyword],
13320 ) -> Result<Option<Ident>, ParserError> {
13321 fn validator(_explicit: bool, _kw: &Keyword, _parser: &mut Parser) -> bool {
13322 false
13323 }
13324 self.parse_optional_alias_inner(Some(reserved_kwds), validator)
13325 }
13326
13327 fn parse_optional_alias_inner<F>(
13334 &mut self,
13335 reserved_kwds: Option<&[Keyword]>,
13336 validator: F,
13337 ) -> Result<Option<Ident>, ParserError>
13338 where
13339 F: Fn(bool, &Keyword, &mut Parser) -> bool,
13340 {
13341 let after_as = self.parse_keyword(Keyword::AS);
13342
13343 let next_token = self.next_token();
13344 match next_token.token {
13345 Token::Word(w)
13348 if reserved_kwds.is_some()
13349 && (after_as || reserved_kwds.is_some_and(|x| !x.contains(&w.keyword))) =>
13350 {
13351 Ok(Some(w.into_ident(next_token.span)))
13352 }
13353 Token::Word(w) if validator(after_as, &w.keyword, self) => {
13357 Ok(Some(w.into_ident(next_token.span)))
13358 }
13359 Token::SingleQuotedString(s) => Ok(Some(Ident::with_quote('\'', s))),
13361 Token::DoubleQuotedString(s) => Ok(Some(Ident::with_quote('\"', s))),
13362 _ => {
13363 if after_as {
13364 return self.expected("an identifier after AS", next_token);
13365 }
13366 self.prev_token();
13367 Ok(None) }
13369 }
13370 }
13371
13372 pub fn parse_optional_group_by(&mut self) -> Result<Option<GroupByExpr>, ParserError> {
13374 if self.parse_keywords(&[Keyword::GROUP, Keyword::BY]) {
13375 let expressions = if self.parse_keyword(Keyword::ALL) {
13376 None
13377 } else {
13378 Some(self.parse_comma_separated(Parser::parse_group_by_expr)?)
13379 };
13380
13381 let mut modifiers = vec![];
13382 if self.dialect.supports_group_by_with_modifier() {
13383 loop {
13384 if !self.parse_keyword(Keyword::WITH) {
13385 break;
13386 }
13387 let keyword = self.expect_one_of_keywords(&[
13388 Keyword::ROLLUP,
13389 Keyword::CUBE,
13390 Keyword::TOTALS,
13391 ])?;
13392 modifiers.push(match keyword {
13393 Keyword::ROLLUP => GroupByWithModifier::Rollup,
13394 Keyword::CUBE => GroupByWithModifier::Cube,
13395 Keyword::TOTALS => GroupByWithModifier::Totals,
13396 _ => {
13397 return parser_err!(
13398 "BUG: expected to match GroupBy modifier keyword",
13399 self.peek_token_ref().span.start
13400 )
13401 }
13402 });
13403 }
13404 }
13405 if self.parse_keywords(&[Keyword::GROUPING, Keyword::SETS]) {
13406 self.expect_token(&Token::LParen)?;
13407 let result = self.parse_comma_separated(|p| {
13408 if p.peek_token_ref().token == Token::LParen {
13409 p.parse_tuple(true, true)
13410 } else {
13411 Ok(vec![p.parse_expr()?])
13412 }
13413 })?;
13414 self.expect_token(&Token::RParen)?;
13415 modifiers.push(GroupByWithModifier::GroupingSets(Expr::GroupingSets(
13416 result,
13417 )));
13418 };
13419 let group_by = match expressions {
13420 None => GroupByExpr::All(modifiers),
13421 Some(exprs) => GroupByExpr::Expressions(exprs, modifiers),
13422 };
13423 Ok(Some(group_by))
13424 } else {
13425 Ok(None)
13426 }
13427 }
13428
13429 pub fn parse_optional_order_by(&mut self) -> Result<Option<OrderBy>, ParserError> {
13431 if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
13432 let order_by =
13433 if self.dialect.supports_order_by_all() && self.parse_keyword(Keyword::ALL) {
13434 let order_by_options = self.parse_order_by_options()?;
13435 OrderBy {
13436 kind: OrderByKind::All(order_by_options),
13437 interpolate: None,
13438 }
13439 } else {
13440 let exprs = self.parse_comma_separated(Parser::parse_order_by_expr)?;
13441 let interpolate = if self.dialect.supports_interpolate() {
13442 self.parse_interpolations()?
13443 } else {
13444 None
13445 };
13446 OrderBy {
13447 kind: OrderByKind::Expressions(exprs),
13448 interpolate,
13449 }
13450 };
13451 Ok(Some(order_by))
13452 } else {
13453 Ok(None)
13454 }
13455 }
13456
13457 fn parse_optional_limit_clause(&mut self) -> Result<Option<LimitClause>, ParserError> {
13458 let mut offset = if self.parse_keyword(Keyword::OFFSET) {
13459 Some(self.parse_offset()?)
13460 } else {
13461 None
13462 };
13463
13464 let (limit, limit_by) = if self.parse_keyword(Keyword::LIMIT) {
13465 let expr = self.parse_limit()?;
13466
13467 if self.dialect.supports_limit_comma()
13468 && offset.is_none()
13469 && expr.is_some() && self.consume_token(&Token::Comma)
13471 {
13472 let offset = expr.ok_or_else(|| {
13473 ParserError::ParserError(
13474 "Missing offset for LIMIT <offset>, <limit>".to_string(),
13475 )
13476 })?;
13477 return Ok(Some(LimitClause::OffsetCommaLimit {
13478 offset,
13479 limit: self.parse_expr()?,
13480 }));
13481 }
13482
13483 let limit_by = if self.dialect.supports_limit_by() && self.parse_keyword(Keyword::BY) {
13484 Some(self.parse_comma_separated(Parser::parse_expr)?)
13485 } else {
13486 None
13487 };
13488
13489 (Some(expr), limit_by)
13490 } else {
13491 (None, None)
13492 };
13493
13494 if offset.is_none() && limit.is_some() && self.parse_keyword(Keyword::OFFSET) {
13495 offset = Some(self.parse_offset()?);
13496 }
13497
13498 if offset.is_some() || (limit.is_some() && limit != Some(None)) || limit_by.is_some() {
13499 Ok(Some(LimitClause::LimitOffset {
13500 limit: limit.unwrap_or_default(),
13501 offset,
13502 limit_by: limit_by.unwrap_or_default(),
13503 }))
13504 } else {
13505 Ok(None)
13506 }
13507 }
13508
13509 pub fn parse_table_object(&mut self) -> Result<TableObject, ParserError> {
13512 if self.dialect.supports_insert_table_function() && self.parse_keyword(Keyword::FUNCTION) {
13513 let fn_name = self.parse_object_name(false)?;
13514 self.parse_function_call(fn_name)
13515 .map(TableObject::TableFunction)
13516 } else if self.dialect.supports_insert_table_query() && self.peek_subquery_or_cte_start() {
13517 self.parse_parenthesized(|p| p.parse_query())
13518 .map(TableObject::TableQuery)
13519 } else {
13520 self.parse_object_name(false).map(TableObject::TableName)
13521 }
13522 }
13523
13524 pub fn parse_object_name(&mut self, in_table_clause: bool) -> Result<ObjectName, ParserError> {
13531 self.parse_object_name_inner(in_table_clause, false)
13532 }
13533
13534 fn parse_object_name_inner(
13544 &mut self,
13545 in_table_clause: bool,
13546 allow_wildcards: bool,
13547 ) -> Result<ObjectName, ParserError> {
13548 let mut parts = vec![];
13549 if dialect_of!(self is BigQueryDialect) && in_table_clause {
13550 loop {
13551 let (ident, end_with_period) = self.parse_unquoted_hyphenated_identifier()?;
13552 parts.push(ObjectNamePart::Identifier(ident));
13553 if !self.consume_token(&Token::Period) && !end_with_period {
13554 break;
13555 }
13556 }
13557 } else {
13558 loop {
13559 if allow_wildcards && self.peek_token_ref().token == Token::Mul {
13560 let span = self.next_token().span;
13561 parts.push(ObjectNamePart::Identifier(Ident {
13562 value: Token::Mul.to_string(),
13563 quote_style: None,
13564 span,
13565 }));
13566 } else if dialect_of!(self is BigQueryDialect) && in_table_clause {
13567 let (ident, end_with_period) = self.parse_unquoted_hyphenated_identifier()?;
13568 parts.push(ObjectNamePart::Identifier(ident));
13569 if !self.consume_token(&Token::Period) && !end_with_period {
13570 break;
13571 }
13572 } else if self.dialect.supports_object_name_double_dot_notation()
13573 && parts.len() == 1
13574 && matches!(self.peek_token_ref().token, Token::Period)
13575 {
13576 parts.push(ObjectNamePart::Identifier(Ident::new("")));
13578 } else {
13579 let ident = self.parse_identifier()?;
13580 let part = if self
13581 .dialect
13582 .is_identifier_generating_function_name(&ident, &parts)
13583 {
13584 self.expect_token(&Token::LParen)?;
13585 let args: Vec<FunctionArg> =
13586 self.parse_comma_separated0(Self::parse_function_args, Token::RParen)?;
13587 self.expect_token(&Token::RParen)?;
13588 ObjectNamePart::Function(ObjectNamePartFunction { name: ident, args })
13589 } else {
13590 ObjectNamePart::Identifier(ident)
13591 };
13592 parts.push(part);
13593 }
13594
13595 if !self.consume_token(&Token::Period) {
13596 break;
13597 }
13598 }
13599 }
13600
13601 if dialect_of!(self is BigQueryDialect)
13604 && parts.iter().any(|part| {
13605 part.as_ident()
13606 .is_some_and(|ident| ident.value.contains('.'))
13607 })
13608 {
13609 parts = parts
13610 .into_iter()
13611 .flat_map(|part| match part.as_ident() {
13612 Some(ident) => ident
13613 .value
13614 .split('.')
13615 .map(|value| {
13616 ObjectNamePart::Identifier(Ident {
13617 value: value.into(),
13618 quote_style: ident.quote_style,
13619 span: ident.span,
13620 })
13621 })
13622 .collect::<Vec<_>>(),
13623 None => vec![part],
13624 })
13625 .collect()
13626 }
13627
13628 Ok(ObjectName(parts))
13629 }
13630
13631 pub fn parse_identifiers(&mut self) -> Result<Vec<Ident>, ParserError> {
13633 let mut idents = vec![];
13634 loop {
13635 let token = self.peek_token_ref();
13636 match &token.token {
13637 Token::Word(w) => {
13638 idents.push(w.to_ident(token.span));
13639 }
13640 Token::EOF | Token::Eq | Token::SemiColon | Token::VerticalBarRightAngleBracket => {
13641 break
13642 }
13643 _ => {}
13644 }
13645 self.advance_token();
13646 }
13647 Ok(idents)
13648 }
13649
13650 pub fn parse_multipart_identifier(&mut self) -> Result<Vec<Ident>, ParserError> {
13690 let mut idents = vec![];
13691
13692 let next_token = self.next_token();
13694 match next_token.token {
13695 Token::Word(w) => idents.push(w.into_ident(next_token.span)),
13696 Token::EOF => {
13697 return Err(ParserError::ParserError(
13698 "Empty input when parsing identifier".to_string(),
13699 ))?
13700 }
13701 token => {
13702 return Err(ParserError::ParserError(format!(
13703 "Unexpected token in identifier: {token}"
13704 )))?
13705 }
13706 };
13707
13708 loop {
13710 match self.next_token().token {
13711 Token::Period => {
13713 let next_token = self.next_token();
13714 match next_token.token {
13715 Token::Word(w) => idents.push(w.into_ident(next_token.span)),
13716 Token::EOF => {
13717 return Err(ParserError::ParserError(
13718 "Trailing period in identifier".to_string(),
13719 ))?
13720 }
13721 token => {
13722 return Err(ParserError::ParserError(format!(
13723 "Unexpected token following period in identifier: {token}"
13724 )))?
13725 }
13726 }
13727 }
13728 Token::EOF => break,
13729 token => {
13730 return Err(ParserError::ParserError(format!(
13731 "Unexpected token in identifier: {token}"
13732 )))?;
13733 }
13734 }
13735 }
13736
13737 Ok(idents)
13738 }
13739
13740 pub fn parse_identifier(&mut self) -> Result<Ident, ParserError> {
13742 let next_token = self.next_token();
13743 match next_token.token {
13744 Token::Word(w) => Ok(w.into_ident(next_token.span)),
13745 Token::SingleQuotedString(s) => Ok(Ident::with_quote('\'', s)),
13746 Token::DoubleQuotedString(s) => Ok(Ident::with_quote('\"', s)),
13747 _ => self.expected("identifier", next_token),
13748 }
13749 }
13750
13751 fn parse_unquoted_hyphenated_identifier(&mut self) -> Result<(Ident, bool), ParserError> {
13762 match self.peek_token().token {
13763 Token::Word(w) => {
13764 let quote_style_is_none = w.quote_style.is_none();
13765 let mut requires_whitespace = false;
13766 let mut ident = w.into_ident(self.next_token().span);
13767 if quote_style_is_none {
13768 while matches!(self.peek_token_no_skip().token, Token::Minus) {
13769 self.next_token();
13770 ident.value.push('-');
13771
13772 let token = self
13773 .next_token_no_skip()
13774 .cloned()
13775 .unwrap_or(TokenWithSpan::wrap(Token::EOF));
13776 requires_whitespace = match token.token {
13777 Token::Word(next_word) if next_word.quote_style.is_none() => {
13778 ident.value.push_str(&next_word.value);
13779 false
13780 }
13781 Token::Number(s, false) => {
13782 if s.ends_with('.') {
13789 let Some(s) = s.split('.').next().filter(|s| {
13790 !s.is_empty() && s.chars().all(|c| c.is_ascii_digit())
13791 }) else {
13792 return self.expected(
13793 "continuation of hyphenated identifier",
13794 TokenWithSpan::new(Token::Number(s, false), token.span),
13795 );
13796 };
13797 ident.value.push_str(s);
13798 return Ok((ident, true));
13799 } else {
13800 ident.value.push_str(&s);
13801 }
13802 !matches!(self.peek_token_ref().token, Token::Period)
13805 }
13806 _ => {
13807 return self
13808 .expected("continuation of hyphenated identifier", token);
13809 }
13810 }
13811 }
13812
13813 if requires_whitespace {
13816 let token = self.next_token();
13817 if !matches!(token.token, Token::EOF | Token::Whitespace(_)) {
13818 return self
13819 .expected("whitespace following hyphenated identifier", token);
13820 }
13821 }
13822 }
13823 Ok((ident, false))
13824 }
13825 _ => Ok((self.parse_identifier()?, false)),
13826 }
13827 }
13828
13829 fn parse_view_columns(&mut self) -> Result<Vec<ViewColumnDef>, ParserError> {
13831 if self.consume_token(&Token::LParen) {
13832 if self.peek_token_ref().token == Token::RParen {
13833 self.next_token();
13834 Ok(vec![])
13835 } else {
13836 let cols = self.parse_comma_separated_with_trailing_commas(
13837 Parser::parse_view_column,
13838 self.dialect.supports_column_definition_trailing_commas(),
13839 Self::is_reserved_for_column_alias,
13840 )?;
13841 self.expect_token(&Token::RParen)?;
13842 Ok(cols)
13843 }
13844 } else {
13845 Ok(vec![])
13846 }
13847 }
13848
13849 fn parse_view_column(&mut self) -> Result<ViewColumnDef, ParserError> {
13851 let name = self.parse_identifier()?;
13852 let options = self.parse_view_column_options()?;
13853 let data_type = if dialect_of!(self is ClickHouseDialect) {
13854 Some(self.parse_data_type()?)
13855 } else {
13856 None
13857 };
13858 Ok(ViewColumnDef {
13859 name,
13860 data_type,
13861 options,
13862 })
13863 }
13864
13865 fn parse_view_column_options(&mut self) -> Result<Option<ColumnOptions>, ParserError> {
13866 let mut options = Vec::new();
13867 loop {
13868 let option = self.parse_optional_column_option()?;
13869 if let Some(option) = option {
13870 options.push(option);
13871 } else {
13872 break;
13873 }
13874 }
13875 if options.is_empty() {
13876 Ok(None)
13877 } else if self.dialect.supports_space_separated_column_options() {
13878 Ok(Some(ColumnOptions::SpaceSeparated(options)))
13879 } else {
13880 Ok(Some(ColumnOptions::CommaSeparated(options)))
13881 }
13882 }
13883
13884 pub fn parse_parenthesized_column_list(
13887 &mut self,
13888 optional: IsOptional,
13889 allow_empty: bool,
13890 ) -> Result<Vec<Ident>, ParserError> {
13891 self.parse_parenthesized_column_list_inner(optional, allow_empty, |p| p.parse_identifier())
13892 }
13893
13894 pub fn parse_parenthesized_compound_identifier_list(
13896 &mut self,
13897 optional: IsOptional,
13898 allow_empty: bool,
13899 ) -> Result<Vec<Expr>, ParserError> {
13900 self.parse_parenthesized_column_list_inner(optional, allow_empty, |p| {
13901 Ok(Expr::CompoundIdentifier(
13902 p.parse_period_separated(|p| p.parse_identifier())?,
13903 ))
13904 })
13905 }
13906
13907 fn parse_parenthesized_index_column_list(&mut self) -> Result<Vec<IndexColumn>, ParserError> {
13910 self.parse_parenthesized_column_list_inner(Mandatory, false, |p| {
13911 p.parse_create_index_expr()
13912 })
13913 }
13914
13915 pub fn parse_parenthesized_qualified_column_list(
13918 &mut self,
13919 optional: IsOptional,
13920 allow_empty: bool,
13921 ) -> Result<Vec<ObjectName>, ParserError> {
13922 self.parse_parenthesized_column_list_inner(optional, allow_empty, |p| {
13923 p.parse_object_name(true)
13924 })
13925 }
13926
13927 fn parse_parenthesized_column_list_inner<F, T>(
13930 &mut self,
13931 optional: IsOptional,
13932 allow_empty: bool,
13933 mut f: F,
13934 ) -> Result<Vec<T>, ParserError>
13935 where
13936 F: FnMut(&mut Parser) -> Result<T, ParserError>,
13937 {
13938 if self.consume_token(&Token::LParen) {
13939 if allow_empty && self.peek_token_ref().token == Token::RParen {
13940 self.next_token();
13941 Ok(vec![])
13942 } else {
13943 let cols = self.parse_comma_separated(|p| f(p))?;
13944 self.expect_token(&Token::RParen)?;
13945 Ok(cols)
13946 }
13947 } else if optional == Optional {
13948 Ok(vec![])
13949 } else {
13950 self.expected_ref("a list of columns in parentheses", self.peek_token_ref())
13951 }
13952 }
13953
13954 fn parse_table_alias_column_defs(&mut self) -> Result<Vec<TableAliasColumnDef>, ParserError> {
13956 if self.consume_token(&Token::LParen) {
13957 let cols = self.parse_comma_separated(|p| {
13958 let name = p.parse_identifier()?;
13959 let data_type = p.maybe_parse(|p| p.parse_data_type())?;
13960 Ok(TableAliasColumnDef { name, data_type })
13961 })?;
13962 self.expect_token(&Token::RParen)?;
13963 Ok(cols)
13964 } else {
13965 Ok(vec![])
13966 }
13967 }
13968
13969 pub fn parse_precision(&mut self) -> Result<u64, ParserError> {
13971 self.expect_token(&Token::LParen)?;
13972 let n = self.parse_literal_uint()?;
13973 self.expect_token(&Token::RParen)?;
13974 Ok(n)
13975 }
13976
13977 pub fn parse_optional_precision(&mut self) -> Result<Option<u64>, ParserError> {
13979 if self.consume_token(&Token::LParen) {
13980 let n = self.parse_literal_uint()?;
13981 self.expect_token(&Token::RParen)?;
13982 Ok(Some(n))
13983 } else {
13984 Ok(None)
13985 }
13986 }
13987
13988 fn maybe_parse_optional_interval_fields(
13989 &mut self,
13990 ) -> Result<Option<IntervalFields>, ParserError> {
13991 match self.parse_one_of_keywords(&[
13992 Keyword::YEAR,
13994 Keyword::DAY,
13995 Keyword::HOUR,
13996 Keyword::MINUTE,
13997 Keyword::MONTH,
13999 Keyword::SECOND,
14000 ]) {
14001 Some(Keyword::YEAR) => {
14002 if self.peek_keyword(Keyword::TO) {
14003 self.expect_keyword(Keyword::TO)?;
14004 self.expect_keyword(Keyword::MONTH)?;
14005 Ok(Some(IntervalFields::YearToMonth))
14006 } else {
14007 Ok(Some(IntervalFields::Year))
14008 }
14009 }
14010 Some(Keyword::DAY) => {
14011 if self.peek_keyword(Keyword::TO) {
14012 self.expect_keyword(Keyword::TO)?;
14013 match self.expect_one_of_keywords(&[
14014 Keyword::HOUR,
14015 Keyword::MINUTE,
14016 Keyword::SECOND,
14017 ])? {
14018 Keyword::HOUR => Ok(Some(IntervalFields::DayToHour)),
14019 Keyword::MINUTE => Ok(Some(IntervalFields::DayToMinute)),
14020 Keyword::SECOND => Ok(Some(IntervalFields::DayToSecond)),
14021 _ => {
14022 self.prev_token();
14023 self.expected_ref("HOUR, MINUTE, or SECOND", self.peek_token_ref())
14024 }
14025 }
14026 } else {
14027 Ok(Some(IntervalFields::Day))
14028 }
14029 }
14030 Some(Keyword::HOUR) => {
14031 if self.peek_keyword(Keyword::TO) {
14032 self.expect_keyword(Keyword::TO)?;
14033 match self.expect_one_of_keywords(&[Keyword::MINUTE, Keyword::SECOND])? {
14034 Keyword::MINUTE => Ok(Some(IntervalFields::HourToMinute)),
14035 Keyword::SECOND => Ok(Some(IntervalFields::HourToSecond)),
14036 _ => {
14037 self.prev_token();
14038 self.expected_ref("MINUTE or SECOND", self.peek_token_ref())
14039 }
14040 }
14041 } else {
14042 Ok(Some(IntervalFields::Hour))
14043 }
14044 }
14045 Some(Keyword::MINUTE) => {
14046 if self.peek_keyword(Keyword::TO) {
14047 self.expect_keyword(Keyword::TO)?;
14048 self.expect_keyword(Keyword::SECOND)?;
14049 Ok(Some(IntervalFields::MinuteToSecond))
14050 } else {
14051 Ok(Some(IntervalFields::Minute))
14052 }
14053 }
14054 Some(Keyword::MONTH) => Ok(Some(IntervalFields::Month)),
14055 Some(Keyword::SECOND) => Ok(Some(IntervalFields::Second)),
14056 Some(_) => {
14057 self.prev_token();
14058 self.expected_ref(
14059 "YEAR, MONTH, DAY, HOUR, MINUTE, or SECOND",
14060 self.peek_token_ref(),
14061 )
14062 }
14063 None => Ok(None),
14064 }
14065 }
14066
14067 pub fn parse_datetime_64(&mut self) -> Result<(u64, Option<String>), ParserError> {
14075 self.expect_keyword_is(Keyword::DATETIME64)?;
14076 self.expect_token(&Token::LParen)?;
14077 let precision = self.parse_literal_uint()?;
14078 let time_zone = if self.consume_token(&Token::Comma) {
14079 Some(self.parse_literal_string()?)
14080 } else {
14081 None
14082 };
14083 self.expect_token(&Token::RParen)?;
14084 Ok((precision, time_zone))
14085 }
14086
14087 pub fn parse_optional_character_length(
14089 &mut self,
14090 ) -> Result<Option<CharacterLength>, ParserError> {
14091 if self.consume_token(&Token::LParen) {
14092 let character_length = self.parse_character_length()?;
14093 self.expect_token(&Token::RParen)?;
14094 Ok(Some(character_length))
14095 } else {
14096 Ok(None)
14097 }
14098 }
14099
14100 pub fn parse_optional_binary_length(&mut self) -> Result<Option<BinaryLength>, ParserError> {
14102 if self.consume_token(&Token::LParen) {
14103 let binary_length = self.parse_binary_length()?;
14104 self.expect_token(&Token::RParen)?;
14105 Ok(Some(binary_length))
14106 } else {
14107 Ok(None)
14108 }
14109 }
14110
14111 pub fn parse_character_length(&mut self) -> Result<CharacterLength, ParserError> {
14113 if self.parse_keyword(Keyword::MAX) {
14114 return Ok(CharacterLength::Max);
14115 }
14116 let length = self.parse_literal_uint()?;
14117 let unit = if self.parse_keyword(Keyword::CHARACTERS) {
14118 Some(CharLengthUnits::Characters)
14119 } else if self.parse_keyword(Keyword::OCTETS) {
14120 Some(CharLengthUnits::Octets)
14121 } else {
14122 None
14123 };
14124 Ok(CharacterLength::IntegerLength { length, unit })
14125 }
14126
14127 pub fn parse_binary_length(&mut self) -> Result<BinaryLength, ParserError> {
14129 if self.parse_keyword(Keyword::MAX) {
14130 return Ok(BinaryLength::Max);
14131 }
14132 let length = self.parse_literal_uint()?;
14133 Ok(BinaryLength::IntegerLength { length })
14134 }
14135
14136 pub fn parse_optional_precision_scale(
14138 &mut self,
14139 ) -> Result<(Option<u64>, Option<u64>), ParserError> {
14140 if self.consume_token(&Token::LParen) {
14141 let n = self.parse_literal_uint()?;
14142 let scale = if self.consume_token(&Token::Comma) {
14143 Some(self.parse_literal_uint()?)
14144 } else {
14145 None
14146 };
14147 self.expect_token(&Token::RParen)?;
14148 Ok((Some(n), scale))
14149 } else {
14150 Ok((None, None))
14151 }
14152 }
14153
14154 pub fn parse_exact_number_optional_precision_scale(
14156 &mut self,
14157 ) -> Result<ExactNumberInfo, ParserError> {
14158 if self.consume_token(&Token::LParen) {
14159 let precision = self.parse_literal_uint()?;
14160 let scale = if self.consume_token(&Token::Comma) {
14161 Some(self.parse_signed_integer()?)
14162 } else {
14163 None
14164 };
14165
14166 self.expect_token(&Token::RParen)?;
14167
14168 match scale {
14169 None => Ok(ExactNumberInfo::Precision(precision)),
14170 Some(scale) => Ok(ExactNumberInfo::PrecisionAndScale(precision, scale)),
14171 }
14172 } else {
14173 Ok(ExactNumberInfo::None)
14174 }
14175 }
14176
14177 fn parse_signed_integer(&mut self) -> Result<i64, ParserError> {
14179 let is_negative = self.consume_token(&Token::Minus);
14180
14181 if !is_negative {
14182 let _ = self.consume_token(&Token::Plus);
14183 }
14184
14185 let current_token = self.peek_token_ref();
14186 match ¤t_token.token {
14187 Token::Number(s, _) => {
14188 let s = s.clone();
14189 let span_start = current_token.span.start;
14190 self.advance_token();
14191 let value = Self::parse::<i64>(s, span_start)?;
14192 Ok(if is_negative { -value } else { value })
14193 }
14194 _ => self.expected_ref("number", current_token),
14195 }
14196 }
14197
14198 pub fn parse_optional_type_modifiers(&mut self) -> Result<Option<Vec<String>>, ParserError> {
14200 if self.consume_token(&Token::LParen) {
14201 let mut modifiers = Vec::new();
14202 loop {
14203 let next_token = self.next_token();
14204 match next_token.token {
14205 Token::Word(w) => modifiers.push(w.to_string()),
14206 Token::Number(n, _) => modifiers.push(n),
14207 Token::SingleQuotedString(s) => modifiers.push(s),
14208
14209 Token::Comma => {
14210 continue;
14211 }
14212 Token::RParen => {
14213 break;
14214 }
14215 _ => self.expected("type modifiers", next_token)?,
14216 }
14217 }
14218
14219 Ok(Some(modifiers))
14220 } else {
14221 Ok(None)
14222 }
14223 }
14224
14225 fn parse_sub_type<F>(&mut self, parent_type: F) -> Result<DataType, ParserError>
14227 where
14228 F: FnOnce(Box<DataType>) -> DataType,
14229 {
14230 self.expect_token(&Token::LParen)?;
14231 let inside_type = self.parse_data_type()?;
14232 self.expect_token(&Token::RParen)?;
14233 Ok(parent_type(inside_type.into()))
14234 }
14235
14236 fn parse_delete_setexpr_boxed(
14240 &mut self,
14241 delete_token: TokenWithSpan,
14242 ) -> Result<Box<SetExpr>, ParserError> {
14243 Ok(Box::new(SetExpr::Delete(self.parse_delete(delete_token)?)))
14244 }
14245
14246 pub fn parse_delete(&mut self, delete_token: TokenWithSpan) -> Result<Statement, ParserError> {
14248 let optimizer_hints = self.maybe_parse_optimizer_hints()?;
14249 let (tables, with_from_keyword) = if !self.parse_keyword(Keyword::FROM) {
14250 if dialect_of!(self is BigQueryDialect | OracleDialect | GenericDialect) {
14253 (vec![], false)
14254 } else {
14255 let tables = self.parse_comma_separated(|p| p.parse_object_name(false))?;
14256 self.expect_keyword_is(Keyword::FROM)?;
14257 (tables, true)
14258 }
14259 } else {
14260 (vec![], true)
14261 };
14262
14263 let from = self.parse_comma_separated(Parser::parse_table_and_joins)?;
14264
14265 let output = self.maybe_parse_output_clause()?;
14266
14267 let using = if self.parse_keyword(Keyword::USING) {
14268 Some(self.parse_comma_separated(Parser::parse_table_and_joins)?)
14269 } else {
14270 None
14271 };
14272 let selection = if self.parse_keyword(Keyword::WHERE) {
14273 Some(self.parse_expr()?)
14274 } else {
14275 None
14276 };
14277 let returning = if self.parse_keyword(Keyword::RETURNING) {
14278 Some(self.parse_comma_separated(Parser::parse_select_item)?)
14279 } else {
14280 None
14281 };
14282 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
14283 self.parse_comma_separated(Parser::parse_order_by_expr)?
14284 } else {
14285 vec![]
14286 };
14287 let limit = if self.parse_keyword(Keyword::LIMIT) {
14288 self.parse_limit()?
14289 } else {
14290 None
14291 };
14292
14293 Ok(Statement::Delete(Delete {
14294 delete_token: delete_token.into(),
14295 optimizer_hints,
14296 tables,
14297 from: if with_from_keyword {
14298 FromTable::WithFromKeyword(from)
14299 } else {
14300 FromTable::WithoutKeyword(from)
14301 },
14302 using,
14303 selection,
14304 returning,
14305 output,
14306 order_by,
14307 limit,
14308 }))
14309 }
14310
14311 pub fn parse_kill(&mut self) -> Result<Statement, ParserError> {
14314 let modifier_keyword =
14315 self.parse_one_of_keywords(&[Keyword::CONNECTION, Keyword::QUERY, Keyword::MUTATION]);
14316
14317 let id = self.parse_literal_uint()?;
14318
14319 let modifier = match modifier_keyword {
14320 Some(Keyword::CONNECTION) => Some(KillType::Connection),
14321 Some(Keyword::QUERY) => Some(KillType::Query),
14322 Some(Keyword::MUTATION) => {
14323 if dialect_of!(self is ClickHouseDialect | GenericDialect) {
14324 Some(KillType::Mutation)
14325 } else {
14326 self.expected_ref(
14327 "Unsupported type for KILL, allowed: CONNECTION | QUERY",
14328 self.peek_token_ref(),
14329 )?
14330 }
14331 }
14332 _ => None,
14333 };
14334
14335 Ok(Statement::Kill { modifier, id })
14336 }
14337
14338 pub fn parse_explain(
14340 &mut self,
14341 describe_alias: DescribeAlias,
14342 ) -> Result<Statement, ParserError> {
14343 let mut analyze = false;
14344 let mut verbose = false;
14345 let mut query_plan = false;
14346 let mut estimate = false;
14347 let mut format = None;
14348 let mut options = None;
14349
14350 if describe_alias == DescribeAlias::Explain
14353 && self.dialect.supports_explain_with_utility_options()
14354 && self.peek_token_ref().token == Token::LParen
14355 {
14356 options = Some(self.parse_utility_options()?)
14357 } else if self.parse_keywords(&[Keyword::QUERY, Keyword::PLAN]) {
14358 query_plan = true;
14359 } else if self.parse_keyword(Keyword::ESTIMATE) {
14360 estimate = true;
14361 } else {
14362 analyze = self.parse_keyword(Keyword::ANALYZE);
14363 verbose = self.parse_keyword(Keyword::VERBOSE);
14364 if self.parse_keyword(Keyword::FORMAT) {
14365 format = Some(self.parse_analyze_format_kind()?);
14366 }
14367 }
14368
14369 match self.maybe_parse(|parser| parser.parse_statement())? {
14370 Some(Statement::Explain { .. }) | Some(Statement::ExplainTable { .. }) => Err(
14371 ParserError::ParserError("Explain must be root of the plan".to_string()),
14372 ),
14373 Some(statement) => Ok(Statement::Explain {
14374 describe_alias,
14375 analyze,
14376 verbose,
14377 query_plan,
14378 estimate,
14379 statement: Box::new(statement),
14380 format,
14381 options,
14382 }),
14383 _ => {
14384 let hive_format =
14385 match self.parse_one_of_keywords(&[Keyword::EXTENDED, Keyword::FORMATTED]) {
14386 Some(Keyword::EXTENDED) => Some(HiveDescribeFormat::Extended),
14387 Some(Keyword::FORMATTED) => Some(HiveDescribeFormat::Formatted),
14388 _ => None,
14389 };
14390
14391 let has_table_keyword = if self.dialect.describe_requires_table_keyword() {
14392 self.parse_keyword(Keyword::TABLE)
14394 } else {
14395 false
14396 };
14397
14398 let table_name = self.parse_object_name(false)?;
14399 Ok(Statement::ExplainTable {
14400 describe_alias,
14401 hive_format,
14402 has_table_keyword,
14403 table_name,
14404 })
14405 }
14406 }
14407 }
14408
14409 #[cfg_attr(feature = "recursive-protection", recursive::recursive)]
14414 pub fn parse_query(&mut self) -> Result<Box<Query>, ParserError> {
14415 let _guard = self.recursion_counter.try_decrease()?;
14416 let with = if self.parse_keyword(Keyword::WITH) {
14417 let with_token = self.get_current_token();
14418 Some(With {
14419 with_token: with_token.clone().into(),
14420 recursive: self.parse_keyword(Keyword::RECURSIVE),
14421 cte_tables: self.parse_comma_separated(Parser::parse_cte)?,
14422 })
14423 } else {
14424 None
14425 };
14426 if self.parse_keyword(Keyword::INSERT) {
14427 Ok(Query {
14428 with,
14429 body: self.parse_insert_setexpr_boxed(self.get_current_token().clone())?,
14430 order_by: None,
14431 limit_clause: None,
14432 fetch: None,
14433 locks: vec![],
14434 for_clause: None,
14435 settings: None,
14436 format_clause: None,
14437 pipe_operators: vec![],
14438 }
14439 .into())
14440 } else if self.parse_keyword(Keyword::UPDATE) {
14441 Ok(Query {
14442 with,
14443 body: self.parse_update_setexpr_boxed(self.get_current_token().clone())?,
14444 order_by: None,
14445 limit_clause: None,
14446 fetch: None,
14447 locks: vec![],
14448 for_clause: None,
14449 settings: None,
14450 format_clause: None,
14451 pipe_operators: vec![],
14452 }
14453 .into())
14454 } else if self.parse_keyword(Keyword::DELETE) {
14455 Ok(Query {
14456 with,
14457 body: self.parse_delete_setexpr_boxed(self.get_current_token().clone())?,
14458 limit_clause: None,
14459 order_by: None,
14460 fetch: None,
14461 locks: vec![],
14462 for_clause: None,
14463 settings: None,
14464 format_clause: None,
14465 pipe_operators: vec![],
14466 }
14467 .into())
14468 } else if self.parse_keyword(Keyword::MERGE) {
14469 Ok(Query {
14470 with,
14471 body: self.parse_merge_setexpr_boxed(self.get_current_token().clone())?,
14472 limit_clause: None,
14473 order_by: None,
14474 fetch: None,
14475 locks: vec![],
14476 for_clause: None,
14477 settings: None,
14478 format_clause: None,
14479 pipe_operators: vec![],
14480 }
14481 .into())
14482 } else {
14483 let body = self.parse_query_body(self.dialect.prec_unknown())?;
14484
14485 let order_by = self.parse_optional_order_by()?;
14486
14487 let limit_clause = self.parse_optional_limit_clause()?;
14488
14489 let settings = self.parse_settings()?;
14490
14491 let fetch = if self.parse_keyword(Keyword::FETCH) {
14492 Some(self.parse_fetch()?)
14493 } else {
14494 None
14495 };
14496
14497 let mut for_clause = None;
14498 let mut locks = Vec::new();
14499 while self.parse_keyword(Keyword::FOR) {
14500 if let Some(parsed_for_clause) = self.parse_for_clause()? {
14501 for_clause = Some(parsed_for_clause);
14502 break;
14503 } else {
14504 locks.push(self.parse_lock()?);
14505 }
14506 }
14507 let format_clause =
14508 if self.dialect.supports_select_format() && self.parse_keyword(Keyword::FORMAT) {
14509 if self.parse_keyword(Keyword::NULL) {
14510 Some(FormatClause::Null)
14511 } else {
14512 let ident = self.parse_identifier()?;
14513 Some(FormatClause::Identifier(ident))
14514 }
14515 } else {
14516 None
14517 };
14518
14519 let pipe_operators = if self.dialect.supports_pipe_operator() {
14520 self.parse_pipe_operators()?
14521 } else {
14522 Vec::new()
14523 };
14524
14525 Ok(Query {
14526 with,
14527 body,
14528 order_by,
14529 limit_clause,
14530 fetch,
14531 locks,
14532 for_clause,
14533 settings,
14534 format_clause,
14535 pipe_operators,
14536 }
14537 .into())
14538 }
14539 }
14540
14541 fn parse_pipe_operators(&mut self) -> Result<Vec<PipeOperator>, ParserError> {
14542 let mut pipe_operators = Vec::new();
14543
14544 while self.consume_token(&Token::VerticalBarRightAngleBracket) {
14545 let kw = self.expect_one_of_keywords(&[
14546 Keyword::SELECT,
14547 Keyword::EXTEND,
14548 Keyword::SET,
14549 Keyword::DROP,
14550 Keyword::AS,
14551 Keyword::WHERE,
14552 Keyword::LIMIT,
14553 Keyword::AGGREGATE,
14554 Keyword::ORDER,
14555 Keyword::TABLESAMPLE,
14556 Keyword::RENAME,
14557 Keyword::UNION,
14558 Keyword::INTERSECT,
14559 Keyword::EXCEPT,
14560 Keyword::CALL,
14561 Keyword::PIVOT,
14562 Keyword::UNPIVOT,
14563 Keyword::JOIN,
14564 Keyword::INNER,
14565 Keyword::LEFT,
14566 Keyword::RIGHT,
14567 Keyword::FULL,
14568 Keyword::CROSS,
14569 ])?;
14570 match kw {
14571 Keyword::SELECT => {
14572 let exprs = self.parse_comma_separated(Parser::parse_select_item)?;
14573 pipe_operators.push(PipeOperator::Select { exprs })
14574 }
14575 Keyword::EXTEND => {
14576 let exprs = self.parse_comma_separated(Parser::parse_select_item)?;
14577 pipe_operators.push(PipeOperator::Extend { exprs })
14578 }
14579 Keyword::SET => {
14580 let assignments = self.parse_comma_separated(Parser::parse_assignment)?;
14581 pipe_operators.push(PipeOperator::Set { assignments })
14582 }
14583 Keyword::DROP => {
14584 let columns = self.parse_identifiers()?;
14585 pipe_operators.push(PipeOperator::Drop { columns })
14586 }
14587 Keyword::AS => {
14588 let alias = self.parse_identifier()?;
14589 pipe_operators.push(PipeOperator::As { alias })
14590 }
14591 Keyword::WHERE => {
14592 let expr = self.parse_expr()?;
14593 pipe_operators.push(PipeOperator::Where { expr })
14594 }
14595 Keyword::LIMIT => {
14596 let expr = self.parse_expr()?;
14597 let offset = if self.parse_keyword(Keyword::OFFSET) {
14598 Some(self.parse_expr()?)
14599 } else {
14600 None
14601 };
14602 pipe_operators.push(PipeOperator::Limit { expr, offset })
14603 }
14604 Keyword::AGGREGATE => {
14605 let full_table_exprs = if self.peek_keyword(Keyword::GROUP) {
14606 vec![]
14607 } else {
14608 self.parse_comma_separated(|parser| {
14609 parser.parse_expr_with_alias_and_order_by()
14610 })?
14611 };
14612
14613 let group_by_expr = if self.parse_keywords(&[Keyword::GROUP, Keyword::BY]) {
14614 self.parse_comma_separated(|parser| {
14615 parser.parse_expr_with_alias_and_order_by()
14616 })?
14617 } else {
14618 vec![]
14619 };
14620
14621 pipe_operators.push(PipeOperator::Aggregate {
14622 full_table_exprs,
14623 group_by_expr,
14624 })
14625 }
14626 Keyword::ORDER => {
14627 self.expect_one_of_keywords(&[Keyword::BY])?;
14628 let exprs = self.parse_comma_separated(Parser::parse_order_by_expr)?;
14629 pipe_operators.push(PipeOperator::OrderBy { exprs })
14630 }
14631 Keyword::TABLESAMPLE => {
14632 let sample = self.parse_table_sample(TableSampleModifier::TableSample)?;
14633 pipe_operators.push(PipeOperator::TableSample { sample });
14634 }
14635 Keyword::RENAME => {
14636 let mappings =
14637 self.parse_comma_separated(Parser::parse_identifier_with_optional_alias)?;
14638 pipe_operators.push(PipeOperator::Rename { mappings });
14639 }
14640 Keyword::UNION => {
14641 let set_quantifier = self.parse_set_quantifier(&Some(SetOperator::Union));
14642 let queries = self.parse_pipe_operator_queries()?;
14643 pipe_operators.push(PipeOperator::Union {
14644 set_quantifier,
14645 queries,
14646 });
14647 }
14648 Keyword::INTERSECT => {
14649 let set_quantifier =
14650 self.parse_distinct_required_set_quantifier("INTERSECT")?;
14651 let queries = self.parse_pipe_operator_queries()?;
14652 pipe_operators.push(PipeOperator::Intersect {
14653 set_quantifier,
14654 queries,
14655 });
14656 }
14657 Keyword::EXCEPT => {
14658 let set_quantifier = self.parse_distinct_required_set_quantifier("EXCEPT")?;
14659 let queries = self.parse_pipe_operator_queries()?;
14660 pipe_operators.push(PipeOperator::Except {
14661 set_quantifier,
14662 queries,
14663 });
14664 }
14665 Keyword::CALL => {
14666 let function_name = self.parse_object_name(false)?;
14667 let function_expr = self.parse_function(function_name)?;
14668 if let Expr::Function(function) = function_expr {
14669 let alias = self.parse_identifier_optional_alias()?;
14670 pipe_operators.push(PipeOperator::Call { function, alias });
14671 } else {
14672 return Err(ParserError::ParserError(
14673 "Expected function call after CALL".to_string(),
14674 ));
14675 }
14676 }
14677 Keyword::PIVOT => {
14678 self.expect_token(&Token::LParen)?;
14679 let aggregate_functions =
14680 self.parse_comma_separated(Self::parse_pivot_aggregate_function)?;
14681 self.expect_keyword_is(Keyword::FOR)?;
14682 let value_column = self.parse_period_separated(|p| p.parse_identifier())?;
14683 self.expect_keyword_is(Keyword::IN)?;
14684
14685 self.expect_token(&Token::LParen)?;
14686 let value_source = if self.parse_keyword(Keyword::ANY) {
14687 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
14688 self.parse_comma_separated(Parser::parse_order_by_expr)?
14689 } else {
14690 vec![]
14691 };
14692 PivotValueSource::Any(order_by)
14693 } else if self.peek_sub_query() {
14694 PivotValueSource::Subquery(self.parse_query()?)
14695 } else {
14696 PivotValueSource::List(
14697 self.parse_comma_separated(Self::parse_expr_with_alias)?,
14698 )
14699 };
14700 self.expect_token(&Token::RParen)?;
14701 self.expect_token(&Token::RParen)?;
14702
14703 let alias = self.parse_identifier_optional_alias()?;
14704
14705 pipe_operators.push(PipeOperator::Pivot {
14706 aggregate_functions,
14707 value_column,
14708 value_source,
14709 alias,
14710 });
14711 }
14712 Keyword::UNPIVOT => {
14713 self.expect_token(&Token::LParen)?;
14714 let value_column = self.parse_identifier()?;
14715 self.expect_keyword(Keyword::FOR)?;
14716 let name_column = self.parse_identifier()?;
14717 self.expect_keyword(Keyword::IN)?;
14718
14719 self.expect_token(&Token::LParen)?;
14720 let unpivot_columns = self.parse_comma_separated(Parser::parse_identifier)?;
14721 self.expect_token(&Token::RParen)?;
14722
14723 self.expect_token(&Token::RParen)?;
14724
14725 let alias = self.parse_identifier_optional_alias()?;
14726
14727 pipe_operators.push(PipeOperator::Unpivot {
14728 value_column,
14729 name_column,
14730 unpivot_columns,
14731 alias,
14732 });
14733 }
14734 Keyword::JOIN
14735 | Keyword::INNER
14736 | Keyword::LEFT
14737 | Keyword::RIGHT
14738 | Keyword::FULL
14739 | Keyword::CROSS => {
14740 self.prev_token();
14741 let mut joins = self.parse_joins()?;
14742 if joins.len() != 1 {
14743 return Err(ParserError::ParserError(
14744 "Join pipe operator must have a single join".to_string(),
14745 ));
14746 }
14747 let join = joins.swap_remove(0);
14748 pipe_operators.push(PipeOperator::Join(join))
14749 }
14750 unhandled => {
14751 return Err(ParserError::ParserError(format!(
14752 "`expect_one_of_keywords` further up allowed unhandled keyword: {unhandled:?}"
14753 )))
14754 }
14755 }
14756 }
14757 Ok(pipe_operators)
14758 }
14759
14760 fn parse_settings(&mut self) -> Result<Option<Vec<Setting>>, ParserError> {
14761 let settings = if self.dialect.supports_settings() && self.parse_keyword(Keyword::SETTINGS)
14762 {
14763 let key_values = self.parse_comma_separated(|p| {
14764 let key = p.parse_identifier()?;
14765 p.expect_token(&Token::Eq)?;
14766 let value = p.parse_expr()?;
14767 Ok(Setting { key, value })
14768 })?;
14769 Some(key_values)
14770 } else {
14771 None
14772 };
14773 Ok(settings)
14774 }
14775
14776 pub fn parse_for_clause(&mut self) -> Result<Option<ForClause>, ParserError> {
14778 if self.parse_keyword(Keyword::XML) {
14779 Ok(Some(self.parse_for_xml()?))
14780 } else if self.parse_keyword(Keyword::JSON) {
14781 Ok(Some(self.parse_for_json()?))
14782 } else if self.parse_keyword(Keyword::BROWSE) {
14783 Ok(Some(ForClause::Browse))
14784 } else {
14785 Ok(None)
14786 }
14787 }
14788
14789 pub fn parse_for_xml(&mut self) -> Result<ForClause, ParserError> {
14791 let for_xml = if self.parse_keyword(Keyword::RAW) {
14792 let mut element_name = None;
14793 if self.peek_token_ref().token == Token::LParen {
14794 self.expect_token(&Token::LParen)?;
14795 element_name = Some(self.parse_literal_string()?);
14796 self.expect_token(&Token::RParen)?;
14797 }
14798 ForXml::Raw(element_name)
14799 } else if self.parse_keyword(Keyword::AUTO) {
14800 ForXml::Auto
14801 } else if self.parse_keyword(Keyword::EXPLICIT) {
14802 ForXml::Explicit
14803 } else if self.parse_keyword(Keyword::PATH) {
14804 let mut element_name = None;
14805 if self.peek_token_ref().token == Token::LParen {
14806 self.expect_token(&Token::LParen)?;
14807 element_name = Some(self.parse_literal_string()?);
14808 self.expect_token(&Token::RParen)?;
14809 }
14810 ForXml::Path(element_name)
14811 } else {
14812 return Err(ParserError::ParserError(
14813 "Expected FOR XML [RAW | AUTO | EXPLICIT | PATH ]".to_string(),
14814 ));
14815 };
14816 let mut elements = false;
14817 let mut binary_base64 = false;
14818 let mut root = None;
14819 let mut r#type = false;
14820 while self.peek_token_ref().token == Token::Comma {
14821 self.next_token();
14822 if self.parse_keyword(Keyword::ELEMENTS) {
14823 elements = true;
14824 } else if self.parse_keyword(Keyword::BINARY) {
14825 self.expect_keyword_is(Keyword::BASE64)?;
14826 binary_base64 = true;
14827 } else if self.parse_keyword(Keyword::ROOT) {
14828 self.expect_token(&Token::LParen)?;
14829 root = Some(self.parse_literal_string()?);
14830 self.expect_token(&Token::RParen)?;
14831 } else if self.parse_keyword(Keyword::TYPE) {
14832 r#type = true;
14833 }
14834 }
14835 Ok(ForClause::Xml {
14836 for_xml,
14837 elements,
14838 binary_base64,
14839 root,
14840 r#type,
14841 })
14842 }
14843
14844 pub fn parse_for_json(&mut self) -> Result<ForClause, ParserError> {
14846 let for_json = if self.parse_keyword(Keyword::AUTO) {
14847 ForJson::Auto
14848 } else if self.parse_keyword(Keyword::PATH) {
14849 ForJson::Path
14850 } else {
14851 return Err(ParserError::ParserError(
14852 "Expected FOR JSON [AUTO | PATH ]".to_string(),
14853 ));
14854 };
14855 let mut root = None;
14856 let mut include_null_values = false;
14857 let mut without_array_wrapper = false;
14858 while self.peek_token_ref().token == Token::Comma {
14859 self.next_token();
14860 if self.parse_keyword(Keyword::ROOT) {
14861 self.expect_token(&Token::LParen)?;
14862 root = Some(self.parse_literal_string()?);
14863 self.expect_token(&Token::RParen)?;
14864 } else if self.parse_keyword(Keyword::INCLUDE_NULL_VALUES) {
14865 include_null_values = true;
14866 } else if self.parse_keyword(Keyword::WITHOUT_ARRAY_WRAPPER) {
14867 without_array_wrapper = true;
14868 }
14869 }
14870 Ok(ForClause::Json {
14871 for_json,
14872 root,
14873 include_null_values,
14874 without_array_wrapper,
14875 })
14876 }
14877
14878 pub fn parse_cte(&mut self) -> Result<Cte, ParserError> {
14880 let name = self.parse_identifier()?;
14881
14882 let as_optional = self.dialect.supports_cte_without_as();
14883
14884 if as_optional && !self.peek_keyword(Keyword::AS) {
14886 if let Some((query, closing_paren_token)) = self.maybe_parse(|p| {
14887 p.expect_token(&Token::LParen)?;
14888 let query = p.parse_query()?;
14889 let closing_paren_token = p.expect_token(&Token::RParen)?;
14890 Ok((query, closing_paren_token))
14891 })? {
14892 let mut cte = Cte {
14893 alias: TableAlias {
14894 explicit: false,
14895 name,
14896 columns: vec![],
14897 },
14898 query,
14899 from: None,
14900 materialized: None,
14901 closing_paren_token: closing_paren_token.into(),
14902 };
14903 if self.parse_keyword(Keyword::FROM) {
14904 cte.from = Some(self.parse_identifier()?);
14905 }
14906 return Ok(cte);
14907 }
14908 }
14909
14910 let columns = if self.parse_keyword(Keyword::AS) {
14912 vec![]
14913 } else {
14914 let columns = self.parse_table_alias_column_defs()?;
14915 if as_optional {
14916 let _ = self.parse_keyword(Keyword::AS);
14917 } else {
14918 self.expect_keyword_is(Keyword::AS)?;
14919 }
14920 columns
14921 };
14922
14923 let mut is_materialized = None;
14924 if dialect_of!(self is PostgreSqlDialect) {
14925 if self.parse_keyword(Keyword::MATERIALIZED) {
14926 is_materialized = Some(CteAsMaterialized::Materialized);
14927 } else if self.parse_keywords(&[Keyword::NOT, Keyword::MATERIALIZED]) {
14928 is_materialized = Some(CteAsMaterialized::NotMaterialized);
14929 }
14930 }
14931
14932 self.expect_token(&Token::LParen)?;
14933 let query = self.parse_query()?;
14934 let closing_paren_token = self.expect_token(&Token::RParen)?;
14935
14936 let mut cte = Cte {
14937 alias: TableAlias {
14938 explicit: false,
14939 name,
14940 columns,
14941 },
14942 query,
14943 from: None,
14944 materialized: is_materialized,
14945 closing_paren_token: closing_paren_token.into(),
14946 };
14947 if self.dialect.supports_from_first_insert() && self.parse_keyword(Keyword::FROM) {
14948 cte.from = Some(self.parse_identifier()?);
14949 }
14950 Ok(cte)
14951 }
14952
14953 pub fn parse_query_body(&mut self, precedence: u8) -> Result<Box<SetExpr>, ParserError> {
14962 let expr = if self.peek_keyword(Keyword::SELECT)
14965 || (self.peek_keyword(Keyword::FROM) && self.dialect.supports_from_first_select())
14966 {
14967 SetExpr::Select(self.parse_select().map(Box::new)?)
14968 } else if self.consume_token(&Token::LParen) {
14969 let subquery = self.parse_query()?;
14971 self.expect_token(&Token::RParen)?;
14972 SetExpr::Query(subquery)
14973 } else if self.parse_keyword(Keyword::VALUES) {
14974 let is_mysql = dialect_of!(self is MySqlDialect);
14975 SetExpr::Values(self.parse_values(is_mysql, false)?)
14976 } else if self.parse_keyword(Keyword::VALUE) {
14977 let is_mysql = dialect_of!(self is MySqlDialect);
14978 SetExpr::Values(self.parse_values(is_mysql, true)?)
14979 } else if self.parse_keyword(Keyword::TABLE) {
14980 SetExpr::Table(Box::new(self.parse_as_table()?))
14981 } else {
14982 return self.expected_ref(
14983 "SELECT, VALUES, or a subquery in the query body",
14984 self.peek_token_ref(),
14985 );
14986 };
14987
14988 self.parse_remaining_set_exprs(expr, precedence)
14989 }
14990
14991 fn parse_remaining_set_exprs(
14995 &mut self,
14996 mut expr: SetExpr,
14997 precedence: u8,
14998 ) -> Result<Box<SetExpr>, ParserError> {
14999 loop {
15000 let op = self.parse_set_operator(&self.peek_token().token);
15002 let next_precedence = match op {
15003 Some(SetOperator::Union) | Some(SetOperator::Except) | Some(SetOperator::Minus) => {
15005 10
15006 }
15007 Some(SetOperator::Intersect) => 20,
15009 None => break,
15011 };
15012 if precedence >= next_precedence {
15013 break;
15014 }
15015 self.next_token(); let set_quantifier = self.parse_set_quantifier(&op);
15017 expr = SetExpr::SetOperation {
15018 left: Box::new(expr),
15019 op: op.unwrap(),
15020 set_quantifier,
15021 right: self.parse_query_body(next_precedence)?,
15022 };
15023 }
15024
15025 Ok(expr.into())
15026 }
15027
15028 pub fn parse_set_operator(&mut self, token: &Token) -> Option<SetOperator> {
15030 match token {
15031 Token::Word(w) if w.keyword == Keyword::UNION => Some(SetOperator::Union),
15032 Token::Word(w) if w.keyword == Keyword::EXCEPT => Some(SetOperator::Except),
15033 Token::Word(w) if w.keyword == Keyword::INTERSECT => Some(SetOperator::Intersect),
15034 Token::Word(w) if w.keyword == Keyword::MINUS => Some(SetOperator::Minus),
15035 _ => None,
15036 }
15037 }
15038
15039 pub fn parse_set_quantifier(&mut self, op: &Option<SetOperator>) -> SetQuantifier {
15041 match op {
15042 Some(
15043 SetOperator::Except
15044 | SetOperator::Intersect
15045 | SetOperator::Union
15046 | SetOperator::Minus,
15047 ) => {
15048 if self.parse_keywords(&[Keyword::DISTINCT, Keyword::BY, Keyword::NAME]) {
15049 SetQuantifier::DistinctByName
15050 } else if self.parse_keywords(&[Keyword::BY, Keyword::NAME]) {
15051 SetQuantifier::ByName
15052 } else if self.parse_keyword(Keyword::ALL) {
15053 if self.parse_keywords(&[Keyword::BY, Keyword::NAME]) {
15054 SetQuantifier::AllByName
15055 } else {
15056 SetQuantifier::All
15057 }
15058 } else if self.parse_keyword(Keyword::DISTINCT) {
15059 SetQuantifier::Distinct
15060 } else {
15061 SetQuantifier::None
15062 }
15063 }
15064 _ => SetQuantifier::None,
15065 }
15066 }
15067
15068 pub fn parse_select(&mut self) -> Result<Select, ParserError> {
15070 let mut from_first = None;
15071
15072 if self.dialect.supports_from_first_select() && self.peek_keyword(Keyword::FROM) {
15073 let from_token = self.expect_keyword(Keyword::FROM)?;
15074 let from = self.parse_table_with_joins()?;
15075 if !self.peek_keyword(Keyword::SELECT) {
15076 return Ok(Select {
15077 select_token: AttachedToken(from_token),
15078 optimizer_hints: vec![],
15079 distinct: None,
15080 select_modifiers: None,
15081 top: None,
15082 top_before_distinct: false,
15083 projection: vec![],
15084 exclude: None,
15085 into: None,
15086 from,
15087 lateral_views: vec![],
15088 prewhere: None,
15089 selection: None,
15090 group_by: GroupByExpr::Expressions(vec![], vec![]),
15091 cluster_by: vec![],
15092 distribute_by: vec![],
15093 sort_by: vec![],
15094 having: None,
15095 named_window: vec![],
15096 window_before_qualify: false,
15097 qualify: None,
15098 value_table_mode: None,
15099 connect_by: vec![],
15100 flavor: SelectFlavor::FromFirstNoSelect,
15101 });
15102 }
15103 from_first = Some(from);
15104 }
15105
15106 let select_token = self.expect_keyword(Keyword::SELECT)?;
15107 let optimizer_hints = self.maybe_parse_optimizer_hints()?;
15108 let value_table_mode = self.parse_value_table_mode()?;
15109
15110 let (select_modifiers, distinct_select_modifier) =
15111 if self.dialect.supports_select_modifiers() {
15112 self.parse_select_modifiers()?
15113 } else {
15114 (None, None)
15115 };
15116
15117 let mut top_before_distinct = false;
15118 let mut top = None;
15119 if self.dialect.supports_top_before_distinct() && self.parse_keyword(Keyword::TOP) {
15120 top = Some(self.parse_top()?);
15121 top_before_distinct = true;
15122 }
15123
15124 let distinct = if distinct_select_modifier.is_some() {
15125 distinct_select_modifier
15126 } else {
15127 self.parse_all_or_distinct()?
15128 };
15129
15130 if !self.dialect.supports_top_before_distinct() && self.parse_keyword(Keyword::TOP) {
15131 top = Some(self.parse_top()?);
15132 }
15133
15134 let projection =
15135 if self.dialect.supports_empty_projections() && self.peek_keyword(Keyword::FROM) {
15136 vec![]
15137 } else {
15138 self.parse_projection()?
15139 };
15140
15141 let exclude = if self.dialect.supports_select_exclude() {
15142 self.parse_optional_select_item_exclude()?
15143 } else {
15144 None
15145 };
15146
15147 let into = if self.parse_keyword(Keyword::INTO) {
15148 Some(self.parse_select_into()?)
15149 } else {
15150 None
15151 };
15152
15153 let (from, from_first) = if let Some(from) = from_first.take() {
15159 (from, true)
15160 } else if self.parse_keyword(Keyword::FROM) {
15161 (self.parse_table_with_joins()?, false)
15162 } else {
15163 (vec![], false)
15164 };
15165
15166 let mut lateral_views = vec![];
15167 loop {
15168 if self.parse_keywords(&[Keyword::LATERAL, Keyword::VIEW]) {
15169 let outer = self.parse_keyword(Keyword::OUTER);
15170 let lateral_view = self.parse_expr()?;
15171 let lateral_view_name = self.parse_object_name(false)?;
15172 let lateral_col_alias = self
15173 .parse_comma_separated(|parser| {
15174 parser.parse_optional_alias(&[
15175 Keyword::WHERE,
15176 Keyword::GROUP,
15177 Keyword::CLUSTER,
15178 Keyword::HAVING,
15179 Keyword::LATERAL,
15180 ]) })?
15182 .into_iter()
15183 .flatten()
15184 .collect();
15185
15186 lateral_views.push(LateralView {
15187 lateral_view,
15188 lateral_view_name,
15189 lateral_col_alias,
15190 outer,
15191 });
15192 } else {
15193 break;
15194 }
15195 }
15196
15197 let prewhere = if self.dialect.supports_prewhere() && self.parse_keyword(Keyword::PREWHERE)
15198 {
15199 Some(self.parse_expr()?)
15200 } else {
15201 None
15202 };
15203
15204 let selection = if self.parse_keyword(Keyword::WHERE) {
15205 Some(self.parse_expr()?)
15206 } else {
15207 None
15208 };
15209
15210 let connect_by = self.maybe_parse_connect_by()?;
15211
15212 let group_by = self
15213 .parse_optional_group_by()?
15214 .unwrap_or_else(|| GroupByExpr::Expressions(vec![], vec![]));
15215
15216 let cluster_by = if self.parse_keywords(&[Keyword::CLUSTER, Keyword::BY]) {
15217 self.parse_comma_separated(Parser::parse_expr)?
15218 } else {
15219 vec![]
15220 };
15221
15222 let distribute_by = if self.parse_keywords(&[Keyword::DISTRIBUTE, Keyword::BY]) {
15223 self.parse_comma_separated(Parser::parse_expr)?
15224 } else {
15225 vec![]
15226 };
15227
15228 let sort_by = if self.parse_keywords(&[Keyword::SORT, Keyword::BY]) {
15229 self.parse_comma_separated(Parser::parse_order_by_expr)?
15230 } else {
15231 vec![]
15232 };
15233
15234 let having = if self.parse_keyword(Keyword::HAVING) {
15235 Some(self.parse_expr()?)
15236 } else {
15237 None
15238 };
15239
15240 let (named_windows, qualify, window_before_qualify) = if self.parse_keyword(Keyword::WINDOW)
15242 {
15243 let named_windows = self.parse_comma_separated(Parser::parse_named_window)?;
15244 if self.parse_keyword(Keyword::QUALIFY) {
15245 (named_windows, Some(self.parse_expr()?), true)
15246 } else {
15247 (named_windows, None, true)
15248 }
15249 } else if self.parse_keyword(Keyword::QUALIFY) {
15250 let qualify = Some(self.parse_expr()?);
15251 if self.parse_keyword(Keyword::WINDOW) {
15252 (
15253 self.parse_comma_separated(Parser::parse_named_window)?,
15254 qualify,
15255 false,
15256 )
15257 } else {
15258 (Default::default(), qualify, false)
15259 }
15260 } else {
15261 Default::default()
15262 };
15263
15264 Ok(Select {
15265 select_token: AttachedToken(select_token),
15266 optimizer_hints,
15267 distinct,
15268 select_modifiers,
15269 top,
15270 top_before_distinct,
15271 projection,
15272 exclude,
15273 into,
15274 from,
15275 lateral_views,
15276 prewhere,
15277 selection,
15278 group_by,
15279 cluster_by,
15280 distribute_by,
15281 sort_by,
15282 having,
15283 named_window: named_windows,
15284 window_before_qualify,
15285 qualify,
15286 value_table_mode,
15287 connect_by,
15288 flavor: if from_first {
15289 SelectFlavor::FromFirst
15290 } else {
15291 SelectFlavor::Standard
15292 },
15293 })
15294 }
15295
15296 fn maybe_parse_optimizer_hints(&mut self) -> Result<Vec<OptimizerHint>, ParserError> {
15305 let supports_hints = self.dialect.supports_comment_optimizer_hint();
15306 if !supports_hints {
15307 return Ok(vec![]);
15308 }
15309 let mut hints = vec![];
15310 loop {
15311 let t = self.peek_nth_token_no_skip_ref(0);
15312 let Token::Whitespace(ws) = &t.token else {
15313 break;
15314 };
15315 match ws {
15316 Whitespace::SingleLineComment { comment, prefix } => {
15317 if let Some((hint_prefix, text)) = Self::extract_hint_prefix_and_text(comment) {
15318 hints.push(OptimizerHint {
15319 prefix: hint_prefix,
15320 text,
15321 style: OptimizerHintStyle::SingleLine {
15322 prefix: prefix.clone(),
15323 },
15324 });
15325 }
15326 self.next_token_no_skip();
15327 }
15328 Whitespace::MultiLineComment(comment) => {
15329 if let Some((hint_prefix, text)) = Self::extract_hint_prefix_and_text(comment) {
15330 hints.push(OptimizerHint {
15331 prefix: hint_prefix,
15332 text,
15333 style: OptimizerHintStyle::MultiLine,
15334 });
15335 }
15336 self.next_token_no_skip();
15337 }
15338 Whitespace::Space | Whitespace::Tab | Whitespace::Newline => {
15339 self.next_token_no_skip();
15340 }
15341 }
15342 }
15343 Ok(hints)
15344 }
15345
15346 fn extract_hint_prefix_and_text(comment: &str) -> Option<(String, String)> {
15349 let (before_plus, text) = comment.split_once('+')?;
15350 if before_plus.chars().all(|c| c.is_ascii_alphanumeric()) {
15351 Some((before_plus.to_string(), text.to_string()))
15352 } else {
15353 None
15354 }
15355 }
15356
15357 fn parse_select_modifiers(
15364 &mut self,
15365 ) -> Result<(Option<SelectModifiers>, Option<Distinct>), ParserError> {
15366 let mut modifiers = SelectModifiers::default();
15367 let mut distinct = None;
15368
15369 let keywords = &[
15370 Keyword::ALL,
15371 Keyword::DISTINCT,
15372 Keyword::DISTINCTROW,
15373 Keyword::HIGH_PRIORITY,
15374 Keyword::STRAIGHT_JOIN,
15375 Keyword::SQL_SMALL_RESULT,
15376 Keyword::SQL_BIG_RESULT,
15377 Keyword::SQL_BUFFER_RESULT,
15378 Keyword::SQL_NO_CACHE,
15379 Keyword::SQL_CALC_FOUND_ROWS,
15380 ];
15381
15382 while let Some(keyword) = self.parse_one_of_keywords(keywords) {
15383 match keyword {
15384 Keyword::ALL | Keyword::DISTINCT if distinct.is_none() => {
15385 self.prev_token();
15386 distinct = self.parse_all_or_distinct()?;
15387 }
15388 Keyword::DISTINCTROW if distinct.is_none() => {
15390 distinct = Some(Distinct::Distinct);
15391 }
15392 Keyword::HIGH_PRIORITY => modifiers.high_priority = true,
15393 Keyword::STRAIGHT_JOIN => modifiers.straight_join = true,
15394 Keyword::SQL_SMALL_RESULT => modifiers.sql_small_result = true,
15395 Keyword::SQL_BIG_RESULT => modifiers.sql_big_result = true,
15396 Keyword::SQL_BUFFER_RESULT => modifiers.sql_buffer_result = true,
15397 Keyword::SQL_NO_CACHE => modifiers.sql_no_cache = true,
15398 Keyword::SQL_CALC_FOUND_ROWS => modifiers.sql_calc_found_rows = true,
15399 _ => {
15400 self.prev_token();
15401 return self.expected_ref(
15402 "HIGH_PRIORITY, STRAIGHT_JOIN, or other MySQL select modifier",
15403 self.peek_token_ref(),
15404 );
15405 }
15406 }
15407 }
15408
15409 let select_modifiers = if modifiers.is_any_set() {
15412 Some(modifiers)
15413 } else {
15414 None
15415 };
15416 Ok((select_modifiers, distinct))
15417 }
15418
15419 fn parse_value_table_mode(&mut self) -> Result<Option<ValueTableMode>, ParserError> {
15420 if !dialect_of!(self is BigQueryDialect) {
15421 return Ok(None);
15422 }
15423
15424 let mode = if self.parse_keywords(&[Keyword::DISTINCT, Keyword::AS, Keyword::VALUE]) {
15425 Some(ValueTableMode::DistinctAsValue)
15426 } else if self.parse_keywords(&[Keyword::DISTINCT, Keyword::AS, Keyword::STRUCT]) {
15427 Some(ValueTableMode::DistinctAsStruct)
15428 } else if self.parse_keywords(&[Keyword::AS, Keyword::VALUE])
15429 || self.parse_keywords(&[Keyword::ALL, Keyword::AS, Keyword::VALUE])
15430 {
15431 Some(ValueTableMode::AsValue)
15432 } else if self.parse_keywords(&[Keyword::AS, Keyword::STRUCT])
15433 || self.parse_keywords(&[Keyword::ALL, Keyword::AS, Keyword::STRUCT])
15434 {
15435 Some(ValueTableMode::AsStruct)
15436 } else if self.parse_keyword(Keyword::AS) {
15437 self.expected_ref("VALUE or STRUCT", self.peek_token_ref())?
15438 } else {
15439 None
15440 };
15441
15442 Ok(mode)
15443 }
15444
15445 fn with_state<T, F>(&mut self, state: ParserState, mut f: F) -> Result<T, ParserError>
15449 where
15450 F: FnMut(&mut Parser) -> Result<T, ParserError>,
15451 {
15452 let current_state = self.state;
15453 self.state = state;
15454 let res = f(self);
15455 self.state = current_state;
15456 res
15457 }
15458
15459 pub fn maybe_parse_connect_by(&mut self) -> Result<Vec<ConnectByKind>, ParserError> {
15461 let mut clauses = Vec::with_capacity(2);
15462 loop {
15463 if let Some(idx) = self.parse_keywords_indexed(&[Keyword::START, Keyword::WITH]) {
15464 clauses.push(ConnectByKind::StartWith {
15465 start_token: self.token_at(idx).clone().into(),
15466 condition: self.parse_expr()?.into(),
15467 });
15468 } else if let Some(idx) = self.parse_keywords_indexed(&[Keyword::CONNECT, Keyword::BY])
15469 {
15470 clauses.push(ConnectByKind::ConnectBy {
15471 connect_token: self.token_at(idx).clone().into(),
15472 nocycle: self.parse_keyword(Keyword::NOCYCLE),
15473 relationships: self.with_state(ParserState::ConnectBy, |parser| {
15474 parser.parse_comma_separated(Parser::parse_expr)
15475 })?,
15476 });
15477 } else {
15478 break;
15479 }
15480 }
15481 Ok(clauses)
15482 }
15483
15484 pub fn parse_as_table(&mut self) -> Result<Table, ParserError> {
15486 let token1 = self.next_token();
15487 let token2 = self.next_token();
15488 let token3 = self.next_token();
15489
15490 let table_name;
15491 let schema_name;
15492 if token2 == Token::Period {
15493 match token1.token {
15494 Token::Word(w) => {
15495 schema_name = w.value;
15496 }
15497 _ => {
15498 return self.expected("Schema name", token1);
15499 }
15500 }
15501 match token3.token {
15502 Token::Word(w) => {
15503 table_name = w.value;
15504 }
15505 _ => {
15506 return self.expected("Table name", token3);
15507 }
15508 }
15509 Ok(Table {
15510 table_name: Some(table_name),
15511 schema_name: Some(schema_name),
15512 })
15513 } else {
15514 match token1.token {
15515 Token::Word(w) => {
15516 table_name = w.value;
15517 }
15518 _ => {
15519 return self.expected("Table name", token1);
15520 }
15521 }
15522 Ok(Table {
15523 table_name: Some(table_name),
15524 schema_name: None,
15525 })
15526 }
15527 }
15528
15529 fn parse_set_role(
15531 &mut self,
15532 modifier: Option<ContextModifier>,
15533 ) -> Result<Statement, ParserError> {
15534 self.expect_keyword_is(Keyword::ROLE)?;
15535
15536 let role_name = if self.parse_keyword(Keyword::NONE) {
15537 None
15538 } else {
15539 Some(self.parse_identifier()?)
15540 };
15541 Ok(Statement::Set(Set::SetRole {
15542 context_modifier: modifier,
15543 role_name,
15544 }))
15545 }
15546
15547 fn parse_set_values(
15548 &mut self,
15549 parenthesized_assignment: bool,
15550 ) -> Result<Vec<Expr>, ParserError> {
15551 let mut values = vec![];
15552
15553 if parenthesized_assignment {
15554 self.expect_token(&Token::LParen)?;
15555 }
15556
15557 loop {
15558 let value = if let Some(expr) = self.try_parse_expr_sub_query()? {
15559 expr
15560 } else if let Ok(expr) = self.parse_expr() {
15561 expr
15562 } else {
15563 self.expected_ref("variable value", self.peek_token_ref())?
15564 };
15565
15566 values.push(value);
15567 if self.consume_token(&Token::Comma) {
15568 continue;
15569 }
15570
15571 if parenthesized_assignment {
15572 self.expect_token(&Token::RParen)?;
15573 }
15574 return Ok(values);
15575 }
15576 }
15577
15578 fn parse_context_modifier(&mut self) -> Option<ContextModifier> {
15579 let modifier =
15580 self.parse_one_of_keywords(&[Keyword::SESSION, Keyword::LOCAL, Keyword::GLOBAL])?;
15581
15582 Self::keyword_to_modifier(modifier)
15583 }
15584
15585 fn parse_set_assignment(&mut self) -> Result<SetAssignment, ParserError> {
15587 let scope = self.parse_context_modifier();
15588
15589 let name = if self.dialect.supports_parenthesized_set_variables()
15590 && self.consume_token(&Token::LParen)
15591 {
15592 self.expected_ref("Unparenthesized assignment", self.peek_token_ref())?
15596 } else {
15597 self.parse_object_name(false)?
15598 };
15599
15600 if !(self.consume_token(&Token::Eq) || self.parse_keyword(Keyword::TO)) {
15601 return self.expected_ref("assignment operator", self.peek_token_ref());
15602 }
15603
15604 let value = self.parse_expr()?;
15605
15606 Ok(SetAssignment { scope, name, value })
15607 }
15608
15609 fn parse_set(&mut self) -> Result<Statement, ParserError> {
15610 let hivevar = self.parse_keyword(Keyword::HIVEVAR);
15611
15612 let scope = if !hivevar {
15614 self.parse_context_modifier()
15615 } else {
15616 None
15617 };
15618
15619 if hivevar {
15620 self.expect_token(&Token::Colon)?;
15621 }
15622
15623 if let Some(set_role_stmt) = self.maybe_parse(|parser| parser.parse_set_role(scope))? {
15624 return Ok(set_role_stmt);
15625 }
15626
15627 if self.parse_keywords(&[Keyword::TIME, Keyword::ZONE])
15629 || self.parse_keyword(Keyword::TIMEZONE)
15630 {
15631 if self.consume_token(&Token::Eq) || self.parse_keyword(Keyword::TO) {
15632 return Ok(Set::SingleAssignment {
15633 scope,
15634 hivevar,
15635 variable: ObjectName::from(vec!["TIMEZONE".into()]),
15636 values: self.parse_set_values(false)?,
15637 }
15638 .into());
15639 } else {
15640 return Ok(Set::SetTimeZone {
15644 local: scope == Some(ContextModifier::Local),
15645 value: self.parse_expr()?,
15646 }
15647 .into());
15648 }
15649 } else if self.dialect.supports_set_names() && self.parse_keyword(Keyword::NAMES) {
15650 if self.parse_keyword(Keyword::DEFAULT) {
15651 return Ok(Set::SetNamesDefault {}.into());
15652 }
15653 let charset_name = self.parse_identifier()?;
15654 let collation_name = if self.parse_one_of_keywords(&[Keyword::COLLATE]).is_some() {
15655 Some(self.parse_literal_string()?)
15656 } else {
15657 None
15658 };
15659
15660 return Ok(Set::SetNames {
15661 charset_name,
15662 collation_name,
15663 }
15664 .into());
15665 } else if self.parse_keyword(Keyword::CHARACTERISTICS) {
15666 self.expect_keywords(&[Keyword::AS, Keyword::TRANSACTION])?;
15667 return Ok(Set::SetTransaction {
15668 modes: self.parse_transaction_modes()?,
15669 snapshot: None,
15670 session: true,
15671 }
15672 .into());
15673 } else if self.parse_keyword(Keyword::TRANSACTION) {
15674 if self.parse_keyword(Keyword::SNAPSHOT) {
15675 let snapshot_id = self.parse_value()?;
15676 return Ok(Set::SetTransaction {
15677 modes: vec![],
15678 snapshot: Some(snapshot_id),
15679 session: false,
15680 }
15681 .into());
15682 }
15683 return Ok(Set::SetTransaction {
15684 modes: self.parse_transaction_modes()?,
15685 snapshot: None,
15686 session: false,
15687 }
15688 .into());
15689 } else if self.parse_keyword(Keyword::AUTHORIZATION) {
15690 let scope = match scope {
15691 Some(s) => s,
15692 None => {
15693 return self.expected_at(
15694 "SESSION, LOCAL, or other scope modifier before AUTHORIZATION",
15695 self.get_current_index(),
15696 )
15697 }
15698 };
15699 let auth_value = if self.parse_keyword(Keyword::DEFAULT) {
15700 SetSessionAuthorizationParamKind::Default
15701 } else {
15702 let value = self.parse_identifier()?;
15703 SetSessionAuthorizationParamKind::User(value)
15704 };
15705 return Ok(Set::SetSessionAuthorization(SetSessionAuthorizationParam {
15706 scope,
15707 kind: auth_value,
15708 })
15709 .into());
15710 }
15711
15712 if self.dialect.supports_comma_separated_set_assignments() {
15713 if scope.is_some() {
15714 self.prev_token();
15715 }
15716
15717 if let Some(assignments) = self
15718 .maybe_parse(|parser| parser.parse_comma_separated(Parser::parse_set_assignment))?
15719 {
15720 return if assignments.len() > 1 {
15721 Ok(Set::MultipleAssignments { assignments }.into())
15722 } else {
15723 let SetAssignment { scope, name, value } =
15724 assignments.into_iter().next().ok_or_else(|| {
15725 ParserError::ParserError("Expected at least one assignment".to_string())
15726 })?;
15727
15728 Ok(Set::SingleAssignment {
15729 scope,
15730 hivevar,
15731 variable: name,
15732 values: vec![value],
15733 }
15734 .into())
15735 };
15736 }
15737 }
15738
15739 let variables = if self.dialect.supports_parenthesized_set_variables()
15740 && self.consume_token(&Token::LParen)
15741 {
15742 let vars = OneOrManyWithParens::Many(
15743 self.parse_comma_separated(|parser: &mut Parser<'a>| parser.parse_identifier())?
15744 .into_iter()
15745 .map(|ident| ObjectName::from(vec![ident]))
15746 .collect(),
15747 );
15748 self.expect_token(&Token::RParen)?;
15749 vars
15750 } else {
15751 OneOrManyWithParens::One(self.parse_object_name(false)?)
15752 };
15753
15754 if self.consume_token(&Token::Eq) || self.parse_keyword(Keyword::TO) {
15755 let stmt = match variables {
15756 OneOrManyWithParens::One(var) => Set::SingleAssignment {
15757 scope,
15758 hivevar,
15759 variable: var,
15760 values: self.parse_set_values(false)?,
15761 },
15762 OneOrManyWithParens::Many(vars) => Set::ParenthesizedAssignments {
15763 variables: vars,
15764 values: self.parse_set_values(true)?,
15765 },
15766 };
15767
15768 return Ok(stmt.into());
15769 }
15770
15771 if self.dialect.supports_set_stmt_without_operator() {
15772 self.prev_token();
15773 return self.parse_set_session_params();
15774 };
15775
15776 self.expected_ref("equals sign or TO", self.peek_token_ref())
15777 }
15778
15779 pub fn parse_set_session_params(&mut self) -> Result<Statement, ParserError> {
15781 if self.parse_keyword(Keyword::STATISTICS) {
15782 let topic = match self.parse_one_of_keywords(&[
15783 Keyword::IO,
15784 Keyword::PROFILE,
15785 Keyword::TIME,
15786 Keyword::XML,
15787 ]) {
15788 Some(Keyword::IO) => SessionParamStatsTopic::IO,
15789 Some(Keyword::PROFILE) => SessionParamStatsTopic::Profile,
15790 Some(Keyword::TIME) => SessionParamStatsTopic::Time,
15791 Some(Keyword::XML) => SessionParamStatsTopic::Xml,
15792 _ => return self.expected_ref("IO, PROFILE, TIME or XML", self.peek_token_ref()),
15793 };
15794 let value = self.parse_session_param_value()?;
15795 Ok(
15796 Set::SetSessionParam(SetSessionParamKind::Statistics(SetSessionParamStatistics {
15797 topic,
15798 value,
15799 }))
15800 .into(),
15801 )
15802 } else if self.parse_keyword(Keyword::IDENTITY_INSERT) {
15803 let obj = self.parse_object_name(false)?;
15804 let value = self.parse_session_param_value()?;
15805 Ok(Set::SetSessionParam(SetSessionParamKind::IdentityInsert(
15806 SetSessionParamIdentityInsert { obj, value },
15807 ))
15808 .into())
15809 } else if self.parse_keyword(Keyword::OFFSETS) {
15810 let keywords = self.parse_comma_separated(|parser| {
15811 let next_token = parser.next_token();
15812 match &next_token.token {
15813 Token::Word(w) => Ok(w.to_string()),
15814 _ => parser.expected("SQL keyword", next_token),
15815 }
15816 })?;
15817 let value = self.parse_session_param_value()?;
15818 Ok(
15819 Set::SetSessionParam(SetSessionParamKind::Offsets(SetSessionParamOffsets {
15820 keywords,
15821 value,
15822 }))
15823 .into(),
15824 )
15825 } else {
15826 let names = self.parse_comma_separated(|parser| {
15827 let next_token = parser.next_token();
15828 match next_token.token {
15829 Token::Word(w) => Ok(w.to_string()),
15830 _ => parser.expected("Session param name", next_token),
15831 }
15832 })?;
15833 let value = self.parse_expr()?.to_string();
15834 Ok(
15835 Set::SetSessionParam(SetSessionParamKind::Generic(SetSessionParamGeneric {
15836 names,
15837 value,
15838 }))
15839 .into(),
15840 )
15841 }
15842 }
15843
15844 fn parse_session_param_value(&mut self) -> Result<SessionParamValue, ParserError> {
15845 if self.parse_keyword(Keyword::ON) {
15846 Ok(SessionParamValue::On)
15847 } else if self.parse_keyword(Keyword::OFF) {
15848 Ok(SessionParamValue::Off)
15849 } else {
15850 self.expected_ref("ON or OFF", self.peek_token_ref())
15851 }
15852 }
15853
15854 pub fn parse_show(&mut self) -> Result<Statement, ParserError> {
15856 let terse = self.parse_keyword(Keyword::TERSE);
15857 let extended = self.parse_keyword(Keyword::EXTENDED);
15858 let full = self.parse_keyword(Keyword::FULL);
15859 let session = self.parse_keyword(Keyword::SESSION);
15860 let global = self.parse_keyword(Keyword::GLOBAL);
15861 let external = self.parse_keyword(Keyword::EXTERNAL);
15862 if self
15863 .parse_one_of_keywords(&[Keyword::COLUMNS, Keyword::FIELDS])
15864 .is_some()
15865 {
15866 Ok(self.parse_show_columns(extended, full)?)
15867 } else if self.parse_keyword(Keyword::TABLES) {
15868 Ok(self.parse_show_tables(terse, extended, full, external)?)
15869 } else if self.parse_keywords(&[Keyword::MATERIALIZED, Keyword::VIEWS]) {
15870 Ok(self.parse_show_views(terse, true)?)
15871 } else if self.parse_keyword(Keyword::VIEWS) {
15872 Ok(self.parse_show_views(terse, false)?)
15873 } else if self.parse_keyword(Keyword::FUNCTIONS) {
15874 Ok(self.parse_show_functions()?)
15875 } else if self.parse_keyword(Keyword::PROCESSLIST) {
15876 Ok(Statement::ShowProcessList { full })
15877 } else if extended || full {
15878 Err(ParserError::ParserError(
15879 "EXTENDED/FULL are not supported with this type of SHOW query".to_string(),
15880 ))
15881 } else if self.parse_one_of_keywords(&[Keyword::CREATE]).is_some() {
15882 Ok(self.parse_show_create()?)
15883 } else if self.parse_keyword(Keyword::COLLATION) {
15884 Ok(self.parse_show_collation()?)
15885 } else if self.parse_keyword(Keyword::VARIABLES)
15886 && dialect_of!(self is MySqlDialect | GenericDialect)
15887 {
15888 Ok(Statement::ShowVariables {
15889 filter: self.parse_show_statement_filter()?,
15890 session,
15891 global,
15892 })
15893 } else if self.parse_keyword(Keyword::STATUS)
15894 && dialect_of!(self is MySqlDialect | GenericDialect)
15895 {
15896 Ok(Statement::ShowStatus {
15897 filter: self.parse_show_statement_filter()?,
15898 session,
15899 global,
15900 })
15901 } else if self.parse_keyword(Keyword::CATALOGS) {
15902 self.parse_show_catalogs(terse)
15903 } else if self.parse_keyword(Keyword::DATABASES) {
15904 self.parse_show_databases(terse)
15905 } else if self.parse_keyword(Keyword::SCHEMAS) {
15906 self.parse_show_schemas(terse)
15907 } else if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
15908 self.parse_show_charset(false)
15909 } else if self.parse_keyword(Keyword::CHARSET) {
15910 self.parse_show_charset(true)
15911 } else {
15912 Ok(Statement::ShowVariable {
15913 variable: self.parse_identifiers()?,
15914 })
15915 }
15916 }
15917
15918 fn parse_show_charset(&mut self, is_shorthand: bool) -> Result<Statement, ParserError> {
15919 Ok(Statement::ShowCharset(ShowCharset {
15921 is_shorthand,
15922 filter: self.parse_show_statement_filter()?,
15923 }))
15924 }
15925
15926 fn parse_show_catalogs(&mut self, terse: bool) -> Result<Statement, ParserError> {
15927 let history = self.parse_keyword(Keyword::HISTORY);
15928 let show_options = self.parse_show_stmt_options()?;
15929 Ok(Statement::ShowCatalogs {
15930 terse,
15931 history,
15932 show_options,
15933 })
15934 }
15935
15936 fn parse_show_databases(&mut self, terse: bool) -> Result<Statement, ParserError> {
15937 let history = self.parse_keyword(Keyword::HISTORY);
15938 let show_options = self.parse_show_stmt_options()?;
15939 Ok(Statement::ShowDatabases {
15940 terse,
15941 history,
15942 show_options,
15943 })
15944 }
15945
15946 fn parse_show_schemas(&mut self, terse: bool) -> Result<Statement, ParserError> {
15947 let history = self.parse_keyword(Keyword::HISTORY);
15948 let show_options = self.parse_show_stmt_options()?;
15949 Ok(Statement::ShowSchemas {
15950 terse,
15951 history,
15952 show_options,
15953 })
15954 }
15955
15956 pub fn parse_show_create(&mut self) -> Result<Statement, ParserError> {
15958 let obj_type = match self.expect_one_of_keywords(&[
15959 Keyword::TABLE,
15960 Keyword::TRIGGER,
15961 Keyword::FUNCTION,
15962 Keyword::PROCEDURE,
15963 Keyword::EVENT,
15964 Keyword::VIEW,
15965 ])? {
15966 Keyword::TABLE => Ok(ShowCreateObject::Table),
15967 Keyword::TRIGGER => Ok(ShowCreateObject::Trigger),
15968 Keyword::FUNCTION => Ok(ShowCreateObject::Function),
15969 Keyword::PROCEDURE => Ok(ShowCreateObject::Procedure),
15970 Keyword::EVENT => Ok(ShowCreateObject::Event),
15971 Keyword::VIEW => Ok(ShowCreateObject::View),
15972 keyword => Err(ParserError::ParserError(format!(
15973 "Unable to map keyword to ShowCreateObject: {keyword:?}"
15974 ))),
15975 }?;
15976
15977 let obj_name = self.parse_object_name(false)?;
15978
15979 Ok(Statement::ShowCreate { obj_type, obj_name })
15980 }
15981
15982 pub fn parse_show_columns(
15984 &mut self,
15985 extended: bool,
15986 full: bool,
15987 ) -> Result<Statement, ParserError> {
15988 let show_options = self.parse_show_stmt_options()?;
15989 Ok(Statement::ShowColumns {
15990 extended,
15991 full,
15992 show_options,
15993 })
15994 }
15995
15996 fn parse_show_tables(
15997 &mut self,
15998 terse: bool,
15999 extended: bool,
16000 full: bool,
16001 external: bool,
16002 ) -> Result<Statement, ParserError> {
16003 let history = !external && self.parse_keyword(Keyword::HISTORY);
16004 let show_options = self.parse_show_stmt_options()?;
16005 Ok(Statement::ShowTables {
16006 terse,
16007 history,
16008 extended,
16009 full,
16010 external,
16011 show_options,
16012 })
16013 }
16014
16015 fn parse_show_views(
16016 &mut self,
16017 terse: bool,
16018 materialized: bool,
16019 ) -> Result<Statement, ParserError> {
16020 let show_options = self.parse_show_stmt_options()?;
16021 Ok(Statement::ShowViews {
16022 materialized,
16023 terse,
16024 show_options,
16025 })
16026 }
16027
16028 pub fn parse_show_functions(&mut self) -> Result<Statement, ParserError> {
16030 let filter = self.parse_show_statement_filter()?;
16031 Ok(Statement::ShowFunctions { filter })
16032 }
16033
16034 pub fn parse_show_collation(&mut self) -> Result<Statement, ParserError> {
16036 let filter = self.parse_show_statement_filter()?;
16037 Ok(Statement::ShowCollation { filter })
16038 }
16039
16040 pub fn parse_show_statement_filter(
16042 &mut self,
16043 ) -> Result<Option<ShowStatementFilter>, ParserError> {
16044 if self.parse_keyword(Keyword::LIKE) {
16045 Ok(Some(ShowStatementFilter::Like(
16046 self.parse_literal_string()?,
16047 )))
16048 } else if self.parse_keyword(Keyword::ILIKE) {
16049 Ok(Some(ShowStatementFilter::ILike(
16050 self.parse_literal_string()?,
16051 )))
16052 } else if self.parse_keyword(Keyword::WHERE) {
16053 Ok(Some(ShowStatementFilter::Where(self.parse_expr()?)))
16054 } else {
16055 self.maybe_parse(|parser| -> Result<String, ParserError> {
16056 parser.parse_literal_string()
16057 })?
16058 .map_or(Ok(None), |filter| {
16059 Ok(Some(ShowStatementFilter::NoKeyword(filter)))
16060 })
16061 }
16062 }
16063
16064 pub fn parse_use(&mut self) -> Result<Statement, ParserError> {
16066 let parsed_keyword = if dialect_of!(self is HiveDialect) {
16068 if self.parse_keyword(Keyword::DEFAULT) {
16070 return Ok(Statement::Use(Use::Default));
16071 }
16072 None } else if dialect_of!(self is DatabricksDialect) {
16074 self.parse_one_of_keywords(&[Keyword::CATALOG, Keyword::DATABASE, Keyword::SCHEMA])
16075 } else if dialect_of!(self is SnowflakeDialect) {
16076 self.parse_one_of_keywords(&[
16077 Keyword::DATABASE,
16078 Keyword::SCHEMA,
16079 Keyword::WAREHOUSE,
16080 Keyword::ROLE,
16081 Keyword::SECONDARY,
16082 ])
16083 } else {
16084 None };
16086
16087 let result = if matches!(parsed_keyword, Some(Keyword::SECONDARY)) {
16088 self.parse_secondary_roles()?
16089 } else {
16090 let obj_name = self.parse_object_name(false)?;
16091 match parsed_keyword {
16092 Some(Keyword::CATALOG) => Use::Catalog(obj_name),
16093 Some(Keyword::DATABASE) => Use::Database(obj_name),
16094 Some(Keyword::SCHEMA) => Use::Schema(obj_name),
16095 Some(Keyword::WAREHOUSE) => Use::Warehouse(obj_name),
16096 Some(Keyword::ROLE) => Use::Role(obj_name),
16097 _ => Use::Object(obj_name),
16098 }
16099 };
16100
16101 Ok(Statement::Use(result))
16102 }
16103
16104 fn parse_secondary_roles(&mut self) -> Result<Use, ParserError> {
16105 self.expect_one_of_keywords(&[Keyword::ROLES, Keyword::ROLE])?;
16106 if self.parse_keyword(Keyword::NONE) {
16107 Ok(Use::SecondaryRoles(SecondaryRoles::None))
16108 } else if self.parse_keyword(Keyword::ALL) {
16109 Ok(Use::SecondaryRoles(SecondaryRoles::All))
16110 } else {
16111 let roles = self.parse_comma_separated(|parser| parser.parse_identifier())?;
16112 Ok(Use::SecondaryRoles(SecondaryRoles::List(roles)))
16113 }
16114 }
16115
16116 pub fn parse_table_and_joins(&mut self) -> Result<TableWithJoins, ParserError> {
16118 let relation = self.parse_table_factor()?;
16119 let joins = self.parse_joins()?;
16123 Ok(TableWithJoins { relation, joins })
16124 }
16125
16126 fn parse_joins(&mut self) -> Result<Vec<Join>, ParserError> {
16127 let mut joins = vec![];
16128 loop {
16129 let global = self.parse_keyword(Keyword::GLOBAL);
16130 let join = if self.parse_keyword(Keyword::CROSS) {
16131 let join_operator = if self.parse_keyword(Keyword::JOIN) {
16132 JoinOperator::CrossJoin(JoinConstraint::None)
16133 } else if self.parse_keyword(Keyword::APPLY) {
16134 JoinOperator::CrossApply
16136 } else {
16137 return self.expected_ref("JOIN or APPLY after CROSS", self.peek_token_ref());
16138 };
16139 let relation = self.parse_table_factor()?;
16140 let join_operator = if matches!(join_operator, JoinOperator::CrossJoin(_))
16141 && self.dialect.supports_cross_join_constraint()
16142 {
16143 let constraint = self.parse_join_constraint(false)?;
16144 JoinOperator::CrossJoin(constraint)
16145 } else {
16146 join_operator
16147 };
16148 Join {
16149 relation,
16150 global,
16151 join_operator,
16152 }
16153 } else if self.parse_keyword(Keyword::OUTER) {
16154 self.expect_keyword_is(Keyword::APPLY)?;
16156 Join {
16157 relation: self.parse_table_factor()?,
16158 global,
16159 join_operator: JoinOperator::OuterApply,
16160 }
16161 } else if self.parse_keyword(Keyword::ASOF) {
16162 self.expect_keyword_is(Keyword::JOIN)?;
16163 let relation = self.parse_table_factor()?;
16164 self.expect_keyword_is(Keyword::MATCH_CONDITION)?;
16165 let match_condition = self.parse_parenthesized(Self::parse_expr)?;
16166 Join {
16167 relation,
16168 global,
16169 join_operator: JoinOperator::AsOf {
16170 match_condition,
16171 constraint: self.parse_join_constraint(false)?,
16172 },
16173 }
16174 } else {
16175 let natural = self.parse_keyword(Keyword::NATURAL);
16176 let peek_keyword = if let Token::Word(w) = &self.peek_token_ref().token {
16177 w.keyword
16178 } else {
16179 Keyword::NoKeyword
16180 };
16181
16182 let join_operator_type = match peek_keyword {
16183 Keyword::INNER | Keyword::JOIN => {
16184 let inner = self.parse_keyword(Keyword::INNER); self.expect_keyword_is(Keyword::JOIN)?;
16186 if inner {
16187 JoinOperator::Inner
16188 } else {
16189 JoinOperator::Join
16190 }
16191 }
16192 kw @ Keyword::LEFT | kw @ Keyword::RIGHT => {
16193 let _ = self.next_token(); let is_left = kw == Keyword::LEFT;
16195 let join_type = self.parse_one_of_keywords(&[
16196 Keyword::OUTER,
16197 Keyword::SEMI,
16198 Keyword::ANTI,
16199 Keyword::JOIN,
16200 ]);
16201 match join_type {
16202 Some(Keyword::OUTER) => {
16203 self.expect_keyword_is(Keyword::JOIN)?;
16204 if is_left {
16205 JoinOperator::LeftOuter
16206 } else {
16207 JoinOperator::RightOuter
16208 }
16209 }
16210 Some(Keyword::SEMI) => {
16211 self.expect_keyword_is(Keyword::JOIN)?;
16212 if is_left {
16213 JoinOperator::LeftSemi
16214 } else {
16215 JoinOperator::RightSemi
16216 }
16217 }
16218 Some(Keyword::ANTI) => {
16219 self.expect_keyword_is(Keyword::JOIN)?;
16220 if is_left {
16221 JoinOperator::LeftAnti
16222 } else {
16223 JoinOperator::RightAnti
16224 }
16225 }
16226 Some(Keyword::JOIN) => {
16227 if is_left {
16228 JoinOperator::Left
16229 } else {
16230 JoinOperator::Right
16231 }
16232 }
16233 _ => {
16234 return Err(ParserError::ParserError(format!(
16235 "expected OUTER, SEMI, ANTI or JOIN after {kw:?}"
16236 )))
16237 }
16238 }
16239 }
16240 Keyword::ANTI => {
16241 let _ = self.next_token(); self.expect_keyword_is(Keyword::JOIN)?;
16243 JoinOperator::Anti
16244 }
16245 Keyword::SEMI => {
16246 let _ = self.next_token(); self.expect_keyword_is(Keyword::JOIN)?;
16248 JoinOperator::Semi
16249 }
16250 Keyword::FULL => {
16251 let _ = self.next_token(); let _ = self.parse_keyword(Keyword::OUTER); self.expect_keyword_is(Keyword::JOIN)?;
16254 JoinOperator::FullOuter
16255 }
16256 Keyword::OUTER => {
16257 return self.expected_ref("LEFT, RIGHT, or FULL", self.peek_token_ref());
16258 }
16259 Keyword::STRAIGHT_JOIN => {
16260 let _ = self.next_token(); JoinOperator::StraightJoin
16262 }
16263 _ if natural => {
16264 return self
16265 .expected_ref("a join type after NATURAL", self.peek_token_ref());
16266 }
16267 _ => break,
16268 };
16269 let mut relation = self.parse_table_factor()?;
16270
16271 if !self
16272 .dialect
16273 .supports_left_associative_joins_without_parens()
16274 && self.peek_parens_less_nested_join()
16275 {
16276 let joins = self.parse_joins()?;
16277 relation = TableFactor::NestedJoin {
16278 table_with_joins: Box::new(TableWithJoins { relation, joins }),
16279 alias: None,
16280 };
16281 }
16282
16283 let join_constraint = self.parse_join_constraint(natural)?;
16284 Join {
16285 relation,
16286 global,
16287 join_operator: join_operator_type(join_constraint),
16288 }
16289 };
16290 joins.push(join);
16291 }
16292 Ok(joins)
16293 }
16294
16295 fn peek_parens_less_nested_join(&self) -> bool {
16296 matches!(
16297 self.peek_token_ref().token,
16298 Token::Word(Word {
16299 keyword: Keyword::JOIN
16300 | Keyword::INNER
16301 | Keyword::LEFT
16302 | Keyword::RIGHT
16303 | Keyword::FULL,
16304 ..
16305 })
16306 )
16307 }
16308
16309 #[cfg_attr(feature = "recursive-protection", recursive::recursive)]
16311 pub fn parse_table_factor(&mut self) -> Result<TableFactor, ParserError> {
16312 let _guard = self.recursion_counter.try_decrease()?;
16313 if self.parse_keyword(Keyword::LATERAL) {
16314 if self.consume_token(&Token::LParen) {
16316 self.parse_derived_table_factor(Lateral)
16317 } else {
16318 let name = self.parse_object_name(false)?;
16319 self.expect_token(&Token::LParen)?;
16320 let args = self.parse_optional_args()?;
16321 let alias = self.maybe_parse_table_alias()?;
16322 Ok(TableFactor::Function {
16323 lateral: true,
16324 name,
16325 args,
16326 alias,
16327 })
16328 }
16329 } else if self.parse_keyword(Keyword::TABLE) {
16330 self.expect_token(&Token::LParen)?;
16332 let expr = self.parse_expr()?;
16333 self.expect_token(&Token::RParen)?;
16334 let alias = self.maybe_parse_table_alias()?;
16335 Ok(TableFactor::TableFunction { expr, alias })
16336 } else if self.consume_token(&Token::LParen) {
16337 if let Some(mut table) =
16359 self.maybe_parse(|parser| parser.parse_derived_table_factor(NotLateral))?
16360 {
16361 while let Some(kw) = self.parse_one_of_keywords(&[Keyword::PIVOT, Keyword::UNPIVOT])
16362 {
16363 table = match kw {
16364 Keyword::PIVOT => self.parse_pivot_table_factor(table)?,
16365 Keyword::UNPIVOT => self.parse_unpivot_table_factor(table)?,
16366 unexpected_keyword => return Err(ParserError::ParserError(
16367 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in pivot/unpivot"),
16368 )),
16369 }
16370 }
16371 return Ok(table);
16372 }
16373
16374 let mut table_and_joins = self.parse_table_and_joins()?;
16381
16382 #[allow(clippy::if_same_then_else)]
16383 if !table_and_joins.joins.is_empty() {
16384 self.expect_token(&Token::RParen)?;
16385 let alias = self.maybe_parse_table_alias()?;
16386 Ok(TableFactor::NestedJoin {
16387 table_with_joins: Box::new(table_and_joins),
16388 alias,
16389 }) } else if let TableFactor::NestedJoin {
16391 table_with_joins: _,
16392 alias: _,
16393 } = &table_and_joins.relation
16394 {
16395 self.expect_token(&Token::RParen)?;
16398 let alias = self.maybe_parse_table_alias()?;
16399 Ok(TableFactor::NestedJoin {
16400 table_with_joins: Box::new(table_and_joins),
16401 alias,
16402 })
16403 } else if self.dialect.supports_parens_around_table_factor() {
16404 self.expect_token(&Token::RParen)?;
16411
16412 if let Some(outer_alias) = self.maybe_parse_table_alias()? {
16413 match &mut table_and_joins.relation {
16416 TableFactor::Derived { alias, .. }
16417 | TableFactor::Table { alias, .. }
16418 | TableFactor::Function { alias, .. }
16419 | TableFactor::UNNEST { alias, .. }
16420 | TableFactor::JsonTable { alias, .. }
16421 | TableFactor::XmlTable { alias, .. }
16422 | TableFactor::OpenJsonTable { alias, .. }
16423 | TableFactor::TableFunction { alias, .. }
16424 | TableFactor::Pivot { alias, .. }
16425 | TableFactor::Unpivot { alias, .. }
16426 | TableFactor::MatchRecognize { alias, .. }
16427 | TableFactor::SemanticView { alias, .. }
16428 | TableFactor::NestedJoin { alias, .. } => {
16429 if let Some(inner_alias) = alias {
16431 return Err(ParserError::ParserError(format!(
16432 "duplicate alias {inner_alias}"
16433 )));
16434 }
16435 alias.replace(outer_alias);
16439 }
16440 };
16441 }
16442 Ok(table_and_joins.relation)
16444 } else {
16445 self.expected_ref("joined table", self.peek_token_ref())
16448 }
16449 } else if self.dialect.supports_values_as_table_factor()
16450 && matches!(
16451 self.peek_tokens(),
16452 [
16453 Token::Word(Word {
16454 keyword: Keyword::VALUES,
16455 ..
16456 }),
16457 Token::LParen
16458 ]
16459 )
16460 {
16461 self.expect_keyword_is(Keyword::VALUES)?;
16462
16463 let values = SetExpr::Values(self.parse_values(false, false)?);
16467 let alias = self.maybe_parse_table_alias()?;
16468 Ok(TableFactor::Derived {
16469 lateral: false,
16470 subquery: Box::new(Query {
16471 with: None,
16472 body: Box::new(values),
16473 order_by: None,
16474 limit_clause: None,
16475 fetch: None,
16476 locks: vec![],
16477 for_clause: None,
16478 settings: None,
16479 format_clause: None,
16480 pipe_operators: vec![],
16481 }),
16482 alias,
16483 sample: None,
16484 })
16485 } else if dialect_of!(self is BigQueryDialect | PostgreSqlDialect | GenericDialect)
16486 && self.parse_keyword(Keyword::UNNEST)
16487 {
16488 self.expect_token(&Token::LParen)?;
16489 let array_exprs = self.parse_comma_separated(Parser::parse_expr)?;
16490 self.expect_token(&Token::RParen)?;
16491
16492 let with_ordinality = self.parse_keywords(&[Keyword::WITH, Keyword::ORDINALITY]);
16493 let alias = match self.maybe_parse_table_alias() {
16494 Ok(Some(alias)) => Some(alias),
16495 Ok(None) => None,
16496 Err(e) => return Err(e),
16497 };
16498
16499 let with_offset = match self.expect_keywords(&[Keyword::WITH, Keyword::OFFSET]) {
16500 Ok(()) => true,
16501 Err(_) => false,
16502 };
16503
16504 let with_offset_alias = if with_offset {
16505 match self.parse_optional_alias(keywords::RESERVED_FOR_COLUMN_ALIAS) {
16506 Ok(Some(alias)) => Some(alias),
16507 Ok(None) => None,
16508 Err(e) => return Err(e),
16509 }
16510 } else {
16511 None
16512 };
16513
16514 Ok(TableFactor::UNNEST {
16515 alias,
16516 array_exprs,
16517 with_offset,
16518 with_offset_alias,
16519 with_ordinality,
16520 })
16521 } else if self.parse_keyword_with_tokens(Keyword::JSON_TABLE, &[Token::LParen]) {
16522 let json_expr = self.parse_expr()?;
16523 self.expect_token(&Token::Comma)?;
16524 let json_path = self.parse_value()?;
16525 self.expect_keyword_is(Keyword::COLUMNS)?;
16526 self.expect_token(&Token::LParen)?;
16527 let columns = self.parse_comma_separated(Parser::parse_json_table_column_def)?;
16528 self.expect_token(&Token::RParen)?;
16529 self.expect_token(&Token::RParen)?;
16530 let alias = self.maybe_parse_table_alias()?;
16531 Ok(TableFactor::JsonTable {
16532 json_expr,
16533 json_path,
16534 columns,
16535 alias,
16536 })
16537 } else if self.parse_keyword_with_tokens(Keyword::OPENJSON, &[Token::LParen]) {
16538 self.prev_token();
16539 self.parse_open_json_table_factor()
16540 } else if self.parse_keyword_with_tokens(Keyword::XMLTABLE, &[Token::LParen]) {
16541 self.prev_token();
16542 self.parse_xml_table_factor()
16543 } else if self.dialect.supports_semantic_view_table_factor()
16544 && self.peek_keyword_with_tokens(Keyword::SEMANTIC_VIEW, &[Token::LParen])
16545 {
16546 self.parse_semantic_view_table_factor()
16547 } else if self.peek_token_ref().token == Token::AtSign {
16548 self.parse_snowflake_stage_table_factor()
16550 } else {
16551 let name = self.parse_object_name(true)?;
16552
16553 let json_path = match &self.peek_token_ref().token {
16554 Token::LBracket if self.dialect.supports_partiql() => Some(self.parse_json_path()?),
16555 _ => None,
16556 };
16557
16558 let partitions: Vec<Ident> = if dialect_of!(self is MySqlDialect | GenericDialect)
16559 && self.parse_keyword(Keyword::PARTITION)
16560 {
16561 self.parse_parenthesized_identifiers()?
16562 } else {
16563 vec![]
16564 };
16565
16566 let version = self.maybe_parse_table_version()?;
16568
16569 let args = if self.consume_token(&Token::LParen) {
16571 Some(self.parse_table_function_args()?)
16572 } else {
16573 None
16574 };
16575
16576 let with_ordinality = self.parse_keywords(&[Keyword::WITH, Keyword::ORDINALITY]);
16577
16578 let mut sample = None;
16579 if self.dialect.supports_table_sample_before_alias() {
16580 if let Some(parsed_sample) = self.maybe_parse_table_sample()? {
16581 sample = Some(TableSampleKind::BeforeTableAlias(parsed_sample));
16582 }
16583 }
16584
16585 let alias = self.maybe_parse_table_alias()?;
16586
16587 let index_hints = if self.dialect.supports_table_hints() {
16589 self.maybe_parse(|p| p.parse_table_index_hints())?
16590 .unwrap_or(vec![])
16591 } else {
16592 vec![]
16593 };
16594
16595 let mut with_hints = vec![];
16597 if self.parse_keyword(Keyword::WITH) {
16598 if self.consume_token(&Token::LParen) {
16599 with_hints = self.parse_comma_separated(Parser::parse_expr)?;
16600 self.expect_token(&Token::RParen)?;
16601 } else {
16602 self.prev_token();
16604 }
16605 };
16606
16607 if !self.dialect.supports_table_sample_before_alias() {
16608 if let Some(parsed_sample) = self.maybe_parse_table_sample()? {
16609 sample = Some(TableSampleKind::AfterTableAlias(parsed_sample));
16610 }
16611 }
16612
16613 let mut table = TableFactor::Table {
16614 name,
16615 alias,
16616 args,
16617 with_hints,
16618 version,
16619 partitions,
16620 with_ordinality,
16621 json_path,
16622 sample,
16623 index_hints,
16624 };
16625
16626 while let Some(kw) = self.parse_one_of_keywords(&[Keyword::PIVOT, Keyword::UNPIVOT]) {
16627 table = match kw {
16628 Keyword::PIVOT => self.parse_pivot_table_factor(table)?,
16629 Keyword::UNPIVOT => self.parse_unpivot_table_factor(table)?,
16630 unexpected_keyword => return Err(ParserError::ParserError(
16631 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in pivot/unpivot"),
16632 )),
16633 }
16634 }
16635
16636 if self.dialect.supports_match_recognize()
16637 && self.parse_keyword(Keyword::MATCH_RECOGNIZE)
16638 {
16639 table = self.parse_match_recognize(table)?;
16640 }
16641
16642 Ok(table)
16643 }
16644 }
16645
16646 fn parse_snowflake_stage_table_factor(&mut self) -> Result<TableFactor, ParserError> {
16651 let name = crate::dialect::parse_snowflake_stage_name(self)?;
16653
16654 let args = if self.consume_token(&Token::LParen) {
16656 Some(self.parse_table_function_args()?)
16657 } else {
16658 None
16659 };
16660
16661 let alias = self.maybe_parse_table_alias()?;
16662
16663 Ok(TableFactor::Table {
16664 name,
16665 alias,
16666 args,
16667 with_hints: vec![],
16668 version: None,
16669 partitions: vec![],
16670 with_ordinality: false,
16671 json_path: None,
16672 sample: None,
16673 index_hints: vec![],
16674 })
16675 }
16676
16677 fn maybe_parse_table_sample(&mut self) -> Result<Option<Box<TableSample>>, ParserError> {
16678 let modifier = if self.parse_keyword(Keyword::TABLESAMPLE) {
16679 TableSampleModifier::TableSample
16680 } else if self.parse_keyword(Keyword::SAMPLE) {
16681 TableSampleModifier::Sample
16682 } else {
16683 return Ok(None);
16684 };
16685 self.parse_table_sample(modifier).map(Some)
16686 }
16687
16688 fn parse_table_sample(
16689 &mut self,
16690 modifier: TableSampleModifier,
16691 ) -> Result<Box<TableSample>, ParserError> {
16692 let name = match self.parse_one_of_keywords(&[
16693 Keyword::BERNOULLI,
16694 Keyword::ROW,
16695 Keyword::SYSTEM,
16696 Keyword::BLOCK,
16697 ]) {
16698 Some(Keyword::BERNOULLI) => Some(TableSampleMethod::Bernoulli),
16699 Some(Keyword::ROW) => Some(TableSampleMethod::Row),
16700 Some(Keyword::SYSTEM) => Some(TableSampleMethod::System),
16701 Some(Keyword::BLOCK) => Some(TableSampleMethod::Block),
16702 _ => None,
16703 };
16704
16705 let parenthesized = self.consume_token(&Token::LParen);
16706
16707 let (quantity, bucket) = if parenthesized && self.parse_keyword(Keyword::BUCKET) {
16708 let selected_bucket = self.parse_number_value()?;
16709 self.expect_keywords(&[Keyword::OUT, Keyword::OF])?;
16710 let total = self.parse_number_value()?;
16711 let on = if self.parse_keyword(Keyword::ON) {
16712 Some(self.parse_expr()?)
16713 } else {
16714 None
16715 };
16716 (
16717 None,
16718 Some(TableSampleBucket {
16719 bucket: selected_bucket,
16720 total,
16721 on,
16722 }),
16723 )
16724 } else {
16725 let value = match self.maybe_parse(|p| p.parse_expr())? {
16726 Some(num) => num,
16727 None => {
16728 let next_token = self.next_token();
16729 if let Token::Word(w) = next_token.token {
16730 Expr::Value(Value::Placeholder(w.value).with_span(next_token.span))
16731 } else {
16732 return parser_err!(
16733 "Expecting number or byte length e.g. 100M",
16734 self.peek_token_ref().span.start
16735 );
16736 }
16737 }
16738 };
16739 let unit = if self.parse_keyword(Keyword::ROWS) {
16740 Some(TableSampleUnit::Rows)
16741 } else if self.parse_keyword(Keyword::PERCENT) {
16742 Some(TableSampleUnit::Percent)
16743 } else {
16744 None
16745 };
16746 (
16747 Some(TableSampleQuantity {
16748 parenthesized,
16749 value,
16750 unit,
16751 }),
16752 None,
16753 )
16754 };
16755 if parenthesized {
16756 self.expect_token(&Token::RParen)?;
16757 }
16758
16759 let seed = if self.parse_keyword(Keyword::REPEATABLE) {
16760 Some(self.parse_table_sample_seed(TableSampleSeedModifier::Repeatable)?)
16761 } else if self.parse_keyword(Keyword::SEED) {
16762 Some(self.parse_table_sample_seed(TableSampleSeedModifier::Seed)?)
16763 } else {
16764 None
16765 };
16766
16767 let offset = if self.parse_keyword(Keyword::OFFSET) {
16768 Some(self.parse_expr()?)
16769 } else {
16770 None
16771 };
16772
16773 Ok(Box::new(TableSample {
16774 modifier,
16775 name,
16776 quantity,
16777 seed,
16778 bucket,
16779 offset,
16780 }))
16781 }
16782
16783 fn parse_table_sample_seed(
16784 &mut self,
16785 modifier: TableSampleSeedModifier,
16786 ) -> Result<TableSampleSeed, ParserError> {
16787 self.expect_token(&Token::LParen)?;
16788 let value = self.parse_number_value()?;
16789 self.expect_token(&Token::RParen)?;
16790 Ok(TableSampleSeed { modifier, value })
16791 }
16792
16793 fn parse_open_json_table_factor(&mut self) -> Result<TableFactor, ParserError> {
16796 self.expect_token(&Token::LParen)?;
16797 let json_expr = self.parse_expr()?;
16798 let json_path = if self.consume_token(&Token::Comma) {
16799 Some(self.parse_value()?)
16800 } else {
16801 None
16802 };
16803 self.expect_token(&Token::RParen)?;
16804 let columns = if self.parse_keyword(Keyword::WITH) {
16805 self.expect_token(&Token::LParen)?;
16806 let columns = self.parse_comma_separated(Parser::parse_openjson_table_column_def)?;
16807 self.expect_token(&Token::RParen)?;
16808 columns
16809 } else {
16810 Vec::new()
16811 };
16812 let alias = self.maybe_parse_table_alias()?;
16813 Ok(TableFactor::OpenJsonTable {
16814 json_expr,
16815 json_path,
16816 columns,
16817 alias,
16818 })
16819 }
16820
16821 fn parse_xml_table_factor(&mut self) -> Result<TableFactor, ParserError> {
16822 self.expect_token(&Token::LParen)?;
16823 let namespaces = if self.parse_keyword(Keyword::XMLNAMESPACES) {
16824 self.expect_token(&Token::LParen)?;
16825 let namespaces = self.parse_comma_separated(Parser::parse_xml_namespace_definition)?;
16826 self.expect_token(&Token::RParen)?;
16827 self.expect_token(&Token::Comma)?;
16828 namespaces
16829 } else {
16830 vec![]
16831 };
16832 let row_expression = self.parse_expr()?;
16833 let passing = self.parse_xml_passing_clause()?;
16834 self.expect_keyword_is(Keyword::COLUMNS)?;
16835 let columns = self.parse_comma_separated(Parser::parse_xml_table_column)?;
16836 self.expect_token(&Token::RParen)?;
16837 let alias = self.maybe_parse_table_alias()?;
16838 Ok(TableFactor::XmlTable {
16839 namespaces,
16840 row_expression,
16841 passing,
16842 columns,
16843 alias,
16844 })
16845 }
16846
16847 fn parse_xml_namespace_definition(&mut self) -> Result<XmlNamespaceDefinition, ParserError> {
16848 let uri = self.parse_expr()?;
16849 self.expect_keyword_is(Keyword::AS)?;
16850 let name = self.parse_identifier()?;
16851 Ok(XmlNamespaceDefinition { uri, name })
16852 }
16853
16854 fn parse_xml_table_column(&mut self) -> Result<XmlTableColumn, ParserError> {
16855 let name = self.parse_identifier()?;
16856
16857 let option = if self.parse_keyword(Keyword::FOR) {
16858 self.expect_keyword(Keyword::ORDINALITY)?;
16859 XmlTableColumnOption::ForOrdinality
16860 } else {
16861 let r#type = self.parse_data_type()?;
16862 let mut path = None;
16863 let mut default = None;
16864
16865 if self.parse_keyword(Keyword::PATH) {
16866 path = Some(self.parse_expr()?);
16867 }
16868
16869 if self.parse_keyword(Keyword::DEFAULT) {
16870 default = Some(self.parse_expr()?);
16871 }
16872
16873 let not_null = self.parse_keywords(&[Keyword::NOT, Keyword::NULL]);
16874 if !not_null {
16875 let _ = self.parse_keyword(Keyword::NULL);
16877 }
16878
16879 XmlTableColumnOption::NamedInfo {
16880 r#type,
16881 path,
16882 default,
16883 nullable: !not_null,
16884 }
16885 };
16886 Ok(XmlTableColumn { name, option })
16887 }
16888
16889 fn parse_xml_passing_clause(&mut self) -> Result<XmlPassingClause, ParserError> {
16890 let mut arguments = vec![];
16891 if self.parse_keyword(Keyword::PASSING) {
16892 loop {
16893 let by_value =
16894 self.parse_keyword(Keyword::BY) && self.expect_keyword(Keyword::VALUE).is_ok();
16895 let expr = self.parse_expr()?;
16896 let alias = if self.parse_keyword(Keyword::AS) {
16897 Some(self.parse_identifier()?)
16898 } else {
16899 None
16900 };
16901 arguments.push(XmlPassingArgument {
16902 expr,
16903 alias,
16904 by_value,
16905 });
16906 if !self.consume_token(&Token::Comma) {
16907 break;
16908 }
16909 }
16910 }
16911 Ok(XmlPassingClause { arguments })
16912 }
16913
16914 fn parse_semantic_view_table_factor(&mut self) -> Result<TableFactor, ParserError> {
16916 self.expect_keyword(Keyword::SEMANTIC_VIEW)?;
16917 self.expect_token(&Token::LParen)?;
16918
16919 let name = self.parse_object_name(true)?;
16920
16921 let mut dimensions = Vec::new();
16923 let mut metrics = Vec::new();
16924 let mut facts = Vec::new();
16925 let mut where_clause = None;
16926
16927 while self.peek_token_ref().token != Token::RParen {
16928 if self.parse_keyword(Keyword::DIMENSIONS) {
16929 if !dimensions.is_empty() {
16930 return Err(ParserError::ParserError(
16931 "DIMENSIONS clause can only be specified once".to_string(),
16932 ));
16933 }
16934 dimensions = self.parse_comma_separated(Parser::parse_wildcard_expr)?;
16935 } else if self.parse_keyword(Keyword::METRICS) {
16936 if !metrics.is_empty() {
16937 return Err(ParserError::ParserError(
16938 "METRICS clause can only be specified once".to_string(),
16939 ));
16940 }
16941 metrics = self.parse_comma_separated(Parser::parse_wildcard_expr)?;
16942 } else if self.parse_keyword(Keyword::FACTS) {
16943 if !facts.is_empty() {
16944 return Err(ParserError::ParserError(
16945 "FACTS clause can only be specified once".to_string(),
16946 ));
16947 }
16948 facts = self.parse_comma_separated(Parser::parse_wildcard_expr)?;
16949 } else if self.parse_keyword(Keyword::WHERE) {
16950 if where_clause.is_some() {
16951 return Err(ParserError::ParserError(
16952 "WHERE clause can only be specified once".to_string(),
16953 ));
16954 }
16955 where_clause = Some(self.parse_expr()?);
16956 } else {
16957 let tok = self.peek_token_ref();
16958 return parser_err!(
16959 format!(
16960 "Expected one of DIMENSIONS, METRICS, FACTS or WHERE, got {}",
16961 tok.token
16962 ),
16963 tok.span.start
16964 )?;
16965 }
16966 }
16967
16968 self.expect_token(&Token::RParen)?;
16969
16970 let alias = self.maybe_parse_table_alias()?;
16971
16972 Ok(TableFactor::SemanticView {
16973 name,
16974 dimensions,
16975 metrics,
16976 facts,
16977 where_clause,
16978 alias,
16979 })
16980 }
16981
16982 fn parse_match_recognize(&mut self, table: TableFactor) -> Result<TableFactor, ParserError> {
16983 self.expect_token(&Token::LParen)?;
16984
16985 let partition_by = if self.parse_keywords(&[Keyword::PARTITION, Keyword::BY]) {
16986 self.parse_comma_separated(Parser::parse_expr)?
16987 } else {
16988 vec![]
16989 };
16990
16991 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
16992 self.parse_comma_separated(Parser::parse_order_by_expr)?
16993 } else {
16994 vec![]
16995 };
16996
16997 let measures = if self.parse_keyword(Keyword::MEASURES) {
16998 self.parse_comma_separated(|p| {
16999 let expr = p.parse_expr()?;
17000 let _ = p.parse_keyword(Keyword::AS);
17001 let alias = p.parse_identifier()?;
17002 Ok(Measure { expr, alias })
17003 })?
17004 } else {
17005 vec![]
17006 };
17007
17008 let rows_per_match =
17009 if self.parse_keywords(&[Keyword::ONE, Keyword::ROW, Keyword::PER, Keyword::MATCH]) {
17010 Some(RowsPerMatch::OneRow)
17011 } else if self.parse_keywords(&[
17012 Keyword::ALL,
17013 Keyword::ROWS,
17014 Keyword::PER,
17015 Keyword::MATCH,
17016 ]) {
17017 Some(RowsPerMatch::AllRows(
17018 if self.parse_keywords(&[Keyword::SHOW, Keyword::EMPTY, Keyword::MATCHES]) {
17019 Some(EmptyMatchesMode::Show)
17020 } else if self.parse_keywords(&[
17021 Keyword::OMIT,
17022 Keyword::EMPTY,
17023 Keyword::MATCHES,
17024 ]) {
17025 Some(EmptyMatchesMode::Omit)
17026 } else if self.parse_keywords(&[
17027 Keyword::WITH,
17028 Keyword::UNMATCHED,
17029 Keyword::ROWS,
17030 ]) {
17031 Some(EmptyMatchesMode::WithUnmatched)
17032 } else {
17033 None
17034 },
17035 ))
17036 } else {
17037 None
17038 };
17039
17040 let after_match_skip =
17041 if self.parse_keywords(&[Keyword::AFTER, Keyword::MATCH, Keyword::SKIP]) {
17042 if self.parse_keywords(&[Keyword::PAST, Keyword::LAST, Keyword::ROW]) {
17043 Some(AfterMatchSkip::PastLastRow)
17044 } else if self.parse_keywords(&[Keyword::TO, Keyword::NEXT, Keyword::ROW]) {
17045 Some(AfterMatchSkip::ToNextRow)
17046 } else if self.parse_keywords(&[Keyword::TO, Keyword::FIRST]) {
17047 Some(AfterMatchSkip::ToFirst(self.parse_identifier()?))
17048 } else if self.parse_keywords(&[Keyword::TO, Keyword::LAST]) {
17049 Some(AfterMatchSkip::ToLast(self.parse_identifier()?))
17050 } else {
17051 let found = self.next_token();
17052 return self.expected("after match skip option", found);
17053 }
17054 } else {
17055 None
17056 };
17057
17058 self.expect_keyword_is(Keyword::PATTERN)?;
17059 let pattern = self.parse_parenthesized(Self::parse_pattern)?;
17060
17061 self.expect_keyword_is(Keyword::DEFINE)?;
17062
17063 let symbols = self.parse_comma_separated(|p| {
17064 let symbol = p.parse_identifier()?;
17065 p.expect_keyword_is(Keyword::AS)?;
17066 let definition = p.parse_expr()?;
17067 Ok(SymbolDefinition { symbol, definition })
17068 })?;
17069
17070 self.expect_token(&Token::RParen)?;
17071
17072 let alias = self.maybe_parse_table_alias()?;
17073
17074 Ok(TableFactor::MatchRecognize {
17075 table: Box::new(table),
17076 partition_by,
17077 order_by,
17078 measures,
17079 rows_per_match,
17080 after_match_skip,
17081 pattern,
17082 symbols,
17083 alias,
17084 })
17085 }
17086
17087 fn parse_base_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
17088 match self.next_token().token {
17089 Token::Caret => Ok(MatchRecognizePattern::Symbol(MatchRecognizeSymbol::Start)),
17090 Token::Placeholder(s) if s == "$" => {
17091 Ok(MatchRecognizePattern::Symbol(MatchRecognizeSymbol::End))
17092 }
17093 Token::LBrace => {
17094 self.expect_token(&Token::Minus)?;
17095 let symbol = self.parse_identifier().map(MatchRecognizeSymbol::Named)?;
17096 self.expect_token(&Token::Minus)?;
17097 self.expect_token(&Token::RBrace)?;
17098 Ok(MatchRecognizePattern::Exclude(symbol))
17099 }
17100 Token::Word(Word {
17101 value,
17102 quote_style: None,
17103 ..
17104 }) if value == "PERMUTE" => {
17105 self.expect_token(&Token::LParen)?;
17106 let symbols = self.parse_comma_separated(|p| {
17107 p.parse_identifier().map(MatchRecognizeSymbol::Named)
17108 })?;
17109 self.expect_token(&Token::RParen)?;
17110 Ok(MatchRecognizePattern::Permute(symbols))
17111 }
17112 Token::LParen => {
17113 let pattern = self.parse_pattern()?;
17114 self.expect_token(&Token::RParen)?;
17115 Ok(MatchRecognizePattern::Group(Box::new(pattern)))
17116 }
17117 _ => {
17118 self.prev_token();
17119 self.parse_identifier()
17120 .map(MatchRecognizeSymbol::Named)
17121 .map(MatchRecognizePattern::Symbol)
17122 }
17123 }
17124 }
17125
17126 fn parse_repetition_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
17127 let mut pattern = self.parse_base_pattern()?;
17128 loop {
17129 let token = self.next_token();
17130 let quantifier = match token.token {
17131 Token::Mul => RepetitionQuantifier::ZeroOrMore,
17132 Token::Plus => RepetitionQuantifier::OneOrMore,
17133 Token::Placeholder(s) if s == "?" => RepetitionQuantifier::AtMostOne,
17134 Token::LBrace => {
17135 let token = self.next_token();
17137 match token.token {
17138 Token::Comma => {
17139 let next_token = self.next_token();
17140 let Token::Number(n, _) = next_token.token else {
17141 return self.expected("literal number", next_token);
17142 };
17143 self.expect_token(&Token::RBrace)?;
17144 RepetitionQuantifier::AtMost(Self::parse(n, token.span.start)?)
17145 }
17146 Token::Number(n, _) if self.consume_token(&Token::Comma) => {
17147 let next_token = self.next_token();
17148 match next_token.token {
17149 Token::Number(m, _) => {
17150 self.expect_token(&Token::RBrace)?;
17151 RepetitionQuantifier::Range(
17152 Self::parse(n, token.span.start)?,
17153 Self::parse(m, token.span.start)?,
17154 )
17155 }
17156 Token::RBrace => {
17157 RepetitionQuantifier::AtLeast(Self::parse(n, token.span.start)?)
17158 }
17159 _ => {
17160 return self.expected("} or upper bound", next_token);
17161 }
17162 }
17163 }
17164 Token::Number(n, _) => {
17165 self.expect_token(&Token::RBrace)?;
17166 RepetitionQuantifier::Exactly(Self::parse(n, token.span.start)?)
17167 }
17168 _ => return self.expected("quantifier range", token),
17169 }
17170 }
17171 _ => {
17172 self.prev_token();
17173 break;
17174 }
17175 };
17176 pattern = MatchRecognizePattern::Repetition(Box::new(pattern), quantifier);
17177 }
17178 Ok(pattern)
17179 }
17180
17181 fn parse_concat_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
17182 let mut patterns = vec![self.parse_repetition_pattern()?];
17183 while !matches!(self.peek_token_ref().token, Token::RParen | Token::Pipe) {
17184 patterns.push(self.parse_repetition_pattern()?);
17185 }
17186 match <[MatchRecognizePattern; 1]>::try_from(patterns) {
17187 Ok([pattern]) => Ok(pattern),
17188 Err(patterns) => Ok(MatchRecognizePattern::Concat(patterns)),
17189 }
17190 }
17191
17192 fn parse_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
17193 let pattern = self.parse_concat_pattern()?;
17194 if self.consume_token(&Token::Pipe) {
17195 match self.parse_pattern()? {
17196 MatchRecognizePattern::Alternation(mut patterns) => {
17198 patterns.insert(0, pattern);
17199 Ok(MatchRecognizePattern::Alternation(patterns))
17200 }
17201 next => Ok(MatchRecognizePattern::Alternation(vec![pattern, next])),
17202 }
17203 } else {
17204 Ok(pattern)
17205 }
17206 }
17207
17208 pub fn maybe_parse_table_version(&mut self) -> Result<Option<TableVersion>, ParserError> {
17210 if self.dialect.supports_table_versioning() {
17211 if self.parse_keywords(&[Keyword::FOR, Keyword::SYSTEM_TIME, Keyword::AS, Keyword::OF])
17212 {
17213 let expr = self.parse_expr()?;
17214 return Ok(Some(TableVersion::ForSystemTimeAsOf(expr)));
17215 } else if self.peek_keyword(Keyword::CHANGES) {
17216 return self.parse_table_version_changes().map(Some);
17217 } else if self.peek_keyword(Keyword::AT) || self.peek_keyword(Keyword::BEFORE) {
17218 let func_name = self.parse_object_name(true)?;
17219 let func = self.parse_function(func_name)?;
17220 return Ok(Some(TableVersion::Function(func)));
17221 } else if self.parse_keywords(&[Keyword::TIMESTAMP, Keyword::AS, Keyword::OF]) {
17222 let expr = self.parse_expr()?;
17223 return Ok(Some(TableVersion::TimestampAsOf(expr)));
17224 } else if self.parse_keywords(&[Keyword::VERSION, Keyword::AS, Keyword::OF]) {
17225 let expr = Expr::Value(self.parse_number_value()?);
17226 return Ok(Some(TableVersion::VersionAsOf(expr)));
17227 }
17228 }
17229 Ok(None)
17230 }
17231
17232 fn parse_table_version_changes(&mut self) -> Result<TableVersion, ParserError> {
17243 let changes_name = self.parse_object_name(true)?;
17244 let changes = self.parse_function(changes_name)?;
17245 let at_name = self.parse_object_name(true)?;
17246 let at = self.parse_function(at_name)?;
17247 let end = if self.peek_keyword(Keyword::END) {
17248 let end_name = self.parse_object_name(true)?;
17249 Some(self.parse_function(end_name)?)
17250 } else {
17251 None
17252 };
17253 Ok(TableVersion::Changes { changes, at, end })
17254 }
17255
17256 pub fn parse_json_table_column_def(&mut self) -> Result<JsonTableColumn, ParserError> {
17259 if self.parse_keyword(Keyword::NESTED) {
17260 let _has_path_keyword = self.parse_keyword(Keyword::PATH);
17261 let path = self.parse_value()?;
17262 self.expect_keyword_is(Keyword::COLUMNS)?;
17263 let columns = self.parse_parenthesized(|p| {
17264 p.parse_comma_separated(Self::parse_json_table_column_def)
17265 })?;
17266 return Ok(JsonTableColumn::Nested(JsonTableNestedColumn {
17267 path,
17268 columns,
17269 }));
17270 }
17271 let name = self.parse_identifier()?;
17272 if self.parse_keyword(Keyword::FOR) {
17273 self.expect_keyword_is(Keyword::ORDINALITY)?;
17274 return Ok(JsonTableColumn::ForOrdinality(name));
17275 }
17276 let r#type = self.parse_data_type()?;
17277 let exists = self.parse_keyword(Keyword::EXISTS);
17278 self.expect_keyword_is(Keyword::PATH)?;
17279 let path = self.parse_value()?;
17280 let mut on_empty = None;
17281 let mut on_error = None;
17282 while let Some(error_handling) = self.parse_json_table_column_error_handling()? {
17283 if self.parse_keyword(Keyword::EMPTY) {
17284 on_empty = Some(error_handling);
17285 } else {
17286 self.expect_keyword_is(Keyword::ERROR)?;
17287 on_error = Some(error_handling);
17288 }
17289 }
17290 Ok(JsonTableColumn::Named(JsonTableNamedColumn {
17291 name,
17292 r#type,
17293 path,
17294 exists,
17295 on_empty,
17296 on_error,
17297 }))
17298 }
17299
17300 pub fn parse_openjson_table_column_def(&mut self) -> Result<OpenJsonTableColumn, ParserError> {
17308 let name = self.parse_identifier()?;
17309 let r#type = self.parse_data_type()?;
17310 let path = if let Token::SingleQuotedString(path) = self.peek_token().token {
17311 self.next_token();
17312 Some(path)
17313 } else {
17314 None
17315 };
17316 let as_json = self.parse_keyword(Keyword::AS);
17317 if as_json {
17318 self.expect_keyword_is(Keyword::JSON)?;
17319 }
17320 Ok(OpenJsonTableColumn {
17321 name,
17322 r#type,
17323 path,
17324 as_json,
17325 })
17326 }
17327
17328 fn parse_json_table_column_error_handling(
17329 &mut self,
17330 ) -> Result<Option<JsonTableColumnErrorHandling>, ParserError> {
17331 let res = if self.parse_keyword(Keyword::NULL) {
17332 JsonTableColumnErrorHandling::Null
17333 } else if self.parse_keyword(Keyword::ERROR) {
17334 JsonTableColumnErrorHandling::Error
17335 } else if self.parse_keyword(Keyword::DEFAULT) {
17336 JsonTableColumnErrorHandling::Default(self.parse_value()?)
17337 } else {
17338 return Ok(None);
17339 };
17340 self.expect_keyword_is(Keyword::ON)?;
17341 Ok(Some(res))
17342 }
17343
17344 pub fn parse_derived_table_factor(
17346 &mut self,
17347 lateral: IsLateral,
17348 ) -> Result<TableFactor, ParserError> {
17349 let subquery = self.parse_query()?;
17350 self.expect_token(&Token::RParen)?;
17351 let alias = self.maybe_parse_table_alias()?;
17352
17353 let sample = self
17355 .maybe_parse_table_sample()?
17356 .map(TableSampleKind::AfterTableAlias);
17357
17358 Ok(TableFactor::Derived {
17359 lateral: match lateral {
17360 Lateral => true,
17361 NotLateral => false,
17362 },
17363 subquery,
17364 alias,
17365 sample,
17366 })
17367 }
17368
17369 pub fn parse_expr_with_alias(&mut self) -> Result<ExprWithAlias, ParserError> {
17392 let expr = self.parse_expr()?;
17393 let alias = if self.parse_keyword(Keyword::AS) {
17394 Some(self.parse_identifier()?)
17395 } else {
17396 None
17397 };
17398
17399 Ok(ExprWithAlias { expr, alias })
17400 }
17401
17402 fn parse_expr_with_alias_optional_as_keyword(&mut self) -> Result<ExprWithAlias, ParserError> {
17406 let expr = self.parse_expr()?;
17407 let alias = self.parse_identifier_optional_alias()?;
17408 Ok(ExprWithAlias { expr, alias })
17409 }
17410
17411 fn parse_pivot_aggregate_function(&mut self) -> Result<ExprWithAlias, ParserError> {
17413 let function_name = match self.next_token().token {
17414 Token::Word(w) => Ok(w.value),
17415 _ => self.expected_ref("a function identifier", self.peek_token_ref()),
17416 }?;
17417 let expr = self.parse_function(ObjectName::from(vec![Ident::new(function_name)]))?;
17418 let alias = {
17419 fn validator(explicit: bool, kw: &Keyword, parser: &mut Parser) -> bool {
17420 kw != &Keyword::FOR && parser.dialect.is_select_item_alias(explicit, kw, parser)
17422 }
17423 self.parse_optional_alias_inner(None, validator)?
17424 };
17425 Ok(ExprWithAlias { expr, alias })
17426 }
17427
17428 pub fn parse_pivot_table_factor(
17430 &mut self,
17431 table: TableFactor,
17432 ) -> Result<TableFactor, ParserError> {
17433 self.expect_token(&Token::LParen)?;
17434 let aggregate_functions =
17435 self.parse_comma_separated(Self::parse_pivot_aggregate_function)?;
17436 self.expect_keyword_is(Keyword::FOR)?;
17437 let value_column = if self.peek_token_ref().token == Token::LParen {
17438 self.parse_parenthesized_column_list_inner(Mandatory, false, |p| {
17439 p.parse_subexpr(self.dialect.prec_value(Precedence::Between))
17440 })?
17441 } else {
17442 vec![self.parse_subexpr(self.dialect.prec_value(Precedence::Between))?]
17443 };
17444 self.expect_keyword_is(Keyword::IN)?;
17445
17446 self.expect_token(&Token::LParen)?;
17447 let value_source = if self.parse_keyword(Keyword::ANY) {
17448 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
17449 self.parse_comma_separated(Parser::parse_order_by_expr)?
17450 } else {
17451 vec![]
17452 };
17453 PivotValueSource::Any(order_by)
17454 } else if self.peek_sub_query() {
17455 PivotValueSource::Subquery(self.parse_query()?)
17456 } else {
17457 PivotValueSource::List(
17458 self.parse_comma_separated(Self::parse_expr_with_alias_optional_as_keyword)?,
17459 )
17460 };
17461 self.expect_token(&Token::RParen)?;
17462
17463 let default_on_null =
17464 if self.parse_keywords(&[Keyword::DEFAULT, Keyword::ON, Keyword::NULL]) {
17465 self.expect_token(&Token::LParen)?;
17466 let expr = self.parse_expr()?;
17467 self.expect_token(&Token::RParen)?;
17468 Some(expr)
17469 } else {
17470 None
17471 };
17472
17473 self.expect_token(&Token::RParen)?;
17474 let alias = self.maybe_parse_table_alias()?;
17475 Ok(TableFactor::Pivot {
17476 table: Box::new(table),
17477 aggregate_functions,
17478 value_column,
17479 value_source,
17480 default_on_null,
17481 alias,
17482 })
17483 }
17484
17485 pub fn parse_unpivot_table_factor(
17487 &mut self,
17488 table: TableFactor,
17489 ) -> Result<TableFactor, ParserError> {
17490 let null_inclusion = if self.parse_keyword(Keyword::INCLUDE) {
17491 self.expect_keyword_is(Keyword::NULLS)?;
17492 Some(NullInclusion::IncludeNulls)
17493 } else if self.parse_keyword(Keyword::EXCLUDE) {
17494 self.expect_keyword_is(Keyword::NULLS)?;
17495 Some(NullInclusion::ExcludeNulls)
17496 } else {
17497 None
17498 };
17499 self.expect_token(&Token::LParen)?;
17500 let value = self.parse_expr()?;
17501 self.expect_keyword_is(Keyword::FOR)?;
17502 let name = self.parse_identifier()?;
17503 self.expect_keyword_is(Keyword::IN)?;
17504 let columns = self.parse_parenthesized_column_list_inner(Mandatory, false, |p| {
17505 p.parse_expr_with_alias()
17506 })?;
17507 self.expect_token(&Token::RParen)?;
17508 let alias = self.maybe_parse_table_alias()?;
17509 Ok(TableFactor::Unpivot {
17510 table: Box::new(table),
17511 value,
17512 null_inclusion,
17513 name,
17514 columns,
17515 alias,
17516 })
17517 }
17518
17519 pub fn parse_join_constraint(&mut self, natural: bool) -> Result<JoinConstraint, ParserError> {
17521 if natural {
17522 Ok(JoinConstraint::Natural)
17523 } else if self.parse_keyword(Keyword::ON) {
17524 let constraint = self.parse_expr()?;
17525 Ok(JoinConstraint::On(constraint))
17526 } else if self.parse_keyword(Keyword::USING) {
17527 let columns = self.parse_parenthesized_qualified_column_list(Mandatory, false)?;
17528 Ok(JoinConstraint::Using(columns))
17529 } else {
17530 Ok(JoinConstraint::None)
17531 }
17533 }
17534
17535 pub fn parse_grant(&mut self) -> Result<Grant, ParserError> {
17537 let (privileges, objects) = self.parse_grant_deny_revoke_privileges_objects()?;
17538
17539 self.expect_keyword_is(Keyword::TO)?;
17540 let grantees = self.parse_grantees()?;
17541
17542 let with_grant_option =
17543 self.parse_keywords(&[Keyword::WITH, Keyword::GRANT, Keyword::OPTION]);
17544
17545 let current_grants =
17546 if self.parse_keywords(&[Keyword::COPY, Keyword::CURRENT, Keyword::GRANTS]) {
17547 Some(CurrentGrantsKind::CopyCurrentGrants)
17548 } else if self.parse_keywords(&[Keyword::REVOKE, Keyword::CURRENT, Keyword::GRANTS]) {
17549 Some(CurrentGrantsKind::RevokeCurrentGrants)
17550 } else {
17551 None
17552 };
17553
17554 let as_grantor = if self.parse_keywords(&[Keyword::AS]) {
17555 Some(self.parse_identifier()?)
17556 } else {
17557 None
17558 };
17559
17560 let granted_by = if self.parse_keywords(&[Keyword::GRANTED, Keyword::BY]) {
17561 Some(self.parse_identifier()?)
17562 } else {
17563 None
17564 };
17565
17566 Ok(Grant {
17567 privileges,
17568 objects,
17569 grantees,
17570 with_grant_option,
17571 as_grantor,
17572 granted_by,
17573 current_grants,
17574 })
17575 }
17576
17577 fn parse_grantees(&mut self) -> Result<Vec<Grantee>, ParserError> {
17578 let mut values = vec![];
17579 let mut grantee_type = GranteesType::None;
17580 loop {
17581 let new_grantee_type = if self.parse_keyword(Keyword::ROLE) {
17582 GranteesType::Role
17583 } else if self.parse_keyword(Keyword::USER) {
17584 GranteesType::User
17585 } else if self.parse_keyword(Keyword::SHARE) {
17586 GranteesType::Share
17587 } else if self.parse_keyword(Keyword::GROUP) {
17588 GranteesType::Group
17589 } else if self.parse_keyword(Keyword::PUBLIC) {
17590 GranteesType::Public
17591 } else if self.parse_keywords(&[Keyword::DATABASE, Keyword::ROLE]) {
17592 GranteesType::DatabaseRole
17593 } else if self.parse_keywords(&[Keyword::APPLICATION, Keyword::ROLE]) {
17594 GranteesType::ApplicationRole
17595 } else if self.parse_keyword(Keyword::APPLICATION) {
17596 GranteesType::Application
17597 } else {
17598 grantee_type.clone() };
17600
17601 if self
17602 .dialect
17603 .get_reserved_grantees_types()
17604 .contains(&new_grantee_type)
17605 {
17606 self.prev_token();
17607 } else {
17608 grantee_type = new_grantee_type;
17609 }
17610
17611 let grantee = if grantee_type == GranteesType::Public {
17612 Grantee {
17613 grantee_type: grantee_type.clone(),
17614 name: None,
17615 }
17616 } else {
17617 let mut name = self.parse_grantee_name()?;
17618 if self.consume_token(&Token::Colon) {
17619 let ident = self.parse_identifier()?;
17623 if let GranteeName::ObjectName(namespace) = name {
17624 name = GranteeName::ObjectName(ObjectName::from(vec![Ident::new(
17625 format!("{namespace}:{ident}"),
17626 )]));
17627 };
17628 }
17629 Grantee {
17630 grantee_type: grantee_type.clone(),
17631 name: Some(name),
17632 }
17633 };
17634
17635 values.push(grantee);
17636
17637 if !self.consume_token(&Token::Comma) {
17638 break;
17639 }
17640 }
17641
17642 Ok(values)
17643 }
17644
17645 pub fn parse_grant_deny_revoke_privileges_objects(
17647 &mut self,
17648 ) -> Result<(Privileges, Option<GrantObjects>), ParserError> {
17649 let privileges = if self.parse_keyword(Keyword::ALL) {
17650 Privileges::All {
17651 with_privileges_keyword: self.parse_keyword(Keyword::PRIVILEGES),
17652 }
17653 } else {
17654 let actions = self.parse_actions_list()?;
17655 Privileges::Actions(actions)
17656 };
17657
17658 let objects = if self.parse_keyword(Keyword::ON) {
17659 if self.parse_keywords(&[Keyword::ALL, Keyword::TABLES, Keyword::IN, Keyword::SCHEMA]) {
17660 Some(GrantObjects::AllTablesInSchema {
17661 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
17662 })
17663 } else if self.parse_keywords(&[
17664 Keyword::ALL,
17665 Keyword::EXTERNAL,
17666 Keyword::TABLES,
17667 Keyword::IN,
17668 Keyword::SCHEMA,
17669 ]) {
17670 Some(GrantObjects::AllExternalTablesInSchema {
17671 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
17672 })
17673 } else if self.parse_keywords(&[
17674 Keyword::ALL,
17675 Keyword::VIEWS,
17676 Keyword::IN,
17677 Keyword::SCHEMA,
17678 ]) {
17679 Some(GrantObjects::AllViewsInSchema {
17680 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
17681 })
17682 } else if self.parse_keywords(&[
17683 Keyword::ALL,
17684 Keyword::MATERIALIZED,
17685 Keyword::VIEWS,
17686 Keyword::IN,
17687 Keyword::SCHEMA,
17688 ]) {
17689 Some(GrantObjects::AllMaterializedViewsInSchema {
17690 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
17691 })
17692 } else if self.parse_keywords(&[
17693 Keyword::ALL,
17694 Keyword::FUNCTIONS,
17695 Keyword::IN,
17696 Keyword::SCHEMA,
17697 ]) {
17698 Some(GrantObjects::AllFunctionsInSchema {
17699 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
17700 })
17701 } else if self.parse_keywords(&[
17702 Keyword::FUTURE,
17703 Keyword::SCHEMAS,
17704 Keyword::IN,
17705 Keyword::DATABASE,
17706 ]) {
17707 Some(GrantObjects::FutureSchemasInDatabase {
17708 databases: self.parse_comma_separated(|p| p.parse_object_name(false))?,
17709 })
17710 } else if self.parse_keywords(&[
17711 Keyword::FUTURE,
17712 Keyword::TABLES,
17713 Keyword::IN,
17714 Keyword::SCHEMA,
17715 ]) {
17716 Some(GrantObjects::FutureTablesInSchema {
17717 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
17718 })
17719 } else if self.parse_keywords(&[
17720 Keyword::FUTURE,
17721 Keyword::EXTERNAL,
17722 Keyword::TABLES,
17723 Keyword::IN,
17724 Keyword::SCHEMA,
17725 ]) {
17726 Some(GrantObjects::FutureExternalTablesInSchema {
17727 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
17728 })
17729 } else if self.parse_keywords(&[
17730 Keyword::FUTURE,
17731 Keyword::VIEWS,
17732 Keyword::IN,
17733 Keyword::SCHEMA,
17734 ]) {
17735 Some(GrantObjects::FutureViewsInSchema {
17736 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
17737 })
17738 } else if self.parse_keywords(&[
17739 Keyword::FUTURE,
17740 Keyword::MATERIALIZED,
17741 Keyword::VIEWS,
17742 Keyword::IN,
17743 Keyword::SCHEMA,
17744 ]) {
17745 Some(GrantObjects::FutureMaterializedViewsInSchema {
17746 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
17747 })
17748 } else if self.parse_keywords(&[
17749 Keyword::ALL,
17750 Keyword::SEQUENCES,
17751 Keyword::IN,
17752 Keyword::SCHEMA,
17753 ]) {
17754 Some(GrantObjects::AllSequencesInSchema {
17755 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
17756 })
17757 } else if self.parse_keywords(&[
17758 Keyword::FUTURE,
17759 Keyword::SEQUENCES,
17760 Keyword::IN,
17761 Keyword::SCHEMA,
17762 ]) {
17763 Some(GrantObjects::FutureSequencesInSchema {
17764 schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
17765 })
17766 } else if self.parse_keywords(&[Keyword::RESOURCE, Keyword::MONITOR]) {
17767 Some(GrantObjects::ResourceMonitors(
17768 self.parse_comma_separated(|p| p.parse_object_name(false))?,
17769 ))
17770 } else if self.parse_keywords(&[Keyword::COMPUTE, Keyword::POOL]) {
17771 Some(GrantObjects::ComputePools(
17772 self.parse_comma_separated(|p| p.parse_object_name(false))?,
17773 ))
17774 } else if self.parse_keywords(&[Keyword::FAILOVER, Keyword::GROUP]) {
17775 Some(GrantObjects::FailoverGroup(
17776 self.parse_comma_separated(|p| p.parse_object_name(false))?,
17777 ))
17778 } else if self.parse_keywords(&[Keyword::REPLICATION, Keyword::GROUP]) {
17779 Some(GrantObjects::ReplicationGroup(
17780 self.parse_comma_separated(|p| p.parse_object_name(false))?,
17781 ))
17782 } else if self.parse_keywords(&[Keyword::EXTERNAL, Keyword::VOLUME]) {
17783 Some(GrantObjects::ExternalVolumes(
17784 self.parse_comma_separated(|p| p.parse_object_name(false))?,
17785 ))
17786 } else {
17787 let object_type = self.parse_one_of_keywords(&[
17788 Keyword::SEQUENCE,
17789 Keyword::DATABASE,
17790 Keyword::SCHEMA,
17791 Keyword::TABLE,
17792 Keyword::VIEW,
17793 Keyword::WAREHOUSE,
17794 Keyword::INTEGRATION,
17795 Keyword::VIEW,
17796 Keyword::WAREHOUSE,
17797 Keyword::INTEGRATION,
17798 Keyword::USER,
17799 Keyword::CONNECTION,
17800 Keyword::PROCEDURE,
17801 Keyword::FUNCTION,
17802 ]);
17803 let objects =
17804 self.parse_comma_separated(|p| p.parse_object_name_inner(false, true));
17805 match object_type {
17806 Some(Keyword::DATABASE) => Some(GrantObjects::Databases(objects?)),
17807 Some(Keyword::SCHEMA) => Some(GrantObjects::Schemas(objects?)),
17808 Some(Keyword::SEQUENCE) => Some(GrantObjects::Sequences(objects?)),
17809 Some(Keyword::WAREHOUSE) => Some(GrantObjects::Warehouses(objects?)),
17810 Some(Keyword::INTEGRATION) => Some(GrantObjects::Integrations(objects?)),
17811 Some(Keyword::VIEW) => Some(GrantObjects::Views(objects?)),
17812 Some(Keyword::USER) => Some(GrantObjects::Users(objects?)),
17813 Some(Keyword::CONNECTION) => Some(GrantObjects::Connections(objects?)),
17814 kw @ (Some(Keyword::PROCEDURE) | Some(Keyword::FUNCTION)) => {
17815 if let Some(name) = objects?.first() {
17816 self.parse_grant_procedure_or_function(name, &kw)?
17817 } else {
17818 self.expected_ref("procedure or function name", self.peek_token_ref())?
17819 }
17820 }
17821 Some(Keyword::TABLE) | None => Some(GrantObjects::Tables(objects?)),
17822 Some(unexpected_keyword) => return Err(ParserError::ParserError(
17823 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in grant objects"),
17824 )),
17825 }
17826 }
17827 } else {
17828 None
17829 };
17830
17831 Ok((privileges, objects))
17832 }
17833
17834 fn parse_grant_procedure_or_function(
17835 &mut self,
17836 name: &ObjectName,
17837 kw: &Option<Keyword>,
17838 ) -> Result<Option<GrantObjects>, ParserError> {
17839 let arg_types = if self.consume_token(&Token::LParen) {
17840 let list = self.parse_comma_separated0(Self::parse_data_type, Token::RParen)?;
17841 self.expect_token(&Token::RParen)?;
17842 list
17843 } else {
17844 vec![]
17845 };
17846 match kw {
17847 Some(Keyword::PROCEDURE) => Ok(Some(GrantObjects::Procedure {
17848 name: name.clone(),
17849 arg_types,
17850 })),
17851 Some(Keyword::FUNCTION) => Ok(Some(GrantObjects::Function {
17852 name: name.clone(),
17853 arg_types,
17854 })),
17855 _ => self.expected_ref("procedure or function keywords", self.peek_token_ref())?,
17856 }
17857 }
17858
17859 pub fn parse_grant_permission(&mut self) -> Result<Action, ParserError> {
17861 fn parse_columns(parser: &mut Parser) -> Result<Option<Vec<Ident>>, ParserError> {
17862 let columns = parser.parse_parenthesized_column_list(Optional, false)?;
17863 if columns.is_empty() {
17864 Ok(None)
17865 } else {
17866 Ok(Some(columns))
17867 }
17868 }
17869
17870 if self.parse_keywords(&[Keyword::IMPORTED, Keyword::PRIVILEGES]) {
17872 Ok(Action::ImportedPrivileges)
17873 } else if self.parse_keywords(&[Keyword::ADD, Keyword::SEARCH, Keyword::OPTIMIZATION]) {
17874 Ok(Action::AddSearchOptimization)
17875 } else if self.parse_keywords(&[Keyword::ATTACH, Keyword::LISTING]) {
17876 Ok(Action::AttachListing)
17877 } else if self.parse_keywords(&[Keyword::ATTACH, Keyword::POLICY]) {
17878 Ok(Action::AttachPolicy)
17879 } else if self.parse_keywords(&[Keyword::BIND, Keyword::SERVICE, Keyword::ENDPOINT]) {
17880 Ok(Action::BindServiceEndpoint)
17881 } else if self.parse_keywords(&[Keyword::DATABASE, Keyword::ROLE]) {
17882 let role = self.parse_object_name(false)?;
17883 Ok(Action::DatabaseRole { role })
17884 } else if self.parse_keywords(&[Keyword::EVOLVE, Keyword::SCHEMA]) {
17885 Ok(Action::EvolveSchema)
17886 } else if self.parse_keywords(&[Keyword::IMPORT, Keyword::SHARE]) {
17887 Ok(Action::ImportShare)
17888 } else if self.parse_keywords(&[Keyword::MANAGE, Keyword::VERSIONS]) {
17889 Ok(Action::ManageVersions)
17890 } else if self.parse_keywords(&[Keyword::MANAGE, Keyword::RELEASES]) {
17891 Ok(Action::ManageReleases)
17892 } else if self.parse_keywords(&[Keyword::OVERRIDE, Keyword::SHARE, Keyword::RESTRICTIONS]) {
17893 Ok(Action::OverrideShareRestrictions)
17894 } else if self.parse_keywords(&[
17895 Keyword::PURCHASE,
17896 Keyword::DATA,
17897 Keyword::EXCHANGE,
17898 Keyword::LISTING,
17899 ]) {
17900 Ok(Action::PurchaseDataExchangeListing)
17901 } else if self.parse_keywords(&[Keyword::RESOLVE, Keyword::ALL]) {
17902 Ok(Action::ResolveAll)
17903 } else if self.parse_keywords(&[Keyword::READ, Keyword::SESSION]) {
17904 Ok(Action::ReadSession)
17905
17906 } else if self.parse_keyword(Keyword::APPLY) {
17908 let apply_type = self.parse_action_apply_type()?;
17909 Ok(Action::Apply { apply_type })
17910 } else if self.parse_keyword(Keyword::APPLYBUDGET) {
17911 Ok(Action::ApplyBudget)
17912 } else if self.parse_keyword(Keyword::AUDIT) {
17913 Ok(Action::Audit)
17914 } else if self.parse_keyword(Keyword::CONNECT) {
17915 Ok(Action::Connect)
17916 } else if self.parse_keyword(Keyword::CREATE) {
17917 let obj_type = self.maybe_parse_action_create_object_type();
17918 Ok(Action::Create { obj_type })
17919 } else if self.parse_keyword(Keyword::DELETE) {
17920 Ok(Action::Delete)
17921 } else if self.parse_keyword(Keyword::EXEC) {
17922 let obj_type = self.maybe_parse_action_execute_obj_type();
17923 Ok(Action::Exec { obj_type })
17924 } else if self.parse_keyword(Keyword::EXECUTE) {
17925 let obj_type = self.maybe_parse_action_execute_obj_type();
17926 Ok(Action::Execute { obj_type })
17927 } else if self.parse_keyword(Keyword::FAILOVER) {
17928 Ok(Action::Failover)
17929 } else if self.parse_keyword(Keyword::INSERT) {
17930 Ok(Action::Insert {
17931 columns: parse_columns(self)?,
17932 })
17933 } else if self.parse_keyword(Keyword::MANAGE) {
17934 let manage_type = self.parse_action_manage_type()?;
17935 Ok(Action::Manage { manage_type })
17936 } else if self.parse_keyword(Keyword::MODIFY) {
17937 let modify_type = self.parse_action_modify_type();
17938 Ok(Action::Modify { modify_type })
17939 } else if self.parse_keyword(Keyword::MONITOR) {
17940 let monitor_type = self.parse_action_monitor_type();
17941 Ok(Action::Monitor { monitor_type })
17942 } else if self.parse_keyword(Keyword::OPERATE) {
17943 Ok(Action::Operate)
17944 } else if self.parse_keyword(Keyword::REFERENCES) {
17945 Ok(Action::References {
17946 columns: parse_columns(self)?,
17947 })
17948 } else if self.parse_keyword(Keyword::READ) {
17949 Ok(Action::Read)
17950 } else if self.parse_keyword(Keyword::REPLICATE) {
17951 Ok(Action::Replicate)
17952 } else if self.parse_keyword(Keyword::ROLE) {
17953 let role = self.parse_object_name(false)?;
17954 Ok(Action::Role { role })
17955 } else if self.parse_keyword(Keyword::SELECT) {
17956 Ok(Action::Select {
17957 columns: parse_columns(self)?,
17958 })
17959 } else if self.parse_keyword(Keyword::TEMPORARY) {
17960 Ok(Action::Temporary)
17961 } else if self.parse_keyword(Keyword::TRIGGER) {
17962 Ok(Action::Trigger)
17963 } else if self.parse_keyword(Keyword::TRUNCATE) {
17964 Ok(Action::Truncate)
17965 } else if self.parse_keyword(Keyword::UPDATE) {
17966 Ok(Action::Update {
17967 columns: parse_columns(self)?,
17968 })
17969 } else if self.parse_keyword(Keyword::USAGE) {
17970 Ok(Action::Usage)
17971 } else if self.parse_keyword(Keyword::OWNERSHIP) {
17972 Ok(Action::Ownership)
17973 } else if self.parse_keyword(Keyword::DROP) {
17974 Ok(Action::Drop)
17975 } else {
17976 self.expected_ref("a privilege keyword", self.peek_token_ref())?
17977 }
17978 }
17979
17980 fn maybe_parse_action_create_object_type(&mut self) -> Option<ActionCreateObjectType> {
17981 if self.parse_keywords(&[Keyword::APPLICATION, Keyword::PACKAGE]) {
17983 Some(ActionCreateObjectType::ApplicationPackage)
17984 } else if self.parse_keywords(&[Keyword::COMPUTE, Keyword::POOL]) {
17985 Some(ActionCreateObjectType::ComputePool)
17986 } else if self.parse_keywords(&[Keyword::DATA, Keyword::EXCHANGE, Keyword::LISTING]) {
17987 Some(ActionCreateObjectType::DataExchangeListing)
17988 } else if self.parse_keywords(&[Keyword::EXTERNAL, Keyword::VOLUME]) {
17989 Some(ActionCreateObjectType::ExternalVolume)
17990 } else if self.parse_keywords(&[Keyword::FAILOVER, Keyword::GROUP]) {
17991 Some(ActionCreateObjectType::FailoverGroup)
17992 } else if self.parse_keywords(&[Keyword::NETWORK, Keyword::POLICY]) {
17993 Some(ActionCreateObjectType::NetworkPolicy)
17994 } else if self.parse_keywords(&[Keyword::ORGANIZATION, Keyword::LISTING]) {
17995 Some(ActionCreateObjectType::OrganiationListing)
17996 } else if self.parse_keywords(&[Keyword::REPLICATION, Keyword::GROUP]) {
17997 Some(ActionCreateObjectType::ReplicationGroup)
17998 }
17999 else if self.parse_keyword(Keyword::ACCOUNT) {
18001 Some(ActionCreateObjectType::Account)
18002 } else if self.parse_keyword(Keyword::APPLICATION) {
18003 Some(ActionCreateObjectType::Application)
18004 } else if self.parse_keyword(Keyword::DATABASE) {
18005 Some(ActionCreateObjectType::Database)
18006 } else if self.parse_keyword(Keyword::INTEGRATION) {
18007 Some(ActionCreateObjectType::Integration)
18008 } else if self.parse_keyword(Keyword::ROLE) {
18009 Some(ActionCreateObjectType::Role)
18010 } else if self.parse_keyword(Keyword::SCHEMA) {
18011 Some(ActionCreateObjectType::Schema)
18012 } else if self.parse_keyword(Keyword::SHARE) {
18013 Some(ActionCreateObjectType::Share)
18014 } else if self.parse_keyword(Keyword::USER) {
18015 Some(ActionCreateObjectType::User)
18016 } else if self.parse_keyword(Keyword::WAREHOUSE) {
18017 Some(ActionCreateObjectType::Warehouse)
18018 } else {
18019 None
18020 }
18021 }
18022
18023 fn parse_action_apply_type(&mut self) -> Result<ActionApplyType, ParserError> {
18024 if self.parse_keywords(&[Keyword::AGGREGATION, Keyword::POLICY]) {
18025 Ok(ActionApplyType::AggregationPolicy)
18026 } else if self.parse_keywords(&[Keyword::AUTHENTICATION, Keyword::POLICY]) {
18027 Ok(ActionApplyType::AuthenticationPolicy)
18028 } else if self.parse_keywords(&[Keyword::JOIN, Keyword::POLICY]) {
18029 Ok(ActionApplyType::JoinPolicy)
18030 } else if self.parse_keywords(&[Keyword::MASKING, Keyword::POLICY]) {
18031 Ok(ActionApplyType::MaskingPolicy)
18032 } else if self.parse_keywords(&[Keyword::PACKAGES, Keyword::POLICY]) {
18033 Ok(ActionApplyType::PackagesPolicy)
18034 } else if self.parse_keywords(&[Keyword::PASSWORD, Keyword::POLICY]) {
18035 Ok(ActionApplyType::PasswordPolicy)
18036 } else if self.parse_keywords(&[Keyword::PROJECTION, Keyword::POLICY]) {
18037 Ok(ActionApplyType::ProjectionPolicy)
18038 } else if self.parse_keywords(&[Keyword::ROW, Keyword::ACCESS, Keyword::POLICY]) {
18039 Ok(ActionApplyType::RowAccessPolicy)
18040 } else if self.parse_keywords(&[Keyword::SESSION, Keyword::POLICY]) {
18041 Ok(ActionApplyType::SessionPolicy)
18042 } else if self.parse_keyword(Keyword::TAG) {
18043 Ok(ActionApplyType::Tag)
18044 } else {
18045 self.expected_ref("GRANT APPLY type", self.peek_token_ref())
18046 }
18047 }
18048
18049 fn maybe_parse_action_execute_obj_type(&mut self) -> Option<ActionExecuteObjectType> {
18050 if self.parse_keywords(&[Keyword::DATA, Keyword::METRIC, Keyword::FUNCTION]) {
18051 Some(ActionExecuteObjectType::DataMetricFunction)
18052 } else if self.parse_keywords(&[Keyword::MANAGED, Keyword::ALERT]) {
18053 Some(ActionExecuteObjectType::ManagedAlert)
18054 } else if self.parse_keywords(&[Keyword::MANAGED, Keyword::TASK]) {
18055 Some(ActionExecuteObjectType::ManagedTask)
18056 } else if self.parse_keyword(Keyword::ALERT) {
18057 Some(ActionExecuteObjectType::Alert)
18058 } else if self.parse_keyword(Keyword::TASK) {
18059 Some(ActionExecuteObjectType::Task)
18060 } else {
18061 None
18062 }
18063 }
18064
18065 fn parse_action_manage_type(&mut self) -> Result<ActionManageType, ParserError> {
18066 if self.parse_keywords(&[Keyword::ACCOUNT, Keyword::SUPPORT, Keyword::CASES]) {
18067 Ok(ActionManageType::AccountSupportCases)
18068 } else if self.parse_keywords(&[Keyword::EVENT, Keyword::SHARING]) {
18069 Ok(ActionManageType::EventSharing)
18070 } else if self.parse_keywords(&[Keyword::LISTING, Keyword::AUTO, Keyword::FULFILLMENT]) {
18071 Ok(ActionManageType::ListingAutoFulfillment)
18072 } else if self.parse_keywords(&[Keyword::ORGANIZATION, Keyword::SUPPORT, Keyword::CASES]) {
18073 Ok(ActionManageType::OrganizationSupportCases)
18074 } else if self.parse_keywords(&[Keyword::USER, Keyword::SUPPORT, Keyword::CASES]) {
18075 Ok(ActionManageType::UserSupportCases)
18076 } else if self.parse_keyword(Keyword::GRANTS) {
18077 Ok(ActionManageType::Grants)
18078 } else if self.parse_keyword(Keyword::WAREHOUSES) {
18079 Ok(ActionManageType::Warehouses)
18080 } else {
18081 self.expected_ref("GRANT MANAGE type", self.peek_token_ref())
18082 }
18083 }
18084
18085 fn parse_action_modify_type(&mut self) -> Option<ActionModifyType> {
18086 if self.parse_keywords(&[Keyword::LOG, Keyword::LEVEL]) {
18087 Some(ActionModifyType::LogLevel)
18088 } else if self.parse_keywords(&[Keyword::TRACE, Keyword::LEVEL]) {
18089 Some(ActionModifyType::TraceLevel)
18090 } else if self.parse_keywords(&[Keyword::SESSION, Keyword::LOG, Keyword::LEVEL]) {
18091 Some(ActionModifyType::SessionLogLevel)
18092 } else if self.parse_keywords(&[Keyword::SESSION, Keyword::TRACE, Keyword::LEVEL]) {
18093 Some(ActionModifyType::SessionTraceLevel)
18094 } else {
18095 None
18096 }
18097 }
18098
18099 fn parse_action_monitor_type(&mut self) -> Option<ActionMonitorType> {
18100 if self.parse_keyword(Keyword::EXECUTION) {
18101 Some(ActionMonitorType::Execution)
18102 } else if self.parse_keyword(Keyword::SECURITY) {
18103 Some(ActionMonitorType::Security)
18104 } else if self.parse_keyword(Keyword::USAGE) {
18105 Some(ActionMonitorType::Usage)
18106 } else {
18107 None
18108 }
18109 }
18110
18111 pub fn parse_grantee_name(&mut self) -> Result<GranteeName, ParserError> {
18113 let mut name = self.parse_object_name(false)?;
18114 if self.dialect.supports_user_host_grantee()
18115 && name.0.len() == 1
18116 && name.0[0].as_ident().is_some()
18117 && self.consume_token(&Token::AtSign)
18118 {
18119 let user = name.0.pop().unwrap().as_ident().unwrap().clone();
18120 let host = self.parse_identifier()?;
18121 Ok(GranteeName::UserHost { user, host })
18122 } else {
18123 Ok(GranteeName::ObjectName(name))
18124 }
18125 }
18126
18127 pub fn parse_deny(&mut self) -> Result<Statement, ParserError> {
18129 self.expect_keyword(Keyword::DENY)?;
18130
18131 let (privileges, objects) = self.parse_grant_deny_revoke_privileges_objects()?;
18132 let objects = match objects {
18133 Some(o) => o,
18134 None => {
18135 return parser_err!(
18136 "DENY statements must specify an object",
18137 self.peek_token_ref().span.start
18138 )
18139 }
18140 };
18141
18142 self.expect_keyword_is(Keyword::TO)?;
18143 let grantees = self.parse_grantees()?;
18144 let cascade = self.parse_cascade_option();
18145 let granted_by = if self.parse_keywords(&[Keyword::AS]) {
18146 Some(self.parse_identifier()?)
18147 } else {
18148 None
18149 };
18150
18151 Ok(Statement::Deny(DenyStatement {
18152 privileges,
18153 objects,
18154 grantees,
18155 cascade,
18156 granted_by,
18157 }))
18158 }
18159
18160 pub fn parse_revoke(&mut self) -> Result<Revoke, ParserError> {
18162 let (privileges, objects) = self.parse_grant_deny_revoke_privileges_objects()?;
18163
18164 self.expect_keyword_is(Keyword::FROM)?;
18165 let grantees = self.parse_grantees()?;
18166
18167 let granted_by = if self.parse_keywords(&[Keyword::GRANTED, Keyword::BY]) {
18168 Some(self.parse_identifier()?)
18169 } else {
18170 None
18171 };
18172
18173 let cascade = self.parse_cascade_option();
18174
18175 Ok(Revoke {
18176 privileges,
18177 objects,
18178 grantees,
18179 granted_by,
18180 cascade,
18181 })
18182 }
18183
18184 pub fn parse_replace(
18186 &mut self,
18187 replace_token: TokenWithSpan,
18188 ) -> Result<Statement, ParserError> {
18189 if !dialect_of!(self is MySqlDialect | GenericDialect) {
18190 return parser_err!(
18191 "Unsupported statement REPLACE",
18192 self.peek_token_ref().span.start
18193 );
18194 }
18195
18196 let mut insert = self.parse_insert(replace_token)?;
18197 if let Statement::Insert(Insert { replace_into, .. }) = &mut insert {
18198 *replace_into = true;
18199 }
18200
18201 Ok(insert)
18202 }
18203
18204 fn parse_insert_setexpr_boxed(
18208 &mut self,
18209 insert_token: TokenWithSpan,
18210 ) -> Result<Box<SetExpr>, ParserError> {
18211 Ok(Box::new(SetExpr::Insert(self.parse_insert(insert_token)?)))
18212 }
18213
18214 pub fn parse_insert(&mut self, insert_token: TokenWithSpan) -> Result<Statement, ParserError> {
18216 let optimizer_hints = self.maybe_parse_optimizer_hints()?;
18217 let or = self.parse_conflict_clause();
18218 let priority = if !dialect_of!(self is MySqlDialect | GenericDialect) {
18219 None
18220 } else if self.parse_keyword(Keyword::LOW_PRIORITY) {
18221 Some(MysqlInsertPriority::LowPriority)
18222 } else if self.parse_keyword(Keyword::DELAYED) {
18223 Some(MysqlInsertPriority::Delayed)
18224 } else if self.parse_keyword(Keyword::HIGH_PRIORITY) {
18225 Some(MysqlInsertPriority::HighPriority)
18226 } else {
18227 None
18228 };
18229
18230 let ignore = dialect_of!(self is MySqlDialect | GenericDialect)
18231 && self.parse_keyword(Keyword::IGNORE);
18232
18233 let replace_into = false;
18234
18235 let overwrite = self.parse_keyword(Keyword::OVERWRITE);
18236 let into = self.parse_keyword(Keyword::INTO);
18237
18238 let local = self.parse_keyword(Keyword::LOCAL);
18239
18240 if self.parse_keyword(Keyword::DIRECTORY) {
18241 let path = self.parse_literal_string()?;
18242 let file_format = if self.parse_keywords(&[Keyword::STORED, Keyword::AS]) {
18243 Some(self.parse_file_format()?)
18244 } else {
18245 None
18246 };
18247 let source = self.parse_query()?;
18248 Ok(Statement::Directory {
18249 local,
18250 path,
18251 overwrite,
18252 file_format,
18253 source,
18254 })
18255 } else {
18256 let table = self.parse_keyword(Keyword::TABLE);
18258 let table_object = self.parse_table_object()?;
18259
18260 let table_alias = if self.dialect.supports_insert_table_alias()
18261 && !self.peek_sub_query()
18262 && self
18263 .peek_one_of_keywords(&[Keyword::DEFAULT, Keyword::VALUES])
18264 .is_none()
18265 {
18266 if self.parse_keyword(Keyword::AS) {
18267 Some(TableAliasWithoutColumns {
18268 explicit: true,
18269 alias: self.parse_identifier()?,
18270 })
18271 } else {
18272 self.maybe_parse(|parser| parser.parse_identifier())?
18273 .map(|alias| TableAliasWithoutColumns {
18274 explicit: false,
18275 alias,
18276 })
18277 }
18278 } else {
18279 None
18280 };
18281
18282 let is_mysql = dialect_of!(self is MySqlDialect);
18283
18284 let (columns, partitioned, after_columns, output, source, assignments) = if self
18285 .parse_keywords(&[Keyword::DEFAULT, Keyword::VALUES])
18286 {
18287 (vec![], None, vec![], None, None, vec![])
18288 } else {
18289 let (columns, partitioned, after_columns) = if !self.peek_subquery_start() {
18290 let columns =
18291 self.parse_parenthesized_qualified_column_list(Optional, is_mysql)?;
18292
18293 let partitioned = self.parse_insert_partition()?;
18294 let after_columns = if dialect_of!(self is HiveDialect) {
18296 self.parse_parenthesized_column_list(Optional, false)?
18297 } else {
18298 vec![]
18299 };
18300 (columns, partitioned, after_columns)
18301 } else {
18302 Default::default()
18303 };
18304
18305 let output = self.maybe_parse_output_clause()?;
18306
18307 let (source, assignments) = if self.peek_keyword(Keyword::FORMAT)
18308 || self.peek_keyword(Keyword::SETTINGS)
18309 {
18310 (None, vec![])
18311 } else if self.dialect.supports_insert_set() && self.parse_keyword(Keyword::SET) {
18312 (None, self.parse_comma_separated(Parser::parse_assignment)?)
18313 } else {
18314 (Some(self.parse_query()?), vec![])
18315 };
18316
18317 (
18318 columns,
18319 partitioned,
18320 after_columns,
18321 output,
18322 source,
18323 assignments,
18324 )
18325 };
18326
18327 let (format_clause, settings) = if self.dialect.supports_insert_format() {
18328 let settings = self.parse_settings()?;
18331
18332 let format = if self.parse_keyword(Keyword::FORMAT) {
18333 Some(self.parse_input_format_clause()?)
18334 } else {
18335 None
18336 };
18337
18338 (format, settings)
18339 } else {
18340 Default::default()
18341 };
18342
18343 let insert_alias = if dialect_of!(self is MySqlDialect | GenericDialect)
18344 && self.parse_keyword(Keyword::AS)
18345 {
18346 let row_alias = self.parse_object_name(false)?;
18347 let col_aliases = Some(self.parse_parenthesized_column_list(Optional, false)?);
18348 Some(InsertAliases {
18349 row_alias,
18350 col_aliases,
18351 })
18352 } else {
18353 None
18354 };
18355
18356 let on = if self.parse_keyword(Keyword::ON) {
18357 if self.parse_keyword(Keyword::CONFLICT) {
18358 let conflict_target =
18359 if self.parse_keywords(&[Keyword::ON, Keyword::CONSTRAINT]) {
18360 Some(ConflictTarget::OnConstraint(self.parse_object_name(false)?))
18361 } else if self.peek_token_ref().token == Token::LParen {
18362 Some(ConflictTarget::Columns(
18363 self.parse_parenthesized_column_list(IsOptional::Mandatory, false)?,
18364 ))
18365 } else {
18366 None
18367 };
18368
18369 self.expect_keyword_is(Keyword::DO)?;
18370 let action = if self.parse_keyword(Keyword::NOTHING) {
18371 OnConflictAction::DoNothing
18372 } else {
18373 self.expect_keyword_is(Keyword::UPDATE)?;
18374 self.expect_keyword_is(Keyword::SET)?;
18375 let assignments = self.parse_comma_separated(Parser::parse_assignment)?;
18376 let selection = if self.parse_keyword(Keyword::WHERE) {
18377 Some(self.parse_expr()?)
18378 } else {
18379 None
18380 };
18381 OnConflictAction::DoUpdate(DoUpdate {
18382 assignments,
18383 selection,
18384 })
18385 };
18386
18387 Some(OnInsert::OnConflict(OnConflict {
18388 conflict_target,
18389 action,
18390 }))
18391 } else {
18392 self.expect_keyword_is(Keyword::DUPLICATE)?;
18393 self.expect_keyword_is(Keyword::KEY)?;
18394 self.expect_keyword_is(Keyword::UPDATE)?;
18395 let l = self.parse_comma_separated(Parser::parse_assignment)?;
18396
18397 Some(OnInsert::DuplicateKeyUpdate(l))
18398 }
18399 } else {
18400 None
18401 };
18402
18403 let returning = if self.parse_keyword(Keyword::RETURNING) {
18404 Some(self.parse_comma_separated(Parser::parse_select_item)?)
18405 } else {
18406 None
18407 };
18408
18409 Ok(Insert {
18410 insert_token: insert_token.into(),
18411 optimizer_hints,
18412 or,
18413 table: table_object,
18414 table_alias,
18415 ignore,
18416 into,
18417 overwrite,
18418 partitioned,
18419 columns,
18420 after_columns,
18421 source,
18422 assignments,
18423 has_table_keyword: table,
18424 on,
18425 returning,
18426 output,
18427 replace_into,
18428 priority,
18429 insert_alias,
18430 settings,
18431 format_clause,
18432 multi_table_insert_type: None,
18433 multi_table_into_clauses: vec![],
18434 multi_table_when_clauses: vec![],
18435 multi_table_else_clause: None,
18436 }
18437 .into())
18438 }
18439 }
18440
18441 pub fn parse_input_format_clause(&mut self) -> Result<InputFormatClause, ParserError> {
18445 let ident = self.parse_identifier()?;
18446 let values = self
18447 .maybe_parse(|p| p.parse_comma_separated(|p| p.parse_expr()))?
18448 .unwrap_or_default();
18449
18450 Ok(InputFormatClause { ident, values })
18451 }
18452
18453 fn peek_subquery_start(&mut self) -> bool {
18456 matches!(
18457 self.peek_tokens_ref(),
18458 [
18459 TokenWithSpan {
18460 token: Token::LParen,
18461 ..
18462 },
18463 TokenWithSpan {
18464 token: Token::Word(Word {
18465 keyword: Keyword::SELECT,
18466 ..
18467 }),
18468 ..
18469 },
18470 ]
18471 )
18472 }
18473
18474 fn peek_subquery_or_cte_start(&mut self) -> bool {
18478 matches!(
18479 self.peek_tokens_ref(),
18480 [
18481 TokenWithSpan {
18482 token: Token::LParen,
18483 ..
18484 },
18485 TokenWithSpan {
18486 token: Token::Word(Word {
18487 keyword: Keyword::SELECT | Keyword::WITH,
18488 ..
18489 }),
18490 ..
18491 },
18492 ]
18493 )
18494 }
18495
18496 fn parse_conflict_clause(&mut self) -> Option<SqliteOnConflict> {
18497 if self.parse_keywords(&[Keyword::OR, Keyword::REPLACE]) {
18498 Some(SqliteOnConflict::Replace)
18499 } else if self.parse_keywords(&[Keyword::OR, Keyword::ROLLBACK]) {
18500 Some(SqliteOnConflict::Rollback)
18501 } else if self.parse_keywords(&[Keyword::OR, Keyword::ABORT]) {
18502 Some(SqliteOnConflict::Abort)
18503 } else if self.parse_keywords(&[Keyword::OR, Keyword::FAIL]) {
18504 Some(SqliteOnConflict::Fail)
18505 } else if self.parse_keywords(&[Keyword::OR, Keyword::IGNORE]) {
18506 Some(SqliteOnConflict::Ignore)
18507 } else if self.parse_keyword(Keyword::REPLACE) {
18508 Some(SqliteOnConflict::Replace)
18509 } else {
18510 None
18511 }
18512 }
18513
18514 pub fn parse_insert_partition(&mut self) -> Result<Option<Vec<Expr>>, ParserError> {
18516 if self.parse_keyword(Keyword::PARTITION) {
18517 self.expect_token(&Token::LParen)?;
18518 let partition_cols = Some(self.parse_comma_separated(Parser::parse_expr)?);
18519 self.expect_token(&Token::RParen)?;
18520 Ok(partition_cols)
18521 } else {
18522 Ok(None)
18523 }
18524 }
18525
18526 pub fn parse_load_data_table_format(
18528 &mut self,
18529 ) -> Result<Option<HiveLoadDataFormat>, ParserError> {
18530 if self.parse_keyword(Keyword::INPUTFORMAT) {
18531 let input_format = self.parse_expr()?;
18532 self.expect_keyword_is(Keyword::SERDE)?;
18533 let serde = self.parse_expr()?;
18534 Ok(Some(HiveLoadDataFormat {
18535 input_format,
18536 serde,
18537 }))
18538 } else {
18539 Ok(None)
18540 }
18541 }
18542
18543 fn parse_update_setexpr_boxed(
18547 &mut self,
18548 update_token: TokenWithSpan,
18549 ) -> Result<Box<SetExpr>, ParserError> {
18550 Ok(Box::new(SetExpr::Update(self.parse_update(update_token)?)))
18551 }
18552
18553 pub fn parse_update(&mut self, update_token: TokenWithSpan) -> Result<Statement, ParserError> {
18555 let optimizer_hints = self.maybe_parse_optimizer_hints()?;
18556 let or = self.parse_conflict_clause();
18557 let table = self.parse_table_and_joins()?;
18558 let from_before_set = if self.parse_keyword(Keyword::FROM) {
18559 Some(UpdateTableFromKind::BeforeSet(
18560 self.parse_table_with_joins()?,
18561 ))
18562 } else {
18563 None
18564 };
18565 self.expect_keyword(Keyword::SET)?;
18566 let assignments = self.parse_comma_separated(Parser::parse_assignment)?;
18567
18568 let output = self.maybe_parse_output_clause()?;
18569
18570 let from = if from_before_set.is_none() && self.parse_keyword(Keyword::FROM) {
18571 Some(UpdateTableFromKind::AfterSet(
18572 self.parse_table_with_joins()?,
18573 ))
18574 } else {
18575 from_before_set
18576 };
18577 let selection = if self.parse_keyword(Keyword::WHERE) {
18578 Some(self.parse_expr()?)
18579 } else {
18580 None
18581 };
18582 let returning = if self.parse_keyword(Keyword::RETURNING) {
18583 Some(self.parse_comma_separated(Parser::parse_select_item)?)
18584 } else {
18585 None
18586 };
18587 let order_by = if self.dialect.supports_update_order_by()
18588 && self.parse_keywords(&[Keyword::ORDER, Keyword::BY])
18589 {
18590 self.parse_comma_separated(Parser::parse_order_by_expr)?
18591 } else {
18592 vec![]
18593 };
18594 let limit = if self.parse_keyword(Keyword::LIMIT) {
18595 Some(self.parse_expr()?)
18596 } else {
18597 None
18598 };
18599 Ok(Update {
18600 update_token: update_token.into(),
18601 optimizer_hints,
18602 table,
18603 assignments,
18604 from,
18605 selection,
18606 returning,
18607 output,
18608 or,
18609 order_by,
18610 limit,
18611 }
18612 .into())
18613 }
18614
18615 pub fn parse_assignment(&mut self) -> Result<Assignment, ParserError> {
18617 let target = self.parse_assignment_target()?;
18618 self.expect_token(&Token::Eq)?;
18619 let value = self.parse_expr()?;
18620 Ok(Assignment { target, value })
18621 }
18622
18623 pub fn parse_assignment_target(&mut self) -> Result<AssignmentTarget, ParserError> {
18625 if self.consume_token(&Token::LParen) {
18626 let columns = self.parse_comma_separated(|p| p.parse_object_name(false))?;
18627 self.expect_token(&Token::RParen)?;
18628 Ok(AssignmentTarget::Tuple(columns))
18629 } else {
18630 let column = self.parse_object_name(false)?;
18631 Ok(AssignmentTarget::ColumnName(column))
18632 }
18633 }
18634
18635 pub fn parse_function_args(&mut self) -> Result<FunctionArg, ParserError> {
18637 let arg = if self.dialect.supports_named_fn_args_with_expr_name() {
18638 self.maybe_parse(|p| {
18639 let name = p.parse_expr()?;
18640 let operator = p.parse_function_named_arg_operator()?;
18641 let arg = p.parse_wildcard_expr()?.into();
18642 Ok(FunctionArg::ExprNamed {
18643 name,
18644 arg,
18645 operator,
18646 })
18647 })?
18648 } else {
18649 self.maybe_parse(|p| {
18650 let name = p.parse_identifier()?;
18651 let operator = p.parse_function_named_arg_operator()?;
18652 let arg = p.parse_wildcard_expr()?.into();
18653 Ok(FunctionArg::Named {
18654 name,
18655 arg,
18656 operator,
18657 })
18658 })?
18659 };
18660 if let Some(arg) = arg {
18661 return Ok(arg);
18662 }
18663 let wildcard_expr = self.parse_wildcard_expr()?;
18664 let arg_expr: FunctionArgExpr = match wildcard_expr {
18665 Expr::Wildcard(ref token) if self.dialect.supports_select_wildcard_exclude() => {
18666 let opts = self.parse_wildcard_additional_options(token.0.clone())?;
18669 if opts.opt_exclude.is_some()
18670 || opts.opt_except.is_some()
18671 || opts.opt_replace.is_some()
18672 || opts.opt_rename.is_some()
18673 || opts.opt_ilike.is_some()
18674 {
18675 FunctionArgExpr::WildcardWithOptions(opts)
18676 } else {
18677 wildcard_expr.into()
18678 }
18679 }
18680 other => other.into(),
18681 };
18682 Ok(FunctionArg::Unnamed(arg_expr))
18683 }
18684
18685 fn parse_function_named_arg_operator(&mut self) -> Result<FunctionArgOperator, ParserError> {
18686 if self.parse_keyword(Keyword::VALUE) {
18687 return Ok(FunctionArgOperator::Value);
18688 }
18689 let tok = self.next_token();
18690 match tok.token {
18691 Token::RArrow if self.dialect.supports_named_fn_args_with_rarrow_operator() => {
18692 Ok(FunctionArgOperator::RightArrow)
18693 }
18694 Token::Eq if self.dialect.supports_named_fn_args_with_eq_operator() => {
18695 Ok(FunctionArgOperator::Equals)
18696 }
18697 Token::Assignment
18698 if self
18699 .dialect
18700 .supports_named_fn_args_with_assignment_operator() =>
18701 {
18702 Ok(FunctionArgOperator::Assignment)
18703 }
18704 Token::Colon if self.dialect.supports_named_fn_args_with_colon_operator() => {
18705 Ok(FunctionArgOperator::Colon)
18706 }
18707 _ => {
18708 self.prev_token();
18709 self.expected("argument operator", tok)
18710 }
18711 }
18712 }
18713
18714 pub fn parse_optional_args(&mut self) -> Result<Vec<FunctionArg>, ParserError> {
18716 if self.consume_token(&Token::RParen) {
18717 Ok(vec![])
18718 } else {
18719 let args = self.parse_comma_separated(Parser::parse_function_args)?;
18720 self.expect_token(&Token::RParen)?;
18721 Ok(args)
18722 }
18723 }
18724
18725 fn parse_table_function_args(&mut self) -> Result<TableFunctionArgs, ParserError> {
18726 if self.consume_token(&Token::RParen) {
18727 return Ok(TableFunctionArgs {
18728 args: vec![],
18729 settings: None,
18730 });
18731 }
18732 let mut args = vec![];
18733 let settings = loop {
18734 if let Some(settings) = self.parse_settings()? {
18735 break Some(settings);
18736 }
18737 args.push(self.parse_function_args()?);
18738 if self.is_parse_comma_separated_end() {
18739 break None;
18740 }
18741 };
18742 self.expect_token(&Token::RParen)?;
18743 Ok(TableFunctionArgs { args, settings })
18744 }
18745
18746 fn parse_function_argument_list(&mut self) -> Result<FunctionArgumentList, ParserError> {
18755 let mut clauses = vec![];
18756
18757 if let Some(null_clause) = self.parse_json_null_clause() {
18760 clauses.push(FunctionArgumentClause::JsonNullClause(null_clause));
18761 }
18762
18763 if let Some(json_returning_clause) = self.maybe_parse_json_returning_clause()? {
18764 clauses.push(FunctionArgumentClause::JsonReturningClause(
18765 json_returning_clause,
18766 ));
18767 }
18768
18769 if self.consume_token(&Token::RParen) {
18770 return Ok(FunctionArgumentList {
18771 duplicate_treatment: None,
18772 args: vec![],
18773 clauses,
18774 });
18775 }
18776
18777 let duplicate_treatment = self.parse_duplicate_treatment()?;
18778 let args = self.parse_comma_separated(Parser::parse_function_args)?;
18779
18780 if self.dialect.supports_window_function_null_treatment_arg() {
18781 if let Some(null_treatment) = self.parse_null_treatment()? {
18782 clauses.push(FunctionArgumentClause::IgnoreOrRespectNulls(null_treatment));
18783 }
18784 }
18785
18786 if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
18787 clauses.push(FunctionArgumentClause::OrderBy(
18788 self.parse_comma_separated(Parser::parse_order_by_expr)?,
18789 ));
18790 }
18791
18792 if self.parse_keyword(Keyword::LIMIT) {
18793 clauses.push(FunctionArgumentClause::Limit(self.parse_expr()?));
18794 }
18795
18796 if dialect_of!(self is GenericDialect | BigQueryDialect)
18797 && self.parse_keyword(Keyword::HAVING)
18798 {
18799 let kind = match self.expect_one_of_keywords(&[Keyword::MIN, Keyword::MAX])? {
18800 Keyword::MIN => HavingBoundKind::Min,
18801 Keyword::MAX => HavingBoundKind::Max,
18802 unexpected_keyword => return Err(ParserError::ParserError(
18803 format!("Internal parser error: unexpected keyword `{unexpected_keyword}` in having bound"),
18804 )),
18805 };
18806 clauses.push(FunctionArgumentClause::Having(HavingBound(
18807 kind,
18808 self.parse_expr()?,
18809 )))
18810 }
18811
18812 if dialect_of!(self is GenericDialect | MySqlDialect)
18813 && self.parse_keyword(Keyword::SEPARATOR)
18814 {
18815 clauses.push(FunctionArgumentClause::Separator(self.parse_value()?));
18816 }
18817
18818 if let Some(on_overflow) = self.parse_listagg_on_overflow()? {
18819 clauses.push(FunctionArgumentClause::OnOverflow(on_overflow));
18820 }
18821
18822 if let Some(null_clause) = self.parse_json_null_clause() {
18823 clauses.push(FunctionArgumentClause::JsonNullClause(null_clause));
18824 }
18825
18826 if let Some(json_returning_clause) = self.maybe_parse_json_returning_clause()? {
18827 clauses.push(FunctionArgumentClause::JsonReturningClause(
18828 json_returning_clause,
18829 ));
18830 }
18831
18832 self.expect_token(&Token::RParen)?;
18833 Ok(FunctionArgumentList {
18834 duplicate_treatment,
18835 args,
18836 clauses,
18837 })
18838 }
18839
18840 fn parse_json_null_clause(&mut self) -> Option<JsonNullClause> {
18841 if self.parse_keywords(&[Keyword::ABSENT, Keyword::ON, Keyword::NULL]) {
18842 Some(JsonNullClause::AbsentOnNull)
18843 } else if self.parse_keywords(&[Keyword::NULL, Keyword::ON, Keyword::NULL]) {
18844 Some(JsonNullClause::NullOnNull)
18845 } else {
18846 None
18847 }
18848 }
18849
18850 fn maybe_parse_json_returning_clause(
18851 &mut self,
18852 ) -> Result<Option<JsonReturningClause>, ParserError> {
18853 if self.parse_keyword(Keyword::RETURNING) {
18854 let data_type = self.parse_data_type()?;
18855 Ok(Some(JsonReturningClause { data_type }))
18856 } else {
18857 Ok(None)
18858 }
18859 }
18860
18861 fn parse_duplicate_treatment(&mut self) -> Result<Option<DuplicateTreatment>, ParserError> {
18862 let loc = self.peek_token_ref().span.start;
18863 match (
18864 self.parse_keyword(Keyword::ALL),
18865 self.parse_keyword(Keyword::DISTINCT),
18866 ) {
18867 (true, false) => Ok(Some(DuplicateTreatment::All)),
18868 (false, true) => Ok(Some(DuplicateTreatment::Distinct)),
18869 (false, false) => Ok(None),
18870 (true, true) => parser_err!("Cannot specify both ALL and DISTINCT".to_string(), loc),
18871 }
18872 }
18873
18874 pub fn parse_select_item(&mut self) -> Result<SelectItem, ParserError> {
18876 let prefix = self
18877 .parse_one_of_keywords(
18878 self.dialect
18879 .get_reserved_keywords_for_select_item_operator(),
18880 )
18881 .map(|keyword| Ident::new(format!("{keyword:?}")));
18882
18883 match self.parse_wildcard_expr()? {
18884 Expr::QualifiedWildcard(prefix, token) => Ok(SelectItem::QualifiedWildcard(
18885 SelectItemQualifiedWildcardKind::ObjectName(prefix),
18886 self.parse_wildcard_additional_options(token.0)?,
18887 )),
18888 Expr::Wildcard(token) => Ok(SelectItem::Wildcard(
18889 self.parse_wildcard_additional_options(token.0)?,
18890 )),
18891 Expr::Identifier(v) if v.value.to_lowercase() == "from" && v.quote_style.is_none() => {
18892 parser_err!(
18893 format!("Expected an expression, found: {}", v),
18894 self.peek_token_ref().span.start
18895 )
18896 }
18897 Expr::BinaryOp {
18898 left,
18899 op: BinaryOperator::Eq,
18900 right,
18901 } if self.dialect.supports_eq_alias_assignment()
18902 && matches!(left.as_ref(), Expr::Identifier(_)) =>
18903 {
18904 let Expr::Identifier(alias) = *left else {
18905 return parser_err!(
18906 "BUG: expected identifier expression as alias",
18907 self.peek_token_ref().span.start
18908 );
18909 };
18910 Ok(SelectItem::ExprWithAlias {
18911 expr: *right,
18912 alias,
18913 })
18914 }
18915 expr if self.dialect.supports_select_expr_star()
18916 && self.consume_tokens(&[Token::Period, Token::Mul]) =>
18917 {
18918 let wildcard_token = self.get_previous_token().clone();
18919 Ok(SelectItem::QualifiedWildcard(
18920 SelectItemQualifiedWildcardKind::Expr(expr),
18921 self.parse_wildcard_additional_options(wildcard_token)?,
18922 ))
18923 }
18924 expr if self.dialect.supports_select_item_multi_column_alias()
18925 && self.peek_keyword(Keyword::AS)
18926 && self.peek_nth_token(1).token == Token::LParen =>
18927 {
18928 self.expect_keyword(Keyword::AS)?;
18929 self.expect_token(&Token::LParen)?;
18930 let aliases = self.parse_comma_separated(|p| p.parse_identifier())?;
18931 self.expect_token(&Token::RParen)?;
18932 Ok(SelectItem::ExprWithAliases {
18933 expr: maybe_prefixed_expr(expr, prefix),
18934 aliases,
18935 })
18936 }
18937 expr => self
18938 .maybe_parse_select_item_alias()
18939 .map(|alias| match alias {
18940 Some(alias) => SelectItem::ExprWithAlias {
18941 expr: maybe_prefixed_expr(expr, prefix),
18942 alias,
18943 },
18944 None => SelectItem::UnnamedExpr(maybe_prefixed_expr(expr, prefix)),
18945 }),
18946 }
18947 }
18948
18949 pub fn parse_wildcard_additional_options(
18953 &mut self,
18954 wildcard_token: TokenWithSpan,
18955 ) -> Result<WildcardAdditionalOptions, ParserError> {
18956 let opt_ilike = if self.dialect.supports_select_wildcard_ilike() {
18957 self.parse_optional_select_item_ilike()?
18958 } else {
18959 None
18960 };
18961 let opt_exclude = if opt_ilike.is_none() && self.dialect.supports_select_wildcard_exclude()
18962 {
18963 self.parse_optional_select_item_exclude()?
18964 } else {
18965 None
18966 };
18967 let opt_except = if self.dialect.supports_select_wildcard_except() {
18968 self.parse_optional_select_item_except()?
18969 } else {
18970 None
18971 };
18972 let opt_replace = if self.dialect.supports_select_wildcard_replace() {
18973 self.parse_optional_select_item_replace()?
18974 } else {
18975 None
18976 };
18977 let opt_rename = if self.dialect.supports_select_wildcard_rename() {
18978 self.parse_optional_select_item_rename()?
18979 } else {
18980 None
18981 };
18982
18983 let opt_alias = if self.dialect.supports_select_wildcard_with_alias() {
18984 self.maybe_parse_select_item_alias()?
18985 } else {
18986 None
18987 };
18988
18989 Ok(WildcardAdditionalOptions {
18990 wildcard_token: wildcard_token.into(),
18991 opt_ilike,
18992 opt_exclude,
18993 opt_except,
18994 opt_rename,
18995 opt_replace,
18996 opt_alias,
18997 })
18998 }
18999
19000 pub fn parse_optional_select_item_ilike(
19004 &mut self,
19005 ) -> Result<Option<IlikeSelectItem>, ParserError> {
19006 let opt_ilike = if self.parse_keyword(Keyword::ILIKE) {
19007 let next_token = self.next_token();
19008 let pattern = match next_token.token {
19009 Token::SingleQuotedString(s) => s,
19010 _ => return self.expected("ilike pattern", next_token),
19011 };
19012 Some(IlikeSelectItem { pattern })
19013 } else {
19014 None
19015 };
19016 Ok(opt_ilike)
19017 }
19018
19019 pub fn parse_optional_select_item_exclude(
19023 &mut self,
19024 ) -> Result<Option<ExcludeSelectItem>, ParserError> {
19025 let opt_exclude = if self.parse_keyword(Keyword::EXCLUDE) {
19026 if self.consume_token(&Token::LParen) {
19027 let columns =
19028 self.parse_comma_separated(|parser| parser.parse_object_name(false))?;
19029 self.expect_token(&Token::RParen)?;
19030 Some(ExcludeSelectItem::Multiple(columns))
19031 } else {
19032 let column = self.parse_object_name(false)?;
19033 Some(ExcludeSelectItem::Single(column))
19034 }
19035 } else {
19036 None
19037 };
19038
19039 Ok(opt_exclude)
19040 }
19041
19042 pub fn parse_optional_select_item_except(
19046 &mut self,
19047 ) -> Result<Option<ExceptSelectItem>, ParserError> {
19048 let opt_except = if self.parse_keyword(Keyword::EXCEPT) {
19049 if self.peek_token_ref().token == Token::LParen {
19050 let idents = self.parse_parenthesized_column_list(Mandatory, false)?;
19051 match &idents[..] {
19052 [] => {
19053 return self.expected_ref(
19054 "at least one column should be parsed by the expect clause",
19055 self.peek_token_ref(),
19056 )?;
19057 }
19058 [first, idents @ ..] => Some(ExceptSelectItem {
19059 first_element: first.clone(),
19060 additional_elements: idents.to_vec(),
19061 }),
19062 }
19063 } else {
19064 let ident = self.parse_identifier()?;
19066 Some(ExceptSelectItem {
19067 first_element: ident,
19068 additional_elements: vec![],
19069 })
19070 }
19071 } else {
19072 None
19073 };
19074
19075 Ok(opt_except)
19076 }
19077
19078 pub fn parse_optional_select_item_rename(
19080 &mut self,
19081 ) -> Result<Option<RenameSelectItem>, ParserError> {
19082 let opt_rename = if self.parse_keyword(Keyword::RENAME) {
19083 if self.consume_token(&Token::LParen) {
19084 let idents =
19085 self.parse_comma_separated(|parser| parser.parse_identifier_with_alias())?;
19086 self.expect_token(&Token::RParen)?;
19087 Some(RenameSelectItem::Multiple(idents))
19088 } else {
19089 let ident = self.parse_identifier_with_alias()?;
19090 Some(RenameSelectItem::Single(ident))
19091 }
19092 } else {
19093 None
19094 };
19095
19096 Ok(opt_rename)
19097 }
19098
19099 pub fn parse_optional_select_item_replace(
19101 &mut self,
19102 ) -> Result<Option<ReplaceSelectItem>, ParserError> {
19103 let opt_replace = if self.parse_keyword(Keyword::REPLACE) {
19104 if self.consume_token(&Token::LParen) {
19105 let items = self.parse_comma_separated(|parser| {
19106 Ok(Box::new(parser.parse_replace_elements()?))
19107 })?;
19108 self.expect_token(&Token::RParen)?;
19109 Some(ReplaceSelectItem { items })
19110 } else {
19111 let tok = self.next_token();
19112 return self.expected("( after REPLACE but", tok);
19113 }
19114 } else {
19115 None
19116 };
19117
19118 Ok(opt_replace)
19119 }
19120 pub fn parse_replace_elements(&mut self) -> Result<ReplaceSelectElement, ParserError> {
19122 let expr = self.parse_expr()?;
19123 let as_keyword = self.parse_keyword(Keyword::AS);
19124 let ident = self.parse_identifier()?;
19125 Ok(ReplaceSelectElement {
19126 expr,
19127 column_name: ident,
19128 as_keyword,
19129 })
19130 }
19131
19132 pub fn parse_asc_desc(&mut self) -> Option<bool> {
19135 if self.parse_keyword(Keyword::ASC) {
19136 Some(true)
19137 } else if self.parse_keyword(Keyword::DESC) {
19138 Some(false)
19139 } else {
19140 None
19141 }
19142 }
19143
19144 pub fn parse_order_by_expr(&mut self) -> Result<OrderByExpr, ParserError> {
19146 self.parse_order_by_expr_inner(false)
19147 .map(|(order_by, _)| order_by)
19148 }
19149
19150 pub fn parse_create_index_expr(&mut self) -> Result<IndexColumn, ParserError> {
19152 self.parse_order_by_expr_inner(true)
19153 .map(|(column, operator_class)| IndexColumn {
19154 column,
19155 operator_class,
19156 })
19157 }
19158
19159 fn parse_order_by_expr_inner(
19160 &mut self,
19161 with_operator_class: bool,
19162 ) -> Result<(OrderByExpr, Option<ObjectName>), ParserError> {
19163 let expr = self.parse_expr()?;
19164
19165 let operator_class: Option<ObjectName> = if with_operator_class {
19166 if self
19169 .peek_one_of_keywords(&[Keyword::ASC, Keyword::DESC, Keyword::NULLS, Keyword::WITH])
19170 .is_some()
19171 {
19172 None
19173 } else {
19174 self.maybe_parse(|parser| parser.parse_object_name(false))?
19175 }
19176 } else {
19177 None
19178 };
19179
19180 let options = self.parse_order_by_options()?;
19181
19182 let with_fill = if self.dialect.supports_with_fill()
19183 && self.parse_keywords(&[Keyword::WITH, Keyword::FILL])
19184 {
19185 Some(self.parse_with_fill()?)
19186 } else {
19187 None
19188 };
19189
19190 Ok((
19191 OrderByExpr {
19192 expr,
19193 options,
19194 with_fill,
19195 },
19196 operator_class,
19197 ))
19198 }
19199
19200 fn parse_order_by_options(&mut self) -> Result<OrderByOptions, ParserError> {
19201 let asc = self.parse_asc_desc();
19202
19203 let nulls_first = if self.parse_keywords(&[Keyword::NULLS, Keyword::FIRST]) {
19204 Some(true)
19205 } else if self.parse_keywords(&[Keyword::NULLS, Keyword::LAST]) {
19206 Some(false)
19207 } else {
19208 None
19209 };
19210
19211 Ok(OrderByOptions { asc, nulls_first })
19212 }
19213
19214 pub fn parse_with_fill(&mut self) -> Result<WithFill, ParserError> {
19218 let from = if self.parse_keyword(Keyword::FROM) {
19219 Some(self.parse_expr()?)
19220 } else {
19221 None
19222 };
19223
19224 let to = if self.parse_keyword(Keyword::TO) {
19225 Some(self.parse_expr()?)
19226 } else {
19227 None
19228 };
19229
19230 let step = if self.parse_keyword(Keyword::STEP) {
19231 Some(self.parse_expr()?)
19232 } else {
19233 None
19234 };
19235
19236 Ok(WithFill { from, to, step })
19237 }
19238
19239 pub fn parse_interpolations(&mut self) -> Result<Option<Interpolate>, ParserError> {
19242 if !self.parse_keyword(Keyword::INTERPOLATE) {
19243 return Ok(None);
19244 }
19245
19246 if self.consume_token(&Token::LParen) {
19247 let interpolations =
19248 self.parse_comma_separated0(|p| p.parse_interpolation(), Token::RParen)?;
19249 self.expect_token(&Token::RParen)?;
19250 return Ok(Some(Interpolate {
19252 exprs: Some(interpolations),
19253 }));
19254 }
19255
19256 Ok(Some(Interpolate { exprs: None }))
19258 }
19259
19260 pub fn parse_interpolation(&mut self) -> Result<InterpolateExpr, ParserError> {
19262 let column = self.parse_identifier()?;
19263 let expr = if self.parse_keyword(Keyword::AS) {
19264 Some(self.parse_expr()?)
19265 } else {
19266 None
19267 };
19268 Ok(InterpolateExpr { column, expr })
19269 }
19270
19271 pub fn parse_top(&mut self) -> Result<Top, ParserError> {
19274 let quantity = if self.consume_token(&Token::LParen) {
19275 let quantity = self.parse_expr()?;
19276 self.expect_token(&Token::RParen)?;
19277 Some(TopQuantity::Expr(quantity))
19278 } else {
19279 let next_token = self.next_token();
19280 let quantity = match next_token.token {
19281 Token::Number(s, _) => Self::parse::<u64>(s, next_token.span.start)?,
19282 _ => self.expected("literal int", next_token)?,
19283 };
19284 Some(TopQuantity::Constant(quantity))
19285 };
19286
19287 let percent = self.parse_keyword(Keyword::PERCENT);
19288
19289 let with_ties = self.parse_keywords(&[Keyword::WITH, Keyword::TIES]);
19290
19291 Ok(Top {
19292 with_ties,
19293 percent,
19294 quantity,
19295 })
19296 }
19297
19298 pub fn parse_limit(&mut self) -> Result<Option<Expr>, ParserError> {
19300 if self.parse_keyword(Keyword::ALL) {
19301 Ok(None)
19302 } else {
19303 Ok(Some(self.parse_expr()?))
19304 }
19305 }
19306
19307 pub fn parse_offset(&mut self) -> Result<Offset, ParserError> {
19309 let value = self.parse_expr()?;
19310 let rows = if self.parse_keyword(Keyword::ROW) {
19311 OffsetRows::Row
19312 } else if self.parse_keyword(Keyword::ROWS) {
19313 OffsetRows::Rows
19314 } else {
19315 OffsetRows::None
19316 };
19317 Ok(Offset { value, rows })
19318 }
19319
19320 pub fn parse_fetch(&mut self) -> Result<Fetch, ParserError> {
19322 let _ = self.parse_one_of_keywords(&[Keyword::FIRST, Keyword::NEXT]);
19323
19324 let (quantity, percent) = if self
19325 .parse_one_of_keywords(&[Keyword::ROW, Keyword::ROWS])
19326 .is_some()
19327 {
19328 (None, false)
19329 } else {
19330 let quantity = Expr::Value(self.parse_value()?);
19331 let percent = self.parse_keyword(Keyword::PERCENT);
19332 let _ = self.parse_one_of_keywords(&[Keyword::ROW, Keyword::ROWS]);
19333 (Some(quantity), percent)
19334 };
19335
19336 let with_ties = if self.parse_keyword(Keyword::ONLY) {
19337 false
19338 } else {
19339 self.parse_keywords(&[Keyword::WITH, Keyword::TIES])
19340 };
19341
19342 Ok(Fetch {
19343 with_ties,
19344 percent,
19345 quantity,
19346 })
19347 }
19348
19349 pub fn parse_lock(&mut self) -> Result<LockClause, ParserError> {
19351 let lock_type = match self.expect_one_of_keywords(&[Keyword::UPDATE, Keyword::SHARE])? {
19352 Keyword::UPDATE => LockType::Update,
19353 Keyword::SHARE => LockType::Share,
19354 unexpected_keyword => return Err(ParserError::ParserError(
19355 format!("Internal parser error: expected any of {{UPDATE, SHARE}}, got {unexpected_keyword:?}"),
19356 )),
19357 };
19358 let of = if self.parse_keyword(Keyword::OF) {
19359 Some(self.parse_object_name(false)?)
19360 } else {
19361 None
19362 };
19363 let nonblock = if self.parse_keyword(Keyword::NOWAIT) {
19364 Some(NonBlock::Nowait)
19365 } else if self.parse_keywords(&[Keyword::SKIP, Keyword::LOCKED]) {
19366 Some(NonBlock::SkipLocked)
19367 } else {
19368 None
19369 };
19370 Ok(LockClause {
19371 lock_type,
19372 of,
19373 nonblock,
19374 })
19375 }
19376
19377 pub fn parse_lock_statement(&mut self) -> Result<Lock, ParserError> {
19379 self.expect_keyword(Keyword::LOCK)?;
19380
19381 if self.peek_keyword(Keyword::TABLES) {
19382 return self.expected_ref("TABLE or a table name", self.peek_token_ref());
19383 }
19384
19385 let _ = self.parse_keyword(Keyword::TABLE);
19386 let tables = self.parse_comma_separated(Parser::parse_lock_table_target)?;
19387 let lock_mode = if self.parse_keyword(Keyword::IN) {
19388 let lock_mode = self.parse_lock_table_mode()?;
19389 self.expect_keyword(Keyword::MODE)?;
19390 Some(lock_mode)
19391 } else {
19392 None
19393 };
19394 let nowait = self.parse_keyword(Keyword::NOWAIT);
19395
19396 Ok(Lock {
19397 tables,
19398 lock_mode,
19399 nowait,
19400 })
19401 }
19402
19403 fn parse_lock_table_target(&mut self) -> Result<LockTableTarget, ParserError> {
19404 let only = self.parse_keyword(Keyword::ONLY);
19405 let name = self.parse_object_name(false)?;
19406 let has_asterisk = self.consume_token(&Token::Mul);
19407
19408 Ok(LockTableTarget {
19409 name,
19410 only,
19411 has_asterisk,
19412 })
19413 }
19414
19415 fn parse_lock_table_mode(&mut self) -> Result<LockTableMode, ParserError> {
19416 if self.parse_keywords(&[Keyword::ACCESS, Keyword::SHARE]) {
19417 Ok(LockTableMode::AccessShare)
19418 } else if self.parse_keywords(&[Keyword::ACCESS, Keyword::EXCLUSIVE]) {
19419 Ok(LockTableMode::AccessExclusive)
19420 } else if self.parse_keywords(&[Keyword::ROW, Keyword::SHARE]) {
19421 Ok(LockTableMode::RowShare)
19422 } else if self.parse_keywords(&[Keyword::ROW, Keyword::EXCLUSIVE]) {
19423 Ok(LockTableMode::RowExclusive)
19424 } else if self.parse_keywords(&[Keyword::SHARE, Keyword::UPDATE, Keyword::EXCLUSIVE]) {
19425 Ok(LockTableMode::ShareUpdateExclusive)
19426 } else if self.parse_keywords(&[Keyword::SHARE, Keyword::ROW, Keyword::EXCLUSIVE]) {
19427 Ok(LockTableMode::ShareRowExclusive)
19428 } else if self.parse_keyword(Keyword::SHARE) {
19429 Ok(LockTableMode::Share)
19430 } else if self.parse_keyword(Keyword::EXCLUSIVE) {
19431 Ok(LockTableMode::Exclusive)
19432 } else {
19433 self.expected_ref("a PostgreSQL LOCK TABLE mode", self.peek_token_ref())
19434 }
19435 }
19436
19437 pub fn parse_values(
19439 &mut self,
19440 allow_empty: bool,
19441 value_keyword: bool,
19442 ) -> Result<Values, ParserError> {
19443 let mut explicit_row = false;
19444
19445 let rows = self.parse_comma_separated(|parser| {
19446 if parser.parse_keyword(Keyword::ROW) {
19447 explicit_row = true;
19448 }
19449
19450 parser.expect_token(&Token::LParen)?;
19451 if allow_empty && parser.peek_token().token == Token::RParen {
19452 parser.next_token();
19453 Ok(vec![])
19454 } else {
19455 let exprs = parser.parse_comma_separated(Parser::parse_expr)?;
19456 parser.expect_token(&Token::RParen)?;
19457 Ok(exprs)
19458 }
19459 })?;
19460 Ok(Values {
19461 explicit_row,
19462 rows,
19463 value_keyword,
19464 })
19465 }
19466
19467 pub fn parse_start_transaction(&mut self) -> Result<Statement, ParserError> {
19469 self.expect_keyword_is(Keyword::TRANSACTION)?;
19470 Ok(Statement::StartTransaction {
19471 modes: self.parse_transaction_modes()?,
19472 begin: false,
19473 transaction: Some(BeginTransactionKind::Transaction),
19474 modifier: None,
19475 statements: vec![],
19476 exception: None,
19477 has_end_keyword: false,
19478 })
19479 }
19480
19481 pub(crate) fn parse_transaction_modifier(&mut self) -> Option<TransactionModifier> {
19483 if !self.dialect.supports_start_transaction_modifier() {
19484 None
19485 } else if self.parse_keyword(Keyword::DEFERRED) {
19486 Some(TransactionModifier::Deferred)
19487 } else if self.parse_keyword(Keyword::IMMEDIATE) {
19488 Some(TransactionModifier::Immediate)
19489 } else if self.parse_keyword(Keyword::EXCLUSIVE) {
19490 Some(TransactionModifier::Exclusive)
19491 } else if self.parse_keyword(Keyword::TRY) {
19492 Some(TransactionModifier::Try)
19493 } else if self.parse_keyword(Keyword::CATCH) {
19494 Some(TransactionModifier::Catch)
19495 } else {
19496 None
19497 }
19498 }
19499
19500 pub fn parse_begin(&mut self) -> Result<Statement, ParserError> {
19502 let modifier = self.parse_transaction_modifier();
19503 let transaction =
19504 match self.parse_one_of_keywords(&[Keyword::TRANSACTION, Keyword::WORK, Keyword::TRAN])
19505 {
19506 Some(Keyword::TRANSACTION) => Some(BeginTransactionKind::Transaction),
19507 Some(Keyword::WORK) => Some(BeginTransactionKind::Work),
19508 Some(Keyword::TRAN) => Some(BeginTransactionKind::Tran),
19509 _ => None,
19510 };
19511 Ok(Statement::StartTransaction {
19512 modes: self.parse_transaction_modes()?,
19513 begin: true,
19514 transaction,
19515 modifier,
19516 statements: vec![],
19517 exception: None,
19518 has_end_keyword: false,
19519 })
19520 }
19521
19522 pub fn parse_begin_exception_end(&mut self) -> Result<Statement, ParserError> {
19524 let statements = self.parse_statement_list(&[Keyword::EXCEPTION, Keyword::END])?;
19525
19526 let exception = if self.parse_keyword(Keyword::EXCEPTION) {
19527 let mut when = Vec::new();
19528
19529 while !self.peek_keyword(Keyword::END) {
19531 self.expect_keyword(Keyword::WHEN)?;
19532
19533 let mut idents = Vec::new();
19537
19538 while !self.parse_keyword(Keyword::THEN) {
19539 let ident = self.parse_identifier()?;
19540 idents.push(ident);
19541
19542 self.maybe_parse(|p| p.expect_keyword(Keyword::OR))?;
19543 }
19544
19545 let statements = self.parse_statement_list(&[Keyword::WHEN, Keyword::END])?;
19546
19547 when.push(ExceptionWhen { idents, statements });
19548 }
19549
19550 Some(when)
19551 } else {
19552 None
19553 };
19554
19555 self.expect_keyword(Keyword::END)?;
19556
19557 Ok(Statement::StartTransaction {
19558 begin: true,
19559 statements,
19560 exception,
19561 has_end_keyword: true,
19562 transaction: None,
19563 modifier: None,
19564 modes: Default::default(),
19565 })
19566 }
19567
19568 pub fn parse_end(&mut self) -> Result<Statement, ParserError> {
19570 let modifier = if !self.dialect.supports_end_transaction_modifier() {
19571 None
19572 } else if self.parse_keyword(Keyword::TRY) {
19573 Some(TransactionModifier::Try)
19574 } else if self.parse_keyword(Keyword::CATCH) {
19575 Some(TransactionModifier::Catch)
19576 } else {
19577 None
19578 };
19579 Ok(Statement::Commit {
19580 chain: self.parse_commit_rollback_chain()?,
19581 end: true,
19582 modifier,
19583 })
19584 }
19585
19586 pub fn parse_transaction_modes(&mut self) -> Result<Vec<TransactionMode>, ParserError> {
19588 let mut modes = vec![];
19589 let mut required = false;
19590 loop {
19591 let mode = if self.parse_keywords(&[Keyword::ISOLATION, Keyword::LEVEL]) {
19592 let iso_level = if self.parse_keywords(&[Keyword::READ, Keyword::UNCOMMITTED]) {
19593 TransactionIsolationLevel::ReadUncommitted
19594 } else if self.parse_keywords(&[Keyword::READ, Keyword::COMMITTED]) {
19595 TransactionIsolationLevel::ReadCommitted
19596 } else if self.parse_keywords(&[Keyword::REPEATABLE, Keyword::READ]) {
19597 TransactionIsolationLevel::RepeatableRead
19598 } else if self.parse_keyword(Keyword::SERIALIZABLE) {
19599 TransactionIsolationLevel::Serializable
19600 } else if self.parse_keyword(Keyword::SNAPSHOT) {
19601 TransactionIsolationLevel::Snapshot
19602 } else {
19603 self.expected_ref("isolation level", self.peek_token_ref())?
19604 };
19605 TransactionMode::IsolationLevel(iso_level)
19606 } else if self.parse_keywords(&[Keyword::READ, Keyword::ONLY]) {
19607 TransactionMode::AccessMode(TransactionAccessMode::ReadOnly)
19608 } else if self.parse_keywords(&[Keyword::READ, Keyword::WRITE]) {
19609 TransactionMode::AccessMode(TransactionAccessMode::ReadWrite)
19610 } else if required {
19611 self.expected_ref("transaction mode", self.peek_token_ref())?
19612 } else {
19613 break;
19614 };
19615 modes.push(mode);
19616 required = self.consume_token(&Token::Comma);
19621 }
19622 Ok(modes)
19623 }
19624
19625 pub fn parse_commit(&mut self) -> Result<Statement, ParserError> {
19627 Ok(Statement::Commit {
19628 chain: self.parse_commit_rollback_chain()?,
19629 end: false,
19630 modifier: None,
19631 })
19632 }
19633
19634 pub fn parse_rollback(&mut self) -> Result<Statement, ParserError> {
19636 let chain = self.parse_commit_rollback_chain()?;
19637 let savepoint = self.parse_rollback_savepoint()?;
19638
19639 Ok(Statement::Rollback { chain, savepoint })
19640 }
19641
19642 pub fn parse_commit_rollback_chain(&mut self) -> Result<bool, ParserError> {
19644 let _ = self.parse_one_of_keywords(&[Keyword::TRANSACTION, Keyword::WORK, Keyword::TRAN]);
19645 if self.parse_keyword(Keyword::AND) {
19646 let chain = !self.parse_keyword(Keyword::NO);
19647 self.expect_keyword_is(Keyword::CHAIN)?;
19648 Ok(chain)
19649 } else {
19650 Ok(false)
19651 }
19652 }
19653
19654 pub fn parse_rollback_savepoint(&mut self) -> Result<Option<Ident>, ParserError> {
19656 if self.parse_keyword(Keyword::TO) {
19657 let _ = self.parse_keyword(Keyword::SAVEPOINT);
19658 let savepoint = self.parse_identifier()?;
19659
19660 Ok(Some(savepoint))
19661 } else {
19662 Ok(None)
19663 }
19664 }
19665
19666 pub fn parse_raiserror(&mut self) -> Result<Statement, ParserError> {
19668 self.expect_token(&Token::LParen)?;
19669 let message = Box::new(self.parse_expr()?);
19670 self.expect_token(&Token::Comma)?;
19671 let severity = Box::new(self.parse_expr()?);
19672 self.expect_token(&Token::Comma)?;
19673 let state = Box::new(self.parse_expr()?);
19674 let arguments = if self.consume_token(&Token::Comma) {
19675 self.parse_comma_separated(Parser::parse_expr)?
19676 } else {
19677 vec![]
19678 };
19679 self.expect_token(&Token::RParen)?;
19680 let options = if self.parse_keyword(Keyword::WITH) {
19681 self.parse_comma_separated(Parser::parse_raiserror_option)?
19682 } else {
19683 vec![]
19684 };
19685 Ok(Statement::RaisError {
19686 message,
19687 severity,
19688 state,
19689 arguments,
19690 options,
19691 })
19692 }
19693
19694 pub fn parse_raiserror_option(&mut self) -> Result<RaisErrorOption, ParserError> {
19696 match self.expect_one_of_keywords(&[Keyword::LOG, Keyword::NOWAIT, Keyword::SETERROR])? {
19697 Keyword::LOG => Ok(RaisErrorOption::Log),
19698 Keyword::NOWAIT => Ok(RaisErrorOption::NoWait),
19699 Keyword::SETERROR => Ok(RaisErrorOption::SetError),
19700 _ => self.expected_ref(
19701 "LOG, NOWAIT OR SETERROR raiserror option",
19702 self.peek_token_ref(),
19703 ),
19704 }
19705 }
19706
19707 pub fn parse_throw(&mut self) -> Result<ThrowStatement, ParserError> {
19711 self.expect_keyword_is(Keyword::THROW)?;
19712
19713 let error_number = self.maybe_parse(|p| p.parse_expr().map(Box::new))?;
19714 let (message, state) = if error_number.is_some() {
19715 self.expect_token(&Token::Comma)?;
19716 let message = Box::new(self.parse_expr()?);
19717 self.expect_token(&Token::Comma)?;
19718 let state = Box::new(self.parse_expr()?);
19719 (Some(message), Some(state))
19720 } else {
19721 (None, None)
19722 };
19723
19724 Ok(ThrowStatement {
19725 error_number,
19726 message,
19727 state,
19728 })
19729 }
19730
19731 pub fn parse_deallocate(&mut self) -> Result<Statement, ParserError> {
19733 let prepare = self.parse_keyword(Keyword::PREPARE);
19734 let name = self.parse_identifier()?;
19735 Ok(Statement::Deallocate { name, prepare })
19736 }
19737
19738 pub fn parse_execute(&mut self) -> Result<Statement, ParserError> {
19740 let immediate =
19741 self.dialect.supports_execute_immediate() && self.parse_keyword(Keyword::IMMEDIATE);
19742
19743 let name = if immediate || matches!(self.peek_token_ref().token, Token::LParen) {
19749 None
19750 } else {
19751 Some(self.parse_object_name(false)?)
19752 };
19753
19754 let has_parentheses = self.consume_token(&Token::LParen);
19755
19756 let end_kws = &[Keyword::USING, Keyword::OUTPUT, Keyword::DEFAULT];
19757 let end_token = match (has_parentheses, self.peek_token().token) {
19758 (true, _) => Token::RParen,
19759 (false, Token::EOF) => Token::EOF,
19760 (false, Token::Word(w)) if end_kws.contains(&w.keyword) => Token::Word(w),
19761 (false, _) => Token::SemiColon,
19762 };
19763
19764 let parameters = self.parse_comma_separated0(Parser::parse_expr, end_token)?;
19765
19766 if has_parentheses {
19767 self.expect_token(&Token::RParen)?;
19768 }
19769
19770 let into = if self.parse_keyword(Keyword::INTO) {
19771 self.parse_comma_separated(Self::parse_identifier)?
19772 } else {
19773 vec![]
19774 };
19775
19776 let using = if self.parse_keyword(Keyword::USING) {
19777 self.parse_comma_separated(Self::parse_expr_with_alias)?
19778 } else {
19779 vec![]
19780 };
19781
19782 let output = self.parse_keyword(Keyword::OUTPUT);
19783
19784 let default = self.parse_keyword(Keyword::DEFAULT);
19785
19786 Ok(Statement::Execute {
19787 immediate,
19788 name,
19789 parameters,
19790 has_parentheses,
19791 into,
19792 using,
19793 output,
19794 default,
19795 })
19796 }
19797
19798 pub fn parse_prepare(&mut self) -> Result<Statement, ParserError> {
19800 let name = self.parse_identifier()?;
19801
19802 let mut data_types = vec![];
19803 if self.consume_token(&Token::LParen) {
19804 data_types = self.parse_comma_separated(Parser::parse_data_type)?;
19805 self.expect_token(&Token::RParen)?;
19806 }
19807
19808 self.expect_keyword_is(Keyword::AS)?;
19809 let statement = Box::new(self.parse_statement()?);
19810 Ok(Statement::Prepare {
19811 name,
19812 data_types,
19813 statement,
19814 })
19815 }
19816
19817 pub fn parse_unload(&mut self) -> Result<Statement, ParserError> {
19819 self.expect_keyword(Keyword::UNLOAD)?;
19820 self.expect_token(&Token::LParen)?;
19821 let (query, query_text) =
19822 if matches!(self.peek_token_ref().token, Token::SingleQuotedString(_)) {
19823 (None, Some(self.parse_literal_string()?))
19824 } else {
19825 (Some(self.parse_query()?), None)
19826 };
19827 self.expect_token(&Token::RParen)?;
19828
19829 self.expect_keyword_is(Keyword::TO)?;
19830 let to = self.parse_identifier()?;
19831 let auth = if self.parse_keyword(Keyword::IAM_ROLE) {
19832 Some(self.parse_iam_role_kind()?)
19833 } else {
19834 None
19835 };
19836 let with = self.parse_options(Keyword::WITH)?;
19837 let mut options = vec![];
19838 while let Some(opt) = self.maybe_parse(|parser| parser.parse_copy_legacy_option())? {
19839 options.push(opt);
19840 }
19841 Ok(Statement::Unload {
19842 query,
19843 query_text,
19844 to,
19845 auth,
19846 with,
19847 options,
19848 })
19849 }
19850
19851 fn parse_select_into(&mut self) -> Result<SelectInto, ParserError> {
19852 let temporary = self
19853 .parse_one_of_keywords(&[Keyword::TEMP, Keyword::TEMPORARY])
19854 .is_some();
19855 let unlogged = self.parse_keyword(Keyword::UNLOGGED);
19856 let table = self.parse_keyword(Keyword::TABLE);
19857 let name = self.parse_object_name(false)?;
19858
19859 Ok(SelectInto {
19860 temporary,
19861 unlogged,
19862 table,
19863 name,
19864 })
19865 }
19866
19867 fn parse_pragma_value(&mut self) -> Result<ValueWithSpan, ParserError> {
19868 let v = self.parse_value()?;
19869 match &v.value {
19870 Value::SingleQuotedString(_) => Ok(v),
19871 Value::DoubleQuotedString(_) => Ok(v),
19872 Value::Number(_, _) => Ok(v),
19873 Value::Placeholder(_) => Ok(v),
19874 _ => {
19875 self.prev_token();
19876 self.expected_ref("number or string or ? placeholder", self.peek_token_ref())
19877 }
19878 }
19879 }
19880
19881 pub fn parse_pragma(&mut self) -> Result<Statement, ParserError> {
19883 let name = self.parse_object_name(false)?;
19884 if self.consume_token(&Token::LParen) {
19885 let value = self.parse_pragma_value()?;
19886 self.expect_token(&Token::RParen)?;
19887 Ok(Statement::Pragma {
19888 name,
19889 value: Some(value),
19890 is_eq: false,
19891 })
19892 } else if self.consume_token(&Token::Eq) {
19893 Ok(Statement::Pragma {
19894 name,
19895 value: Some(self.parse_pragma_value()?),
19896 is_eq: true,
19897 })
19898 } else {
19899 Ok(Statement::Pragma {
19900 name,
19901 value: None,
19902 is_eq: false,
19903 })
19904 }
19905 }
19906
19907 pub fn parse_install(&mut self) -> Result<Statement, ParserError> {
19909 let extension_name = self.parse_identifier()?;
19910
19911 Ok(Statement::Install { extension_name })
19912 }
19913
19914 pub fn parse_load(&mut self) -> Result<Statement, ParserError> {
19916 if self.dialect.supports_load_extension() {
19917 let extension_name = self.parse_identifier()?;
19918 Ok(Statement::Load { extension_name })
19919 } else if self.parse_keyword(Keyword::DATA) && self.dialect.supports_load_data() {
19920 let local = self.parse_one_of_keywords(&[Keyword::LOCAL]).is_some();
19921 self.expect_keyword_is(Keyword::INPATH)?;
19922 let inpath = self.parse_literal_string()?;
19923 let overwrite = self.parse_one_of_keywords(&[Keyword::OVERWRITE]).is_some();
19924 self.expect_keyword_is(Keyword::INTO)?;
19925 self.expect_keyword_is(Keyword::TABLE)?;
19926 let table_name = self.parse_object_name(false)?;
19927 let partitioned = self.parse_insert_partition()?;
19928 let table_format = self.parse_load_data_table_format()?;
19929 Ok(Statement::LoadData {
19930 local,
19931 inpath,
19932 overwrite,
19933 table_name,
19934 partitioned,
19935 table_format,
19936 })
19937 } else {
19938 self.expected_ref(
19939 "`DATA` or an extension name after `LOAD`",
19940 self.peek_token_ref(),
19941 )
19942 }
19943 }
19944
19945 pub fn parse_optimize_table(&mut self) -> Result<Statement, ParserError> {
19957 let has_table_keyword = self.parse_keyword(Keyword::TABLE);
19958
19959 let name = self.parse_object_name(false)?;
19960
19961 let on_cluster = self.parse_optional_on_cluster()?;
19963
19964 let partition = if self.parse_keyword(Keyword::PARTITION) {
19965 if self.parse_keyword(Keyword::ID) {
19966 Some(Partition::Identifier(self.parse_identifier()?))
19967 } else {
19968 Some(Partition::Expr(self.parse_expr()?))
19969 }
19970 } else {
19971 None
19972 };
19973
19974 let include_final = self.parse_keyword(Keyword::FINAL);
19975
19976 let deduplicate = if self.parse_keyword(Keyword::DEDUPLICATE) {
19977 if self.parse_keyword(Keyword::BY) {
19978 Some(Deduplicate::ByExpression(self.parse_expr()?))
19979 } else {
19980 Some(Deduplicate::All)
19981 }
19982 } else {
19983 None
19984 };
19985
19986 let predicate = if self.parse_keyword(Keyword::WHERE) {
19988 Some(self.parse_expr()?)
19989 } else {
19990 None
19991 };
19992
19993 let zorder = if self.parse_keywords(&[Keyword::ZORDER, Keyword::BY]) {
19994 self.expect_token(&Token::LParen)?;
19995 let columns = self.parse_comma_separated(|p| p.parse_expr())?;
19996 self.expect_token(&Token::RParen)?;
19997 Some(columns)
19998 } else {
19999 None
20000 };
20001
20002 Ok(Statement::OptimizeTable {
20003 name,
20004 has_table_keyword,
20005 on_cluster,
20006 partition,
20007 include_final,
20008 deduplicate,
20009 predicate,
20010 zorder,
20011 })
20012 }
20013
20014 pub fn parse_create_sequence(&mut self, temporary: bool) -> Result<Statement, ParserError> {
20020 let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
20022 let name = self.parse_object_name(false)?;
20024 let mut data_type: Option<DataType> = None;
20026 if self.parse_keywords(&[Keyword::AS]) {
20027 data_type = Some(self.parse_data_type()?)
20028 }
20029 let sequence_options = self.parse_create_sequence_options()?;
20030 let owned_by = if self.parse_keywords(&[Keyword::OWNED, Keyword::BY]) {
20032 if self.parse_keywords(&[Keyword::NONE]) {
20033 Some(ObjectName::from(vec![Ident::new("NONE")]))
20034 } else {
20035 Some(self.parse_object_name(false)?)
20036 }
20037 } else {
20038 None
20039 };
20040 Ok(Statement::CreateSequence {
20041 temporary,
20042 if_not_exists,
20043 name,
20044 data_type,
20045 sequence_options,
20046 owned_by,
20047 })
20048 }
20049
20050 fn parse_create_sequence_options(&mut self) -> Result<Vec<SequenceOptions>, ParserError> {
20051 let mut sequence_options = vec![];
20052 if self.parse_keywords(&[Keyword::INCREMENT]) {
20054 if self.parse_keywords(&[Keyword::BY]) {
20055 sequence_options.push(SequenceOptions::IncrementBy(self.parse_number()?, true));
20056 } else {
20057 sequence_options.push(SequenceOptions::IncrementBy(self.parse_number()?, false));
20058 }
20059 }
20060 if self.parse_keyword(Keyword::MINVALUE) {
20062 sequence_options.push(SequenceOptions::MinValue(Some(self.parse_number()?)));
20063 } else if self.parse_keywords(&[Keyword::NO, Keyword::MINVALUE]) {
20064 sequence_options.push(SequenceOptions::MinValue(None));
20065 }
20066 if self.parse_keywords(&[Keyword::MAXVALUE]) {
20068 sequence_options.push(SequenceOptions::MaxValue(Some(self.parse_number()?)));
20069 } else if self.parse_keywords(&[Keyword::NO, Keyword::MAXVALUE]) {
20070 sequence_options.push(SequenceOptions::MaxValue(None));
20071 }
20072
20073 if self.parse_keywords(&[Keyword::START]) {
20075 if self.parse_keywords(&[Keyword::WITH]) {
20076 sequence_options.push(SequenceOptions::StartWith(self.parse_number()?, true));
20077 } else {
20078 sequence_options.push(SequenceOptions::StartWith(self.parse_number()?, false));
20079 }
20080 }
20081 if self.parse_keywords(&[Keyword::CACHE]) {
20083 sequence_options.push(SequenceOptions::Cache(self.parse_number()?));
20084 }
20085 if self.parse_keywords(&[Keyword::NO, Keyword::CYCLE]) {
20087 sequence_options.push(SequenceOptions::Cycle(true));
20088 } else if self.parse_keywords(&[Keyword::CYCLE]) {
20089 sequence_options.push(SequenceOptions::Cycle(false));
20090 }
20091
20092 Ok(sequence_options)
20093 }
20094
20095 pub fn parse_pg_create_server(&mut self) -> Result<Statement, ParserError> {
20099 let ine = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
20100 let name = self.parse_object_name(false)?;
20101
20102 let server_type = if self.parse_keyword(Keyword::TYPE) {
20103 Some(self.parse_identifier()?)
20104 } else {
20105 None
20106 };
20107
20108 let version = if self.parse_keyword(Keyword::VERSION) {
20109 Some(self.parse_identifier()?)
20110 } else {
20111 None
20112 };
20113
20114 self.expect_keywords(&[Keyword::FOREIGN, Keyword::DATA, Keyword::WRAPPER])?;
20115 let foreign_data_wrapper = self.parse_object_name(false)?;
20116
20117 let mut options = None;
20118 if self.parse_keyword(Keyword::OPTIONS) {
20119 self.expect_token(&Token::LParen)?;
20120 options = Some(self.parse_comma_separated(|p| {
20121 let key = p.parse_identifier()?;
20122 let value = p.parse_identifier()?;
20123 Ok(CreateServerOption { key, value })
20124 })?);
20125 self.expect_token(&Token::RParen)?;
20126 }
20127
20128 Ok(Statement::CreateServer(CreateServerStatement {
20129 name,
20130 if_not_exists: ine,
20131 server_type,
20132 version,
20133 foreign_data_wrapper,
20134 options,
20135 }))
20136 }
20137
20138 pub fn parse_create_foreign_data_wrapper(
20142 &mut self,
20143 ) -> Result<CreateForeignDataWrapper, ParserError> {
20144 let name = self.parse_identifier()?;
20145
20146 let handler = if self.parse_keyword(Keyword::HANDLER) {
20147 Some(FdwRoutineClause::Function(self.parse_object_name(false)?))
20148 } else if self.parse_keywords(&[Keyword::NO, Keyword::HANDLER]) {
20149 Some(FdwRoutineClause::NoFunction)
20150 } else {
20151 None
20152 };
20153
20154 let validator = if self.parse_keyword(Keyword::VALIDATOR) {
20155 Some(FdwRoutineClause::Function(self.parse_object_name(false)?))
20156 } else if self.parse_keywords(&[Keyword::NO, Keyword::VALIDATOR]) {
20157 Some(FdwRoutineClause::NoFunction)
20158 } else {
20159 None
20160 };
20161
20162 let options = if self.parse_keyword(Keyword::OPTIONS) {
20163 self.expect_token(&Token::LParen)?;
20164 let opts = self.parse_comma_separated(|p| {
20165 let key = p.parse_identifier()?;
20166 let value = p.parse_identifier()?;
20167 Ok(CreateServerOption { key, value })
20168 })?;
20169 self.expect_token(&Token::RParen)?;
20170 Some(opts)
20171 } else {
20172 None
20173 };
20174
20175 Ok(CreateForeignDataWrapper {
20176 name,
20177 handler,
20178 validator,
20179 options,
20180 })
20181 }
20182
20183 pub fn parse_create_foreign_table(
20187 &mut self,
20188 ) -> Result<CreateForeignTable, ParserError> {
20189 let if_not_exists =
20190 self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
20191 let name = self.parse_object_name(false)?;
20192 let (columns, _constraints) = self.parse_columns()?;
20193 self.expect_keyword_is(Keyword::SERVER)?;
20194 let server_name = self.parse_identifier()?;
20195
20196 let options = if self.parse_keyword(Keyword::OPTIONS) {
20197 self.expect_token(&Token::LParen)?;
20198 let opts = self.parse_comma_separated(|p| {
20199 let key = p.parse_identifier()?;
20200 let value = p.parse_identifier()?;
20201 Ok(CreateServerOption { key, value })
20202 })?;
20203 self.expect_token(&Token::RParen)?;
20204 Some(opts)
20205 } else {
20206 None
20207 };
20208
20209 Ok(CreateForeignTable {
20210 name,
20211 if_not_exists,
20212 columns,
20213 server_name,
20214 options,
20215 })
20216 }
20217
20218 pub fn parse_create_publication(&mut self) -> Result<CreatePublication, ParserError> {
20222 let name = self.parse_identifier()?;
20223
20224 let target = if self.parse_keyword(Keyword::FOR) {
20225 if self.parse_keywords(&[Keyword::ALL, Keyword::TABLES]) {
20226 Some(PublicationTarget::AllTables)
20227 } else if self.parse_keyword(Keyword::TABLE) {
20228 let tables = self.parse_comma_separated(|p| p.parse_object_name(false))?;
20229 Some(PublicationTarget::Tables(tables))
20230 } else if self.parse_keywords(&[Keyword::TABLES, Keyword::IN, Keyword::SCHEMA]) {
20231 let schemas = self.parse_comma_separated(|p| p.parse_identifier())?;
20232 Some(PublicationTarget::TablesInSchema(schemas))
20233 } else {
20234 return self.expected_ref(
20235 "ALL TABLES, TABLE, or TABLES IN SCHEMA after FOR",
20236 self.peek_token_ref(),
20237 );
20238 }
20239 } else {
20240 None
20241 };
20242
20243 let with_options = self.parse_options(Keyword::WITH)?;
20244
20245 Ok(CreatePublication {
20246 name,
20247 target,
20248 with_options,
20249 })
20250 }
20251
20252 pub fn parse_create_subscription(&mut self) -> Result<CreateSubscription, ParserError> {
20256 let name = self.parse_identifier()?;
20257 self.expect_keyword_is(Keyword::CONNECTION)?;
20258 let connection = self.parse_value()?.value;
20259 self.expect_keyword_is(Keyword::PUBLICATION)?;
20260 let publications = self.parse_comma_separated(|p| p.parse_identifier())?;
20261 let with_options = self.parse_options(Keyword::WITH)?;
20262
20263 Ok(CreateSubscription {
20264 name,
20265 connection,
20266 publications,
20267 with_options,
20268 })
20269 }
20270
20271 pub fn index(&self) -> usize {
20273 self.index
20274 }
20275
20276 pub fn parse_named_window(&mut self) -> Result<NamedWindowDefinition, ParserError> {
20278 let ident = self.parse_identifier()?;
20279 self.expect_keyword_is(Keyword::AS)?;
20280
20281 let window_expr = if self.consume_token(&Token::LParen) {
20282 NamedWindowExpr::WindowSpec(self.parse_window_spec()?)
20283 } else if self.dialect.supports_window_clause_named_window_reference() {
20284 NamedWindowExpr::NamedWindow(self.parse_identifier()?)
20285 } else {
20286 return self.expected_ref("(", self.peek_token_ref());
20287 };
20288
20289 Ok(NamedWindowDefinition(ident, window_expr))
20290 }
20291
20292 pub fn parse_create_procedure(&mut self, or_alter: bool) -> Result<Statement, ParserError> {
20294 let name = self.parse_object_name(false)?;
20295 let params = self.parse_optional_procedure_parameters()?;
20296
20297 let language = if self.parse_keyword(Keyword::LANGUAGE) {
20298 Some(self.parse_identifier()?)
20299 } else {
20300 None
20301 };
20302
20303 self.expect_keyword_is(Keyword::AS)?;
20304
20305 let body = self.parse_conditional_statements(&[Keyword::END])?;
20306
20307 Ok(Statement::CreateProcedure {
20308 name,
20309 or_alter,
20310 params,
20311 language,
20312 body,
20313 })
20314 }
20315
20316 pub fn parse_window_spec(&mut self) -> Result<WindowSpec, ParserError> {
20318 let window_name = match &self.peek_token_ref().token {
20319 Token::Word(word) if word.keyword == Keyword::NoKeyword => {
20320 self.parse_optional_ident()?
20321 }
20322 _ => None,
20323 };
20324
20325 let partition_by = if self.parse_keywords(&[Keyword::PARTITION, Keyword::BY]) {
20326 self.parse_comma_separated(Parser::parse_expr)?
20327 } else {
20328 vec![]
20329 };
20330 let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
20331 self.parse_comma_separated(Parser::parse_order_by_expr)?
20332 } else {
20333 vec![]
20334 };
20335
20336 let window_frame = if !self.consume_token(&Token::RParen) {
20337 let window_frame = self.parse_window_frame()?;
20338 self.expect_token(&Token::RParen)?;
20339 Some(window_frame)
20340 } else {
20341 None
20342 };
20343 Ok(WindowSpec {
20344 window_name,
20345 partition_by,
20346 order_by,
20347 window_frame,
20348 })
20349 }
20350
20351 pub fn parse_create_type(&mut self) -> Result<Statement, ParserError> {
20353 let name = self.parse_object_name(false)?;
20354
20355 let has_as = self.parse_keyword(Keyword::AS);
20357
20358 if !has_as {
20359 if self.consume_token(&Token::LParen) {
20361 let options = self.parse_create_type_sql_definition_options()?;
20363 self.expect_token(&Token::RParen)?;
20364 return Ok(Statement::CreateType {
20365 name,
20366 representation: Some(UserDefinedTypeRepresentation::SqlDefinition { options }),
20367 });
20368 }
20369
20370 return Ok(Statement::CreateType {
20372 name,
20373 representation: None,
20374 });
20375 }
20376
20377 if self.parse_keyword(Keyword::ENUM) {
20379 self.parse_create_type_enum(name)
20381 } else if self.parse_keyword(Keyword::RANGE) {
20382 self.parse_create_type_range(name)
20384 } else if self.consume_token(&Token::LParen) {
20385 self.parse_create_type_composite(name)
20387 } else {
20388 self.expected_ref("ENUM, RANGE, or '(' after AS", self.peek_token_ref())
20389 }
20390 }
20391
20392 fn parse_create_type_composite(&mut self, name: ObjectName) -> Result<Statement, ParserError> {
20396 if self.consume_token(&Token::RParen) {
20397 return Ok(Statement::CreateType {
20399 name,
20400 representation: Some(UserDefinedTypeRepresentation::Composite {
20401 attributes: vec![],
20402 }),
20403 });
20404 }
20405
20406 let mut attributes = vec![];
20407 loop {
20408 let attr_name = self.parse_identifier()?;
20409 let attr_data_type = self.parse_data_type()?;
20410 let attr_collation = if self.parse_keyword(Keyword::COLLATE) {
20411 Some(self.parse_object_name(false)?)
20412 } else {
20413 None
20414 };
20415 attributes.push(UserDefinedTypeCompositeAttributeDef {
20416 name: attr_name,
20417 data_type: attr_data_type,
20418 collation: attr_collation,
20419 });
20420
20421 if !self.consume_token(&Token::Comma) {
20422 break;
20423 }
20424 }
20425 self.expect_token(&Token::RParen)?;
20426
20427 Ok(Statement::CreateType {
20428 name,
20429 representation: Some(UserDefinedTypeRepresentation::Composite { attributes }),
20430 })
20431 }
20432
20433 pub fn parse_create_type_enum(&mut self, name: ObjectName) -> Result<Statement, ParserError> {
20437 self.expect_token(&Token::LParen)?;
20438 let labels = self.parse_comma_separated0(|p| p.parse_identifier(), Token::RParen)?;
20439 self.expect_token(&Token::RParen)?;
20440
20441 Ok(Statement::CreateType {
20442 name,
20443 representation: Some(UserDefinedTypeRepresentation::Enum { labels }),
20444 })
20445 }
20446
20447 fn parse_create_type_range(&mut self, name: ObjectName) -> Result<Statement, ParserError> {
20451 self.expect_token(&Token::LParen)?;
20452 let options = self.parse_comma_separated0(|p| p.parse_range_option(), Token::RParen)?;
20453 self.expect_token(&Token::RParen)?;
20454
20455 Ok(Statement::CreateType {
20456 name,
20457 representation: Some(UserDefinedTypeRepresentation::Range { options }),
20458 })
20459 }
20460
20461 fn parse_range_option(&mut self) -> Result<UserDefinedTypeRangeOption, ParserError> {
20463 let keyword = self.parse_one_of_keywords(&[
20464 Keyword::SUBTYPE,
20465 Keyword::SUBTYPE_OPCLASS,
20466 Keyword::COLLATION,
20467 Keyword::CANONICAL,
20468 Keyword::SUBTYPE_DIFF,
20469 Keyword::MULTIRANGE_TYPE_NAME,
20470 ]);
20471
20472 match keyword {
20473 Some(Keyword::SUBTYPE) => {
20474 self.expect_token(&Token::Eq)?;
20475 let data_type = self.parse_data_type()?;
20476 Ok(UserDefinedTypeRangeOption::Subtype(data_type))
20477 }
20478 Some(Keyword::SUBTYPE_OPCLASS) => {
20479 self.expect_token(&Token::Eq)?;
20480 let name = self.parse_object_name(false)?;
20481 Ok(UserDefinedTypeRangeOption::SubtypeOpClass(name))
20482 }
20483 Some(Keyword::COLLATION) => {
20484 self.expect_token(&Token::Eq)?;
20485 let name = self.parse_object_name(false)?;
20486 Ok(UserDefinedTypeRangeOption::Collation(name))
20487 }
20488 Some(Keyword::CANONICAL) => {
20489 self.expect_token(&Token::Eq)?;
20490 let name = self.parse_object_name(false)?;
20491 Ok(UserDefinedTypeRangeOption::Canonical(name))
20492 }
20493 Some(Keyword::SUBTYPE_DIFF) => {
20494 self.expect_token(&Token::Eq)?;
20495 let name = self.parse_object_name(false)?;
20496 Ok(UserDefinedTypeRangeOption::SubtypeDiff(name))
20497 }
20498 Some(Keyword::MULTIRANGE_TYPE_NAME) => {
20499 self.expect_token(&Token::Eq)?;
20500 let name = self.parse_object_name(false)?;
20501 Ok(UserDefinedTypeRangeOption::MultirangeTypeName(name))
20502 }
20503 _ => self.expected_ref("range option keyword", self.peek_token_ref()),
20504 }
20505 }
20506
20507 fn parse_create_type_sql_definition_options(
20509 &mut self,
20510 ) -> Result<Vec<UserDefinedTypeSqlDefinitionOption>, ParserError> {
20511 self.parse_comma_separated0(|p| p.parse_sql_definition_option(), Token::RParen)
20512 }
20513
20514 fn parse_sql_definition_option(
20516 &mut self,
20517 ) -> Result<UserDefinedTypeSqlDefinitionOption, ParserError> {
20518 let keyword = self.parse_one_of_keywords(&[
20519 Keyword::INPUT,
20520 Keyword::OUTPUT,
20521 Keyword::RECEIVE,
20522 Keyword::SEND,
20523 Keyword::TYPMOD_IN,
20524 Keyword::TYPMOD_OUT,
20525 Keyword::ANALYZE,
20526 Keyword::SUBSCRIPT,
20527 Keyword::INTERNALLENGTH,
20528 Keyword::PASSEDBYVALUE,
20529 Keyword::ALIGNMENT,
20530 Keyword::STORAGE,
20531 Keyword::LIKE,
20532 Keyword::CATEGORY,
20533 Keyword::PREFERRED,
20534 Keyword::DEFAULT,
20535 Keyword::ELEMENT,
20536 Keyword::DELIMITER,
20537 Keyword::COLLATABLE,
20538 ]);
20539
20540 match keyword {
20541 Some(Keyword::INPUT) => {
20542 self.expect_token(&Token::Eq)?;
20543 let name = self.parse_object_name(false)?;
20544 Ok(UserDefinedTypeSqlDefinitionOption::Input(name))
20545 }
20546 Some(Keyword::OUTPUT) => {
20547 self.expect_token(&Token::Eq)?;
20548 let name = self.parse_object_name(false)?;
20549 Ok(UserDefinedTypeSqlDefinitionOption::Output(name))
20550 }
20551 Some(Keyword::RECEIVE) => {
20552 self.expect_token(&Token::Eq)?;
20553 let name = self.parse_object_name(false)?;
20554 Ok(UserDefinedTypeSqlDefinitionOption::Receive(name))
20555 }
20556 Some(Keyword::SEND) => {
20557 self.expect_token(&Token::Eq)?;
20558 let name = self.parse_object_name(false)?;
20559 Ok(UserDefinedTypeSqlDefinitionOption::Send(name))
20560 }
20561 Some(Keyword::TYPMOD_IN) => {
20562 self.expect_token(&Token::Eq)?;
20563 let name = self.parse_object_name(false)?;
20564 Ok(UserDefinedTypeSqlDefinitionOption::TypmodIn(name))
20565 }
20566 Some(Keyword::TYPMOD_OUT) => {
20567 self.expect_token(&Token::Eq)?;
20568 let name = self.parse_object_name(false)?;
20569 Ok(UserDefinedTypeSqlDefinitionOption::TypmodOut(name))
20570 }
20571 Some(Keyword::ANALYZE) => {
20572 self.expect_token(&Token::Eq)?;
20573 let name = self.parse_object_name(false)?;
20574 Ok(UserDefinedTypeSqlDefinitionOption::Analyze(name))
20575 }
20576 Some(Keyword::SUBSCRIPT) => {
20577 self.expect_token(&Token::Eq)?;
20578 let name = self.parse_object_name(false)?;
20579 Ok(UserDefinedTypeSqlDefinitionOption::Subscript(name))
20580 }
20581 Some(Keyword::INTERNALLENGTH) => {
20582 self.expect_token(&Token::Eq)?;
20583 if self.parse_keyword(Keyword::VARIABLE) {
20584 Ok(UserDefinedTypeSqlDefinitionOption::InternalLength(
20585 UserDefinedTypeInternalLength::Variable,
20586 ))
20587 } else {
20588 let value = self.parse_literal_uint()?;
20589 Ok(UserDefinedTypeSqlDefinitionOption::InternalLength(
20590 UserDefinedTypeInternalLength::Fixed(value),
20591 ))
20592 }
20593 }
20594 Some(Keyword::PASSEDBYVALUE) => Ok(UserDefinedTypeSqlDefinitionOption::PassedByValue),
20595 Some(Keyword::ALIGNMENT) => {
20596 self.expect_token(&Token::Eq)?;
20597 let align_keyword = self.parse_one_of_keywords(&[
20598 Keyword::CHAR,
20599 Keyword::INT2,
20600 Keyword::INT4,
20601 Keyword::DOUBLE,
20602 ]);
20603 match align_keyword {
20604 Some(Keyword::CHAR) => Ok(UserDefinedTypeSqlDefinitionOption::Alignment(
20605 Alignment::Char,
20606 )),
20607 Some(Keyword::INT2) => Ok(UserDefinedTypeSqlDefinitionOption::Alignment(
20608 Alignment::Int2,
20609 )),
20610 Some(Keyword::INT4) => Ok(UserDefinedTypeSqlDefinitionOption::Alignment(
20611 Alignment::Int4,
20612 )),
20613 Some(Keyword::DOUBLE) => Ok(UserDefinedTypeSqlDefinitionOption::Alignment(
20614 Alignment::Double,
20615 )),
20616 _ => self.expected_ref(
20617 "alignment value (char, int2, int4, or double)",
20618 self.peek_token_ref(),
20619 ),
20620 }
20621 }
20622 Some(Keyword::STORAGE) => {
20623 self.expect_token(&Token::Eq)?;
20624 let storage_keyword = self.parse_one_of_keywords(&[
20625 Keyword::PLAIN,
20626 Keyword::EXTERNAL,
20627 Keyword::EXTENDED,
20628 Keyword::MAIN,
20629 ]);
20630 match storage_keyword {
20631 Some(Keyword::PLAIN) => Ok(UserDefinedTypeSqlDefinitionOption::Storage(
20632 UserDefinedTypeStorage::Plain,
20633 )),
20634 Some(Keyword::EXTERNAL) => Ok(UserDefinedTypeSqlDefinitionOption::Storage(
20635 UserDefinedTypeStorage::External,
20636 )),
20637 Some(Keyword::EXTENDED) => Ok(UserDefinedTypeSqlDefinitionOption::Storage(
20638 UserDefinedTypeStorage::Extended,
20639 )),
20640 Some(Keyword::MAIN) => Ok(UserDefinedTypeSqlDefinitionOption::Storage(
20641 UserDefinedTypeStorage::Main,
20642 )),
20643 _ => self.expected_ref(
20644 "storage value (plain, external, extended, or main)",
20645 self.peek_token_ref(),
20646 ),
20647 }
20648 }
20649 Some(Keyword::LIKE) => {
20650 self.expect_token(&Token::Eq)?;
20651 let name = self.parse_object_name(false)?;
20652 Ok(UserDefinedTypeSqlDefinitionOption::Like(name))
20653 }
20654 Some(Keyword::CATEGORY) => {
20655 self.expect_token(&Token::Eq)?;
20656 let category_str = self.parse_literal_string()?;
20657 let category_char = category_str.chars().next().ok_or_else(|| {
20658 ParserError::ParserError(
20659 "CATEGORY value must be a single character".to_string(),
20660 )
20661 })?;
20662 Ok(UserDefinedTypeSqlDefinitionOption::Category(category_char))
20663 }
20664 Some(Keyword::PREFERRED) => {
20665 self.expect_token(&Token::Eq)?;
20666 let value =
20667 self.parse_keyword(Keyword::TRUE) || !self.parse_keyword(Keyword::FALSE);
20668 Ok(UserDefinedTypeSqlDefinitionOption::Preferred(value))
20669 }
20670 Some(Keyword::DEFAULT) => {
20671 self.expect_token(&Token::Eq)?;
20672 let expr = self.parse_expr()?;
20673 Ok(UserDefinedTypeSqlDefinitionOption::Default(expr))
20674 }
20675 Some(Keyword::ELEMENT) => {
20676 self.expect_token(&Token::Eq)?;
20677 let data_type = self.parse_data_type()?;
20678 Ok(UserDefinedTypeSqlDefinitionOption::Element(data_type))
20679 }
20680 Some(Keyword::DELIMITER) => {
20681 self.expect_token(&Token::Eq)?;
20682 let delimiter = self.parse_literal_string()?;
20683 Ok(UserDefinedTypeSqlDefinitionOption::Delimiter(delimiter))
20684 }
20685 Some(Keyword::COLLATABLE) => {
20686 self.expect_token(&Token::Eq)?;
20687 let value =
20688 self.parse_keyword(Keyword::TRUE) || !self.parse_keyword(Keyword::FALSE);
20689 Ok(UserDefinedTypeSqlDefinitionOption::Collatable(value))
20690 }
20691 _ => self.expected_ref("SQL definition option keyword", self.peek_token_ref()),
20692 }
20693 }
20694
20695 fn parse_parenthesized_identifiers(&mut self) -> Result<Vec<Ident>, ParserError> {
20696 self.expect_token(&Token::LParen)?;
20697 let idents = self.parse_comma_separated0(|p| p.parse_identifier(), Token::RParen)?;
20698 self.expect_token(&Token::RParen)?;
20699 Ok(idents)
20700 }
20701
20702 fn parse_column_position(&mut self) -> Result<Option<MySQLColumnPosition>, ParserError> {
20703 if dialect_of!(self is MySqlDialect | GenericDialect) {
20704 if self.parse_keyword(Keyword::FIRST) {
20705 Ok(Some(MySQLColumnPosition::First))
20706 } else if self.parse_keyword(Keyword::AFTER) {
20707 let ident = self.parse_identifier()?;
20708 Ok(Some(MySQLColumnPosition::After(ident)))
20709 } else {
20710 Ok(None)
20711 }
20712 } else {
20713 Ok(None)
20714 }
20715 }
20716
20717 fn parse_print(&mut self) -> Result<Statement, ParserError> {
20719 Ok(Statement::Print(PrintStatement {
20720 message: Box::new(self.parse_expr()?),
20721 }))
20722 }
20723
20724 fn parse_waitfor(&mut self) -> Result<Statement, ParserError> {
20728 let wait_type = if self.parse_keyword(Keyword::DELAY) {
20729 WaitForType::Delay
20730 } else if self.parse_keyword(Keyword::TIME) {
20731 WaitForType::Time
20732 } else {
20733 return self.expected_ref("DELAY or TIME", self.peek_token_ref());
20734 };
20735 let expr = self.parse_expr()?;
20736 Ok(Statement::WaitFor(WaitForStatement { wait_type, expr }))
20737 }
20738
20739 fn parse_return(&mut self) -> Result<Statement, ParserError> {
20741 match self.maybe_parse(|p| p.parse_expr())? {
20742 Some(expr) => Ok(Statement::Return(ReturnStatement {
20743 value: Some(ReturnStatementValue::Expr(expr)),
20744 })),
20745 None => Ok(Statement::Return(ReturnStatement { value: None })),
20746 }
20747 }
20748
20749 fn parse_export_data(&mut self) -> Result<Statement, ParserError> {
20753 self.expect_keywords(&[Keyword::EXPORT, Keyword::DATA])?;
20754
20755 let connection = if self.parse_keywords(&[Keyword::WITH, Keyword::CONNECTION]) {
20756 Some(self.parse_object_name(false)?)
20757 } else {
20758 None
20759 };
20760 self.expect_keyword(Keyword::OPTIONS)?;
20761 self.expect_token(&Token::LParen)?;
20762 let options = self.parse_comma_separated(|p| p.parse_sql_option())?;
20763 self.expect_token(&Token::RParen)?;
20764 self.expect_keyword(Keyword::AS)?;
20765 let query = self.parse_query()?;
20766 Ok(Statement::ExportData(ExportData {
20767 options,
20768 query,
20769 connection,
20770 }))
20771 }
20772
20773 fn parse_vacuum(&mut self) -> Result<Statement, ParserError> {
20774 self.expect_keyword(Keyword::VACUUM)?;
20775 let full = self.parse_keyword(Keyword::FULL);
20776 let sort_only = self.parse_keywords(&[Keyword::SORT, Keyword::ONLY]);
20777 let delete_only = self.parse_keywords(&[Keyword::DELETE, Keyword::ONLY]);
20778 let reindex = self.parse_keyword(Keyword::REINDEX);
20779 let recluster = self.parse_keyword(Keyword::RECLUSTER);
20780 let (table_name, threshold, boost) =
20781 match self.maybe_parse(|p| p.parse_object_name(false))? {
20782 Some(table_name) => {
20783 let threshold = if self.parse_keyword(Keyword::TO) {
20784 let value = self.parse_value()?;
20785 self.expect_keyword(Keyword::PERCENT)?;
20786 Some(value)
20787 } else {
20788 None
20789 };
20790 let boost = self.parse_keyword(Keyword::BOOST);
20791 (Some(table_name), threshold, boost)
20792 }
20793 _ => (None, None, false),
20794 };
20795 Ok(Statement::Vacuum(VacuumStatement {
20796 full,
20797 sort_only,
20798 delete_only,
20799 reindex,
20800 recluster,
20801 table_name,
20802 threshold,
20803 boost,
20804 }))
20805 }
20806
20807 pub fn into_tokens(self) -> Vec<TokenWithSpan> {
20809 self.tokens
20810 }
20811
20812 fn peek_sub_query(&mut self) -> bool {
20814 self.peek_one_of_keywords(&[Keyword::SELECT, Keyword::WITH])
20815 .is_some()
20816 }
20817
20818 pub(crate) fn parse_show_stmt_options(&mut self) -> Result<ShowStatementOptions, ParserError> {
20819 let show_in;
20820 let mut filter_position = None;
20821 if self.dialect.supports_show_like_before_in() {
20822 if let Some(filter) = self.parse_show_statement_filter()? {
20823 filter_position = Some(ShowStatementFilterPosition::Infix(filter));
20824 }
20825 show_in = self.maybe_parse_show_stmt_in()?;
20826 } else {
20827 show_in = self.maybe_parse_show_stmt_in()?;
20828 if let Some(filter) = self.parse_show_statement_filter()? {
20829 filter_position = Some(ShowStatementFilterPosition::Suffix(filter));
20830 }
20831 }
20832 let starts_with = self.maybe_parse_show_stmt_starts_with()?;
20833 let limit = self.maybe_parse_show_stmt_limit()?;
20834 let from = self.maybe_parse_show_stmt_from()?;
20835 Ok(ShowStatementOptions {
20836 filter_position,
20837 show_in,
20838 starts_with,
20839 limit,
20840 limit_from: from,
20841 })
20842 }
20843
20844 fn maybe_parse_show_stmt_in(&mut self) -> Result<Option<ShowStatementIn>, ParserError> {
20845 let clause = match self.parse_one_of_keywords(&[Keyword::FROM, Keyword::IN]) {
20846 Some(Keyword::FROM) => ShowStatementInClause::FROM,
20847 Some(Keyword::IN) => ShowStatementInClause::IN,
20848 None => return Ok(None),
20849 _ => return self.expected_ref("FROM or IN", self.peek_token_ref()),
20850 };
20851
20852 let (parent_type, parent_name) = match self.parse_one_of_keywords(&[
20853 Keyword::ACCOUNT,
20854 Keyword::DATABASE,
20855 Keyword::SCHEMA,
20856 Keyword::TABLE,
20857 Keyword::VIEW,
20858 ]) {
20859 Some(Keyword::DATABASE)
20861 if self.peek_keywords(&[Keyword::STARTS, Keyword::WITH])
20862 | self.peek_keyword(Keyword::LIMIT) =>
20863 {
20864 (Some(ShowStatementInParentType::Database), None)
20865 }
20866 Some(Keyword::SCHEMA)
20867 if self.peek_keywords(&[Keyword::STARTS, Keyword::WITH])
20868 | self.peek_keyword(Keyword::LIMIT) =>
20869 {
20870 (Some(ShowStatementInParentType::Schema), None)
20871 }
20872 Some(parent_kw) => {
20873 let parent_name = self.maybe_parse(|p| p.parse_object_name(false))?;
20877 match parent_kw {
20878 Keyword::ACCOUNT => (Some(ShowStatementInParentType::Account), parent_name),
20879 Keyword::DATABASE => (Some(ShowStatementInParentType::Database), parent_name),
20880 Keyword::SCHEMA => (Some(ShowStatementInParentType::Schema), parent_name),
20881 Keyword::TABLE => (Some(ShowStatementInParentType::Table), parent_name),
20882 Keyword::VIEW => (Some(ShowStatementInParentType::View), parent_name),
20883 _ => {
20884 return self.expected_ref(
20885 "one of ACCOUNT, DATABASE, SCHEMA, TABLE or VIEW",
20886 self.peek_token_ref(),
20887 )
20888 }
20889 }
20890 }
20891 None => {
20892 let mut parent_name = self.parse_object_name(false)?;
20895 if self
20896 .parse_one_of_keywords(&[Keyword::FROM, Keyword::IN])
20897 .is_some()
20898 {
20899 parent_name
20900 .0
20901 .insert(0, ObjectNamePart::Identifier(self.parse_identifier()?));
20902 }
20903 (None, Some(parent_name))
20904 }
20905 };
20906
20907 Ok(Some(ShowStatementIn {
20908 clause,
20909 parent_type,
20910 parent_name,
20911 }))
20912 }
20913
20914 fn maybe_parse_show_stmt_starts_with(&mut self) -> Result<Option<ValueWithSpan>, ParserError> {
20915 if self.parse_keywords(&[Keyword::STARTS, Keyword::WITH]) {
20916 Ok(Some(self.parse_value()?))
20917 } else {
20918 Ok(None)
20919 }
20920 }
20921
20922 fn maybe_parse_show_stmt_limit(&mut self) -> Result<Option<Expr>, ParserError> {
20923 if self.parse_keyword(Keyword::LIMIT) {
20924 Ok(self.parse_limit()?)
20925 } else {
20926 Ok(None)
20927 }
20928 }
20929
20930 fn maybe_parse_show_stmt_from(&mut self) -> Result<Option<ValueWithSpan>, ParserError> {
20931 if self.parse_keyword(Keyword::FROM) {
20932 Ok(Some(self.parse_value()?))
20933 } else {
20934 Ok(None)
20935 }
20936 }
20937
20938 pub(crate) fn in_column_definition_state(&self) -> bool {
20939 matches!(self.state, ColumnDefinition)
20940 }
20941
20942 pub(crate) fn parse_key_value_options(
20947 &mut self,
20948 parenthesized: bool,
20949 end_words: &[Keyword],
20950 ) -> Result<KeyValueOptions, ParserError> {
20951 let mut options: Vec<KeyValueOption> = Vec::new();
20952 let mut delimiter = KeyValueOptionsDelimiter::Space;
20953 if parenthesized {
20954 self.expect_token(&Token::LParen)?;
20955 }
20956 loop {
20957 match self.next_token().token {
20958 Token::RParen => {
20959 if parenthesized {
20960 break;
20961 } else {
20962 return self.expected_ref(" another option or EOF", self.peek_token_ref());
20963 }
20964 }
20965 Token::EOF | Token::SemiColon => break,
20966 Token::Comma => {
20967 delimiter = KeyValueOptionsDelimiter::Comma;
20968 continue;
20969 }
20970 Token::Word(w) if !end_words.contains(&w.keyword) => {
20971 options.push(self.parse_key_value_option(&w)?)
20972 }
20973 Token::Word(w) if end_words.contains(&w.keyword) => {
20974 self.prev_token();
20975 break;
20976 }
20977 _ => {
20978 return self.expected_ref(
20979 "another option, EOF, SemiColon, Comma or ')'",
20980 self.peek_token_ref(),
20981 )
20982 }
20983 };
20984 }
20985
20986 Ok(KeyValueOptions { delimiter, options })
20987 }
20988
20989 pub(crate) fn parse_key_value_option(
20991 &mut self,
20992 key: &Word,
20993 ) -> Result<KeyValueOption, ParserError> {
20994 self.expect_token(&Token::Eq)?;
20995 let peeked_token = self.peek_token();
20996 match peeked_token.token {
20997 Token::SingleQuotedString(_) => Ok(KeyValueOption {
20998 option_name: key.value.clone(),
20999 option_value: KeyValueOptionKind::Single(self.parse_value()?),
21000 }),
21001 Token::Word(word)
21002 if word.keyword == Keyword::TRUE || word.keyword == Keyword::FALSE =>
21003 {
21004 Ok(KeyValueOption {
21005 option_name: key.value.clone(),
21006 option_value: KeyValueOptionKind::Single(self.parse_value()?),
21007 })
21008 }
21009 Token::Number(..) => Ok(KeyValueOption {
21010 option_name: key.value.clone(),
21011 option_value: KeyValueOptionKind::Single(self.parse_value()?),
21012 }),
21013 Token::Word(word) => {
21014 self.next_token();
21015 Ok(KeyValueOption {
21016 option_name: key.value.clone(),
21017 option_value: KeyValueOptionKind::Single(
21018 Value::Placeholder(word.value.clone()).with_span(peeked_token.span),
21019 ),
21020 })
21021 }
21022 Token::LParen => {
21023 match self.maybe_parse(|parser| {
21027 parser.expect_token(&Token::LParen)?;
21028 let values = parser.parse_comma_separated0(|p| p.parse_value(), Token::RParen);
21029 parser.expect_token(&Token::RParen)?;
21030 values
21031 })? {
21032 Some(values) => Ok(KeyValueOption {
21033 option_name: key.value.clone(),
21034 option_value: KeyValueOptionKind::Multi(values),
21035 }),
21036 None => Ok(KeyValueOption {
21037 option_name: key.value.clone(),
21038 option_value: KeyValueOptionKind::KeyValueOptions(Box::new(
21039 self.parse_key_value_options(true, &[])?,
21040 )),
21041 }),
21042 }
21043 }
21044 _ => self.expected_ref("expected option value", self.peek_token_ref()),
21045 }
21046 }
21047
21048 fn parse_reset(&mut self) -> Result<ResetStatement, ParserError> {
21050 if self.parse_keyword(Keyword::ALL) {
21051 return Ok(ResetStatement { reset: Reset::ALL });
21052 }
21053
21054 let obj = self.parse_object_name(false)?;
21055 Ok(ResetStatement {
21056 reset: Reset::ConfigurationParameter(obj),
21057 })
21058 }
21059}
21060
21061fn maybe_prefixed_expr(expr: Expr, prefix: Option<Ident>) -> Expr {
21062 if let Some(prefix) = prefix {
21063 Expr::Prefixed {
21064 prefix,
21065 value: Box::new(expr),
21066 }
21067 } else {
21068 expr
21069 }
21070}
21071
21072impl Word {
21073 pub fn to_ident(&self, span: Span) -> Ident {
21079 Ident {
21080 value: self.value.clone(),
21081 quote_style: self.quote_style,
21082 span,
21083 }
21084 }
21085
21086 pub fn into_ident(self, span: Span) -> Ident {
21091 Ident {
21092 value: self.value,
21093 quote_style: self.quote_style,
21094 span,
21095 }
21096 }
21097}
21098
21099#[cfg(test)]
21100mod tests {
21101 use crate::test_utils::{all_dialects, TestedDialects};
21102
21103 use super::*;
21104
21105 #[test]
21106 fn test_prev_index() {
21107 let sql = "SELECT version";
21108 all_dialects().run_parser_method(sql, |parser| {
21109 assert_eq!(parser.peek_token(), Token::make_keyword("SELECT"));
21110 assert_eq!(parser.next_token(), Token::make_keyword("SELECT"));
21111 parser.prev_token();
21112 assert_eq!(parser.next_token(), Token::make_keyword("SELECT"));
21113 assert_eq!(parser.next_token(), Token::make_word("version", None));
21114 parser.prev_token();
21115 assert_eq!(parser.peek_token(), Token::make_word("version", None));
21116 assert_eq!(parser.next_token(), Token::make_word("version", None));
21117 assert_eq!(parser.peek_token(), Token::EOF);
21118 parser.prev_token();
21119 assert_eq!(parser.next_token(), Token::make_word("version", None));
21120 assert_eq!(parser.next_token(), Token::EOF);
21121 assert_eq!(parser.next_token(), Token::EOF);
21122 parser.prev_token();
21123 });
21124 }
21125
21126 #[test]
21127 fn test_peek_tokens() {
21128 all_dialects().run_parser_method("SELECT foo AS bar FROM baz", |parser| {
21129 assert!(matches!(
21130 parser.peek_tokens(),
21131 [Token::Word(Word {
21132 keyword: Keyword::SELECT,
21133 ..
21134 })]
21135 ));
21136
21137 assert!(matches!(
21138 parser.peek_tokens(),
21139 [
21140 Token::Word(Word {
21141 keyword: Keyword::SELECT,
21142 ..
21143 }),
21144 Token::Word(_),
21145 Token::Word(Word {
21146 keyword: Keyword::AS,
21147 ..
21148 }),
21149 ]
21150 ));
21151
21152 for _ in 0..4 {
21153 parser.next_token();
21154 }
21155
21156 assert!(matches!(
21157 parser.peek_tokens(),
21158 [
21159 Token::Word(Word {
21160 keyword: Keyword::FROM,
21161 ..
21162 }),
21163 Token::Word(_),
21164 Token::EOF,
21165 Token::EOF,
21166 ]
21167 ))
21168 })
21169 }
21170
21171 #[cfg(test)]
21172 mod test_parse_data_type {
21173 use crate::ast::{
21174 CharLengthUnits, CharacterLength, DataType, ExactNumberInfo, ObjectName, TimezoneInfo,
21175 };
21176 use crate::dialect::{AnsiDialect, GenericDialect, PostgreSqlDialect};
21177 use crate::test_utils::TestedDialects;
21178
21179 macro_rules! test_parse_data_type {
21180 ($dialect:expr, $input:expr, $expected_type:expr $(,)?) => {{
21181 $dialect.run_parser_method(&*$input, |parser| {
21182 let data_type = parser.parse_data_type().unwrap();
21183 assert_eq!($expected_type, data_type);
21184 assert_eq!($input.to_string(), data_type.to_string());
21185 });
21186 }};
21187 }
21188
21189 #[test]
21190 fn test_ansii_character_string_types() {
21191 let dialect =
21193 TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
21194
21195 test_parse_data_type!(dialect, "CHARACTER", DataType::Character(None));
21196
21197 test_parse_data_type!(
21198 dialect,
21199 "CHARACTER(20)",
21200 DataType::Character(Some(CharacterLength::IntegerLength {
21201 length: 20,
21202 unit: None
21203 }))
21204 );
21205
21206 test_parse_data_type!(
21207 dialect,
21208 "CHARACTER(20 CHARACTERS)",
21209 DataType::Character(Some(CharacterLength::IntegerLength {
21210 length: 20,
21211 unit: Some(CharLengthUnits::Characters)
21212 }))
21213 );
21214
21215 test_parse_data_type!(
21216 dialect,
21217 "CHARACTER(20 OCTETS)",
21218 DataType::Character(Some(CharacterLength::IntegerLength {
21219 length: 20,
21220 unit: Some(CharLengthUnits::Octets)
21221 }))
21222 );
21223
21224 test_parse_data_type!(dialect, "CHAR", DataType::Char(None));
21225
21226 test_parse_data_type!(
21227 dialect,
21228 "CHAR(20)",
21229 DataType::Char(Some(CharacterLength::IntegerLength {
21230 length: 20,
21231 unit: None
21232 }))
21233 );
21234
21235 test_parse_data_type!(
21236 dialect,
21237 "CHAR(20 CHARACTERS)",
21238 DataType::Char(Some(CharacterLength::IntegerLength {
21239 length: 20,
21240 unit: Some(CharLengthUnits::Characters)
21241 }))
21242 );
21243
21244 test_parse_data_type!(
21245 dialect,
21246 "CHAR(20 OCTETS)",
21247 DataType::Char(Some(CharacterLength::IntegerLength {
21248 length: 20,
21249 unit: Some(CharLengthUnits::Octets)
21250 }))
21251 );
21252
21253 test_parse_data_type!(
21254 dialect,
21255 "CHARACTER VARYING(20)",
21256 DataType::CharacterVarying(Some(CharacterLength::IntegerLength {
21257 length: 20,
21258 unit: None
21259 }))
21260 );
21261
21262 test_parse_data_type!(
21263 dialect,
21264 "CHARACTER VARYING(20 CHARACTERS)",
21265 DataType::CharacterVarying(Some(CharacterLength::IntegerLength {
21266 length: 20,
21267 unit: Some(CharLengthUnits::Characters)
21268 }))
21269 );
21270
21271 test_parse_data_type!(
21272 dialect,
21273 "CHARACTER VARYING(20 OCTETS)",
21274 DataType::CharacterVarying(Some(CharacterLength::IntegerLength {
21275 length: 20,
21276 unit: Some(CharLengthUnits::Octets)
21277 }))
21278 );
21279
21280 test_parse_data_type!(
21281 dialect,
21282 "CHAR VARYING(20)",
21283 DataType::CharVarying(Some(CharacterLength::IntegerLength {
21284 length: 20,
21285 unit: None
21286 }))
21287 );
21288
21289 test_parse_data_type!(
21290 dialect,
21291 "CHAR VARYING(20 CHARACTERS)",
21292 DataType::CharVarying(Some(CharacterLength::IntegerLength {
21293 length: 20,
21294 unit: Some(CharLengthUnits::Characters)
21295 }))
21296 );
21297
21298 test_parse_data_type!(
21299 dialect,
21300 "CHAR VARYING(20 OCTETS)",
21301 DataType::CharVarying(Some(CharacterLength::IntegerLength {
21302 length: 20,
21303 unit: Some(CharLengthUnits::Octets)
21304 }))
21305 );
21306
21307 test_parse_data_type!(
21308 dialect,
21309 "VARCHAR(20)",
21310 DataType::Varchar(Some(CharacterLength::IntegerLength {
21311 length: 20,
21312 unit: None
21313 }))
21314 );
21315 }
21316
21317 #[test]
21318 fn test_ansii_character_large_object_types() {
21319 let dialect =
21321 TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
21322
21323 test_parse_data_type!(
21324 dialect,
21325 "CHARACTER LARGE OBJECT",
21326 DataType::CharacterLargeObject(None)
21327 );
21328 test_parse_data_type!(
21329 dialect,
21330 "CHARACTER LARGE OBJECT(20)",
21331 DataType::CharacterLargeObject(Some(20))
21332 );
21333
21334 test_parse_data_type!(
21335 dialect,
21336 "CHAR LARGE OBJECT",
21337 DataType::CharLargeObject(None)
21338 );
21339 test_parse_data_type!(
21340 dialect,
21341 "CHAR LARGE OBJECT(20)",
21342 DataType::CharLargeObject(Some(20))
21343 );
21344
21345 test_parse_data_type!(dialect, "CLOB", DataType::Clob(None));
21346 test_parse_data_type!(dialect, "CLOB(20)", DataType::Clob(Some(20)));
21347 }
21348
21349 #[test]
21350 fn test_parse_custom_types() {
21351 let dialect =
21352 TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
21353
21354 test_parse_data_type!(
21355 dialect,
21356 "GEOMETRY",
21357 DataType::Custom(ObjectName::from(vec!["GEOMETRY".into()]), vec![])
21358 );
21359
21360 test_parse_data_type!(
21361 dialect,
21362 "GEOMETRY(POINT)",
21363 DataType::Custom(
21364 ObjectName::from(vec!["GEOMETRY".into()]),
21365 vec!["POINT".to_string()]
21366 )
21367 );
21368
21369 test_parse_data_type!(
21370 dialect,
21371 "GEOMETRY(POINT, 4326)",
21372 DataType::Custom(
21373 ObjectName::from(vec!["GEOMETRY".into()]),
21374 vec!["POINT".to_string(), "4326".to_string()]
21375 )
21376 );
21377 }
21378
21379 #[test]
21380 fn test_ansii_exact_numeric_types() {
21381 let dialect = TestedDialects::new(vec![
21383 Box::new(GenericDialect {}),
21384 Box::new(AnsiDialect {}),
21385 Box::new(PostgreSqlDialect {}),
21386 ]);
21387
21388 test_parse_data_type!(dialect, "NUMERIC", DataType::Numeric(ExactNumberInfo::None));
21389
21390 test_parse_data_type!(
21391 dialect,
21392 "NUMERIC(2)",
21393 DataType::Numeric(ExactNumberInfo::Precision(2))
21394 );
21395
21396 test_parse_data_type!(
21397 dialect,
21398 "NUMERIC(2,10)",
21399 DataType::Numeric(ExactNumberInfo::PrecisionAndScale(2, 10))
21400 );
21401
21402 test_parse_data_type!(dialect, "DECIMAL", DataType::Decimal(ExactNumberInfo::None));
21403
21404 test_parse_data_type!(
21405 dialect,
21406 "DECIMAL(2)",
21407 DataType::Decimal(ExactNumberInfo::Precision(2))
21408 );
21409
21410 test_parse_data_type!(
21411 dialect,
21412 "DECIMAL(2,10)",
21413 DataType::Decimal(ExactNumberInfo::PrecisionAndScale(2, 10))
21414 );
21415
21416 test_parse_data_type!(dialect, "DEC", DataType::Dec(ExactNumberInfo::None));
21417
21418 test_parse_data_type!(
21419 dialect,
21420 "DEC(2)",
21421 DataType::Dec(ExactNumberInfo::Precision(2))
21422 );
21423
21424 test_parse_data_type!(
21425 dialect,
21426 "DEC(2,10)",
21427 DataType::Dec(ExactNumberInfo::PrecisionAndScale(2, 10))
21428 );
21429
21430 test_parse_data_type!(
21432 dialect,
21433 "NUMERIC(10,-2)",
21434 DataType::Numeric(ExactNumberInfo::PrecisionAndScale(10, -2))
21435 );
21436
21437 test_parse_data_type!(
21438 dialect,
21439 "DECIMAL(1000,-10)",
21440 DataType::Decimal(ExactNumberInfo::PrecisionAndScale(1000, -10))
21441 );
21442
21443 test_parse_data_type!(
21444 dialect,
21445 "DEC(5,-1000)",
21446 DataType::Dec(ExactNumberInfo::PrecisionAndScale(5, -1000))
21447 );
21448
21449 test_parse_data_type!(
21450 dialect,
21451 "NUMERIC(10,-5)",
21452 DataType::Numeric(ExactNumberInfo::PrecisionAndScale(10, -5))
21453 );
21454
21455 test_parse_data_type!(
21456 dialect,
21457 "DECIMAL(20,-10)",
21458 DataType::Decimal(ExactNumberInfo::PrecisionAndScale(20, -10))
21459 );
21460
21461 test_parse_data_type!(
21462 dialect,
21463 "DEC(5,-2)",
21464 DataType::Dec(ExactNumberInfo::PrecisionAndScale(5, -2))
21465 );
21466
21467 dialect.run_parser_method("NUMERIC(10,+5)", |parser| {
21468 let data_type = parser.parse_data_type().unwrap();
21469 assert_eq!(
21470 DataType::Numeric(ExactNumberInfo::PrecisionAndScale(10, 5)),
21471 data_type
21472 );
21473 assert_eq!("NUMERIC(10,5)", data_type.to_string());
21475 });
21476 }
21477
21478 #[test]
21479 fn test_ansii_date_type() {
21480 let dialect =
21482 TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
21483
21484 test_parse_data_type!(dialect, "DATE", DataType::Date);
21485
21486 test_parse_data_type!(dialect, "TIME", DataType::Time(None, TimezoneInfo::None));
21487
21488 test_parse_data_type!(
21489 dialect,
21490 "TIME(6)",
21491 DataType::Time(Some(6), TimezoneInfo::None)
21492 );
21493
21494 test_parse_data_type!(
21495 dialect,
21496 "TIME WITH TIME ZONE",
21497 DataType::Time(None, TimezoneInfo::WithTimeZone)
21498 );
21499
21500 test_parse_data_type!(
21501 dialect,
21502 "TIME(6) WITH TIME ZONE",
21503 DataType::Time(Some(6), TimezoneInfo::WithTimeZone)
21504 );
21505
21506 test_parse_data_type!(
21507 dialect,
21508 "TIME WITHOUT TIME ZONE",
21509 DataType::Time(None, TimezoneInfo::WithoutTimeZone)
21510 );
21511
21512 test_parse_data_type!(
21513 dialect,
21514 "TIME(6) WITHOUT TIME ZONE",
21515 DataType::Time(Some(6), TimezoneInfo::WithoutTimeZone)
21516 );
21517
21518 test_parse_data_type!(
21519 dialect,
21520 "TIMESTAMP",
21521 DataType::Timestamp(None, TimezoneInfo::None)
21522 );
21523
21524 test_parse_data_type!(
21525 dialect,
21526 "TIMESTAMP(22)",
21527 DataType::Timestamp(Some(22), TimezoneInfo::None)
21528 );
21529
21530 test_parse_data_type!(
21531 dialect,
21532 "TIMESTAMP(22) WITH TIME ZONE",
21533 DataType::Timestamp(Some(22), TimezoneInfo::WithTimeZone)
21534 );
21535
21536 test_parse_data_type!(
21537 dialect,
21538 "TIMESTAMP(33) WITHOUT TIME ZONE",
21539 DataType::Timestamp(Some(33), TimezoneInfo::WithoutTimeZone)
21540 );
21541 }
21542 }
21543
21544 #[test]
21545 fn test_parse_schema_name() {
21546 macro_rules! test_parse_schema_name {
21548 ($input:expr, $expected_name:expr $(,)?) => {{
21549 all_dialects().run_parser_method(&*$input, |parser| {
21550 let schema_name = parser.parse_schema_name().unwrap();
21551 assert_eq!(schema_name, $expected_name);
21553 assert_eq!(schema_name.to_string(), $input.to_string());
21555 });
21556 }};
21557 }
21558
21559 let dummy_name = ObjectName::from(vec![Ident::new("dummy_name")]);
21560 let dummy_authorization = Ident::new("dummy_authorization");
21561
21562 test_parse_schema_name!(
21563 format!("{dummy_name}"),
21564 SchemaName::Simple(dummy_name.clone())
21565 );
21566
21567 test_parse_schema_name!(
21568 format!("AUTHORIZATION {dummy_authorization}"),
21569 SchemaName::UnnamedAuthorization(dummy_authorization.clone()),
21570 );
21571 test_parse_schema_name!(
21572 format!("{dummy_name} AUTHORIZATION {dummy_authorization}"),
21573 SchemaName::NamedAuthorization(dummy_name.clone(), dummy_authorization.clone()),
21574 );
21575 }
21576
21577 #[test]
21578 fn mysql_parse_index_table_constraint() {
21579 macro_rules! test_parse_table_constraint {
21580 ($dialect:expr, $input:expr, $expected:expr $(,)?) => {{
21581 $dialect.run_parser_method(&*$input, |parser| {
21582 let constraint = parser.parse_optional_table_constraint().unwrap().unwrap();
21583 assert_eq!(constraint, $expected);
21585 assert_eq!(constraint.to_string(), $input.to_string());
21587 });
21588 }};
21589 }
21590
21591 fn mk_expected_col(name: &str) -> IndexColumn {
21592 IndexColumn {
21593 column: OrderByExpr {
21594 expr: Expr::Identifier(name.into()),
21595 options: OrderByOptions {
21596 asc: None,
21597 nulls_first: None,
21598 },
21599 with_fill: None,
21600 },
21601 operator_class: None,
21602 }
21603 }
21604
21605 let dialect =
21606 TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(MySqlDialect {})]);
21607
21608 test_parse_table_constraint!(
21609 dialect,
21610 "INDEX (c1)",
21611 IndexConstraint {
21612 display_as_key: false,
21613 name: None,
21614 index_type: None,
21615 columns: vec![mk_expected_col("c1")],
21616 index_options: vec![],
21617 }
21618 .into()
21619 );
21620
21621 test_parse_table_constraint!(
21622 dialect,
21623 "KEY (c1)",
21624 IndexConstraint {
21625 display_as_key: true,
21626 name: None,
21627 index_type: None,
21628 columns: vec![mk_expected_col("c1")],
21629 index_options: vec![],
21630 }
21631 .into()
21632 );
21633
21634 test_parse_table_constraint!(
21635 dialect,
21636 "INDEX 'index' (c1, c2)",
21637 TableConstraint::Index(IndexConstraint {
21638 display_as_key: false,
21639 name: Some(Ident::with_quote('\'', "index")),
21640 index_type: None,
21641 columns: vec![mk_expected_col("c1"), mk_expected_col("c2")],
21642 index_options: vec![],
21643 })
21644 );
21645
21646 test_parse_table_constraint!(
21647 dialect,
21648 "INDEX USING BTREE (c1)",
21649 IndexConstraint {
21650 display_as_key: false,
21651 name: None,
21652 index_type: Some(IndexType::BTree),
21653 columns: vec![mk_expected_col("c1")],
21654 index_options: vec![],
21655 }
21656 .into()
21657 );
21658
21659 test_parse_table_constraint!(
21660 dialect,
21661 "INDEX USING HASH (c1)",
21662 IndexConstraint {
21663 display_as_key: false,
21664 name: None,
21665 index_type: Some(IndexType::Hash),
21666 columns: vec![mk_expected_col("c1")],
21667 index_options: vec![],
21668 }
21669 .into()
21670 );
21671
21672 test_parse_table_constraint!(
21673 dialect,
21674 "INDEX idx_name USING BTREE (c1)",
21675 IndexConstraint {
21676 display_as_key: false,
21677 name: Some(Ident::new("idx_name")),
21678 index_type: Some(IndexType::BTree),
21679 columns: vec![mk_expected_col("c1")],
21680 index_options: vec![],
21681 }
21682 .into()
21683 );
21684
21685 test_parse_table_constraint!(
21686 dialect,
21687 "INDEX idx_name USING HASH (c1)",
21688 IndexConstraint {
21689 display_as_key: false,
21690 name: Some(Ident::new("idx_name")),
21691 index_type: Some(IndexType::Hash),
21692 columns: vec![mk_expected_col("c1")],
21693 index_options: vec![],
21694 }
21695 .into()
21696 );
21697 }
21698
21699 #[test]
21700 fn test_tokenizer_error_loc() {
21701 let sql = "foo '";
21702 let ast = Parser::parse_sql(&GenericDialect, sql);
21703 assert_eq!(
21704 ast,
21705 Err(ParserError::TokenizerError(
21706 "Unterminated string literal at Line: 1, Column: 5".to_string()
21707 ))
21708 );
21709 }
21710
21711 #[test]
21712 fn test_parser_error_loc() {
21713 let sql = "SELECT this is a syntax error";
21714 let ast = Parser::parse_sql(&GenericDialect, sql);
21715 assert_eq!(
21716 ast,
21717 Err(ParserError::ParserError(
21718 "Expected: [NOT] NULL | TRUE | FALSE | DISTINCT | [form] NORMALIZED FROM after IS, found: a at Line: 1, Column: 16"
21719 .to_string()
21720 ))
21721 );
21722 }
21723
21724 #[test]
21725 fn test_nested_explain_error() {
21726 let sql = "EXPLAIN EXPLAIN SELECT 1";
21727 let ast = Parser::parse_sql(&GenericDialect, sql);
21728 assert_eq!(
21729 ast,
21730 Err(ParserError::ParserError(
21731 "Explain must be root of the plan".to_string()
21732 ))
21733 );
21734 }
21735
21736 #[test]
21737 fn test_parse_multipart_identifier_positive() {
21738 let dialect = TestedDialects::new(vec![Box::new(GenericDialect {})]);
21739
21740 let expected = vec![
21742 Ident {
21743 value: "CATALOG".to_string(),
21744 quote_style: None,
21745 span: Span::empty(),
21746 },
21747 Ident {
21748 value: "F(o)o. \"bar".to_string(),
21749 quote_style: Some('"'),
21750 span: Span::empty(),
21751 },
21752 Ident {
21753 value: "table".to_string(),
21754 quote_style: None,
21755 span: Span::empty(),
21756 },
21757 ];
21758 dialect.run_parser_method(r#"CATALOG."F(o)o. ""bar".table"#, |parser| {
21759 let actual = parser.parse_multipart_identifier().unwrap();
21760 assert_eq!(expected, actual);
21761 });
21762
21763 let expected = vec![
21765 Ident {
21766 value: "CATALOG".to_string(),
21767 quote_style: None,
21768 span: Span::empty(),
21769 },
21770 Ident {
21771 value: "table".to_string(),
21772 quote_style: None,
21773 span: Span::empty(),
21774 },
21775 ];
21776 dialect.run_parser_method("CATALOG . table", |parser| {
21777 let actual = parser.parse_multipart_identifier().unwrap();
21778 assert_eq!(expected, actual);
21779 });
21780 }
21781
21782 #[test]
21783 fn test_parse_multipart_identifier_negative() {
21784 macro_rules! test_parse_multipart_identifier_error {
21785 ($input:expr, $expected_err:expr $(,)?) => {{
21786 all_dialects().run_parser_method(&*$input, |parser| {
21787 let actual_err = parser.parse_multipart_identifier().unwrap_err();
21788 assert_eq!(actual_err.to_string(), $expected_err);
21789 });
21790 }};
21791 }
21792
21793 test_parse_multipart_identifier_error!(
21794 "",
21795 "sql parser error: Empty input when parsing identifier",
21796 );
21797
21798 test_parse_multipart_identifier_error!(
21799 "*schema.table",
21800 "sql parser error: Unexpected token in identifier: *",
21801 );
21802
21803 test_parse_multipart_identifier_error!(
21804 "schema.table*",
21805 "sql parser error: Unexpected token in identifier: *",
21806 );
21807
21808 test_parse_multipart_identifier_error!(
21809 "schema.table.",
21810 "sql parser error: Trailing period in identifier",
21811 );
21812
21813 test_parse_multipart_identifier_error!(
21814 "schema.*",
21815 "sql parser error: Unexpected token following period in identifier: *",
21816 );
21817 }
21818
21819 #[test]
21820 fn test_mysql_partition_selection() {
21821 let sql = "SELECT * FROM employees PARTITION (p0, p2)";
21822 let expected = vec!["p0", "p2"];
21823
21824 let ast: Vec<Statement> = Parser::parse_sql(&MySqlDialect {}, sql).unwrap();
21825 assert_eq!(ast.len(), 1);
21826 if let Statement::Query(v) = &ast[0] {
21827 if let SetExpr::Select(select) = &*v.body {
21828 assert_eq!(select.from.len(), 1);
21829 let from: &TableWithJoins = &select.from[0];
21830 let table_factor = &from.relation;
21831 if let TableFactor::Table { partitions, .. } = table_factor {
21832 let actual: Vec<&str> = partitions
21833 .iter()
21834 .map(|ident| ident.value.as_str())
21835 .collect();
21836 assert_eq!(expected, actual);
21837 }
21838 }
21839 } else {
21840 panic!("fail to parse mysql partition selection");
21841 }
21842 }
21843
21844 #[test]
21845 fn test_replace_into_placeholders() {
21846 let sql = "REPLACE INTO t (a) VALUES (&a)";
21847
21848 assert!(Parser::parse_sql(&GenericDialect {}, sql).is_err());
21849 }
21850
21851 #[test]
21852 fn test_replace_into_set_placeholder() {
21853 let sql = "REPLACE INTO t SET ?";
21854
21855 assert!(Parser::parse_sql(&GenericDialect {}, sql).is_err());
21856 }
21857
21858 #[test]
21859 fn test_replace_incomplete() {
21860 let sql = r#"REPLACE"#;
21861
21862 assert!(Parser::parse_sql(&MySqlDialect {}, sql).is_err());
21863 }
21864
21865 #[test]
21866 fn test_placeholder_invalid_whitespace() {
21867 for w in [" ", "/*invalid*/"] {
21868 let sql = format!("\nSELECT\n :{w}fooBar");
21869 assert!(Parser::parse_sql(&GenericDialect, &sql).is_err());
21870 }
21871 }
21872}